diff --git a/.github/workflows/build-compiler.yml b/.github/workflows/build-compiler.yml new file mode 100644 index 000000000..e7881720b --- /dev/null +++ b/.github/workflows/build-compiler.yml @@ -0,0 +1,28 @@ +name: Compiler Build + +on: + workflow_dispatch: + +env: + COMPILER_IMAGE: ghcr.io/stashapp/compiler:13 + +jobs: + build-compiler: + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v6 + - uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + - uses: docker/setup-buildx-action@v3 + - uses: docker/build-push-action@v6 + with: + push: true + context: "{{defaultContext}}:docker/compiler" + tags: | + ${{ env.COMPILER_IMAGE }} + ghcr.io/stashapp/compiler:latest + cache-from: type=gha,scope=all,mode=max + cache-to: type=gha,scope=all,mode=max \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1e46ecd69..c068b46f0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,7 +2,7 @@ name: Build on: push: - branches: + branches: - develop - master - 'releases/**' @@ -15,50 +15,163 @@ concurrency: cancel-in-progress: true env: - COMPILER_IMAGE: stashapp/compiler:12 + COMPILER_IMAGE: ghcr.io/stashapp/compiler:13 jobs: - build: - runs-on: ubuntu-22.04 + # Job 1: Generate code and build UI + # Runs natively (no Docker) — go generate/gqlgen and node don't need cross-compilers. + # Produces artifacts (generated Go files + UI build) consumed by test and build jobs. + generate: + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + - name: Setup Go + uses: actions/setup-go@v6 - - name: Checkout - run: git fetch --prune --unshallow --tags + # pnpm version is read from the packageManager field in package.json + # very broken (4.3, 4.4) + - name: Install pnpm + uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 + with: + package_json_file: ui/v2.5/package.json + + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + node-version: '20' + cache: 'pnpm' + cache-dependency-path: ui/v2.5/pnpm-lock.yaml + + - name: Install UI dependencies + run: cd ui/v2.5 && pnpm install --frozen-lockfile + + - name: Generate + run: make generate + + - name: Cache UI build + uses: actions/cache@v5 + id: cache-ui + with: + path: ui/v2.5/build + key: ${{ runner.os }}-ui-build-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }} + + - name: Validate UI + # skip UI validation for pull requests if UI is unchanged + if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }} + run: make validate-ui + + - name: Build UI + # skip UI build for pull requests if UI is unchanged (UI was cached) + if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }} + run: make ui + + # Bundle generated Go files + UI build for downstream jobs (test + build) + - name: Upload generated artifacts + uses: actions/upload-artifact@v7 + with: + name: generated + retention-days: 1 + path: | + internal/api/generated_exec.go + internal/api/generated_models.go + ui/v2.5/build/ + ui/login/locales/ + + # Job 2: Integration tests + # Runs natively (no Docker) — only needs Go + GCC (for CGO/SQLite), both on ubuntu-22.04. + # Runs in parallel with the build matrix jobs. + test: + needs: generate + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v6 - name: Setup Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version-file: 'go.mod' - - name: Pull compiler image - run: docker pull $COMPILER_IMAGE - - - name: Cache node modules - uses: actions/cache@v3 - env: - cache-name: cache-node_modules + # Places generated Go files + UI build into the working tree so the build compiles + - name: Download generated artifacts + uses: actions/download-artifact@v8 with: - path: ui/v2.5/node_modules - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml') }} + name: generated - - name: Cache UI build - uses: actions/cache@v3 - id: cache-ui - env: - cache-name: cache-ui + - name: Test Backend + run: make it + + # Job 3: Cross-compile for all platforms + # Each platform gets its own runner and Docker container (ghcr.io/stashapp/compiler:13). + # Each build-cc-* make target is self-contained (sets its own GOOS/GOARCH/CC), + # so running them in separate containers is functionally identical to one container. + # Runs in parallel with the test job. + build: + needs: generate + runs-on: ubuntu-24.04 + strategy: + fail-fast: false + matrix: + include: + - platform: windows + make-target: build-cc-windows + artifact-paths: | + dist/stash-win.exe + tag: win + - platform: macos + make-target: build-cc-macos + artifact-paths: | + dist/stash-macos + dist/Stash.app.zip + tag: osx + - platform: linux + make-target: build-cc-linux + artifact-paths: | + dist/stash-linux + tag: linux + - platform: linux-arm64v8 + make-target: build-cc-linux-arm64v8 + artifact-paths: | + dist/stash-linux-arm64v8 + tag: arm + - platform: linux-arm32v7 + make-target: build-cc-linux-arm32v7 + artifact-paths: | + dist/stash-linux-arm32v7 + tag: arm + - platform: linux-arm32v6 + make-target: build-cc-linux-arm32v6 + artifact-paths: | + dist/stash-linux-arm32v6 + tag: arm + - platform: freebsd + make-target: build-cc-freebsd + artifact-paths: | + dist/stash-freebsd + tag: freebsd + + steps: + - uses: actions/checkout@v6 with: - path: ui/v2.5/build - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }} + fetch-depth: 0 + fetch-tags: true - - name: Cache go build - uses: actions/cache@v3 - env: - # increment the number suffix to bump the cache - cache-name: cache-go-cache-1 + - name: Download generated artifacts + uses: actions/download-artifact@v8 + with: + name: generated + + - name: Cache Go build + uses: actions/cache@v5 with: path: .go-cache - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('go.mod', '**/go.sum') }} + key: ${{ runner.os }}-go-cache-${{ matrix.platform }}-${{ hashFiles('go.mod', '**/go.sum') }} + + # kept seperate to test timings + - name: pull compiler image + run: docker pull $COMPILER_IMAGE - name: Start build container env: @@ -67,45 +180,50 @@ jobs: mkdir -p .go-cache docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null - - name: Pre-install - run: docker exec -t build /bin/bash -c "make CI=1 pre-ui" - - - name: Generate - run: docker exec -t build /bin/bash -c "make generate" - - - name: Validate UI - # skip UI validation for pull requests if UI is unchanged - if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }} - run: docker exec -t build /bin/bash -c "make validate-ui" - - # Static validation happens in the linter workflow in parallel to this workflow - # Run Dynamic validation here, to make sure we pass all the projects integration tests - - name: Test Backend - run: docker exec -t build /bin/bash -c "make it" - - - name: Build UI - # skip UI build for pull requests if UI is unchanged (UI was cached) - # this means that the build version/time may be incorrect if the UI is - # not changed in a pull request - if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }} - run: docker exec -t build /bin/bash -c "make ui" - - - name: Compile for all supported platforms - run: | - docker exec -t build /bin/bash -c "make build-cc-windows" - docker exec -t build /bin/bash -c "make build-cc-macos" - docker exec -t build /bin/bash -c "make build-cc-linux" - docker exec -t build /bin/bash -c "make build-cc-linux-arm64v8" - docker exec -t build /bin/bash -c "make build-cc-linux-arm32v7" - docker exec -t build /bin/bash -c "make build-cc-linux-arm32v6" - docker exec -t build /bin/bash -c "make build-cc-freebsd" - - - name: Zip UI - run: docker exec -t build /bin/bash -c "make zip-ui" + - name: Build (${{ matrix.platform }}) + run: docker exec -t build /bin/bash -c "make ${{ matrix.make-target }}" - name: Cleanup build container run: docker rm -f -v build + - name: Upload build artifact + uses: actions/upload-artifact@v7 + with: + name: build-${{ matrix.platform }} + retention-days: 1 + path: ${{ matrix.artifact-paths }} + + # Job 4: Release + # Waits for both test and build to pass, then collects all platform artifacts + # into dist/ for checksums, GitHub releases, and multi-arch Docker push. + release: + needs: [test, build] + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + + # Downloads all artifacts (generated + 7 platform builds) into artifacts/ subdirectories + - name: Download all build artifacts + uses: actions/download-artifact@v8 + with: + path: artifacts + + # Reassemble platform binaries from matrix job artifacts into a single dist/ directory + # make sure that artifacts have executable bit set + # upload-artifact@v4 strips the common path prefix (dist/), so files are at the artifact root + - name: Collect binaries + run: | + mkdir -p dist + cp artifacts/build-*/* dist/ + chmod +x dist/* + + - name: Zip UI + run: | + cd artifacts/generated/ui/v2.5/build && zip -r ../../../../../dist/stash-ui.zip . + - name: Generate checksums run: | git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1 @@ -116,7 +234,7 @@ jobs: - name: Upload Windows binary # only upload binaries for pull requests if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: stash-win.exe path: dist/stash-win.exe @@ -124,15 +242,23 @@ jobs: - name: Upload macOS binary # only upload binaries for pull requests if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: stash-macos path: dist/stash-macos + - name: Upload macOS bundle + # only upload binaries for pull requests + if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} + uses: actions/upload-artifact@v7 + with: + name: Stash.app.zip + path: dist/Stash.app.zip + - name: Upload Linux binary # only upload binaries for pull requests if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: stash-linux path: dist/stash-linux @@ -140,14 +266,14 @@ jobs: - name: Upload UI # only upload for pull requests if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: stash-ui.zip path: dist/stash-ui.zip - name: Update latest_develop tag if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} - run : git tag -f latest_develop; git push -f --tags + run: git tag -f latest_develop; git push -f --tags - name: Development Release if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} @@ -197,7 +323,7 @@ jobs: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} run: | - docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64 + docker run --rm --privileged tonistiigi/binfmt docker info docker buildx create --name builder --use docker buildx inspect --bootstrap @@ -213,7 +339,7 @@ jobs: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} run: | - docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64 + docker run --rm --privileged tonistiigi/binfmt docker info docker buildx create --name builder --use docker buildx inspect --bootstrap diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml index 71c743ced..19a6d62bd 100644 --- a/.github/workflows/golangci-lint.yml +++ b/.github/workflows/golangci-lint.yml @@ -9,65 +9,20 @@ on: - 'releases/**' pull_request: -env: - COMPILER_IMAGE: stashapp/compiler:12 - jobs: golangci: name: lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - - name: Checkout - run: git fetch --prune --unshallow --tags - - - name: Setup Go - uses: actions/setup-go@v5 - with: - go-version-file: 'go.mod' - - - name: Pull compiler image - run: docker pull $COMPILER_IMAGE - - - name: Start build container - run: | - mkdir -p .go-cache - docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null + # no tags or depth needed for lint + - uses: actions/checkout@v6 + - uses: actions/setup-go@v6 + # generate-backend runs natively (just go generate + touch-ui) — no Docker needed - name: Generate Backend - run: docker exec -t build /bin/bash -c "make generate-backend" + run: make generate-backend + ## WARN + ## using v1, update in a later PR - name: Run golangci-lint - uses: golangci/golangci-lint-action@v6 - with: - # Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version - version: latest - - # Optional: working directory, useful for monorepos - # working-directory: somedir - - # Optional: golangci-lint command line arguments. - # - # Note: By default, the `.golangci.yml` file should be at the root of the repository. - # The location of the configuration file can be changed by using `--config=` - args: --timeout=5m - - # Optional: show only new issues if it's a pull request. The default value is `false`. - # only-new-issues: true - - # Optional: if set to true, then all caching functionality will be completely disabled, - # takes precedence over all other caching options. - # skip-cache: true - - # Optional: if set to true, then the action won't cache or restore ~/go/pkg. - # skip-pkg-cache: true - - # Optional: if set to true, then the action won't cache or restore ~/.cache/go-build. - # skip-build-cache: true - - # Optional: The mode to install golangci-lint. It can be 'binary' or 'goinstall'. - # install-mode: "goinstall" - - - name: Cleanup build container - run: docker rm -f -v build + uses: golangci/golangci-lint-action@v6 \ No newline at end of file diff --git a/Makefile b/Makefile index 7e19063a3..d9caf0ee5 100644 --- a/Makefile +++ b/Makefile @@ -50,7 +50,7 @@ export CGO_ENABLED := 1 # define COMPILER_IMAGE for cross-compilation docker container ifndef COMPILER_IMAGE - COMPILER_IMAGE := stashapp/compiler:latest + COMPILER_IMAGE := ghcr.io/stashapp/compiler:latest endif .PHONY: release @@ -129,7 +129,7 @@ phasher: build-flags # builds dynamically-linked debug binaries .PHONY: build -build: stash phasher +build: stash # builds dynamically-linked PIE release binaries .PHONY: build-release @@ -187,8 +187,6 @@ build-cc-macos: # Combine into universal binaries lipo -create -output dist/stash-macos dist/stash-macos-intel dist/stash-macos-arm rm dist/stash-macos-intel dist/stash-macos-arm - lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm - rm dist/phasher-macos-intel dist/phasher-macos-arm # Place into bundle and zip up rm -rf dist/Stash.app @@ -198,6 +196,16 @@ build-cc-macos: cd dist && rm -f Stash.app.zip && zip -r Stash.app.zip Stash.app rm -rf dist/Stash.app +.PHONY: build-cc-macos-phasher +build-cc-macos-phasher: + make build-cc-macos-arm + make build-cc-macos-intel + + # Combine into universal binaries + lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm + rm dist/phasher-macos-intel dist/phasher-macos-arm + # do not bundle phasher + .PHONY: build-cc-freebsd build-cc-freebsd: export GOOS := freebsd build-cc-freebsd: export GOARCH := amd64 diff --git a/README.md b/README.md index 5ccefe4bc..2d90a76ea 100644 --- a/README.md +++ b/README.md @@ -13,10 +13,10 @@ ![Screenshot of Stash web application interface](docs/readme_assets/demo_image.png) -* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites. -* Stash supports a wide variety of both video and image formats. -* You can tag videos and find them later. -* Stash provides statistics about performers, tags, studios and more. +- Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites. +- Stash supports a wide variety of both video and image formats. +- You can tag videos and find them later. +- Stash provides statistics about performers, tags, studios and more. You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action. @@ -24,17 +24,19 @@ For further information you can consult the [documentation](https://docs.stashap # Installing Stash +> [!tip] Step-by-step instructions are available at [docs.stashapp.cc/installation](https://docs.stashapp.cc/installation/). -#### Windows Users: - -As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._ -At least Windows 10 or Server 2016 is required. - -#### Mac Users: - -As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later. -Stash can still be run through docker on older versions of macOS. +> [!important] +>**Windows Users** +> +>As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._ +>At least Windows 10 or Server 2016 is required. +> +>**macOS Users** +> +> As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later. +> Stash can still be run through docker on older versions of macOS. Windows | macOS | Linux | Docker :---:|:---:|:---:|:---: @@ -85,23 +87,23 @@ The badge below shows the current translation status of Stash across all support Need help or want to get involved? Start with the documentation, then reach out to the community if you need further assistance. -- Documentation - - Official docs: https://docs.stashapp.cc - official guides guides and troubleshooting. - - In-app manual: press Shift + ? in the app or view the manual online: https://docs.stashapp.cc/in-app-manual. - - FAQ: https://discourse.stashapp.cc/c/support/faq/28 - common questions and answers. - - Community wiki: https://discourse.stashapp.cc/tags/c/community-wiki/22/stash - guides, how-to’s and tips. +### Documentation +- [Official documentation](https://docs.stashapp.cc) - official guides guides and troubleshooting. +- [In-app manual](https://docs.stashapp.cc/in-app-manual) press Shift + ? in the app or view the manual online. +- [FAQ](https://discourse.stashapp.cc/c/support/faq/28) - common questions and answers. +- [Community wiki](https://discourse.stashapp.cc/tags/c/community-wiki/22/stash) - guides, how-to’s and tips. -- Community & discussion - - Community forum: https://discourse.stashapp.cc - community support, feature requests and discussions. - - Discord: https://discord.gg/2TsNFKt - real-time chat and community support. - - GitHub discussions: https://github.com/stashapp/stash/discussions - community support and feature discussions. - - Lemmy community: https://discuss.online/c/stashapp - Reddit-style community space. +### Community & discussion +- [Community forum](https://discourse.stashapp.cc) - community support, feature requests and discussions. +- [Discord](https://discord.gg/2TsNFKt) - real-time chat and community support. +- [GitHub discussions](https://github.com/stashapp/stash/discussions) - community support and feature discussions. +- [Lemmy community](https://discuss.online/c/stashapp) - board-style community space. -- Community scrapers & plugins - - Metadata sources: https://docs.stashapp.cc/metadata-sources/ - - Plugins: https://docs.stashapp.cc/plugins/ - - Themes: https://docs.stashapp.cc/themes/ - - Other projects: https://docs.stashapp.cc/other-projects/ +### Community scrapers & plugins +- [Metadata sources](https://docs.stashapp.cc/metadata-sources/) +- [Plugins](https://docs.stashapp.cc/plugins/) +- [Themes](https://docs.stashapp.cc/themes/) +- [Other projects](https://docs.stashapp.cc/other-projects/) # For Developers diff --git a/cmd/phasher/main.go b/cmd/phasher/main.go index 864195631..be2053784 100644 --- a/cmd/phasher/main.go +++ b/cmd/phasher/main.go @@ -5,20 +5,39 @@ import ( "fmt" "os" "os/exec" + "path/filepath" flag "github.com/spf13/pflag" "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/hash/imagephash" "github.com/stashapp/stash/pkg/hash/videophash" "github.com/stashapp/stash/pkg/models" ) func customUsage() { fmt.Fprintf(os.Stderr, "Usage:\n") - fmt.Fprintf(os.Stderr, "%s [OPTIONS] VIDEOFILE...\n\nOptions:\n", os.Args[0]) + fmt.Fprintf(os.Stderr, "%s [OPTIONS] FILE...\n\nOptions:\n", os.Args[0]) flag.PrintDefaults() } func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error { + // Determine if this is a video or image file based on extension + ext := filepath.Ext(inputfile) + ext = ext[1:] // remove the leading dot + + // Common image extensions + imageExts := map[string]bool{ + "jpg": true, "jpeg": true, "png": true, "gif": true, "webp": true, "bmp": true, "avif": true, + } + + if imageExts[ext] { + return printImagePhash(ff, inputfile, quiet) + } + + return printVideoPhash(ff, ffp, inputfile, quiet) +} + +func printVideoPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error { ffvideoFile, err := ffp.NewVideoFile(inputfile) if err != nil { return err @@ -46,6 +65,24 @@ func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet return nil } +func printImagePhash(ff *ffmpeg.FFMpeg, inputfile string, quiet *bool) error { + imgFile := &models.ImageFile{ + BaseFile: &models.BaseFile{Path: inputfile}, + } + + phash, err := imagephash.Generate(ff, imgFile) + if err != nil { + return err + } + + if *quiet { + fmt.Printf("%x\n", *phash) + } else { + fmt.Printf("%x %v\n", *phash, imgFile.Path) + } + return nil +} + func getPaths() (string, string) { ffmpegPath, _ := exec.LookPath("ffmpeg") ffprobePath, _ := exec.LookPath("ffprobe") @@ -67,7 +104,7 @@ func main() { args := flag.Args() if len(args) < 1 { - fmt.Fprintf(os.Stderr, "Missing VIDEOFILE argument.\n") + fmt.Fprintf(os.Stderr, "Missing FILE argument.\n") flag.Usage() os.Exit(2) } @@ -87,4 +124,5 @@ func main() { fmt.Fprintln(os.Stderr, err) } } + } diff --git a/docker/ci/x86_64/Dockerfile b/docker/ci/x86_64/Dockerfile index 6a9c6b76d..2161cb6af 100644 --- a/docker/ci/x86_64/Dockerfile +++ b/docker/ci/x86_64/Dockerfile @@ -12,7 +12,7 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-linux-arm32v6; \ FROM --platform=$TARGETPLATFORM alpine:latest AS app COPY --from=binary /stash /usr/bin/ -RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools \ +RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools vips-heif \ && pip install --break-system-packages mechanicalsoup cloudscraper stashapp-tools ENV STASH_CONFIG_FILE=/root/.stash/config.yml diff --git a/docker/compiler/.gitignore b/docker/compiler/.gitignore deleted file mode 100644 index 7012bfd63..000000000 --- a/docker/compiler/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.sdk.tar.* \ No newline at end of file diff --git a/docker/compiler/Dockerfile b/docker/compiler/Dockerfile index 0154d7e61..c9dfb9c7c 100644 --- a/docker/compiler/Dockerfile +++ b/docker/compiler/Dockerfile @@ -1,82 +1,86 @@ -FROM golang:1.24.3 +### OSXCROSS +FROM debian:bookworm AS osxcross +# add osxcross +WORKDIR /tmp/osxcross +ARG OSXCROSS_REVISION=5e1b71fcceb23952f3229995edca1b6231525b5b +ADD --checksum=sha256:d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647 https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION} /tmp/osxcross.tar.gz -LABEL maintainer="https://discord.gg/2TsNFKt" +ARG OSX_SDK_VERSION=11.3 +ARG OSX_SDK_DOWNLOAD_FILE=MacOSX${OSX_SDK_VERSION}.sdk.tar.xz +ARG OSX_SDK_DOWNLOAD_URL=https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE} +ADD --checksum=sha256:cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4 ${OSX_SDK_DOWNLOAD_URL} /tmp/osxcross/tarballs/${OSX_SDK_DOWNLOAD_FILE} -RUN apt-get update && apt-get install -y apt-transport-https ca-certificates gnupg +ENV UNATTENDED=yes \ + SDK_VERSION=${OSX_SDK_VERSION} \ + OSX_VERSION_MIN=10.10 +RUN apt update && \ + apt install -y --no-install-recommends \ + bash ca-certificates clang cmake git patch libssl-dev bzip2 cpio libbz2-dev libxml2-dev make python3 xz-utils zlib1g-dev +# lzma-dev libxml2-dev xz +RUN tar --strip=1 -C /tmp/osxcross -xf /tmp/osxcross.tar.gz +RUN ./build.sh -RUN mkdir -p /etc/apt/keyrings +### FREEBSD cross-compilation stage +# use alpine for cacheable image since apt is notorous for not caching +FROM alpine:3 AS freebsd +# match golang latest +# https://go.dev/wiki/FreeBSD +ARG FREEBSD_VERSION=12.4 +ADD --checksum=sha256:581c7edacfd2fca2bdf5791f667402d22fccd8a5e184635e0cac075564d57aa8 \ + http://ftp-archive.freebsd.org/mirror/FreeBSD-Archive/old-releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz \ + /tmp/base.txz -ADD https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key nodesource.gpg.key -RUN cat nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && rm nodesource.gpg.key -RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_24.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list +WORKDIR /opt/cross-freebsd +RUN apk add --no-cache tar xz +RUN tar -xf /tmp/base.txz --strip-components=1 ./usr/lib ./usr/include ./lib +RUN cd /opt/cross-freebsd/usr/lib && \ + find . -type l -exec sh -c ' \ + for link; do \ + target=$(readlink "$link"); \ + case "$target" in \ + /lib/*) ln -sf "/opt/cross-freebsd$target" "$link";; \ + esac; \ + done \ + ' sh {} + && \ + ln -s libc++.a libstdc++.a && \ + ln -s libc++.so libstdc++.so -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - git make tar bash nodejs zip \ - clang llvm-dev cmake patch libxml2-dev uuid-dev libssl-dev xz-utils \ - bzip2 gzip sed cpio libbz2-dev zlib1g-dev \ - gcc-mingw-w64 \ - gcc-arm-linux-gnueabi libc-dev-armel-cross linux-libc-dev-armel-cross \ - gcc-aarch64-linux-gnu libc-dev-arm64-cross && \ - rm -rf /var/lib/apt/lists/*; +### BUILDER +FROM golang:1.24.3 AS builder +ENV PATH=/opt/osx-ndk-x86/bin:$PATH + +# copy in nodejs instead of using nodesource :thumbsup: +COPY --from=docker.io/library/node:24-bookworm /usr/local /usr/local +# copy in osxcross +COPY --from=osxcross /tmp/osxcross/target/lib /usr/lib +COPY --from=osxcross /tmp/osxcross/target /opt/osx-ndk-x86 +# copy in cross-freebsd +COPY --from=freebsd /opt/cross-freebsd /opt/cross-freebsd # pnpm install with npm RUN npm install -g pnpm -# FreeBSD cross-compilation setup -# https://github.com/smartmontools/docker-build/blob/6b8c92560d17d325310ba02d9f5a4b250cb0764a/Dockerfile#L66 -ENV FREEBSD_VERSION 13.4 -ENV FREEBSD_DOWNLOAD_URL http://ftp.plusline.de/FreeBSD/releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz -ENV FREEBSD_SHA 8e13b0a93daba349b8d28ad246d7beb327659b2ef4fe44d89f447392daec5a7c +# git for getting hash +# make and bash for building -RUN cd /tmp && \ - curl -o base.txz $FREEBSD_DOWNLOAD_URL && \ - echo "$FREEBSD_SHA base.txz" | sha256sum -c - && \ - mkdir -p /opt/cross-freebsd && \ - cd /opt/cross-freebsd && \ - tar -xf /tmp/base.txz ./lib/ ./usr/lib/ ./usr/include/ && \ - rm -f /tmp/base.txz && \ - cd /opt/cross-freebsd/usr/lib && \ - find . -xtype l | xargs ls -l | grep ' /lib/' | awk '{print "ln -sf /opt/cross-freebsd"$11 " " $9}' | /bin/sh && \ - ln -s libc++.a libstdc++.a && \ - ln -s libc++.so libstdc++.so - -# macOS cross-compilation setup -ENV OSX_SDK_VERSION 11.3 -ENV OSX_SDK_DOWNLOAD_FILE MacOSX${OSX_SDK_VERSION}.sdk.tar.xz -ENV OSX_SDK_DOWNLOAD_URL https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE} -ENV OSX_SDK_SHA cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4 -ENV OSXCROSS_REVISION 5e1b71fcceb23952f3229995edca1b6231525b5b -ENV OSXCROSS_DOWNLOAD_URL https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION} -ENV OSXCROSS_SHA d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647 - -RUN cd /tmp && \ - curl -o osxcross.tar.gz $OSXCROSS_DOWNLOAD_URL && \ - echo "$OSXCROSS_SHA osxcross.tar.gz" | sha256sum -c - && \ - mkdir osxcross && \ - tar --strip=1 -C osxcross -xf osxcross.tar.gz && \ - rm -f osxcross.tar.gz && \ - curl -Lo $OSX_SDK_DOWNLOAD_FILE $OSX_SDK_DOWNLOAD_URL && \ - echo "$OSX_SDK_SHA $OSX_SDK_DOWNLOAD_FILE" | sha256sum -c - && \ - mv $OSX_SDK_DOWNLOAD_FILE osxcross/tarballs/ && \ - UNATTENDED=yes SDK_VERSION=$OSX_SDK_VERSION OSX_VERSION_MIN=10.10 osxcross/build.sh && \ - cp osxcross/target/lib/* /usr/lib/ && \ - mv osxcross/target /opt/osx-ndk-x86 && \ - rm -rf /tmp/osxcross - -ENV PATH /opt/osx-ndk-x86/bin:$PATH - -RUN mkdir -p /root/.ssh && \ - chmod 0700 /root/.ssh && \ - ssh-keyscan github.com > /root/.ssh/known_hosts - -# ignore "dubious ownership" errors +# clang for macos +# zip for stashapp.zip +# gcc-extensions for cross-arch build +# we still target arm soft float? +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + git make bash \ + clang zip \ + gcc-mingw-w64 \ + gcc-arm-linux-gnueabi \ + libc-dev-armel-cross linux-libc-dev-armel-cross \ + gcc-aarch64-linux-gnu libc-dev-arm64-cross && \ + rm -rf /var/lib/apt/lists/*; RUN git config --global safe.directory '*' - # To test locally: # make generate # make ui # cd docker/compiler -# make build -# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t stashapp/compiler:latest make build-cc-all -# # binaries will show up in /dist +# docker build . -t ghcr.io/stashapp/compiler:latest +# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t ghcr.io/stashapp/compiler:latest make build-cc-all +# # binaries will show up in /dist \ No newline at end of file diff --git a/docker/compiler/Makefile b/docker/compiler/Makefile index ed6a9a285..66f19f5d6 100644 --- a/docker/compiler/Makefile +++ b/docker/compiler/Makefile @@ -1,16 +1,22 @@ +host=ghcr.io user=stashapp repo=compiler -version=12 +version=13 + +VERSION_IMAGE = ${host}/${user}/${repo}:${version} +LATEST_IMAGE = ${host}/${user}/${repo}:latest latest: - docker build -t ${user}/${repo}:latest . + docker build -t ${LATEST_IMAGE} . build: - docker build -t ${user}/${repo}:${version} -t ${user}/${repo}:latest . + docker build -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} . build-no-cache: - docker build --no-cache -t ${user}/${repo}:${version} -t ${user}/${repo}:latest . + docker build --no-cache -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} . -install: build - docker push ${user}/${repo}:${version} - docker push ${user}/${repo}:latest +# requires docker login ghcr.io +# echo $CR_PAT | docker login ghcr.io -u USERNAME --password-stdin +push: + docker push ${VERSION_IMAGE} + docker push ${LATEST_IMAGE} \ No newline at end of file diff --git a/docker/compiler/README.md b/docker/compiler/README.md index 6bb7d8d99..c7b4840f9 100644 --- a/docker/compiler/README.md +++ b/docker/compiler/README.md @@ -1,3 +1,3 @@ Modified from https://github.com/bep/dockerfiles/tree/master/ci-goreleaser -When the Dockerfile is changed, the version number should be incremented in the Makefile and the new version tag should be pushed to Docker Hub. The GitHub workflow files also need to be updated to pull the correct image tag. +When the Dockerfile is changed, the version number should be incremented in [.github/workflows/build-compiler.yml](../../.github/workflows/build-compiler.yml) and the workflow [manually ran](). `env: COMPILER_IMAGE` in [.github/workflows/build.yml](../../.github/workflows/build.yml) also needs to be updated to pull the correct image tag. \ No newline at end of file diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md index 85c2f6f23..a26ce6817 100644 --- a/docs/DEVELOPMENT.md +++ b/docs/DEVELOPMENT.md @@ -118,8 +118,8 @@ This project uses a modification of the [CI-GoReleaser](https://github.com/bep/d To cross-compile the app yourself: 1. Run `make pre-ui`, `make generate` and `make ui` outside the container, to generate files and build the UI. -2. Pull the latest compiler image from Docker Hub: `docker pull stashapp/compiler` -3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it stashapp/compiler /bin/bash` to open a shell inside the container. +2. Pull the latest compiler image from GHCR: `docker pull ghcr.io/stashapp/compiler` +3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it ghcr.io/stashapp/compiler /bin/bash` to open a shell inside the container. 4. From inside the container, run `make build-cc-all` to build for all platforms, or run `make build-cc-{platform}` to build for a specific platform (have a look at the `Makefile` for the list of targets). 5. You will find the compiled binaries in `dist/`. diff --git a/go.mod b/go.mod index 0cf02fa0d..348036710 100644 --- a/go.mod +++ b/go.mod @@ -7,10 +7,10 @@ require ( github.com/WithoutPants/sortorder v0.0.0-20230616003020-921c9ef69552 github.com/Yamashou/gqlgenc v0.32.1 github.com/anacrolix/dms v1.2.2 - github.com/antchfx/htmlquery v1.3.0 + github.com/antchfx/htmlquery v1.3.5 github.com/asticode/go-astisub v0.25.1 - github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617 - github.com/chromedp/chromedp v0.9.2 + github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d + github.com/chromedp/chromedp v0.14.2 github.com/corona10/goimagehash v1.1.0 github.com/disintegration/imaging v1.6.2 github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d @@ -44,6 +44,7 @@ require ( github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/remeh/sizedwaitgroup v1.0.0 github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd + github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 github.com/sirupsen/logrus v1.9.3 github.com/spf13/cast v1.6.0 github.com/spf13/pflag v1.0.6 @@ -69,20 +70,21 @@ require ( require ( github.com/agnivade/levenshtein v1.2.1 // indirect - github.com/antchfx/xpath v1.2.3 // indirect + github.com/antchfx/xpath v1.3.5 // indirect github.com/asticode/go-astikit v0.20.0 // indirect github.com/asticode/go-astits v1.8.0 // indirect - github.com/chromedp/sysutil v1.0.0 // indirect + github.com/chromedp/sysutil v1.1.0 // indirect github.com/coder/websocket v1.8.12 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dlclark/regexp2 v1.7.0 // indirect github.com/fsnotify/fsnotify v1.9.0 // indirect + github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 // indirect github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gobwas/httphead v0.1.0 // indirect github.com/gobwas/pool v0.2.1 // indirect - github.com/gobwas/ws v1.3.0 // indirect + github.com/gobwas/ws v1.4.0 // indirect github.com/goccy/go-yaml v1.18.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect @@ -90,10 +92,8 @@ require ( github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/josharian/intern v1.0.0 // indirect github.com/knadh/koanf/maps v0.1.2 // indirect github.com/magiconair/properties v1.8.7 // indirect - github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect diff --git a/go.sum b/go.sum index fc731b705..4e19720f5 100644 --- a/go.sum +++ b/go.sum @@ -85,10 +85,10 @@ github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/ github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM= github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA= -github.com/antchfx/htmlquery v1.3.0 h1:5I5yNFOVI+egyia5F2s/5Do2nFWxJz41Tr3DyfKD25E= -github.com/antchfx/htmlquery v1.3.0/go.mod h1:zKPDVTMhfOmcwxheXUsx4rKJy8KEY/PU6eXr/2SebQ8= -github.com/antchfx/xpath v1.2.3 h1:CCZWOzv5bAqjVv0offZ2LVgVYFbeldKQVuLNbViZdes= -github.com/antchfx/xpath v1.2.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= +github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0= +github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA= +github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ= +github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= @@ -116,13 +116,12 @@ github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= -github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617 h1:/5dwcyi5WOawM1Iz6MjrYqB90TRIdZv3O0fVHEJb86w= -github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= -github.com/chromedp/chromedp v0.9.2 h1:dKtNz4kApb06KuSXoTQIyUC2TrA0fhGDwNZf3bcgfKw= -github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs= -github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic= -github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww= +github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d h1:ZtA1sedVbEW7EW80Iz2GR3Ye6PwbJAJXjv7D74xG6HU= +github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k= +github.com/chromedp/chromedp v0.14.2 h1:r3b/WtwM50RsBZHMUm9fsNhhzRStTHrKdr2zmwbZSzM= +github.com/chromedp/chromedp v0.14.2/go.mod h1:rHzAv60xDE7VNy/MYtTUrYreSc0ujt2O1/C3bzctYBo= +github.com/chromedp/sysutil v1.1.0 h1:PUFNv5EcprjqXZD9nJb9b/c9ibAbxiYo4exNWZyipwM= +github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHGPTUfWTJ8= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= @@ -206,6 +205,8 @@ github.com/go-chi/httplog v0.3.1/go.mod h1:UoiQQ/MTZH5V6JbNB2FzF0DynTh5okpXxlhsy github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 h1:iizUGZ9pEquQS5jTGkh4AqeeHCMbfbjeb0zMt0aEFzs= +github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= @@ -224,9 +225,8 @@ github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= -github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= -github.com/gobwas/ws v1.3.0 h1:sbeU3Y4Qzlb+MOzIe6mQGf7QR4Hkv6ZD0qhGkBFL2O0= -github.com/gobwas/ws v1.3.0/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/gobwas/ws v1.4.0 h1:CTaoG1tojrh4ucGPcoJFiAQUAsEWekEWvLy7GsVNqGs= +github.com/gobwas/ws v1.4.0/go.mod h1:G3gNqMNtPppf5XUz7O4shetPpcZ1VJ7zt18dlUeakrc= github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= @@ -286,6 +286,7 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= @@ -379,8 +380,6 @@ github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8= github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= -github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= -github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -432,8 +431,6 @@ github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc8 github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= -github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= @@ -540,6 +537,8 @@ github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8= +github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI= +github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= @@ -664,6 +663,10 @@ golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -707,6 +710,10 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -757,7 +764,12 @@ golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -789,6 +801,11 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -869,14 +886,25 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -889,7 +917,12 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -956,6 +989,9 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/gqlgen.yml b/gqlgen.yml index b949d44dc..4a3d73d51 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -140,4 +140,8 @@ models: fields: plugins: resolver: true + Performer: + fields: + career_length: + resolver: true diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 8936b8a34..7f07e4579 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -373,6 +373,7 @@ type Mutation { performerDestroy(input: PerformerDestroyInput!): Boolean! performersDestroy(ids: [ID!]!): Boolean! bulkPerformerUpdate(input: BulkPerformerUpdateInput!): [Performer!] + performerMerge(input: PerformerMergeInput!): Performer! studioCreate(input: StudioCreateInput!): Studio studioUpdate(input: StudioUpdateInput!): Studio @@ -421,8 +422,14 @@ type Mutation { """ moveFiles(input: MoveFilesInput!): Boolean! deleteFiles(ids: [ID!]!): Boolean! + "Deletes file entries from the database without deleting the files from the filesystem" + destroyFiles(ids: [ID!]!): Boolean! fileSetFingerprints(input: FileSetFingerprintsInput!): Boolean! + "Reveal the file in the system file manager" + revealFileInFileManager(id: ID!): Boolean! + "Reveal the folder in the system file manager" + revealFolderInFileManager(id: ID!): Boolean! # Saved filters saveFilter(input: SaveFilterInput!): SavedFilter! @@ -576,6 +583,8 @@ type Mutation { stashBoxBatchPerformerTag(input: StashBoxBatchTagInput!): String! "Run batch studio tag task. Returns the job ID." stashBoxBatchStudioTag(input: StashBoxBatchTagInput!): String! + "Run batch tag tag task. Returns the job ID." + stashBoxBatchTagTag(input: StashBoxBatchTagInput!): String! "Enables DLNA for an optional duration. Has no effect if DLNA is enabled by default" enableDLNA(input: EnableDLNAInput!): Boolean! diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql index b6f52091b..5ab7fdfea 100644 --- a/graphql/schema/types/config.graphql +++ b/graphql/schema/types/config.graphql @@ -184,6 +184,18 @@ input ConfigGeneralInput { scraperPackageSources: [PackageSourceInput!] "Source of plugin packages" pluginPackageSources: [PackageSourceInput!] + + "Size of the longest dimension for each sprite in pixels" + spriteScreenshotSize: Int + + "True if sprite generation should use the sprite interval and min/max sprites settings instead of the default" + useCustomSpriteInterval: Boolean + "Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true" + spriteInterval: Float + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + minimumSprites: Int + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + maximumSprites: Int } type ConfigGeneralResult { @@ -287,6 +299,16 @@ type ConfigGeneralResult { logAccess: Boolean! "Maximum log size" logFileMaxSize: Int! + "True if sprite generation should use the sprite interval and min/max sprites settings instead of the default" + useCustomSpriteInterval: Boolean! + "Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true" + spriteInterval: Float! + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + minimumSprites: Int! + "Maximum number of sprites to be generated - only used if useCustomSpriteInterval is true" + maximumSprites: Int! + "Size of the longest dimension for each sprite in pixels" + spriteScreenshotSize: Int! "Array of video file extensions" videoExtensions: [String!]! "Array of image file extensions" @@ -395,6 +417,9 @@ input ConfigInterfaceInput { customLocales: String customLocalesEnabled: Boolean + "When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting" + disableCustomizations: Boolean + "Interface language" language: String @@ -469,6 +494,9 @@ type ConfigInterfaceResult { customLocales: String customLocalesEnabled: Boolean + "When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting" + disableCustomizations: Boolean + "Interface language" language: String diff --git a/graphql/schema/types/file.graphql b/graphql/schema/types/file.graphql index 835479fad..fcc2a58c8 100644 --- a/graphql/schema/types/file.graphql +++ b/graphql/schema/types/file.graphql @@ -6,13 +6,19 @@ type Fingerprint { type Folder { id: ID! path: String! + basename: String! parent_folder_id: ID @deprecated(reason: "Use parent_folder instead") zip_file_id: ID @deprecated(reason: "Use zip_file instead") parent_folder: Folder + "Returns all parent folders in order from immediate parent to top-level" + parent_folders: [Folder!]! zip_file: BasicFile + "Returns direct sub-folders" + sub_folders: [Folder!]! + mod_time: Time! created_at: Time! @@ -153,7 +159,7 @@ input MoveFilesInput { input SetFingerprintsInput { type: String! - "an null value will remove the fingerprint" + "a null value will remove the fingerprint" value: String } diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index bb312e31d..c7d880266 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -75,22 +75,48 @@ input OrientationCriterionInput { value: [OrientationEnum!]! } -input PHashDuplicationCriterionInput { - duplicated: Boolean - "Currently unimplemented" +input DuplicationCriterionInput { + duplicated: Boolean @deprecated(reason: "Use phash field instead") + "Currently unimplemented. Intended for phash distance matching." distance: Int + "Filter by phash duplication" + phash: Boolean + "Filter by URL duplication" + url: Boolean + "Filter by Stash ID duplication" + stash_id: Boolean + "Filter by title duplication" + title: Boolean +} + +input FileDuplicationCriterionInput { + duplicated: Boolean @deprecated(reason: "Use phash field instead") + "Currently unimplemented. Intended for phash distance matching." + distance: Int + "Filter by phash duplication" + phash: Boolean } input StashIDCriterionInput { """ If present, this value is treated as a predicate. - That is, it will filter based on stash_ids with the matching endpoint + That is, it will filter based on stash_id with the matching endpoint """ endpoint: String stash_id: String modifier: CriterionModifier! } +input StashIDsCriterionInput { + """ + If present, this value is treated as a predicate. + That is, it will filter based on stash_ids with the matching endpoint + """ + endpoint: String + stash_ids: [String] + modifier: CriterionModifier! +} + input CustomFieldCriterionInput { field: String! value: [Any!] @@ -126,10 +152,15 @@ input PerformerFilterType { fake_tits: StringCriterionInput "Filter by penis length value" penis_length: FloatCriterionInput - "Filter by ciricumcision" + "Filter by circumcision" circumcised: CircumcisionCriterionInput - "Filter by career length" + "Deprecated: use career_start and career_end. This filter is non-functional." career_length: StringCriterionInput + @deprecated(reason: "Use career_start and career_end") + "Filter by career start" + career_start: DateCriterionInput + "Filter by career end" + career_end: DateCriterionInput "Filter by tattoos" tattoos: StringCriterionInput "Filter by piercings" @@ -146,6 +177,8 @@ input PerformerFilterType { tag_count: IntCriterionInput "Filter by scene count" scene_count: IntCriterionInput + "Filter by marker count (via scene)" + marker_count: IntCriterionInput "Filter by image count" image_count: IntCriterionInput "Filter by gallery count" @@ -156,6 +189,9 @@ input PerformerFilterType { o_counter: IntCriterionInput "Filter by StashID" stash_id_endpoint: StashIDCriterionInput + @deprecated(reason: "use stash_ids_endpoint instead") + "Filter by StashIDs" + stash_ids_endpoint: StashIDsCriterionInput # rating expressed as 1-100 rating100: IntCriterionInput "Filter by url" @@ -186,6 +222,8 @@ input PerformerFilterType { galleries_filter: GalleryFilterType "Filter by related tags that meet this criteria" tags_filter: TagFilterType + "Filter by related scene markers (via scene) that meet this criteria" + markers_filter: SceneMarkerFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" @@ -211,9 +249,9 @@ input SceneMarkerFilterType { updated_at: TimestampCriterionInput "Filter by scene date" scene_date: DateCriterionInput - "Filter by cscene reation time" + "Filter by scene creation time" scene_created_at: TimestampCriterionInput - "Filter by lscene ast update time" + "Filter by scene last update time" scene_updated_at: TimestampCriterionInput "Filter by related scenes that meet this criteria" scene_filter: SceneFilterType @@ -248,8 +286,8 @@ input SceneFilterType { organized: Boolean "Filter by o-counter" o_counter: IntCriterionInput - "Filter Scenes that have an exact phash match available" - duplicated: PHashDuplicationCriterionInput + "Filter Scenes by duplication criteria" + duplicated: DuplicationCriterionInput "Filter by resolution" resolution: ResolutionCriterionInput "Filter by orientation" @@ -292,6 +330,11 @@ input SceneFilterType { performer_count: IntCriterionInput "Filter by StashID" stash_id_endpoint: StashIDCriterionInput + @deprecated(reason: "use stash_ids_endpoint instead") + "Filter by StashIDs" + stash_ids_endpoint: StashIDsCriterionInput + "Filter by StashID count" + stash_id_count: IntCriterionInput "Filter by url" url: StringCriterionInput "Filter by interactive" @@ -332,6 +375,8 @@ input SceneFilterType { markers_filter: SceneMarkerFilterType "Filter by related files that meet this criteria" files_filter: FileFilterType + + custom_fields: [CustomFieldCriterionInput!] } input MovieFilterType { @@ -414,11 +459,16 @@ input GroupFilterType { containing_group_count: IntCriterionInput "Filter by number of sub-groups the group has" sub_group_count: IntCriterionInput + "Filter by number of scenes the group has" + scene_count: IntCriterionInput "Filter by related scenes that meet this criteria" scenes_filter: SceneFilterType "Filter by related studios that meet this criteria" studios_filter: StudioFilterType + + "Filter by custom fields" + custom_fields: [CustomFieldCriterionInput!] } input StudioFilterType { @@ -432,6 +482,9 @@ input StudioFilterType { parents: MultiCriterionInput "Filter by StashID" stash_id_endpoint: StashIDCriterionInput + @deprecated(reason: "use stash_ids_endpoint instead") + "Filter by StashIDs" + stash_ids_endpoint: StashIDsCriterionInput "Filter to only include studios with these tags" tags: HierarchicalMultiCriterionInput "Filter to only include studios missing this property" @@ -446,6 +499,8 @@ input StudioFilterType { image_count: IntCriterionInput "Filter by gallery count" gallery_count: IntCriterionInput + "Filter by group count" + group_count: IntCriterionInput "Filter by tag count" tag_count: IntCriterionInput "Filter by url" @@ -456,16 +511,22 @@ input StudioFilterType { child_count: IntCriterionInput "Filter by autotag ignore value" ignore_auto_tag: Boolean + "Filter by organized" + organized: Boolean "Filter by related scenes that meet this criteria" scenes_filter: SceneFilterType "Filter by related images that meet this criteria" images_filter: ImageFilterType "Filter by related galleries that meet this criteria" galleries_filter: GalleryFilterType + "Filter by related groups that meet this criteria" + groups_filter: GroupFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input GalleryFilterType { @@ -542,6 +603,10 @@ input GalleryFilterType { files_filter: FileFilterType "Filter by related folders that meet this criteria" folders_filter: FolderFilterType + "Filter by parent folder of the zip or folder the gallery is in" + parent_folder: HierarchicalMultiCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input TagFilterType { @@ -600,7 +665,7 @@ input TagFilterType { "Filter by number of parent tags the tag has" parent_count: IntCriterionInput - "Filter by number f child tags the tag has" + "Filter by number of child tags the tag has" child_count: IntCriterionInput "Filter by autotag ignore value" @@ -608,6 +673,10 @@ input TagFilterType { "Filter by StashID" stash_id_endpoint: StashIDCriterionInput + @deprecated(reason: "use stash_ids_endpoint instead") + + "Filter by StashID" + stash_ids_endpoint: StashIDsCriterionInput "Filter by related scenes that meet this criteria" scenes_filter: SceneFilterType @@ -615,12 +684,22 @@ input TagFilterType { images_filter: ImageFilterType "Filter by related galleries that meet this criteria" galleries_filter: GalleryFilterType + "Filter by related groups that meet this criteria" + groups_filter: GroupFilterType + "Filter by related performers that meet this criteria" + performers_filter: PerformerFilterType + "Filter by related studios that meet this criteria" + studios_filter: StudioFilterType + "Filter by related scene markers that meet this criteria" + markers_filter: SceneMarkerFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input ImageFilterType { @@ -635,6 +714,8 @@ input ImageFilterType { id: IntCriterionInput "Filter by file checksum" checksum: StringCriterionInput + "Filter by file phash distance" + phash_distance: PhashDistanceCriterionInput "Filter by path" path: StringCriterionInput "Filter by file count" @@ -692,6 +773,8 @@ input ImageFilterType { tags_filter: TagFilterType "Filter by related files that meet this criteria" files_filter: FileFilterType + "Filter by custom fields" + custom_fields: [CustomFieldCriterionInput!] } input FileFilterType { @@ -709,8 +792,8 @@ input FileFilterType { "Filter by modification time" mod_time: TimestampCriterionInput - "Filter files that have an exact match available" - duplicated: PHashDuplicationCriterionInput + "Filter files by duplication criteria (only phash applies to files)" + duplicated: FileDuplicationCriterionInput "find files based on hash" hashes: [FingerprintFilterInput!] @@ -741,6 +824,7 @@ input FolderFilterType { NOT: FolderFilterType path: StringCriterionInput + basename: StringCriterionInput parent_folder: HierarchicalMultiCriterionInput zip_file: MultiCriterionInput @@ -849,7 +933,7 @@ input GenderCriterionInput { } input CircumcisionCriterionInput { - value: [CircumisedEnum!] + value: [CircumcisedEnum!] modifier: CriterionModifier! } diff --git a/graphql/schema/types/gallery.graphql b/graphql/schema/types/gallery.graphql index 999a743f7..e28c3802b 100644 --- a/graphql/schema/types/gallery.graphql +++ b/graphql/schema/types/gallery.graphql @@ -32,6 +32,7 @@ type Gallery { cover: Image paths: GalleryPathsType! # Resolver + custom_fields: Map! image(index: Int!): Image! } @@ -50,6 +51,8 @@ input GalleryCreateInput { studio_id: ID tag_ids: [ID!] performer_ids: [ID!] + + custom_fields: Map } input GalleryUpdateInput { @@ -71,6 +74,8 @@ input GalleryUpdateInput { performer_ids: [ID!] primary_file_id: ID + + custom_fields: CustomFieldsInput } input BulkGalleryUpdateInput { @@ -89,6 +94,8 @@ input BulkGalleryUpdateInput { studio_id: ID tag_ids: BulkUpdateIds performer_ids: BulkUpdateIds + + custom_fields: CustomFieldsInput } input GalleryDestroyInput { @@ -100,6 +107,8 @@ input GalleryDestroyInput { """ delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindGalleriesResultType { diff --git a/graphql/schema/types/group.graphql b/graphql/schema/types/group.graphql index a46932054..8610f39dc 100644 --- a/graphql/schema/types/group.graphql +++ b/graphql/schema/types/group.graphql @@ -31,6 +31,7 @@ type Group { sub_group_count(depth: Int): Int! # Resolver scenes: [Scene!]! o_counter: Int # Resolver + custom_fields: Map! } input GroupDescriptionInput { @@ -59,6 +60,8 @@ input GroupCreateInput { front_image: String "This should be a URL or a base64 encoded data URL" back_image: String + + custom_fields: Map } input GroupUpdateInput { @@ -82,6 +85,8 @@ input GroupUpdateInput { front_image: String "This should be a URL or a base64 encoded data URL" back_image: String + + custom_fields: CustomFieldsInput } input BulkUpdateGroupDescriptionsInput { @@ -94,6 +99,8 @@ input BulkGroupUpdateInput { ids: [ID!] # rating expressed as 1-100 rating100: Int + date: String + synopsis: String studio_id: ID director: String urls: BulkUpdateStrings @@ -101,6 +108,8 @@ input BulkGroupUpdateInput { containing_groups: BulkUpdateGroupDescriptionsInput sub_groups: BulkUpdateGroupDescriptionsInput + + custom_fields: CustomFieldsInput } input GroupDestroyInput { diff --git a/graphql/schema/types/image.graphql b/graphql/schema/types/image.graphql index fb95556f5..ccc414542 100644 --- a/graphql/schema/types/image.graphql +++ b/graphql/schema/types/image.graphql @@ -21,6 +21,7 @@ type Image { studio: Studio tags: [Tag!]! performers: [Performer!]! + custom_fields: Map! } type ImageFileType { @@ -56,6 +57,7 @@ input ImageUpdateInput { gallery_ids: [ID!] primary_file_id: ID + custom_fields: CustomFieldsInput } input BulkImageUpdateInput { @@ -76,18 +78,23 @@ input BulkImageUpdateInput { performer_ids: BulkUpdateIds tag_ids: BulkUpdateIds gallery_ids: BulkUpdateIds + custom_fields: CustomFieldsInput } input ImageDestroyInput { id: ID! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } input ImagesDestroyInput { ids: [ID!]! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindImagesResultType { diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index c01858f64..6ad620dbe 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -10,8 +10,11 @@ input GenerateMetadataInput { transcodes: Boolean "Generate transcodes even if not required" forceTranscodes: Boolean + "Generate video phashes during scan" phashes: Boolean interactiveHeatmapsSpeeds: Boolean + "Generate image phashes during scan" + imagePhashes: Boolean imageThumbnails: Boolean clipPreviews: Boolean @@ -19,6 +22,12 @@ input GenerateMetadataInput { sceneIDs: [ID!] "marker ids to generate for" markerIDs: [ID!] + "image ids to generate for" + imageIDs: [ID!] + "gallery ids to generate for" + galleryIDs: [ID!] + "paths to run generate on, in addition to the other ID lists" + paths: [String!] "overwrite existing media" overwrite: Boolean @@ -85,8 +94,10 @@ input ScanMetadataInput { scanGenerateImagePreviews: Boolean "Generate sprites during scan" scanGenerateSprites: Boolean - "Generate phashes during scan" + "Generate video phashes during scan" scanGeneratePhashes: Boolean + "Generate image phashes during scan" + scanGenerateImagePhashes: Boolean "Generate image thumbnails during scan" scanGenerateThumbnails: Boolean "Generate image clip previews during scan" @@ -107,8 +118,10 @@ type ScanMetadataOptions { scanGenerateImagePreviews: Boolean! "Generate sprites during scan" scanGenerateSprites: Boolean! - "Generate phashes during scan" + "Generate video phashes during scan" scanGeneratePhashes: Boolean! + "Generate image phashes during scan" + scanGenerateImagePhashes: Boolean "Generate image thumbnails during scan" scanGenerateThumbnails: Boolean! "Generate image clip previews during scan" @@ -118,6 +131,14 @@ type ScanMetadataOptions { input CleanMetadataInput { paths: [String!] + """ + Don't check zip file contents when determining whether to clean a file. + This can significantly speed up the clean process, but will potentially miss removed files within zip files. + Where users do not modify zip files contents directly, this should be safe to use. + Defaults to false. + """ + ignoreZipFileContents: Boolean + "Do a dry run. Don't delete any files" dryRun: Boolean! } @@ -204,7 +225,9 @@ input IdentifyMetadataOptionsInput { setCoverImage: Boolean setOrganized: Boolean "defaults to true if not provided" - includeMalePerformers: Boolean + includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders") + "Filter to only include performers with these genders. If not provided, all genders are included." + performerGenders: [GenderEnum!] "defaults to true if not provided" skipMultipleMatches: Boolean "tag to tag skipped multiple matches with" @@ -249,7 +272,9 @@ type IdentifyMetadataOptions { setCoverImage: Boolean setOrganized: Boolean "defaults to true if not provided" - includeMalePerformers: Boolean + includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders") + "Filter to only include performers with these genders. If not provided, all genders are included." + performerGenders: [GenderEnum!] "defaults to true if not provided" skipMultipleMatches: Boolean "tag to tag skipped multiple matches with" @@ -310,6 +335,8 @@ input ImportObjectsInput { input BackupDatabaseInput { download: Boolean + "If true, blob files will be included in the backup. This can significantly increase the size of the backup and the time it takes to create it, but allows for a complete backup of the system that can be restored without needing access to the original media files." + includeBlobs: Boolean } input AnonymiseDatabaseInput { diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql index fbb67ce8f..bf17298da 100644 --- a/graphql/schema/types/performer.graphql +++ b/graphql/schema/types/performer.graphql @@ -7,7 +7,7 @@ enum GenderEnum { NON_BINARY } -enum CircumisedEnum { +enum CircumcisedEnum { CUT UNCUT } @@ -29,8 +29,10 @@ type Performer { measurements: String fake_tits: String penis_length: Float - circumcised: CircumisedEnum - career_length: String + circumcised: CircumcisedEnum + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String alias_list: [String!]! @@ -76,10 +78,13 @@ input PerformerCreateInput { measurements: String fake_tits: String penis_length: Float - circumcised: CircumisedEnum - career_length: String + circumcised: CircumcisedEnum + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" alias_list: [String!] twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") @@ -114,10 +119,13 @@ input PerformerUpdateInput { measurements: String fake_tits: String penis_length: Float - circumcised: CircumisedEnum - career_length: String + circumcised: CircumcisedEnum + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" alias_list: [String!] twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") @@ -157,10 +165,13 @@ input BulkPerformerUpdateInput { measurements: String fake_tits: String penis_length: Float - circumcised: CircumisedEnum - career_length: String + circumcised: CircumcisedEnum + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String + "Duplicate aliases and those equal to name will result in an error (case-insensitive)" alias_list: BulkUpdateStrings twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") @@ -185,3 +196,10 @@ type FindPerformersResultType { count: Int! performers: [Performer!]! } + +input PerformerMergeInput { + source: [ID!]! + destination: ID! + # values defined here will override values in the destination + values: PerformerUpdateInput +} diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index eca01d15e..4d99e0a21 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -79,6 +79,8 @@ type Scene { performers: [Performer!]! stash_ids: [StashID!]! + custom_fields: Map! + "Return valid stream paths" sceneStreams: [SceneStreamEndpoint!]! } @@ -120,6 +122,8 @@ input SceneCreateInput { Files must not already be primary for another scene. """ file_ids: [ID!] + + custom_fields: Map } input SceneUpdateInput { @@ -158,6 +162,8 @@ input SceneUpdateInput { ) primary_file_id: ID + + custom_fields: CustomFieldsInput } enum BulkUpdateIdMode { @@ -190,18 +196,24 @@ input BulkSceneUpdateInput { tag_ids: BulkUpdateIds group_ids: BulkUpdateIds movie_ids: BulkUpdateIds @deprecated(reason: "Use group_ids") + + custom_fields: CustomFieldsInput } input SceneDestroyInput { id: ID! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } input ScenesDestroyInput { ids: [ID!]! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindScenesResultType { diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql index 487c89516..799b5cd6e 100644 --- a/graphql/schema/types/scraped-performer.graphql +++ b/graphql/schema/types/scraped-performer.graphql @@ -18,7 +18,9 @@ type ScrapedPerformer { fake_tits: String penis_length: String circumcised: String - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String # aliases must be comma-delimited to be parsed correctly @@ -54,7 +56,9 @@ input ScrapedPerformerInput { fake_tits: String penis_length: String circumcised: String - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String aliases: String diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql index 9c0e33fdf..fafd928f7 100644 --- a/graphql/schema/types/scraper.graphql +++ b/graphql/schema/types/scraper.graphql @@ -71,6 +71,9 @@ type ScrapedTag { "Set if tag matched" stored_id: ID name: String! + description: String + alias_list: [String!] + parent: ScrapedTag "Remote site ID, if applicable" remote_site_id: String } diff --git a/graphql/schema/types/studio.graphql b/graphql/schema/types/studio.graphql index 4c5778c5b..51a87bf4f 100644 --- a/graphql/schema/types/studio.graphql +++ b/graphql/schema/types/studio.graphql @@ -8,6 +8,7 @@ type Studio { aliases: [String!]! tags: [Tag!]! ignore_auto_tag: Boolean! + organized: Boolean! image_path: String # Resolver scene_count(depth: Int): Int! # Resolver @@ -26,6 +27,8 @@ type Studio { groups: [Group!]! movies: [Movie!]! @deprecated(reason: "use groups instead") o_counter: Int + + custom_fields: Map! } input StudioCreateInput { @@ -40,9 +43,13 @@ input StudioCreateInput { rating100: Int favorite: Boolean details: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] tag_ids: [ID!] ignore_auto_tag: Boolean + organized: Boolean + + custom_fields: Map } input StudioUpdateInput { @@ -58,9 +65,13 @@ input StudioUpdateInput { rating100: Int favorite: Boolean details: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] tag_ids: [ID!] ignore_auto_tag: Boolean + organized: Boolean + + custom_fields: CustomFieldsInput } input BulkStudioUpdateInput { @@ -74,6 +85,7 @@ input BulkStudioUpdateInput { details: String tag_ids: BulkUpdateIds ignore_auto_tag: Boolean + organized: Boolean } input StudioDestroyInput { diff --git a/graphql/schema/types/tag.graphql b/graphql/schema/types/tag.graphql index 8424ab92a..0acbc927f 100644 --- a/graphql/schema/types/tag.graphql +++ b/graphql/schema/types/tag.graphql @@ -24,6 +24,7 @@ type Tag { parent_count: Int! # Resolver child_count: Int! # Resolver + custom_fields: Map! } input TagCreateInput { @@ -31,6 +32,7 @@ input TagCreateInput { "Value that does not appear in the UI but overrides name for sorting" sort_name: String description: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] ignore_auto_tag: Boolean favorite: Boolean @@ -40,6 +42,8 @@ input TagCreateInput { parent_ids: [ID!] child_ids: [ID!] + + custom_fields: Map } input TagUpdateInput { @@ -48,6 +52,7 @@ input TagUpdateInput { "Value that does not appear in the UI but overrides name for sorting" sort_name: String description: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] ignore_auto_tag: Boolean favorite: Boolean @@ -57,6 +62,8 @@ input TagUpdateInput { parent_ids: [ID!] child_ids: [ID!] + + custom_fields: CustomFieldsInput } input TagDestroyInput { @@ -71,11 +78,14 @@ type FindTagsResultType { input TagsMergeInput { source: [ID!]! destination: ID! + # values defined here will override values in the destination + values: TagUpdateInput } input BulkTagUpdateInput { ids: [ID!] description: String + "Duplicate aliases and those equal to name will result in an error (case-insensitive)" aliases: BulkUpdateStrings ignore_auto_tag: Boolean favorite: Boolean diff --git a/graphql/stash-box/query.graphql b/graphql/stash-box/query.graphql index 2367e85cf..ebaf05648 100644 --- a/graphql/stash-box/query.graphql +++ b/graphql/stash-box/query.graphql @@ -29,6 +29,13 @@ fragment StudioFragment on Studio { fragment TagFragment on Tag { name id + description + aliases + category { + id + name + description + } } fragment MeasurementsFragment on Measurements { @@ -120,18 +127,6 @@ fragment SceneFragment on Scene { } } -query FindSceneByFingerprint($fingerprint: FingerprintQueryInput!) { - findSceneByFingerprint(fingerprint: $fingerprint) { - ...SceneFragment - } -} - -query FindScenesByFullFingerprints($fingerprints: [FingerprintQueryInput!]!) { - findScenesByFullFingerprints(fingerprints: $fingerprints) { - ...SceneFragment - } -} - query FindScenesBySceneFingerprints( $fingerprints: [[FingerprintQueryInput!]!]! ) { diff --git a/internal/api/authentication.go b/internal/api/authentication.go index 6ad7117a1..be399d222 100644 --- a/internal/api/authentication.go +++ b/internal/api/authentication.go @@ -40,6 +40,8 @@ func authenticateHandler() func(http.Handler) http.Handler { return } + r = session.SetLocalRequest(r) + userID, err := manager.GetInstance().SessionStore.Authenticate(w, r) if err != nil { if !errors.Is(err, session.ErrUnauthorized) { diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go index 38f72b0a1..c1faf61ed 100644 --- a/internal/api/loaders/dataloaders.go +++ b/internal/api/loaders/dataloaders.go @@ -11,6 +11,7 @@ //go:generate go run github.com/vektah/dataloaden GroupLoader int *github.com/stashapp/stash/pkg/models.Group //go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File //go:generate go run github.com/vektah/dataloaden FolderLoader github.com/stashapp/stash/pkg/models.FolderID *github.com/stashapp/stash/pkg/models.Folder +//go:generate go run github.com/vektah/dataloaden FolderRelatedFolderIDsLoader github.com/stashapp/stash/pkg/models.FolderID []github.com/stashapp/stash/pkg/models.FolderID //go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID @@ -42,28 +43,40 @@ const ( ) type Loaders struct { - SceneByID *SceneLoader - SceneFiles *SceneFileIDsLoader - ScenePlayCount *ScenePlayCountLoader - SceneOCount *SceneOCountLoader - ScenePlayHistory *ScenePlayHistoryLoader - SceneOHistory *SceneOHistoryLoader - SceneLastPlayed *SceneLastPlayedLoader + SceneByID *SceneLoader + SceneFiles *SceneFileIDsLoader + ScenePlayCount *ScenePlayCountLoader + SceneOCount *SceneOCountLoader + ScenePlayHistory *ScenePlayHistoryLoader + SceneOHistory *SceneOHistoryLoader + SceneLastPlayed *SceneLastPlayedLoader + SceneCustomFields *CustomFieldsLoader ImageFiles *ImageFileIDsLoader GalleryFiles *GalleryFileIDsLoader - GalleryByID *GalleryLoader - ImageByID *ImageLoader + GalleryByID *GalleryLoader + GalleryCustomFields *CustomFieldsLoader + ImageByID *ImageLoader + ImageCustomFields *CustomFieldsLoader PerformerByID *PerformerLoader PerformerCustomFields *CustomFieldsLoader - StudioByID *StudioLoader - TagByID *TagLoader - GroupByID *GroupLoader - FileByID *FileLoader - FolderByID *FolderLoader + StudioByID *StudioLoader + StudioCustomFields *CustomFieldsLoader + + TagByID *TagLoader + TagCustomFields *CustomFieldsLoader + + GroupByID *GroupLoader + GroupCustomFields *CustomFieldsLoader + + FileByID *FileLoader + + FolderByID *FolderLoader + FolderParentFolderIDs *FolderRelatedFolderIDsLoader + FolderSubFolderIDs *FolderRelatedFolderIDsLoader } type Middleware struct { @@ -84,11 +97,21 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchGalleries(ctx), }, + GalleryCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchGalleryCustomFields(ctx), + }, ImageByID: &ImageLoader{ wait: wait, maxBatch: maxBatch, fetch: m.fetchImages(ctx), }, + ImageCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchImageCustomFields(ctx), + }, PerformerByID: &PerformerLoader{ wait: wait, maxBatch: maxBatch, @@ -99,6 +122,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchPerformerCustomFields(ctx), }, + StudioCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchStudioCustomFields(ctx), + }, + SceneCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchSceneCustomFields(ctx), + }, StudioByID: &StudioLoader{ wait: wait, maxBatch: maxBatch, @@ -109,11 +142,21 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchTags(ctx), }, + TagCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchTagCustomFields(ctx), + }, GroupByID: &GroupLoader{ wait: wait, maxBatch: maxBatch, fetch: m.fetchGroups(ctx), }, + GroupCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchGroupCustomFields(ctx), + }, FileByID: &FileLoader{ wait: wait, maxBatch: maxBatch, @@ -124,6 +167,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchFolders(ctx), }, + FolderParentFolderIDs: &FolderRelatedFolderIDsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchFoldersParentFolderIDs(ctx), + }, + FolderSubFolderIDs: &FolderRelatedFolderIDsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchFoldersSubFolderIDs(ctx), + }, SceneFiles: &SceneFileIDsLoader{ wait: wait, maxBatch: maxBatch, @@ -194,6 +247,18 @@ func (m Middleware) fetchScenes(ctx context.Context) func(keys []int) ([]*models } } +func (m Middleware) fetchSceneCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Scene.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models.Image, []error) { return func(keys []int) (ret []*models.Image, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -206,6 +271,18 @@ func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models } } +func (m Middleware) fetchImageCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Image.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchGalleries(ctx context.Context) func(keys []int) ([]*models.Gallery, []error) { return func(keys []int) (ret []*models.Gallery, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -253,6 +330,18 @@ func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*model } } +func (m Middleware) fetchStudioCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Studio.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.Tag, []error) { return func(keys []int) (ret []*models.Tag, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -264,6 +353,42 @@ func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.T } } +func (m Middleware) fetchTagCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Tag.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + +func (m Middleware) fetchGroupCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Group.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + +func (m Middleware) fetchGalleryCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Gallery.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchGroups(ctx context.Context) func(keys []int) ([]*models.Group, []error) { return func(keys []int) (ret []*models.Group, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -297,6 +422,28 @@ func (m Middleware) fetchFolders(ctx context.Context) func(keys []models.FolderI } } +func (m Middleware) fetchFoldersParentFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) { + return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Folder.GetManyParentFolderIDs(ctx, keys) + return err + }) + return ret, toErrorSlice(err) + } +} + +func (m Middleware) fetchFoldersSubFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) { + return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Folder.GetManySubFolderIDs(ctx, keys) + return err + }) + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { diff --git a/internal/api/loaders/folderrelatedfolderidsloader_gen.go b/internal/api/loaders/folderrelatedfolderidsloader_gen.go new file mode 100644 index 000000000..d0edb92f4 --- /dev/null +++ b/internal/api/loaders/folderrelatedfolderidsloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package loaders + +import ( + "sync" + "time" + + "github.com/stashapp/stash/pkg/models" +) + +// FolderParentFolderIDsLoaderConfig captures the config to create a new FolderParentFolderIDsLoader +type FolderParentFolderIDsLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []models.FolderID) ([][]models.FolderID, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewFolderParentFolderIDsLoader creates a new FolderParentFolderIDsLoader given a fetch, wait, and maxBatch +func NewFolderParentFolderIDsLoader(config FolderParentFolderIDsLoaderConfig) *FolderRelatedFolderIDsLoader { + return &FolderRelatedFolderIDsLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// FolderRelatedFolderIDsLoader batches and caches requests +type FolderRelatedFolderIDsLoader struct { + // this method provides the data for the loader + fetch func(keys []models.FolderID) ([][]models.FolderID, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[models.FolderID][]models.FolderID + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *folderParentFolderIDsLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type folderParentFolderIDsLoaderBatch struct { + keys []models.FolderID + data [][]models.FolderID + error []error + closing bool + done chan struct{} +} + +// Load a FolderID by key, batching and caching will be applied automatically +func (l *FolderRelatedFolderIDsLoader) Load(key models.FolderID) ([]models.FolderID, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a FolderID. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *FolderRelatedFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]models.FolderID, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() ([]models.FolderID, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &folderParentFolderIDsLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() ([]models.FolderID, error) { + <-batch.done + + var data []models.FolderID + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *FolderRelatedFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]models.FolderID, []error) { + results := make([]func() ([]models.FolderID, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + folderIDs := make([][]models.FolderID, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + folderIDs[i], errors[i] = thunk() + } + return folderIDs, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a FolderIDs. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *FolderRelatedFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func() ([][]models.FolderID, []error) { + results := make([]func() ([]models.FolderID, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([][]models.FolderID, []error) { + folderIDs := make([][]models.FolderID, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + folderIDs[i], errors[i] = thunk() + } + return folderIDs, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *FolderRelatedFolderIDsLoader) Prime(key models.FolderID, value []models.FolderID) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := make([]models.FolderID, len(value)) + copy(cpy, value) + l.unsafeSet(key, cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *FolderRelatedFolderIDsLoader) Clear(key models.FolderID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *FolderRelatedFolderIDsLoader) unsafeSet(key models.FolderID, value []models.FolderID) { + if l.cache == nil { + l.cache = map[models.FolderID][]models.FolderID{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderRelatedFolderIDsLoader, key models.FolderID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderRelatedFolderIDsLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *folderParentFolderIDsLoaderBatch) end(l *FolderRelatedFolderIDsLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/api/resolver.go b/internal/api/resolver.go index 061d0e1a9..b1cec1c9d 100644 --- a/internal/api/resolver.go +++ b/internal/api/resolver.go @@ -7,6 +7,7 @@ import ( "sort" "strconv" + "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/internal/build" "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/pkg/logger" @@ -145,6 +146,13 @@ func (r *Resolver) withReadTxn(ctx context.Context, fn func(ctx context.Context) return r.repository.WithReadTxn(ctx, fn) } +// idOnly returns true if the query is only asking for the id field. +// This can be used to optimize certain queries where we don't need to load the full object if we're only getting the id. +func (r *Resolver) idOnly(ctx context.Context) bool { + fields := graphql.CollectAllFields(ctx) + return len(fields) == 1 && fields[0] == "id" +} + func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.SceneMarker.Wall(ctx, q) diff --git a/internal/api/resolver_model_folder.go b/internal/api/resolver_model_folder.go index ee6bbfd05..725ca34f8 100644 --- a/internal/api/resolver_model_folder.go +++ b/internal/api/resolver_model_folder.go @@ -2,19 +2,77 @@ package api import ( "context" + "path/filepath" "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/pkg/models" ) +func (r *folderResolver) Basename(ctx context.Context, obj *models.Folder) (string, error) { + return filepath.Base(obj.Path), nil +} + func (r *folderResolver) ParentFolder(ctx context.Context, obj *models.Folder) (*models.Folder, error) { if obj.ParentFolderID == nil { return nil, nil } + if r.idOnly(ctx) { + return &models.Folder{ID: *obj.ParentFolderID}, nil + } + return loaders.From(ctx).FolderByID.Load(*obj.ParentFolderID) } +func foldersFromIDs(ids []models.FolderID) []*models.Folder { + ret := make([]*models.Folder, len(ids)) + for i, id := range ids { + ret[i] = &models.Folder{ID: id} + } + return ret +} + +func (r *folderResolver) ParentFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) { + ids, err := loaders.From(ctx).FolderParentFolderIDs.Load(obj.ID) + if err != nil { + return nil, err + } + + if r.idOnly(ctx) { + return foldersFromIDs(ids), nil + } + + var errs []error + ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids) + return ret, firstError(errs) +} + +func (r *folderResolver) SubFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) { + ids, err := loaders.From(ctx).FolderSubFolderIDs.Load(obj.ID) + if err != nil { + return nil, err + } + + if r.idOnly(ctx) { + return foldersFromIDs(ids), nil + } + + var errs []error + ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids) + return ret, firstError(errs) +} + func (r *folderResolver) ZipFile(ctx context.Context, obj *models.Folder) (*BasicFile, error) { + // shortcut for id only queries + if r.idOnly(ctx) { + if obj.ZipFileID == nil { + return nil, nil + } + + return &BasicFile{ + BaseFile: &models.BaseFile{ID: *obj.ZipFileID}, + }, nil + } + return zipFileResolver(ctx, obj.ZipFileID) } diff --git a/internal/api/resolver_model_gallery.go b/internal/api/resolver_model_gallery.go index 9dc68b4c4..773a831d8 100644 --- a/internal/api/resolver_model_gallery.go +++ b/internal/api/resolver_model_gallery.go @@ -216,3 +216,16 @@ func (r *galleryResolver) Image(ctx context.Context, obj *models.Gallery, index return } + +func (r *galleryResolver) CustomFields(ctx context.Context, obj *models.Gallery) (map[string]interface{}, error) { + m, err := loaders.From(ctx).GalleryCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_model_image.go b/internal/api/resolver_model_image.go index 0886bea40..4a95ae1f4 100644 --- a/internal/api/resolver_model_image.go +++ b/internal/api/resolver_model_image.go @@ -161,3 +161,12 @@ func (r *imageResolver) Urls(ctx context.Context, obj *models.Image) ([]string, return obj.URLs.List(), nil } + +func (r *imageResolver) CustomFields(ctx context.Context, obj *models.Image) (map[string]interface{}, error) { + customFields, err := loaders.From(ctx).ImageCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + return customFields, nil +} diff --git a/internal/api/resolver_model_movie.go b/internal/api/resolver_model_movie.go index 317123c6e..287d5d51a 100644 --- a/internal/api/resolver_model_movie.go +++ b/internal/api/resolver_model_movie.go @@ -215,3 +215,16 @@ func (r *groupResolver) OCounter(ctx context.Context, obj *models.Group) (ret *i } return &count, nil } + +func (r *groupResolver) CustomFields(ctx context.Context, obj *models.Group) (map[string]interface{}, error) { + m, err := loaders.From(ctx).GroupCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_model_performer.go b/internal/api/resolver_model_performer.go index 94da62932..261a98ff3 100644 --- a/internal/api/resolver_model_performer.go +++ b/internal/api/resolver_model_performer.go @@ -109,6 +109,31 @@ func (r *performerResolver) HeightCm(ctx context.Context, obj *models.Performer) return obj.Height, nil } +func (r *performerResolver) CareerStart(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.CareerStart != nil { + ret := obj.CareerStart.String() + return &ret, nil + } + return nil, nil +} + +func (r *performerResolver) CareerEnd(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.CareerEnd != nil { + ret := obj.CareerEnd.String() + return &ret, nil + } + return nil, nil +} + +func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.CareerStart == nil && obj.CareerEnd == nil { + return nil, nil + } + + ret := models.FormatYearRange(obj.CareerStart, obj.CareerEnd) + return &ret, nil +} + func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer) (*string, error) { if obj.Birthdate != nil { ret := obj.Birthdate.String() diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index 2600c9538..81113d858 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -410,3 +410,16 @@ func (r *sceneResolver) OHistory(ctx context.Context, obj *models.Scene) ([]*tim return ptrRet, nil } + +func (r *sceneResolver) CustomFields(ctx context.Context, obj *models.Scene) (map[string]interface{}, error) { + m, err := loaders.From(ctx).SceneCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_model_studio.go b/internal/api/resolver_model_studio.go index fabcf38bd..b54455920 100644 --- a/internal/api/resolver_model_studio.go +++ b/internal/api/resolver_model_studio.go @@ -207,6 +207,19 @@ func (r *studioResolver) Groups(ctx context.Context, obj *models.Studio) (ret [] return ret, nil } +func (r *studioResolver) CustomFields(ctx context.Context, obj *models.Studio) (map[string]interface{}, error) { + m, err := loaders.From(ctx).StudioCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} + // deprecated func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Group, err error) { return r.Groups(ctx, obj) diff --git a/internal/api/resolver_model_tag.go b/internal/api/resolver_model_tag.go index deae41f21..7518036b0 100644 --- a/internal/api/resolver_model_tag.go +++ b/internal/api/resolver_model_tag.go @@ -181,3 +181,16 @@ func (r *tagResolver) ChildCount(ctx context.Context, obj *models.Tag) (ret int, return ret, nil } + +func (r *tagResolver) CustomFields(ctx context.Context, obj *models.Tag) (map[string]interface{}, error) { + m, err := loaders.From(ctx).TagCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_mutation_configure.go b/internal/api/resolver_mutation_configure.go index daed0b5b7..3df1c9114 100644 --- a/internal/api/resolver_mutation_configure.go +++ b/internal/api/resolver_mutation_configure.go @@ -5,6 +5,7 @@ import ( "encoding/json" "errors" "fmt" + "io/fs" "path/filepath" "regexp" "strconv" @@ -85,6 +86,8 @@ func (r *mutationResolver) setConfigFloat(key string, value *float64) { func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGeneralInput) (*ConfigGeneralResult, error) { c := config.GetInstance() + // #4709 - allow stash paths even if they do not exist, so that users may configure stash + // for disconnected drives or network storage. existingPaths := c.GetStashPaths() if input.Stashes != nil { for _, s := range input.Stashes { @@ -97,8 +100,12 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen } } if isNew { + s.Path = filepath.Clean(s.Path) + + // if it exists, it must be directory exists, err := fsutil.DirExists(s.Path) - if !exists { + // allow it to not exist but if it does exist it must be a directory + if !exists && !errors.Is(err, fs.ErrNotExist) { return makeConfigGeneralResult(), err } } @@ -287,6 +294,11 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen if input.PreviewPreset != nil { c.SetString(config.PreviewPreset, input.PreviewPreset.String()) } + r.setConfigBool(config.UseCustomSpriteInterval, input.UseCustomSpriteInterval) + r.setConfigFloat(config.SpriteInterval, input.SpriteInterval) + r.setConfigInt(config.MinimumSprites, input.MinimumSprites) + r.setConfigInt(config.MaximumSprites, input.MaximumSprites) + r.setConfigInt(config.SpriteScreenshotSize, input.SpriteScreenshotSize) r.setConfigBool(config.TranscodeHardwareAcceleration, input.TranscodeHardwareAcceleration) if input.MaxTranscodeSize != nil { @@ -515,6 +527,8 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigI r.setConfigBool(config.CustomLocalesEnabled, input.CustomLocalesEnabled) + r.setConfigBool(config.DisableCustomizations, input.DisableCustomizations) + if input.DisableDropdownCreate != nil { ddc := input.DisableDropdownCreate r.setConfigBool(config.DisableDropdownCreatePerformer, ddc.Performer) diff --git a/internal/api/resolver_mutation_file.go b/internal/api/resolver_mutation_file.go index c5e5e3530..b9e36aa76 100644 --- a/internal/api/resolver_mutation_file.go +++ b/internal/api/resolver_mutation_file.go @@ -5,10 +5,14 @@ import ( "fmt" "strconv" + "github.com/stashapp/stash/internal/desktop" "github.com/stashapp/stash/internal/manager" + "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/session" "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) @@ -16,7 +20,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) if err := r.withTxn(ctx, func(ctx context.Context) error { fileStore := r.repository.File folderStore := r.repository.Folder - mover := file.NewMover(fileStore, folderStore) + mover := file.NewMover(fileStore, folderStore, manager.GetInstance().Config.GetStashPaths().Paths()) mover.RegisterHooks(ctx) var ( @@ -54,13 +58,14 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) folderPath := *input.DestinationFolder // ensure folder path is within the library - if err := r.validateFolderPath(folderPath); err != nil { + stashPaths := manager.GetInstance().Config.GetStashPaths() + if err := r.validateFolderPath(stashPaths, folderPath); err != nil { return err } // get or create folder hierarchy var err error - folder, err = file.GetOrCreateFolderHierarchy(ctx, folderStore, folderPath) + folder, err = file.GetOrCreateFolderHierarchy(ctx, folderStore, folderPath, stashPaths.Paths()) if err != nil { return fmt.Errorf("getting or creating folder hierarchy: %w", err) } @@ -109,8 +114,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) return true, nil } -func (r *mutationResolver) validateFolderPath(folderPath string) error { - paths := manager.GetInstance().Config.GetStashPaths() +func (r *mutationResolver) validateFolderPath(paths config.StashConfigs, folderPath string) error { if l := paths.GetStashFromDirPath(folderPath); l == nil { return fmt.Errorf("folder path %s must be within a stash library path", folderPath) } @@ -210,6 +214,58 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b return true, nil } +func (r *mutationResolver) DestroyFiles(ctx context.Context, ids []string) (ret bool, err error) { + fileIDs, err := stringslice.StringSliceToIntSlice(ids) + if err != nil { + return false, fmt.Errorf("converting ids: %w", err) + } + + destroyer := &file.ZipDestroyer{ + FileDestroyer: r.repository.File, + FolderDestroyer: r.repository.Folder, + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.File + + for _, fileIDInt := range fileIDs { + fileID := models.FileID(fileIDInt) + f, err := qb.Find(ctx, fileID) + if err != nil { + return err + } + + if len(f) == 0 { + return fmt.Errorf("file with id %d not found", fileID) + } + + path := f[0].Base().Path + + // ensure not a primary file + isPrimary, err := qb.IsPrimary(ctx, fileID) + if err != nil { + return fmt.Errorf("checking if file %s is primary: %w", path, err) + } + + if isPrimary { + return fmt.Errorf("cannot destroy primary file entry %s", path) + } + + // destroy DB entries only (no filesystem deletion) + const deleteFile = false + if err := destroyer.DestroyZip(ctx, f[0], nil, deleteFile); err != nil { + return fmt.Errorf("destroying file entry %s: %w", path, err) + } + } + + return nil + }); err != nil { + return false, err + } + + return true, nil +} + func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSetFingerprintsInput) (bool, error) { fileIDInt, err := strconv.Atoi(input.ID) if err != nil { @@ -274,3 +330,71 @@ func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSe return true, nil } + +func (r *mutationResolver) RevealFileInFileManager(ctx context.Context, id string) (bool, error) { + // disallow if request did not come from localhost + if !session.IsLocalRequest(ctx) { + logger.Warnf("Attempt to reveal file in file manager from non-local request") + return false, fmt.Errorf("access denied") + } + + fileIDInt, err := strconv.Atoi(id) + if err != nil { + return false, fmt.Errorf("converting id: %w", err) + } + + var filePath string + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + files, err := r.repository.File.Find(ctx, models.FileID(fileIDInt)) + if err != nil { + return fmt.Errorf("finding file: %w", err) + } + if len(files) == 0 { + return fmt.Errorf("file with id %d not found", fileIDInt) + } + filePath = files[0].Base().Path + return nil + }); err != nil { + return false, err + } + + if err := desktop.RevealInFileManager(filePath); err != nil { + return false, err + } + + return true, nil +} + +func (r *mutationResolver) RevealFolderInFileManager(ctx context.Context, id string) (bool, error) { + // disallow if request did not come from localhost + if !session.IsLocalRequest(ctx) { + logger.Warnf("Attempt to reveal folder in file manager from non-local request") + return false, fmt.Errorf("access denied") + } + + folderIDInt, err := strconv.Atoi(id) + if err != nil { + return false, fmt.Errorf("converting id: %w", err) + } + + var folderPath string + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + folder, err := r.repository.Folder.Find(ctx, models.FolderID(folderIDInt)) + if err != nil { + return fmt.Errorf("finding folder: %w", err) + } + if folder == nil { + return fmt.Errorf("folder with id %d not found", folderIDInt) + } + folderPath = folder.Path + return nil + }); err != nil { + return false, err + } + + if err := desktop.RevealInFileManager(folderPath); err != nil { + return false, err + } + + return true, nil +} diff --git a/internal/api/resolver_mutation_gallery.go b/internal/api/resolver_mutation_gallery.go index 8f4863c6d..2cd80b1ff 100644 --- a/internal/api/resolver_mutation_gallery.go +++ b/internal/api/resolver_mutation_gallery.go @@ -42,7 +42,10 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat } // Populate a new gallery from the input - newGallery := models.NewGallery() + newGallery := models.CreateGalleryInput{ + Gallery: &models.Gallery{}, + } + *newGallery.Gallery = models.NewGallery() newGallery.Title = strings.TrimSpace(input.Title) newGallery.Code = translator.string(input.Code) @@ -81,10 +84,12 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat newGallery.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)}) } + newGallery.CustomFields = convertMapJSONNumbers(input.CustomFields) + // Start the transaction and save the gallery if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Gallery - if err := qb.Create(ctx, &newGallery, nil); err != nil { + if err := qb.Create(ctx, &newGallery); err != nil { return err } @@ -241,6 +246,10 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle return nil, fmt.Errorf("converting scene ids: %w", err) } + if input.CustomFields != nil { + updatedGallery.CustomFields = handleUpdateCustomFields(*input.CustomFields) + } + // gallery scene is set from the scene only gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery) @@ -293,6 +302,10 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall return nil, fmt.Errorf("converting scene ids: %w", err) } + if input.CustomFields != nil { + updatedGallery.CustomFields = handleUpdateCustomFields(*input.CustomFields) + } + ret := []*models.Gallery{} // Start the transaction and save the galleries @@ -346,6 +359,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Gallery @@ -366,7 +380,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall galleries = append(galleries, gallery) - imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile) + imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) if err != nil { return err } diff --git a/internal/api/resolver_mutation_group.go b/internal/api/resolver_mutation_group.go index 14dc817b9..6c986c4da 100644 --- a/internal/api/resolver_mutation_group.go +++ b/internal/api/resolver_mutation_group.go @@ -14,13 +14,17 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.Group, error) { +func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.CreateGroupInput, error) { translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } // Populate a new group from the input - newGroup := models.NewGroup() + newGroupInput := &models.CreateGroupInput{ + Group: &models.Group{}, + } + *newGroupInput.Group = models.NewGroup() + newGroup := newGroupInput.Group newGroup.Name = strings.TrimSpace(input.Name) newGroup.Aliases = translator.string(input.Aliases) @@ -59,28 +63,19 @@ func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*mo newGroup.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls)) } - return &newGroup, nil -} - -func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) { - newGroup, err := groupFromGroupCreateInput(ctx, input) - if err != nil { - return nil, err - } + newGroupInput.CustomFields = convertMapJSONNumbers(input.CustomFields) // Process the base 64 encoded image string - var frontimageData []byte if input.FrontImage != nil { - frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) + newGroupInput.FrontImageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) if err != nil { return nil, fmt.Errorf("processing front image: %w", err) } } // Process the base 64 encoded image string - var backimageData []byte if input.BackImage != nil { - backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage) + newGroupInput.BackImageData, err = utils.ProcessImageInput(ctx, *input.BackImage) if err != nil { return nil, fmt.Errorf("processing back image: %w", err) } @@ -88,13 +83,22 @@ func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInp // HACK: if back image is being set, set the front image to the default. // This is because we can't have a null front image with a non-null back image. - if len(frontimageData) == 0 && len(backimageData) != 0 { - frontimageData = static.ReadAll(static.DefaultGroupImage) + if len(newGroupInput.FrontImageData) == 0 && len(newGroupInput.BackImageData) != 0 { + newGroupInput.FrontImageData = static.ReadAll(static.DefaultGroupImage) + } + + return newGroupInput, nil +} + +func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) { + createGroupInput, err := groupFromGroupCreateInput(ctx, input) + if err != nil { + return nil, err } // Start the transaction and save the group if err := r.withTxn(ctx, func(ctx context.Context) error { - if err = r.groupService.Create(ctx, newGroup, frontimageData, backimageData); err != nil { + if err = r.groupService.Create(ctx, createGroupInput); err != nil { return err } @@ -104,9 +108,9 @@ func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInp } // for backwards compatibility - run both movie and group hooks - r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.GroupCreatePost, input, nil) - r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.MovieCreatePost, input, nil) - return r.getGroup(ctx, newGroup.ID) + r.hookExecutor.ExecutePostHooks(ctx, createGroupInput.Group.ID, hook.GroupCreatePost, input, nil) + r.hookExecutor.ExecutePostHooks(ctx, createGroupInput.Group.ID, hook.MovieCreatePost, input, nil) + return r.getGroup(ctx, createGroupInput.Group.ID) } func groupPartialFromGroupUpdateInput(translator changesetTranslator, input GroupUpdateInput) (ret models.GroupPartial, err error) { @@ -150,6 +154,12 @@ func groupPartialFromGroupUpdateInput(translator changesetTranslator, input Grou } updatedGroup.URLs = translator.updateStrings(input.Urls, "urls") + if input.CustomFields != nil { + updatedGroup.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedGroup.CustomFields.Full = convertMapJSONNumbers(updatedGroup.CustomFields.Full) + updatedGroup.CustomFields.Partial = convertMapJSONNumbers(updatedGroup.CustomFields.Partial) + } return updatedGroup, nil } @@ -217,6 +227,12 @@ func (r *mutationResolver) GroupUpdate(ctx context.Context, input GroupUpdateInp func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input BulkGroupUpdateInput) (ret models.GroupPartial, err error) { updatedGroup := models.NewGroupPartial() + updatedGroup.Date, err = translator.optionalDate(input.Date, "date") + if err != nil { + err = fmt.Errorf("converting date: %w", err) + return + } + updatedGroup.Synopsis = translator.optionalString(input.Synopsis, "synopsis") updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100") updatedGroup.Director = translator.optionalString(input.Director, "director") @@ -246,6 +262,13 @@ func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input updatedGroup.URLs = translator.optionalURLsBulk(input.Urls, nil) + if input.CustomFields != nil { + updatedGroup.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedGroup.CustomFields.Full = convertMapJSONNumbers(updatedGroup.CustomFields.Full) + updatedGroup.CustomFields.Partial = convertMapJSONNumbers(updatedGroup.CustomFields.Partial) + } + return updatedGroup, nil } diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go index 82d9be4cd..cc03c5286 100644 --- a/internal/api/resolver_mutation_image.go +++ b/internal/api/resolver_mutation_image.go @@ -177,6 +177,13 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input models.ImageUp return nil, fmt.Errorf("converting tag ids: %w", err) } + if input.CustomFields != nil { + updatedImage.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedImage.CustomFields.Full = convertMapJSONNumbers(updatedImage.CustomFields.Full) + updatedImage.CustomFields.Partial = convertMapJSONNumbers(updatedImage.CustomFields.Partial) + } + qb := r.repository.Image image, err := qb.UpdatePartial(ctx, imageID, updatedImage) if err != nil { @@ -237,6 +244,13 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU return nil, fmt.Errorf("converting tag ids: %w", err) } + if input.CustomFields != nil { + updatedImage.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedImage.CustomFields.Full = convertMapJSONNumbers(updatedImage.CustomFields.Full) + updatedImage.CustomFields.Partial = convertMapJSONNumbers(updatedImage.CustomFields.Partial) + } + // Start the transaction and save the images if err := r.withTxn(ctx, func(ctx context.Context) error { var updatedGalleryIDs []int @@ -325,7 +339,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD return fmt.Errorf("image with id %d not found", imageID) } - return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)) + return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry)) }); err != nil { fileDeleter.Rollback() return false, err @@ -372,7 +386,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image images = append(images, i) - if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil { + if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry)); err != nil { return err } } diff --git a/internal/api/resolver_mutation_metadata.go b/internal/api/resolver_mutation_metadata.go index 8120e2d31..ea6496800 100644 --- a/internal/api/resolver_mutation_metadata.go +++ b/internal/api/resolver_mutation_metadata.go @@ -122,9 +122,10 @@ func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error func (r *mutationResolver) BackupDatabase(ctx context.Context, input BackupDatabaseInput) (*string, error) { // if download is true, then backup to temporary file and return a link download := input.Download != nil && *input.Download + includeBlobs := input.IncludeBlobs != nil && *input.IncludeBlobs mgr := manager.GetInstance() - backupPath, backupName, err := mgr.BackupDatabase(download) + backupPath, backupName, err := mgr.BackupDatabase(download, includeBlobs) if err != nil { logger.Errorf("Error backing up database: %v", err) return nil, err diff --git a/internal/api/resolver_mutation_performer.go b/internal/api/resolver_mutation_performer.go index c54e3ca93..6f88c54ca 100644 --- a/internal/api/resolver_mutation_performer.go +++ b/internal/api/resolver_mutation_performer.go @@ -2,13 +2,16 @@ package api import ( "context" + "errors" "fmt" + "slices" "strconv" "strings" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/plugin/hook" + "github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/sliceutil/stringslice" "github.com/stashapp/stash/pkg/utils" ) @@ -40,7 +43,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.Name = strings.TrimSpace(input.Name) newPerformer.Disambiguation = translator.string(input.Disambiguation) - newPerformer.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.AliasList)) + newPerformer.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.AliasList), newPerformer.Name)) newPerformer.Gender = input.Gender newPerformer.Ethnicity = translator.string(input.Ethnicity) newPerformer.Country = translator.string(input.Country) @@ -49,7 +52,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.FakeTits = translator.string(input.FakeTits) newPerformer.PenisLength = input.PenisLength newPerformer.Circumcised = input.Circumcised - newPerformer.CareerLength = translator.string(input.CareerLength) newPerformer.Tattoos = translator.string(input.Tattoos) newPerformer.Piercings = translator.string(input.Piercings) newPerformer.Favorite = translator.bool(input.Favorite) @@ -87,6 +89,25 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per return nil, fmt.Errorf("converting death date: %w", err) } + newPerformer.CareerStart, err = translator.datePtr(input.CareerStart) + if err != nil { + return nil, fmt.Errorf("converting career start: %w", err) + } + newPerformer.CareerEnd, err = translator.datePtr(input.CareerEnd) + if err != nil { + return nil, fmt.Errorf("converting career end: %w", err) + } + + // if career_start/career_end not provided, parse deprecated career_length + if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil { + start, end, err := models.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + newPerformer.CareerStart = start + newPerformer.CareerEnd = end + } + newPerformer.TagIDs, err = translator.relatedIds(input.TagIds) if err != nil { return nil, fmt.Errorf("converting tag ids: %w", err) @@ -136,7 +157,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per return r.getPerformer(ctx, newPerformer.ID) } -func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator) error { +func validateNoLegacyURLs(translator changesetTranslator) error { // ensure url/twitter/instagram are not included in the input if translator.hasField("url") { return fmt.Errorf("url field must not be included if urls is included") @@ -151,7 +172,7 @@ func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator) return nil } -func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURL, legacyTwitter, legacyInstagram models.OptionalString, updatedPerformer *models.PerformerPartial) error { +func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURLs legacyPerformerURLs, updatedPerformer *models.PerformerPartial) error { qb := r.repository.Performer // we need to be careful with URL/Twitter/Instagram @@ -170,23 +191,23 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int existingURLs := p.URLs.List() // performer partial URLs should be empty - if legacyURL.Set { + if legacyURLs.URL.Set { replaced := false for i, url := range existingURLs { if !performer.IsTwitterURL(url) && !performer.IsInstagramURL(url) { - existingURLs[i] = legacyURL.Value + existingURLs[i] = legacyURLs.URL.Value replaced = true break } } if !replaced { - existingURLs = append(existingURLs, legacyURL.Value) + existingURLs = append(existingURLs, legacyURLs.URL.Value) } } - if legacyTwitter.Set { - value := utils.URLFromHandle(legacyTwitter.Value, twitterURL) + if legacyURLs.Twitter.Set { + value := utils.URLFromHandle(legacyURLs.Twitter.Value, twitterURL) found := false // find and replace the first twitter URL for i, url := range existingURLs { @@ -201,9 +222,9 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int existingURLs = append(existingURLs, value) } } - if legacyInstagram.Set { + if legacyURLs.Instagram.Set { found := false - value := utils.URLFromHandle(legacyInstagram.Value, instagramURL) + value := utils.URLFromHandle(legacyURLs.Instagram.Value, instagramURL) // find and replace the first instagram URL for i, url := range existingURLs { if performer.IsInstagramURL(url) { @@ -226,16 +247,25 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int return nil } -func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) { - performerID, err := strconv.Atoi(input.ID) - if err != nil { - return nil, fmt.Errorf("converting id: %w", err) - } +type legacyPerformerURLs struct { + URL models.OptionalString + Twitter models.OptionalString + Instagram models.OptionalString +} - translator := changesetTranslator{ - inputMap: getUpdateInputMap(ctx), - } +func (u *legacyPerformerURLs) AnySet() bool { + return u.URL.Set || u.Twitter.Set || u.Instagram.Set +} +func legacyPerformerURLsFromInput(input models.PerformerUpdateInput, translator changesetTranslator) legacyPerformerURLs { + return legacyPerformerURLs{ + URL: translator.optionalString(input.URL, "url"), + Twitter: translator.optionalString(input.Twitter, "twitter"), + Instagram: translator.optionalString(input.Instagram, "instagram"), + } +} + +func performerPartialFromInput(input models.PerformerUpdateInput, translator changesetTranslator) (*models.PerformerPartial, error) { // Populate performer from the input updatedPerformer := models.NewPerformerPartial() @@ -249,7 +279,29 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised") - updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") + // prefer career_start/career_end over deprecated career_length + if translator.hasField("career_start") || translator.hasField("career_end") { + var err error + updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start") + if err != nil { + return nil, fmt.Errorf("converting career start: %w", err) + } + updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end") + if err != nil { + return nil, fmt.Errorf("converting career end: %w", err) + } + } else if translator.hasField("career_length") && input.CareerLength != nil { + start, end, err := models.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + if start != nil { + updatedPerformer.CareerStart = models.NewOptionalDate(*start) + } + if end != nil { + updatedPerformer.CareerEnd = models.NewOptionalDate(*end) + } + } updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite") @@ -260,19 +312,17 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") updatedPerformer.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") + var err error + if translator.hasField("urls") { // ensure url/twitter/instagram are not included in the input - if err := r.validateNoLegacyURLs(translator); err != nil { + if err := validateNoLegacyURLs(translator); err != nil { return nil, err } updatedPerformer.URLs = translator.updateStrings(input.Urls, "urls") } - legacyURL := translator.optionalString(input.URL, "url") - legacyTwitter := translator.optionalString(input.Twitter, "twitter") - legacyInstagram := translator.optionalString(input.Instagram, "instagram") - updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") if err != nil { return nil, fmt.Errorf("converting birthdate: %w", err) @@ -299,6 +349,26 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.CustomFields = handleUpdateCustomFields(input.CustomFields) + return &updatedPerformer, nil +} + +func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) { + performerID, err := strconv.Atoi(input.ID) + if err != nil { + return nil, fmt.Errorf("converting id: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + updatedPerformer, err := performerPartialFromInput(input, translator) + if err != nil { + return nil, err + } + + legacyURLs := legacyPerformerURLsFromInput(input, translator) + var imageData []byte imageIncluded := translator.hasField("image") if input.Image != nil { @@ -312,17 +382,38 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Performer - if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set { - if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil { + if legacyURLs.AnySet() { + if err := r.handleLegacyURLs(ctx, performerID, legacyURLs, updatedPerformer); err != nil { return err } } - if err := performer.ValidateUpdate(ctx, performerID, updatedPerformer, qb); err != nil { + if updatedPerformer.Aliases != nil { + p, err := qb.Find(ctx, performerID) + if err != nil { + return err + } + if p != nil { + if err := p.LoadAliases(ctx, qb); err != nil { + return err + } + + effectiveAliases := updatedPerformer.Aliases.Apply(p.Aliases.List()) + name := p.Name + if updatedPerformer.Name.Set { + name = updatedPerformer.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name) + updatedPerformer.Aliases.Values = sanitized + updatedPerformer.Aliases.Mode = models.RelationshipUpdateModeSet + } + } + if err := performer.ValidateUpdate(ctx, performerID, *updatedPerformer, qb); err != nil { return err } - _, err = qb.UpdatePartial(ctx, performerID, updatedPerformer) + _, err = qb.UpdatePartial(ctx, performerID, *updatedPerformer) if err != nil { return err } @@ -366,7 +457,28 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised") - updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") + // prefer career_start/career_end over deprecated career_length + if translator.hasField("career_start") || translator.hasField("career_end") { + updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start") + if err != nil { + return nil, fmt.Errorf("converting career start: %w", err) + } + updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end") + if err != nil { + return nil, fmt.Errorf("converting career end: %w", err) + } + } else if translator.hasField("career_length") && input.CareerLength != nil { + start, end, err := models.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + if start != nil { + updatedPerformer.CareerStart = models.NewOptionalDate(*start) + } + if end != nil { + updatedPerformer.CareerEnd = models.NewOptionalDate(*end) + } + } updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") @@ -379,16 +491,18 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe if translator.hasField("urls") { // ensure url/twitter/instagram are not included in the input - if err := r.validateNoLegacyURLs(translator); err != nil { + if err := validateNoLegacyURLs(translator); err != nil { return nil, err } updatedPerformer.URLs = translator.updateStringsBulk(input.Urls, "urls") } - legacyURL := translator.optionalString(input.URL, "url") - legacyTwitter := translator.optionalString(input.Twitter, "twitter") - legacyInstagram := translator.optionalString(input.Instagram, "instagram") + legacyURLs := legacyPerformerURLs{ + URL: translator.optionalString(input.URL, "url"), + Twitter: translator.optionalString(input.Twitter, "twitter"), + Instagram: translator.optionalString(input.Instagram, "instagram"), + } updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") if err != nil { @@ -425,8 +539,8 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe qb := r.repository.Performer for _, performerID := range performerIDs { - if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set { - if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil { + if legacyURLs.AnySet() { + if err := r.handleLegacyURLs(ctx, performerID, legacyURLs, &updatedPerformer); err != nil { return err } } @@ -506,3 +620,87 @@ func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs [ return true, nil } + +func (r *mutationResolver) PerformerMerge(ctx context.Context, input PerformerMergeInput) (*models.Performer, error) { + srcIDs, err := stringslice.StringSliceToIntSlice(input.Source) + if err != nil { + return nil, fmt.Errorf("converting source ids: %w", err) + } + + // ensure source ids are unique + srcIDs = sliceutil.AppendUniques(nil, srcIDs) + + destID, err := strconv.Atoi(input.Destination) + if err != nil { + return nil, fmt.Errorf("converting destination id: %w", err) + } + + // ensure destination is not in source list + if slices.Contains(srcIDs, destID) { + return nil, errors.New("destination performer cannot be in source list") + } + + var values *models.PerformerPartial + var imageData []byte + + if input.Values != nil { + translator := changesetTranslator{ + inputMap: getNamedUpdateInputMap(ctx, "input.values"), + } + + values, err = performerPartialFromInput(*input.Values, translator) + if err != nil { + return nil, err + } + legacyURLs := legacyPerformerURLsFromInput(*input.Values, translator) + if legacyURLs.AnySet() { + return nil, errors.New("Merging legacy performer URLs is not supported") + } + + if input.Values.Image != nil { + var err error + imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image) + if err != nil { + return nil, fmt.Errorf("processing cover image: %w", err) + } + } + } else { + v := models.NewPerformerPartial() + values = &v + } + + var dest *models.Performer + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Performer + + dest, err = qb.Find(ctx, destID) + if err != nil { + return fmt.Errorf("finding destination performer ID %d: %w", destID, err) + } + + // ensure source performers exist + if _, err := qb.FindMany(ctx, srcIDs); err != nil { + return fmt.Errorf("finding source performers: %w", err) + } + + if _, err := qb.UpdatePartial(ctx, destID, *values); err != nil { + return fmt.Errorf("updating performer: %w", err) + } + + if err := qb.Merge(ctx, srcIDs, destID); err != nil { + return fmt.Errorf("merging performers: %w", err) + } + + if len(imageData) > 0 { + if err := qb.UpdateImage(ctx, destID, imageData); err != nil { + return err + } + } + + return nil + }); err != nil { + return nil, err + } + + return dest, nil +} diff --git a/internal/api/resolver_mutation_scene.go b/internal/api/resolver_mutation_scene.go index c08184add..70158fc6f 100644 --- a/internal/api/resolver_mutation_scene.go +++ b/internal/api/resolver_mutation_scene.go @@ -103,8 +103,15 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCr } } + customFields := convertMapJSONNumbers(input.CustomFields) + if err := r.withTxn(ctx, func(ctx context.Context) error { - ret, err = r.Resolver.sceneService.Create(ctx, &newScene, fileIDs, coverImageData) + ret, err = r.Resolver.sceneService.Create(ctx, models.CreateSceneInput{ + Scene: &newScene, + FileIDs: fileIDs, + CoverImage: coverImageData, + CustomFields: customFields, + }) return err }); err != nil { return nil, err @@ -297,6 +304,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } var coverImageData []byte + coverImageIncluded := translator.hasField("cover_image") if input.CoverImage != nil { var err error coverImageData, err = utils.ProcessImageInput(ctx, *input.CoverImage) @@ -305,26 +313,41 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } } + var customFields *models.CustomFieldsInput + if input.CustomFields != nil { + cfCopy := *input.CustomFields + customFields = &cfCopy + // convert json.Numbers to int/float + customFields.Full = convertMapJSONNumbers(customFields.Full) + customFields.Partial = convertMapJSONNumbers(customFields.Partial) + } + scene, err := qb.UpdatePartial(ctx, sceneID, *updatedScene) if err != nil { return nil, err } - if err := r.sceneUpdateCoverImage(ctx, scene, coverImageData); err != nil { - return nil, err + if coverImageIncluded { + if err := r.sceneUpdateCoverImage(ctx, scene, coverImageData); err != nil { + return nil, err + } + } + + if customFields != nil { + if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil { + return nil, err + } } return scene, nil } func (r *mutationResolver) sceneUpdateCoverImage(ctx context.Context, s *models.Scene, coverImageData []byte) error { - if len(coverImageData) > 0 { - qb := r.repository.Scene + qb := r.repository.Scene - // update cover table - if err := qb.UpdateCover(ctx, s.ID, coverImageData); err != nil { - return err - } + // update cover table - empty data will clear the cover + if err := qb.UpdateCover(ctx, s.ID, coverImageData); err != nil { + return err } return nil @@ -386,6 +409,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU } } + var customFields *models.CustomFieldsInput + if input.CustomFields != nil { + cf := handleUpdateCustomFields(*input.CustomFields) + customFields = &cf + } + ret := []*models.Scene{} // Start the transaction and save the scenes @@ -398,6 +427,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU return err } + if customFields != nil { + if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil { + return err + } + } + ret = append(ret, scene) } @@ -440,6 +475,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Scene @@ -456,7 +492,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD // kill any running encoders manager.KillRunningStreams(s, fileNamingAlgo) - return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile) + return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) }); err != nil { fileDeleter.Rollback() return false, err @@ -494,6 +530,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Scene @@ -512,7 +549,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene // kill any running encoders manager.KillRunningStreams(scene, fileNamingAlgo) - if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile); err != nil { + if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } } @@ -572,6 +609,7 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput var values *models.ScenePartial var coverImageData []byte + var customFields *models.CustomFieldsInput if input.Values != nil { translator := changesetTranslator{ @@ -590,6 +628,11 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput return nil, fmt.Errorf("processing cover image: %w", err) } } + + if input.Values.CustomFields != nil { + cf := handleUpdateCustomFields(*input.Values.CustomFields) + customFields = &cf + } } else { v := models.NewScenePartial() values = &v @@ -621,7 +664,20 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput return fmt.Errorf("scene with id %d not found", destID) } - return r.sceneUpdateCoverImage(ctx, ret, coverImageData) + // only update cover image if one was provided + if len(coverImageData) > 0 { + if err := r.sceneUpdateCoverImage(ctx, ret, coverImageData); err != nil { + return err + } + } + + if customFields != nil { + if err := r.Resolver.repository.Scene.SetCustomFields(ctx, ret.ID, *customFields); err != nil { + return err + } + } + + return nil }); err != nil { return nil, err } diff --git a/internal/api/resolver_mutation_stash_box.go b/internal/api/resolver_mutation_stash_box.go index 436937511..6d2ab84fd 100644 --- a/internal/api/resolver_mutation_stash_box.go +++ b/internal/api/resolver_mutation_stash_box.go @@ -58,6 +58,16 @@ func (r *mutationResolver) StashBoxBatchStudioTag(ctx context.Context, input man return strconv.Itoa(jobID), nil } +func (r *mutationResolver) StashBoxBatchTagTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) { + b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) //nolint:staticcheck + if err != nil { + return "", err + } + + jobID := manager.GetInstance().StashBoxBatchTagTag(ctx, b, input) + return strconv.Itoa(jobID), nil +} + func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) { b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint) if err != nil { diff --git a/internal/api/resolver_mutation_studio.go b/internal/api/resolver_mutation_studio.go index 4b3316111..c7af918a1 100644 --- a/internal/api/resolver_mutation_studio.go +++ b/internal/api/resolver_mutation_studio.go @@ -31,14 +31,15 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio } // Populate a new studio from the input - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = strings.TrimSpace(input.Name) newStudio.Rating = input.Rating100 newStudio.Favorite = translator.bool(input.Favorite) newStudio.Details = translator.string(input.Details) newStudio.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) - newStudio.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases)) + newStudio.Organized = translator.bool(input.Organized) + newStudio.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newStudio.Name)) newStudio.StashIDs = models.NewRelatedStashIDs(models.StashIDInputs(input.StashIds).ToStashIDs()) var err error @@ -61,6 +62,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio if err != nil { return nil, fmt.Errorf("converting tag ids: %w", err) } + newStudio.CustomFields = convertMapJSONNumbers(input.CustomFields) // Process the base 64 encoded image string var imageData []byte @@ -119,6 +121,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio updatedStudio.Rating = translator.optionalInt(input.Rating100, "rating100") updatedStudio.Favorite = translator.optionalBool(input.Favorite, "favorite") updatedStudio.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + updatedStudio.Organized = translator.optionalBool(input.Organized, "organized") updatedStudio.Aliases = translator.updateStrings(input.Aliases, "aliases") updatedStudio.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") @@ -134,7 +137,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio if translator.hasField("urls") { // ensure url not included in the input - if err := r.validateNoLegacyURLs(translator); err != nil { + if err := validateNoLegacyURLs(translator); err != nil { return nil, err } @@ -152,6 +155,11 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio } } + updatedStudio.CustomFields = input.CustomFields + // convert json.Numbers to int/float + updatedStudio.CustomFields.Full = convertMapJSONNumbers(updatedStudio.CustomFields.Full) + updatedStudio.CustomFields.Partial = convertMapJSONNumbers(updatedStudio.CustomFields.Partial) + // Process the base 64 encoded image string var imageData []byte imageIncluded := translator.hasField("image") @@ -167,6 +175,28 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Studio + if updatedStudio.Aliases != nil { + s, err := qb.Find(ctx, studioID) + if err != nil { + return err + } + if s != nil { + if err := s.LoadAliases(ctx, qb); err != nil { + return err + } + + effectiveAliases := updatedStudio.Aliases.Apply(s.Aliases.List()) + name := s.Name + if updatedStudio.Name.Set { + name = updatedStudio.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name) + updatedStudio.Aliases.Values = sanitized + updatedStudio.Aliases.Mode = models.RelationshipUpdateModeSet + } + } + if err := studio.ValidateModify(ctx, updatedStudio, qb); err != nil { return err } @@ -211,7 +241,7 @@ func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudi if translator.hasField("urls") { // ensure url/twitter/instagram are not included in the input - if err := r.validateNoLegacyURLs(translator); err != nil { + if err := validateNoLegacyURLs(translator); err != nil { return nil, err } @@ -233,6 +263,7 @@ func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudi partial.Rating = translator.optionalInt(input.Rating100, "rating100") partial.Details = translator.optionalString(input.Details, "details") partial.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + partial.Organized = translator.optionalBool(input.Organized, "organized") partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") if err != nil { diff --git a/internal/api/resolver_mutation_tag.go b/internal/api/resolver_mutation_tag.go index f8d4943be..ac0183b74 100644 --- a/internal/api/resolver_mutation_tag.go +++ b/internal/api/resolver_mutation_tag.go @@ -6,7 +6,6 @@ import ( "strconv" "strings" - "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin/hook" "github.com/stashapp/stash/pkg/sliceutil/stringslice" @@ -31,11 +30,14 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) } // Populate a new tag from the input - newTag := models.NewTag() + newTag := models.CreateTagInput{ + Tag: &models.Tag{}, + } + *newTag.Tag = models.NewTag() newTag.Name = strings.TrimSpace(input.Name) newTag.SortName = translator.string(input.SortName) - newTag.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases)) + newTag.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newTag.Name)) newTag.Favorite = translator.bool(input.Favorite) newTag.Description = translator.string(input.Description) newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) @@ -60,6 +62,8 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) return nil, fmt.Errorf("converting child tag ids: %w", err) } + newTag.CustomFields = convertMapJSONNumbers(input.CustomFields) + // Process the base 64 encoded image string var imageData []byte if input.Image != nil { @@ -73,7 +77,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag - if err := tag.ValidateCreate(ctx, newTag, qb); err != nil { + if err := tag.ValidateCreate(ctx, *newTag.Tag, qb); err != nil { return err } @@ -98,17 +102,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) return r.getTag(ctx, newTag.ID) } -func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) { - tagID, err := strconv.Atoi(input.ID) - if err != nil { - return nil, fmt.Errorf("converting id: %w", err) - } - - translator := changesetTranslator{ - inputMap: getUpdateInputMap(ctx), - } - - // Populate tag from the input +func tagPartialFromInput(input TagUpdateInput, translator changesetTranslator) (*models.TagPartial, error) { updatedTag := models.NewTagPartial() updatedTag.Name = translator.optionalString(input.Name, "name") @@ -127,6 +121,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) } updatedTag.StashIDs = translator.updateStashIDs(updateStashIDInputs, "stash_ids") + var err error updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids") if err != nil { return nil, fmt.Errorf("converting parent tag ids: %w", err) @@ -137,6 +132,32 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) return nil, fmt.Errorf("converting child tag ids: %w", err) } + if input.CustomFields != nil { + updatedTag.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedTag.CustomFields.Full = convertMapJSONNumbers(updatedTag.CustomFields.Full) + updatedTag.CustomFields.Partial = convertMapJSONNumbers(updatedTag.CustomFields.Partial) + } + + return &updatedTag, nil +} + +func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) { + tagID, err := strconv.Atoi(input.ID) + if err != nil { + return nil, fmt.Errorf("converting id: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + // Populate tag from the input + updatedTag, err := tagPartialFromInput(input, translator) + if err != nil { + return nil, err + } + var imageData []byte imageIncluded := translator.hasField("image") if input.Image != nil { @@ -151,11 +172,33 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag - if err := tag.ValidateUpdate(ctx, tagID, updatedTag, qb); err != nil { + if updatedTag.Aliases != nil { + t, err := qb.Find(ctx, tagID) + if err != nil { + return err + } + if t != nil { + if err := t.LoadAliases(ctx, qb); err != nil { + return err + } + + newAliases := updatedTag.Aliases.Apply(t.Aliases.List()) + name := t.Name + if updatedTag.Name.Set { + name = updatedTag.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(newAliases, name) + updatedTag.Aliases.Values = sanitized + updatedTag.Aliases.Mode = models.RelationshipUpdateModeSet + } + } + + if err := tag.ValidateUpdate(ctx, tagID, *updatedTag, qb); err != nil { return err } - t, err = qb.UpdatePartial(ctx, tagID, updatedTag) + t, err = qb.UpdatePartial(ctx, tagID, *updatedTag) if err != nil { return err } @@ -303,6 +346,31 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) return nil, nil } + var values *models.TagPartial + var imageData []byte + + if input.Values != nil { + translator := changesetTranslator{ + inputMap: getNamedUpdateInputMap(ctx, "input.values"), + } + + values, err = tagPartialFromInput(*input.Values, translator) + if err != nil { + return nil, err + } + + if input.Values.Image != nil { + var err error + imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image) + if err != nil { + return nil, fmt.Errorf("processing cover image: %w", err) + } + } + } else { + v := models.NewTagPartial() + values = &v + } + var t *models.Tag if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag @@ -317,28 +385,22 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) return fmt.Errorf("tag with id %d not found", destination) } - parents, children, err := tag.MergeHierarchy(ctx, destination, source, qb) - if err != nil { - return err - } - if err = qb.Merge(ctx, source, destination); err != nil { return err } - err = qb.UpdateParentTags(ctx, destination, parents) - if err != nil { - return err - } - err = qb.UpdateChildTags(ctx, destination, children) - if err != nil { + if err := tag.ValidateUpdate(ctx, destination, *values, qb); err != nil { return err } - err = tag.ValidateHierarchyExisting(ctx, t, parents, children, qb) - if err != nil { - logger.Errorf("Error merging tag: %s", err) - return err + if _, err := qb.UpdatePartial(ctx, destination, *values); err != nil { + return fmt.Errorf("updating tag: %w", err) + } + + if len(imageData) > 0 { + if err := qb.UpdateImage(ctx, destination, imageData); err != nil { + return err + } } return nil diff --git a/internal/api/resolver_query_configuration.go b/internal/api/resolver_query_configuration.go index 8a20fcad1..cf2c0e3cc 100644 --- a/internal/api/resolver_query_configuration.go +++ b/internal/api/resolver_query_configuration.go @@ -96,6 +96,11 @@ func makeConfigGeneralResult() *ConfigGeneralResult { CalculateMd5: config.IsCalculateMD5(), VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), ParallelTasks: config.GetParallelTasks(), + UseCustomSpriteInterval: config.GetUseCustomSpriteInterval(), + SpriteInterval: config.GetSpriteInterval(), + SpriteScreenshotSize: config.GetSpriteScreenshotSize(), + MinimumSprites: config.GetMinimumSprites(), + MaximumSprites: config.GetMaximumSprites(), PreviewAudio: config.GetPreviewAudio(), PreviewSegments: config.GetPreviewSegments(), PreviewSegmentDuration: config.GetPreviewSegmentDuration(), @@ -156,6 +161,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult { javascriptEnabled := config.GetJavascriptEnabled() customLocales := config.GetCustomLocales() customLocalesEnabled := config.GetCustomLocalesEnabled() + disableCustomizations := config.GetDisableCustomizations() language := config.GetLanguage() handyKey := config.GetHandyKey() scriptOffset := config.GetFunscriptOffset() @@ -183,6 +189,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult { JavascriptEnabled: &javascriptEnabled, CustomLocales: &customLocales, CustomLocalesEnabled: &customLocalesEnabled, + DisableCustomizations: &disableCustomizations, Language: &language, ImageLightbox: &imageLightboxOptions, diff --git a/internal/api/resolver_query_scraper.go b/internal/api/resolver_query_scraper.go index 86d449921..353bb1a32 100644 --- a/internal/api/resolver_query_scraper.go +++ b/internal/api/resolver_query_scraper.go @@ -6,6 +6,7 @@ import ( "fmt" "slices" "strconv" + "strings" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" @@ -363,7 +364,8 @@ func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Sour client := r.newStashBoxClient(*b) var ret []*models.ScrapedTag - out, err := client.QueryTag(ctx, *input.Query) + query := *input.Query + out, err := client.QueryTag(ctx, query) if err != nil { return nil, err @@ -383,6 +385,22 @@ func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Sour }); err != nil { return nil, err } + + // tag name query returns results that may not match the query exactly. + // if there is an exact match, it should be first + if query != "" { + for i, result := range ret { + if strings.EqualFold(result.Name, query) { + // prepend exact match to the front of the slice + if i != 0 { + ret = append([]*models.ScrapedTag{result}, append(ret[:i], ret[i+1:]...)...) + } + + break + } + } + } + return ret, nil } diff --git a/internal/api/routes_scene.go b/internal/api/routes_scene.go index 95e7c9d44..2905bd53a 100644 --- a/internal/api/routes_scene.go +++ b/internal/api/routes_scene.go @@ -12,6 +12,7 @@ import ( "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/fsutil" @@ -243,6 +244,12 @@ func (rs sceneRoutes) streamSegment(w http.ResponseWriter, r *http.Request, stre } func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) { + // if default flag is set, return the default image + if r.URL.Query().Get("default") == "true" { + utils.ServeImage(w, r, static.ReadAll(static.DefaultSceneImage)) + return + } + scene := r.Context().Value(sceneKey).(*models.Scene) ss := manager.SceneServer{ diff --git a/internal/api/server.go b/internal/api/server.go index 9290c6512..a7516da52 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -11,6 +11,7 @@ import ( "net/http" "os" "path" + "path/filepath" "runtime/debug" "strconv" "strings" @@ -255,6 +256,9 @@ func Initialize() (*Server, error) { staticUI = statigz.FileServer(ui.UIBox.(fs.ReadDirFS)) } + // handle favicon override + r.HandleFunc("/favicon.ico", handleFavicon(staticUI)) + // Serve the web app r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) { ext := path.Ext(r.URL.Path) @@ -295,6 +299,31 @@ func Initialize() (*Server, error) { return server, nil } +func handleFavicon(staticUI *statigz.Server) func(w http.ResponseWriter, r *http.Request) { + mgr := manager.GetInstance() + cfg := mgr.Config + + // check if favicon.ico exists in the config directory + // if so, use that + // otherwise, use the embedded one + iconPath := filepath.Join(cfg.GetConfigPath(), "favicon.ico") + exists, _ := fsutil.FileExists(iconPath) + + if exists { + logger.Debugf("Using custom favicon at %s", iconPath) + } + + return func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "no-cache") + + if exists { + http.ServeFile(w, r, iconPath) + } else { + staticUI.ServeHTTP(w, r) + } + } +} + // Start starts the server. It listens on the configured address and port. // It calls ListenAndServeTLS if TLS is configured, otherwise it calls ListenAndServe. // Calls to Start are blocked until the server is shutdown. @@ -421,7 +450,7 @@ func cssHandler(c *config.Config) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { var paths []string - if c.GetCSSEnabled() { + if c.GetCSSEnabled() && !c.GetDisableCustomizations() { // search for custom.css in current directory, then $HOME/.stash fn := c.GetCSSPath() exists, _ := fsutil.FileExists(fn) @@ -439,7 +468,7 @@ func javascriptHandler(c *config.Config) func(w http.ResponseWriter, r *http.Req return func(w http.ResponseWriter, r *http.Request) { var paths []string - if c.GetJavascriptEnabled() { + if c.GetJavascriptEnabled() && !c.GetDisableCustomizations() { // search for custom.js in current directory, then $HOME/.stash fn := c.GetJavascriptPath() exists, _ := fsutil.FileExists(fn) @@ -457,7 +486,7 @@ func customLocalesHandler(c *config.Config) func(w http.ResponseWriter, r *http. return func(w http.ResponseWriter, r *http.Request) { buffer := bytes.Buffer{} - if c.GetCustomLocalesEnabled() { + if c.GetCustomLocalesEnabled() && !c.GetDisableCustomizations() { // search for custom-locales.json in current directory, then $HOME/.stash path := c.GetCustomLocalesPath() exists, _ := fsutil.FileExists(path) diff --git a/internal/autotag/integration_test.go b/internal/autotag/integration_test.go index fc83df848..f537ecfe7 100644 --- a/internal/autotag/integration_test.go +++ b/internal/autotag/integration_test.go @@ -101,16 +101,15 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error { func createStudio(ctx context.Context, qb models.StudioWriter, name string) (*models.Studio, error) { // create the studio - studio := models.Studio{ - Name: name, - } + studio := models.NewCreateStudioInput() + studio.Name = name err := qb.Create(ctx, &studio) if err != nil { return nil, err } - return &studio, nil + return studio.Studio, nil } func createTag(ctx context.Context, qb models.TagWriter) error { @@ -119,7 +118,7 @@ func createTag(ctx context.Context, qb models.TagWriter) error { Name: testName, } - err := qb.Create(ctx, &tag) + err := qb.Create(ctx, &models.CreateTagInput{Tag: &tag}) if err != nil { return err } @@ -366,7 +365,10 @@ func makeImage(expectedResult bool) *models.Image { } func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *models.ImageFile) error { - err := w.Create(ctx, o, []models.FileID{f.ID}) + err := w.Create(ctx, &models.CreateImageInput{ + Image: o, + FileIDs: []models.FileID{f.ID}, + }) if err != nil { return fmt.Errorf("Failed to create image with path '%s': %s", f.Path, err.Error()) @@ -469,7 +471,10 @@ func makeGallery(expectedResult bool) *models.Gallery { } func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *models.BaseFile) error { - err := w.Create(ctx, o, []models.FileID{f.ID}) + err := w.Create(ctx, &models.CreateGalleryInput{ + Gallery: o, + FileIDs: []models.FileID{f.ID}, + }) if err != nil { return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error()) } diff --git a/internal/desktop/desktop.go b/internal/desktop/desktop.go index 06d400793..f1ca9bc92 100644 --- a/internal/desktop/desktop.go +++ b/internal/desktop/desktop.go @@ -2,6 +2,7 @@ package desktop import ( + "fmt" "os" "path" "path/filepath" @@ -155,15 +156,17 @@ func getIconPath() string { return path.Join(config.GetInstance().GetConfigPath(), "icon.png") } -func RevealInFileManager(path string) { - exists, err := fsutil.FileExists(path) +func RevealInFileManager(path string) error { + info, err := os.Stat(path) if err != nil { - logger.Errorf("Error checking file: %s", err) - return + return fmt.Errorf("error checking path: %w", err) } - if exists && IsDesktop() { - revealInFileManager(path) + + absPath, err := filepath.Abs(path) + if err != nil { + return fmt.Errorf("error getting absolute path: %w", err) } + return revealInFileManager(absPath, info) } func getServerURL(path string) string { diff --git a/internal/desktop/desktop_platform_darwin.go b/internal/desktop/desktop_platform_darwin.go index 593e9516f..732009007 100644 --- a/internal/desktop/desktop_platform_darwin.go +++ b/internal/desktop/desktop_platform_darwin.go @@ -4,9 +4,11 @@ package desktop import ( + "fmt" + "os" "os/exec" - "github.com/kermieisinthehouse/gosx-notifier" + gosxnotifier "github.com/kermieisinthehouse/gosx-notifier" "github.com/stashapp/stash/pkg/logger" ) @@ -32,8 +34,11 @@ func sendNotification(notificationTitle string, notificationText string) { } } -func revealInFileManager(path string) { - exec.Command(`open`, `-R`, path) +func revealInFileManager(path string, _ os.FileInfo) error { + if err := exec.Command(`open`, `-R`, path).Run(); err != nil { + return fmt.Errorf("error revealing path in Finder: %w", err) + } + return nil } func isDoubleClickLaunched() bool { diff --git a/internal/desktop/desktop_platform_nixes.go b/internal/desktop/desktop_platform_nixes.go index 69c780d3c..f5ab13384 100644 --- a/internal/desktop/desktop_platform_nixes.go +++ b/internal/desktop/desktop_platform_nixes.go @@ -4,8 +4,10 @@ package desktop import ( + "fmt" "os" "os/exec" + "path/filepath" "strings" "github.com/stashapp/stash/pkg/logger" @@ -33,8 +35,15 @@ func sendNotification(notificationTitle string, notificationText string) { } } -func revealInFileManager(path string) { - +func revealInFileManager(path string, info os.FileInfo) error { + dir := path + if !info.IsDir() { + dir = filepath.Dir(path) + } + if err := exec.Command("xdg-open", dir).Run(); err != nil { + return fmt.Errorf("error opening directory in file manager: %w", err) + } + return nil } func isDoubleClickLaunched() bool { diff --git a/internal/desktop/desktop_platform_windows.go b/internal/desktop/desktop_platform_windows.go index ecb4060e6..48feabed5 100644 --- a/internal/desktop/desktop_platform_windows.go +++ b/internal/desktop/desktop_platform_windows.go @@ -4,6 +4,7 @@ package desktop import ( + "os" "os/exec" "syscall" "unsafe" @@ -83,6 +84,10 @@ func sendNotification(notificationTitle string, notificationText string) { } } -func revealInFileManager(path string) { - exec.Command(`explorer`, `\select`, path) +func revealInFileManager(path string, _ os.FileInfo) error { + c := exec.Command(`explorer`, `/select,`, path) + logger.Debugf("Running: %s", c.String()) + // explorer seems to return an error code even when it works, so ignore the error + _ = c.Run() + return nil } diff --git a/internal/desktop/systray_nonlinux.go b/internal/desktop/systray_nonlinux.go index dab6d4dc2..6b6055f11 100644 --- a/internal/desktop/systray_nonlinux.go +++ b/internal/desktop/systray_nonlinux.go @@ -3,6 +3,7 @@ package desktop import ( + "fmt" "runtime" "strings" @@ -58,12 +59,12 @@ func startSystray(exit chan int, faviconProvider FaviconProvider) { func systrayInitialize(exit chan<- int, faviconProvider FaviconProvider) { favicon := faviconProvider.GetFavicon() systray.SetTemplateIcon(favicon, favicon) - systray.SetTooltip("🟢 Stash is Running.") + c := config.GetInstance() + systray.SetTooltip(fmt.Sprintf("🟢 Stash is Running on port %d.", c.GetPort())) openStashButton := systray.AddMenuItem("Open Stash", "Open a browser window to Stash") var menuItems []string systray.AddSeparator() - c := config.GetInstance() if !c.IsNewSystem() { menuItems = c.GetMenuItems() for _, item := range menuItems { diff --git a/internal/dlna/activity.go b/internal/dlna/activity.go new file mode 100644 index 000000000..a9a5d9b2d --- /dev/null +++ b/internal/dlna/activity.go @@ -0,0 +1,333 @@ +package dlna + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/txn" +) + +const ( + // DefaultSessionTimeout is the time after which a session is considered complete + // if no new requests are received. + // This is set high (5 minutes) because DLNA clients buffer aggressively and may not + // send any HTTP requests for extended periods while the user is still watching. + DefaultSessionTimeout = 5 * time.Minute + + // monitorInterval is how often we check for expired sessions. + monitorInterval = 10 * time.Second +) + +// ActivityConfig provides configuration options for DLNA activity tracking. +type ActivityConfig interface { + // GetDLNAActivityTrackingEnabled returns true if activity tracking should be enabled. + // If not implemented, defaults to true. + GetDLNAActivityTrackingEnabled() bool + + // GetMinimumPlayPercent returns the minimum percentage of a video that must be + // watched before incrementing the play count. Uses UI setting if available. + GetMinimumPlayPercent() int +} + +// SceneActivityWriter provides methods for saving scene activity. +type SceneActivityWriter interface { + SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) + AddViews(ctx context.Context, sceneID int, dates []time.Time) ([]time.Time, error) +} + +// streamSession represents an active DLNA streaming session. +type streamSession struct { + SceneID int + ClientIP string + StartTime time.Time + LastActivity time.Time + VideoDuration float64 + PlayCountAdded bool +} + +// sessionKey generates a unique key for a session based on client IP and scene ID. +func sessionKey(clientIP string, sceneID int) string { + return fmt.Sprintf("%s:%d", clientIP, sceneID) +} + +// percentWatched calculates the estimated percentage of video watched. +// Uses a time-based approach since DLNA clients buffer aggressively and byte +// positions don't correlate with actual playback position. +// +// The key insight: you cannot have watched more of the video than time has elapsed. +// If the video is 30 minutes and only 1 minute has passed, maximum watched is ~3.3%. +func (s *streamSession) percentWatched() float64 { + if s.VideoDuration <= 0 { + return 0 + } + + // Calculate elapsed time from session start to last activity + elapsed := s.LastActivity.Sub(s.StartTime).Seconds() + if elapsed <= 0 { + return 0 + } + + // Maximum possible percent is based on elapsed time + // You can't watch more of the video than time has passed + timeBasedPercent := (elapsed / s.VideoDuration) * 100 + + // Cap at 100% + if timeBasedPercent > 100 { + return 100 + } + + return timeBasedPercent +} + +// estimatedResumeTime calculates the estimated resume time based on elapsed time. +// Since DLNA clients buffer aggressively, byte positions don't correlate with playback. +// Instead, we estimate based on how long the session has been active. +// Returns the time in seconds, or 0 if the video is nearly complete (>=98%). +func (s *streamSession) estimatedResumeTime() float64 { + if s.VideoDuration <= 0 { + return 0 + } + + // Calculate elapsed time from session start + elapsed := s.LastActivity.Sub(s.StartTime).Seconds() + if elapsed <= 0 { + return 0 + } + + // If elapsed time exceeds 98% of video duration, reset resume time (matches frontend behavior) + if elapsed >= s.VideoDuration*0.98 { + return 0 + } + + // Resume time is approximately where the user was watching + // Capped by video duration + if elapsed > s.VideoDuration { + elapsed = s.VideoDuration + } + + return elapsed +} + +// ActivityTracker tracks DLNA streaming activity and saves it to the database. +type ActivityTracker struct { + txnManager txn.Manager + sceneWriter SceneActivityWriter + config ActivityConfig + sessionTimeout time.Duration + + sessions map[string]*streamSession + mutex sync.RWMutex + + ctx context.Context + cancelFunc context.CancelFunc + wg sync.WaitGroup +} + +// NewActivityTracker creates a new ActivityTracker. +func NewActivityTracker( + txnManager txn.Manager, + sceneWriter SceneActivityWriter, + config ActivityConfig, +) *ActivityTracker { + ctx, cancel := context.WithCancel(context.Background()) + + tracker := &ActivityTracker{ + txnManager: txnManager, + sceneWriter: sceneWriter, + config: config, + sessionTimeout: DefaultSessionTimeout, + sessions: make(map[string]*streamSession), + ctx: ctx, + cancelFunc: cancel, + } + + // Start the session monitor goroutine + tracker.wg.Add(1) + go tracker.monitorSessions() + + return tracker +} + +// Stop stops the activity tracker and processes any remaining sessions. +func (t *ActivityTracker) Stop() { + t.cancelFunc() + t.wg.Wait() + + // Process any remaining sessions + t.mutex.Lock() + sessions := make([]*streamSession, 0, len(t.sessions)) + for _, session := range t.sessions { + sessions = append(sessions, session) + } + t.sessions = make(map[string]*streamSession) + t.mutex.Unlock() + + for _, session := range sessions { + t.processCompletedSession(session) + } +} + +// RecordRequest records a streaming request for activity tracking. +// Each request updates the session's LastActivity time, which is used for +// time-based tracking of watch progress. +func (t *ActivityTracker) RecordRequest(sceneID int, clientIP string, videoDuration float64) { + if !t.isEnabled() { + return + } + + key := sessionKey(clientIP, sceneID) + now := time.Now() + + t.mutex.Lock() + defer t.mutex.Unlock() + + session, exists := t.sessions[key] + if !exists { + session = &streamSession{ + SceneID: sceneID, + ClientIP: clientIP, + StartTime: now, + VideoDuration: videoDuration, + } + t.sessions[key] = session + logger.Debugf("[DLNA Activity] New session started: scene=%d, client=%s", sceneID, clientIP) + } + + session.LastActivity = now +} + +// monitorSessions periodically checks for expired sessions and processes them. +func (t *ActivityTracker) monitorSessions() { + defer t.wg.Done() + + ticker := time.NewTicker(monitorInterval) + defer ticker.Stop() + + for { + select { + case <-t.ctx.Done(): + return + case <-ticker.C: + t.processExpiredSessions() + } + } +} + +// processExpiredSessions finds and processes sessions that have timed out. +func (t *ActivityTracker) processExpiredSessions() { + now := time.Now() + var expiredSessions []*streamSession + + t.mutex.Lock() + for key, session := range t.sessions { + timeSinceStart := now.Sub(session.StartTime) + timeSinceActivity := now.Sub(session.LastActivity) + + // Must have no HTTP activity for the full timeout period + if timeSinceActivity <= t.sessionTimeout { + continue + } + + // DLNA clients buffer aggressively - they fetch most/all of the video quickly, + // then play from cache with NO further HTTP requests. + // + // Two scenarios: + // 1. User watched the whole video: timeSinceStart >= videoDuration + // -> Set LastActivity to when timeout began (they finished watching) + // 2. User stopped early: timeSinceStart < videoDuration + // -> Keep LastActivity as-is (best estimate of when they stopped) + + videoDuration := time.Duration(session.VideoDuration) * time.Second + if timeSinceStart >= videoDuration && videoDuration > 0 { + // User likely watched the whole video, then it timed out + // Estimate they watched until the timeout period started + session.LastActivity = now.Add(-t.sessionTimeout) + } + // else: User stopped early - LastActivity is already our best estimate + + expiredSessions = append(expiredSessions, session) + delete(t.sessions, key) + } + t.mutex.Unlock() + + for _, session := range expiredSessions { + t.processCompletedSession(session) + } +} + +// processCompletedSession saves activity data for a completed streaming session. +func (t *ActivityTracker) processCompletedSession(session *streamSession) { + percentWatched := session.percentWatched() + resumeTime := session.estimatedResumeTime() + + logger.Debugf("[DLNA Activity] Session completed: scene=%d, client=%s, videoDuration=%.1fs, percent=%.1f%%, resume=%.1fs", + session.SceneID, session.ClientIP, session.VideoDuration, percentWatched, resumeTime) + + // Only save if there was meaningful activity (at least 1% watched) + if percentWatched < 1 { + logger.Debugf("[DLNA Activity] Session too short, skipping save") + return + } + + // Skip DB operations if txnManager is nil (for testing) + if t.txnManager == nil { + logger.Debugf("[DLNA Activity] No transaction manager, skipping DB save") + return + } + + // Determine what needs to be saved + shouldSaveResume := resumeTime > 0 + shouldAddView := !session.PlayCountAdded && percentWatched >= float64(t.getMinimumPlayPercent()) + + // Nothing to save + if !shouldSaveResume && !shouldAddView { + return + } + + // Save everything in a single transaction + ctx := context.Background() + if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error { + // Save resume time only. DLNA clients buffer aggressively and don't report + // playback position, so we can't accurately track play duration - saving + // guesses would corrupt analytics. Resume time is still useful as a + // "continue watching" hint even if imprecise. + if shouldSaveResume { + if _, err := t.sceneWriter.SaveActivity(ctx, session.SceneID, &resumeTime, nil); err != nil { + return fmt.Errorf("save resume time: %w", err) + } + } + + // Increment play count (also updates last_played_at via view date) + if shouldAddView { + if _, err := t.sceneWriter.AddViews(ctx, session.SceneID, []time.Time{time.Now()}); err != nil { + return fmt.Errorf("add view: %w", err) + } + session.PlayCountAdded = true + logger.Debugf("[DLNA Activity] Incremented play count for scene %d (%.1f%% watched)", + session.SceneID, percentWatched) + } + + return nil + }); err != nil { + logger.Warnf("[DLNA Activity] Failed to save activity for scene %d: %v", session.SceneID, err) + } +} + +// isEnabled returns true if activity tracking is enabled. +func (t *ActivityTracker) isEnabled() bool { + if t.config == nil { + return true // Default to enabled + } + return t.config.GetDLNAActivityTrackingEnabled() +} + +// getMinimumPlayPercent returns the minimum play percentage for incrementing play count. +func (t *ActivityTracker) getMinimumPlayPercent() int { + if t.config == nil { + return 0 // Default: any play increments count (matches frontend default) + } + return t.config.GetMinimumPlayPercent() +} diff --git a/internal/dlna/activity_test.go b/internal/dlna/activity_test.go new file mode 100644 index 000000000..19ae7ebb8 --- /dev/null +++ b/internal/dlna/activity_test.go @@ -0,0 +1,420 @@ +package dlna + +import ( + "context" + "sync" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +// mockSceneWriter is a mock implementation of SceneActivityWriter +type mockSceneWriter struct { + mu sync.Mutex + saveActivityCalls []saveActivityCall + addViewsCalls []addViewsCall +} + +type saveActivityCall struct { + sceneID int + resumeTime *float64 + playDuration *float64 +} + +type addViewsCall struct { + sceneID int + dates []time.Time +} + +func (m *mockSceneWriter) SaveActivity(_ context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) { + m.mu.Lock() + m.saveActivityCalls = append(m.saveActivityCalls, saveActivityCall{ + sceneID: sceneID, + resumeTime: resumeTime, + playDuration: playDuration, + }) + m.mu.Unlock() + return true, nil +} + +func (m *mockSceneWriter) AddViews(_ context.Context, sceneID int, dates []time.Time) ([]time.Time, error) { + m.mu.Lock() + m.addViewsCalls = append(m.addViewsCalls, addViewsCall{ + sceneID: sceneID, + dates: dates, + }) + m.mu.Unlock() + return dates, nil +} + +// mockConfig is a mock implementation of ActivityConfig +type mockConfig struct { + enabled bool + minPlayPercent int +} + +func (c *mockConfig) GetDLNAActivityTrackingEnabled() bool { + return c.enabled +} + +func (c *mockConfig) GetMinimumPlayPercent() int { + return c.minPlayPercent +} + +func TestStreamSession_PercentWatched(t *testing.T) { + now := time.Now() + + tests := []struct { + name string + startTime time.Time + lastActivity time.Time + videoDuration float64 + expected float64 + }{ + { + name: "no video duration", + startTime: now.Add(-60 * time.Second), + lastActivity: now, + videoDuration: 0, + expected: 0, + }, + { + name: "half watched", + startTime: now.Add(-60 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 1 minute = 50% + expected: 50.0, + }, + { + name: "fully watched", + startTime: now.Add(-120 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 2 minutes = 100% + expected: 100.0, + }, + { + name: "quarter watched", + startTime: now.Add(-30 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 30 seconds = 25% + expected: 25.0, + }, + { + name: "elapsed exceeds duration - capped at 100%", + startTime: now.Add(-180 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, but 3 minutes elapsed = capped at 100% + expected: 100.0, + }, + { + name: "no elapsed time", + startTime: now, + lastActivity: now, + videoDuration: 120.0, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + session := &streamSession{ + StartTime: tt.startTime, + LastActivity: tt.lastActivity, + VideoDuration: tt.videoDuration, + } + result := session.percentWatched() + assert.InDelta(t, tt.expected, result, 0.01) + }) + } +} + +func TestStreamSession_EstimatedResumeTime(t *testing.T) { + now := time.Now() + + tests := []struct { + name string + startTime time.Time + lastActivity time.Time + videoDuration float64 + expected float64 + }{ + { + name: "no elapsed time", + startTime: now, + lastActivity: now, + videoDuration: 120.0, + expected: 0, + }, + { + name: "half way through", + startTime: now.Add(-60 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 1 minute = resume at 60s + expected: 60.0, + }, + { + name: "quarter way through", + startTime: now.Add(-30 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 30 seconds = resume at 30s + expected: 30.0, + }, + { + name: "98% complete - should reset to 0", + startTime: now.Add(-118 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 98.3% elapsed, should reset + expected: 0, + }, + { + name: "100% complete - should reset to 0", + startTime: now.Add(-120 * time.Second), + lastActivity: now, + videoDuration: 120.0, + expected: 0, + }, + { + name: "elapsed exceeds duration - capped and reset to 0", + startTime: now.Add(-180 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 150% elapsed, capped at 100%, reset to 0 + expected: 0, + }, + { + name: "no video duration", + startTime: now.Add(-60 * time.Second), + lastActivity: now, + videoDuration: 0, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + session := &streamSession{ + StartTime: tt.startTime, + LastActivity: tt.lastActivity, + VideoDuration: tt.videoDuration, + } + result := session.estimatedResumeTime() + assert.InDelta(t, tt.expected, result, 1.0) // Allow 1 second tolerance + }) + } +} + +func TestSessionKey(t *testing.T) { + key := sessionKey("192.168.1.100", 42) + assert.Equal(t, "192.168.1.100:42", key) +} + +func TestActivityTracker_RecordRequest(t *testing.T) { + config := &mockConfig{enabled: true, minPlayPercent: 50} + + // Create tracker without starting the goroutine (for unit testing) + tracker := &ActivityTracker{ + txnManager: nil, // Don't need DB for this test + sceneWriter: nil, + config: config, + sessionTimeout: DefaultSessionTimeout, + sessions: make(map[string]*streamSession), + } + + // Record first request - should create new session + tracker.RecordRequest(42, "192.168.1.100", 120.0) + + tracker.mutex.RLock() + session := tracker.sessions["192.168.1.100:42"] + tracker.mutex.RUnlock() + + assert.NotNil(t, session) + assert.Equal(t, 42, session.SceneID) + assert.Equal(t, "192.168.1.100", session.ClientIP) + assert.Equal(t, 120.0, session.VideoDuration) + assert.False(t, session.StartTime.IsZero()) + assert.False(t, session.LastActivity.IsZero()) + + // Record second request - should update LastActivity + firstActivity := session.LastActivity + time.Sleep(10 * time.Millisecond) + tracker.RecordRequest(42, "192.168.1.100", 120.0) + + tracker.mutex.RLock() + session = tracker.sessions["192.168.1.100:42"] + tracker.mutex.RUnlock() + + assert.True(t, session.LastActivity.After(firstActivity)) +} + +func TestActivityTracker_DisabledTracking(t *testing.T) { + config := &mockConfig{enabled: false, minPlayPercent: 50} + + // Create tracker without starting the goroutine (for unit testing) + tracker := &ActivityTracker{ + txnManager: nil, + sceneWriter: nil, + config: config, + sessionTimeout: DefaultSessionTimeout, + sessions: make(map[string]*streamSession), + } + + // Record request - should be ignored when tracking is disabled + tracker.RecordRequest(42, "192.168.1.100", 120.0) + + tracker.mutex.RLock() + sessionCount := len(tracker.sessions) + tracker.mutex.RUnlock() + + assert.Equal(t, 0, sessionCount) +} + +func TestActivityTracker_SessionExpiration(t *testing.T) { + // For this test, we'll test the session expiration logic directly + // without the full transaction manager integration + + sceneWriter := &mockSceneWriter{} + config := &mockConfig{enabled: true, minPlayPercent: 10} + + // Create a tracker with nil txnManager - we'll test processCompletedSession separately + // Here we just verify the session management logic + tracker := &ActivityTracker{ + txnManager: nil, // Skip DB calls for this test + sceneWriter: sceneWriter, + config: config, + sessionTimeout: 100 * time.Millisecond, + sessions: make(map[string]*streamSession), + } + + // Manually add a session + // Use a short video duration (1 second) so the test can verify expiration quickly. + now := time.Now() + tracker.sessions["192.168.1.100:42"] = &streamSession{ + SceneID: 42, + ClientIP: "192.168.1.100", + StartTime: now.Add(-5 * time.Second), // Started 5 seconds ago + LastActivity: now.Add(-200 * time.Millisecond), // Last activity 200ms ago (> 100ms timeout) + VideoDuration: 1.0, // Short video so timeSinceStart > videoDuration + } + + // Verify session exists + assert.Len(t, tracker.sessions, 1) + + // Process expired sessions - this will try to save activity but txnManager is nil + // so it will skip the DB calls but still remove the session + tracker.processExpiredSessions() + + // Verify session was removed (even though DB calls were skipped) + assert.Len(t, tracker.sessions, 0) +} + +func TestActivityTracker_SessionExpiration_StoppedEarly(t *testing.T) { + // Test that sessions expire when user stops watching early (before video ends) + // This was a bug where sessions wouldn't expire until video duration passed + + config := &mockConfig{enabled: true, minPlayPercent: 10} + tracker := &ActivityTracker{ + txnManager: nil, + sceneWriter: nil, + config: config, + sessionTimeout: 100 * time.Millisecond, + sessions: make(map[string]*streamSession), + } + + // User started watching a 30-minute video but stopped after 5 seconds + now := time.Now() + tracker.sessions["192.168.1.100:42"] = &streamSession{ + SceneID: 42, + ClientIP: "192.168.1.100", + StartTime: now.Add(-5 * time.Second), // Started 5 seconds ago + LastActivity: now.Add(-200 * time.Millisecond), // Last activity 200ms ago (> 100ms timeout) + VideoDuration: 1800.0, // 30 minute video - much longer than elapsed time + } + + assert.Len(t, tracker.sessions, 1) + + // Session should expire because timeSinceActivity > timeout + // Even though the video is 30 minutes and only 5 seconds have passed + tracker.processExpiredSessions() + + // Verify session was expired + assert.Len(t, tracker.sessions, 0, "Session should expire when user stops early, not wait for video duration") +} + +func TestActivityTracker_MinimumPlayPercentThreshold(t *testing.T) { + // Test the threshold logic without full transaction integration + config := &mockConfig{enabled: true, minPlayPercent: 75} // High threshold + + tracker := &ActivityTracker{ + txnManager: nil, + sceneWriter: nil, + config: config, + sessionTimeout: 50 * time.Millisecond, + sessions: make(map[string]*streamSession), + } + + // Test that getMinimumPlayPercent returns the configured value + assert.Equal(t, 75, tracker.getMinimumPlayPercent()) + + // Create a session with 30% watched (36 seconds of a 120 second video) + now := time.Now() + session := &streamSession{ + SceneID: 42, + StartTime: now.Add(-36 * time.Second), + LastActivity: now, + VideoDuration: 120.0, + } + + // 30% is below 75% threshold + percentWatched := session.percentWatched() + assert.InDelta(t, 30.0, percentWatched, 0.1) + assert.False(t, percentWatched >= float64(tracker.getMinimumPlayPercent())) +} + +func TestActivityTracker_MultipleSessions(t *testing.T) { + config := &mockConfig{enabled: true, minPlayPercent: 50} + + // Create tracker without starting the goroutine (for unit testing) + tracker := &ActivityTracker{ + txnManager: nil, + sceneWriter: nil, + config: config, + sessionTimeout: DefaultSessionTimeout, + sessions: make(map[string]*streamSession), + } + + // Different clients watching same scene + tracker.RecordRequest(42, "192.168.1.100", 120.0) + tracker.RecordRequest(42, "192.168.1.101", 120.0) + + // Same client watching different scenes + tracker.RecordRequest(43, "192.168.1.100", 180.0) + + tracker.mutex.RLock() + assert.Len(t, tracker.sessions, 3) + tracker.mutex.RUnlock() +} + +func TestActivityTracker_ShortSessionIgnored(t *testing.T) { + // Test that short sessions are ignored + // Create a session with only ~0.8% watched (1 second of a 120 second video) + now := time.Now() + session := &streamSession{ + SceneID: 42, + ClientIP: "192.168.1.100", + StartTime: now.Add(-1 * time.Second), // Only 1 second + LastActivity: now, + VideoDuration: 120.0, // 2 minutes + } + + // Verify percent watched is below threshold (1s / 120s = 0.83%) + assert.InDelta(t, 0.83, session.percentWatched(), 0.1) + + // Verify elapsed time is short + elapsed := session.LastActivity.Sub(session.StartTime).Seconds() + assert.InDelta(t, 1.0, elapsed, 0.5) + + // Both are below the minimum thresholds (1% and 5 seconds) + percentWatched := session.percentWatched() + shouldSkip := percentWatched < 1 && elapsed < 5 + assert.True(t, shouldSkip, "Short session should be skipped") +} diff --git a/internal/dlna/dms.go b/internal/dlna/dms.go index 3b27d607b..d68705f74 100644 --- a/internal/dlna/dms.go +++ b/internal/dlna/dms.go @@ -278,6 +278,7 @@ type Server struct { repository Repository sceneServer sceneServer ipWhitelistManager *ipWhitelistManager + activityTracker *ActivityTracker VideoSortOrder string subscribeLock sync.Mutex @@ -596,6 +597,7 @@ func (me *Server) initMux(mux *http.ServeMux) { mux.HandleFunc(resPath, func(w http.ResponseWriter, r *http.Request) { sceneId := r.URL.Query().Get("scene") var scene *models.Scene + var videoDuration float64 repo := me.repository err := repo.WithReadTxn(r.Context(), func(ctx context.Context) error { sceneIdInt, err := strconv.Atoi(sceneId) @@ -603,6 +605,15 @@ func (me *Server) initMux(mux *http.ServeMux) { return nil } scene, _ = repo.SceneFinder.Find(ctx, sceneIdInt) + if scene != nil { + // Load primary file to get duration for activity tracking + if err := scene.LoadPrimaryFile(ctx, repo.FileGetter); err != nil { + logger.Debugf("failed to load primary file for scene %d: %v", sceneIdInt, err) + } + if f := scene.Files.Primary(); f != nil { + videoDuration = f.Duration + } + } return nil }) if err != nil { @@ -615,6 +626,14 @@ func (me *Server) initMux(mux *http.ServeMux) { w.Header().Set("transferMode.dlna.org", "Streaming") w.Header().Set("contentFeatures.dlna.org", "DLNA.ORG_OP=01;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=01500000000000000000000000000000") + + // Track activity - uses time-based tracking, updated on each request + if me.activityTracker != nil { + sceneIdInt, _ := strconv.Atoi(sceneId) + clientIP, _, _ := net.SplitHostPort(r.RemoteAddr) + me.activityTracker.RecordRequest(sceneIdInt, clientIP, videoDuration) + } + me.sceneServer.StreamSceneDirect(scene, w, r) }) mux.HandleFunc(rootDescPath, func(w http.ResponseWriter, r *http.Request) { diff --git a/internal/dlna/service.go b/internal/dlna/service.go index 6ef825bac..98715b1e6 100644 --- a/internal/dlna/service.go +++ b/internal/dlna/service.go @@ -77,13 +77,29 @@ type Config interface { GetDLNADefaultIPWhitelist() []string GetVideoSortOrder() string GetDLNAPortAsString() string + GetDLNAActivityTrackingEnabled() bool +} + +// activityConfig wraps Config to implement ActivityConfig. +type activityConfig struct { + config Config + minPlayPercent int // cached from UI config +} + +func (c *activityConfig) GetDLNAActivityTrackingEnabled() bool { + return c.config.GetDLNAActivityTrackingEnabled() +} + +func (c *activityConfig) GetMinimumPlayPercent() int { + return c.minPlayPercent } type Service struct { - repository Repository - config Config - sceneServer sceneServer - ipWhitelistMgr *ipWhitelistManager + repository Repository + config Config + sceneServer sceneServer + ipWhitelistMgr *ipWhitelistManager + activityTracker *ActivityTracker server *Server running bool @@ -155,6 +171,7 @@ func (s *Service) init() error { repository: s.repository, sceneServer: s.sceneServer, ipWhitelistManager: s.ipWhitelistMgr, + activityTracker: s.activityTracker, Interfaces: interfaces, HTTPConn: func() net.Listener { conn, err := net.Listen("tcp", dmsConfig.Http) @@ -215,7 +232,14 @@ func (s *Service) init() error { // } // NewService initialises and returns a new DLNA service. -func NewService(repo Repository, cfg Config, sceneServer sceneServer) *Service { +// The sceneWriter parameter should implement SceneActivityWriter (typically models.SceneReaderWriter). +// The minPlayPercent parameter is the minimum percentage of video that must be played to increment play count. +func NewService(repo Repository, cfg Config, sceneServer sceneServer, sceneWriter SceneActivityWriter, minPlayPercent int) *Service { + activityCfg := &activityConfig{ + config: cfg, + minPlayPercent: minPlayPercent, + } + ret := &Service{ repository: repo, sceneServer: sceneServer, @@ -223,7 +247,8 @@ func NewService(repo Repository, cfg Config, sceneServer sceneServer) *Service { ipWhitelistMgr: &ipWhitelistManager{ config: cfg, }, - mutex: sync.Mutex{}, + activityTracker: NewActivityTracker(repo.TxnManager, sceneWriter, activityCfg), + mutex: sync.Mutex{}, } return ret @@ -283,6 +308,12 @@ func (s *Service) Stop(duration *time.Duration) { if s.running { logger.Info("Stopping DLNA") + + // Stop activity tracker first to process any pending sessions + if s.activityTracker != nil { + s.activityTracker.Stop() + } + err := s.server.Close() if err != nil { logger.Error(err) diff --git a/internal/identify/identify.go b/internal/identify/identify.go index 3d4c94467..6dc67dac3 100644 --- a/internal/identify/identify.go +++ b/internal/identify/identify.go @@ -147,6 +147,9 @@ func (t *SceneIdentifier) getOptions(source ScraperSource) MetadataOptions { if source.Options.IncludeMalePerformers != nil { options.IncludeMalePerformers = source.Options.IncludeMalePerformers } + if source.Options.PerformerGenders != nil { + options.PerformerGenders = source.Options.PerformerGenders + } if source.Options.SkipMultipleMatches != nil { options.SkipMultipleMatches = source.Options.SkipMultipleMatches } @@ -204,13 +207,23 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene, ret.Partial.StudioID = models.NewOptionalInt(*studioID) } - includeMalePerformers := true - if options.IncludeMalePerformers != nil { - includeMalePerformers = *options.IncludeMalePerformers + // Determine allowed genders for performer filtering + var allowedGenders []models.GenderEnum + if options.PerformerGenders != nil { + // New field takes precedence + allowedGenders = options.PerformerGenders + } else if options.IncludeMalePerformers != nil && !*options.IncludeMalePerformers { + // Legacy: if includeMalePerformers is false, include all genders except male + for _, g := range models.AllGenderEnum { + if g != models.GenderEnumMale { + allowedGenders = append(allowedGenders, g) + } + } } + // nil allowedGenders means include all performers addSkipSingleNamePerformerTag := false - performerIDs, err := rel.performers(ctx, !includeMalePerformers) + performerIDs, err := rel.performers(ctx, allowedGenders) if err != nil { if errors.Is(err, ErrSkipSingleNamePerformer) { addSkipSingleNamePerformerTag = true diff --git a/internal/identify/identify_test.go b/internal/identify/identify_test.go index eb646c305..35ad2006d 100644 --- a/internal/identify/identify_test.go +++ b/internal/identify/identify_test.go @@ -60,9 +60,15 @@ func TestSceneIdentifier_Identify(t *testing.T) { ) defaultOptions := &MetadataOptions{ - SetOrganized: &boolFalse, - SetCoverImage: &boolFalse, - IncludeMalePerformers: &boolFalse, + SetOrganized: &boolFalse, + SetCoverImage: &boolFalse, + PerformerGenders: []models.GenderEnum{ + models.GenderEnumFemale, + models.GenderEnumTransgenderFemale, + models.GenderEnumTransgenderMale, + models.GenderEnumIntersex, + models.GenderEnumNonBinary, + }, SkipSingleNamePerformers: &boolFalse, } sources := []ScraperSource{ @@ -216,9 +222,15 @@ func TestSceneIdentifier_modifyScene(t *testing.T) { boolFalse := false defaultOptions := &MetadataOptions{ - SetOrganized: &boolFalse, - SetCoverImage: &boolFalse, - IncludeMalePerformers: &boolFalse, + SetOrganized: &boolFalse, + SetCoverImage: &boolFalse, + PerformerGenders: []models.GenderEnum{ + models.GenderEnumFemale, + models.GenderEnumTransgenderFemale, + models.GenderEnumTransgenderMale, + models.GenderEnumIntersex, + models.GenderEnumNonBinary, + }, SkipSingleNamePerformers: &boolFalse, } tr := &SceneIdentifier{ diff --git a/internal/identify/options.go b/internal/identify/options.go index b4954a1f1..9e27a3e39 100644 --- a/internal/identify/options.go +++ b/internal/identify/options.go @@ -5,6 +5,7 @@ import ( "io" "strconv" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scraper" ) @@ -32,7 +33,10 @@ type MetadataOptions struct { SetCoverImage *bool `json:"setCoverImage"` SetOrganized *bool `json:"setOrganized"` // defaults to true if not provided + // Deprecated: use PerformerGenders instead IncludeMalePerformers *bool `json:"includeMalePerformers"` + // Filter to only include performers with these genders. If not provided, all genders are included. + PerformerGenders []models.GenderEnum `json:"performerGenders"` // defaults to true if not provided SkipMultipleMatches *bool `json:"skipMultipleMatches"` // ID of tag to tag skipped multiple matches with diff --git a/internal/identify/scene.go b/internal/identify/scene.go index 789674693..00d387c41 100644 --- a/internal/identify/scene.go +++ b/internal/identify/scene.go @@ -5,6 +5,7 @@ import ( "context" "errors" "fmt" + "slices" "strconv" "strings" "time" @@ -69,7 +70,7 @@ func (g sceneRelationships) studio(ctx context.Context) (*int, error) { return nil, nil } -func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]int, error) { +func (g sceneRelationships) performers(ctx context.Context, allowedGenders []models.GenderEnum) ([]int, error) { fieldStrategy := g.fieldOptions["performers"] scraped := g.result.result.Performers @@ -97,8 +98,11 @@ func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([] singleNamePerformerSkipped := false for _, p := range scraped { - if ignoreMale && p.Gender != nil && strings.EqualFold(*p.Gender, models.GenderEnumMale.String()) { - continue + if allowedGenders != nil && p.Gender != nil { + gender := models.GenderEnum(strings.ToUpper(*p.Gender)) + if !slices.Contains(allowedGenders, gender) { + continue + } } performerID, err := getPerformerID(ctx, endpoint, g.performerCreator, p, createMissing, g.skipSingleNamePerformers) @@ -167,7 +171,9 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) { } else if createMissing { newTag := t.ToTag(endpoint, nil) - err := g.tagCreator.Create(ctx, newTag) + err := g.tagCreator.Create(ctx, &models.CreateTagInput{ + Tag: newTag, + }) if err != nil { return nil, fmt.Errorf("error creating tag: %w", err) } diff --git a/internal/identify/scene_test.go b/internal/identify/scene_test.go index a76aef516..9a3fcf025 100644 --- a/internal/identify/scene_test.go +++ b/internal/identify/scene_test.go @@ -27,7 +27,7 @@ func Test_sceneRelationships_studio(t *testing.T) { db := mocks.NewDatabase() db.Studio.On("Create", testCtx, mock.Anything).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) + s := args.Get(1).(*models.CreateStudioInput) s.ID = validStoredIDInt }).Return(nil) @@ -183,13 +183,13 @@ func Test_sceneRelationships_performers(t *testing.T) { } tests := []struct { - name string - scene *models.Scene - fieldOptions *FieldOptions - scraped []*models.ScrapedPerformer - ignoreMale bool - want []int - wantErr bool + name string + scene *models.Scene + fieldOptions *FieldOptions + scraped []*models.ScrapedPerformer + allowedGenders []models.GenderEnum + want []int + wantErr bool }{ { "ignore", @@ -202,7 +202,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, nil, false, }, @@ -211,7 +211,7 @@ func Test_sceneRelationships_performers(t *testing.T) { emptyScene, defaultOptions, []*models.ScrapedPerformer{}, - false, + nil, nil, false, }, @@ -225,7 +225,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &existingPerformerStr, }, }, - false, + nil, nil, false, }, @@ -239,7 +239,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, []int{existingPerformerID, validStoredIDInt}, false, }, @@ -254,7 +254,7 @@ func Test_sceneRelationships_performers(t *testing.T) { Gender: &male, }, }, - true, + []models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary}, nil, false, }, @@ -270,7 +270,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, []int{validStoredIDInt}, false, }, @@ -287,7 +287,7 @@ func Test_sceneRelationships_performers(t *testing.T) { Gender: &female, }, }, - true, + []models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary}, []int{validStoredIDInt}, false, }, @@ -304,7 +304,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &invalidStoredID, }, }, - false, + nil, nil, true, }, @@ -319,7 +319,7 @@ func Test_sceneRelationships_performers(t *testing.T) { }, } - got, err := tr.performers(testCtx, tt.ignoreMale) + got, err := tr.performers(testCtx, tt.allowedGenders) if (err != nil) != tt.wantErr { t.Errorf("sceneRelationships.performers() error = %v, wantErr %v", err, tt.wantErr) return @@ -368,14 +368,14 @@ func Test_sceneRelationships_tags(t *testing.T) { db := mocks.NewDatabase() - db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool { - return p.Name == validName + db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool { + return p.Tag.Name == validName })).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = validStoredIDInt + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = validStoredIDInt }).Return(nil) - db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool { - return p.Name == invalidName + db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool { + return p.Tag.Name == invalidName })).Return(errors.New("error creating tag")) tr := sceneRelationships{ diff --git a/internal/identify/studio_test.go b/internal/identify/studio_test.go index 5424a6a93..083675650 100644 --- a/internal/identify/studio_test.go +++ b/internal/identify/studio_test.go @@ -21,13 +21,13 @@ func Test_createMissingStudio(t *testing.T) { db := mocks.NewDatabase() - db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool { + db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool { return p.Name == validName })).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) + s := args.Get(1).(*models.CreateStudioInput) s.ID = createdID }).Return(nil) - db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool { + db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool { return p.Name == invalidName })).Return(errors.New("error creating studio")) diff --git a/internal/manager/backup.go b/internal/manager/backup.go new file mode 100644 index 000000000..4a41b263b --- /dev/null +++ b/internal/manager/backup.go @@ -0,0 +1,185 @@ +package manager + +import ( + "archive/zip" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "strings" + + "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/logger" +) + +type databaseBackupZip struct { + *zip.Writer +} + +func (z *databaseBackupZip) zipFileRename(fn, outDir, outFn string) error { + p := filepath.Join(outDir, outFn) + p = filepath.ToSlash(p) + + f, err := z.Create(p) + if err != nil { + return fmt.Errorf("error creating zip entry for %s: %v", fn, err) + } + + i, err := os.Open(fn) + if err != nil { + return fmt.Errorf("error opening %s: %v", fn, err) + } + + defer i.Close() + + if _, err := io.Copy(f, i); err != nil { + return fmt.Errorf("error writing %s to zip: %v", fn, err) + } + + return nil +} + +func (z *databaseBackupZip) zipFile(fn, outDir string) error { + return z.zipFileRename(fn, outDir, filepath.Base(fn)) +} + +func (s *Manager) BackupDatabase(download bool, includeBlobs bool) (string, string, error) { + var backupPath string + var backupName string + + // if we include blobs, then the output is a zip file + // if not, using the same backup logic as before, which creates a sqlite file + if !includeBlobs || s.Config.GetBlobsStorage() != config.BlobStorageTypeFilesystem { + return s.backupDatabaseOnly(download) + } + + // use tmp directory for the backup + backupDir := s.Paths.Generated.Tmp + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + f, err := os.CreateTemp(backupDir, "backup*.sqlite") + if err != nil { + return "", "", err + } + + backupPath = f.Name() + backupName = s.Database.DatabaseBackupPath("") + f.Close() + + // delete the temp file so that the backup operation can create it + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + + if err := s.Database.Backup(backupPath); err != nil { + return "", "", err + } + + // create a zip file + zipFileDir := s.Paths.Generated.Downloads + if !download { + zipFileDir = s.Config.GetBackupDirectoryPathOrDefault() + if zipFileDir != "" { + if err := fsutil.EnsureDir(zipFileDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", zipFileDir, err) + } + } + } + + zipFileName := backupName + ".zip" + zipFilePath := filepath.Join(zipFileDir, zipFileName) + + logger.Debugf("Preparing zip file for database backup at %v", zipFilePath) + + zf, err := os.Create(zipFilePath) + if err != nil { + return "", "", fmt.Errorf("could not create zip file %v: %w", zipFilePath, err) + } + defer zf.Close() + + z := databaseBackupZip{ + Writer: zip.NewWriter(zf), + } + + defer z.Close() + + // move the database file into the zip + dbFn := filepath.Base(s.Config.GetDatabasePath()) + if err := z.zipFileRename(backupPath, "", dbFn); err != nil { + return "", "", fmt.Errorf("could not add database backup to zip file: %w", err) + } + + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + + // walk the blobs directory and add files to the zip + blobsDir := s.Config.GetBlobsPath() + err = filepath.WalkDir(blobsDir, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + if d.IsDir() { + return nil + } + + // calculate out dir by removing the blobsDir prefix from the path + outDir := filepath.Join("blobs", strings.TrimPrefix(filepath.Dir(path), blobsDir)) + if err := z.zipFile(path, outDir); err != nil { + return fmt.Errorf("could not add blob %v to zip file: %w", path, err) + } + + return nil + }) + + if err != nil { + return "", "", fmt.Errorf("error walking blobs directory: %w", err) + } + + return zipFilePath, zipFileName, nil +} + +func (s *Manager) backupDatabaseOnly(download bool) (string, string, error) { + var backupPath string + var backupName string + + if download { + backupDir := s.Paths.Generated.Downloads + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + f, err := os.CreateTemp(backupDir, "backup*.sqlite") + if err != nil { + return "", "", err + } + + backupPath = f.Name() + backupName = s.Database.DatabaseBackupPath("") + f.Close() + + // delete the temp file so that the backup operation can create it + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + } else { + backupDir := s.Config.GetBackupDirectoryPathOrDefault() + if backupDir != "" { + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + } + backupPath = s.Database.DatabaseBackupPath(backupDir) + backupName = filepath.Base(backupPath) + } + + err := s.Database.Backup(backupPath) + if err != nil { + return "", "", err + } + + return backupPath, backupName, nil +} diff --git a/internal/manager/config/config.go b/internal/manager/config/config.go index 2cc3994f4..19e263810 100644 --- a/internal/manager/config/config.go +++ b/internal/manager/config/config.go @@ -83,6 +83,21 @@ const ( ParallelTasks = "parallel_tasks" parallelTasksDefault = 1 + UseCustomSpriteInterval = "use_custom_sprite_interval" + UseCustomSpriteIntervalDefault = false + + SpriteInterval = "sprite_interval" + SpriteIntervalDefault = 30 + + MinimumSprites = "minimum_sprites" + MinimumSpritesDefault = 10 + + MaximumSprites = "maximum_sprites" + MaximumSpritesDefault = 500 + + SpriteScreenshotSize = "sprite_screenshot_width" + spriteScreenshotSizeDefault = 160 + PreviewPreset = "preview_preset" TranscodeHardwareAcceleration = "ffmpeg.hardware_acceleration" @@ -194,6 +209,7 @@ const ( CSSEnabled = "cssenabled" JavascriptEnabled = "javascriptenabled" CustomLocalesEnabled = "customlocalesenabled" + DisableCustomizations = "disable_customizations" ShowScrubber = "show_scrubber" showScrubberDefault = true @@ -974,6 +990,50 @@ func (i *Config) GetParallelTasksWithAutoDetection() int { return parallelTasks } +// GetUseCustomSpriteInterval returns true if the sprite minimum, maximum, and interval settings +// should be used instead of the default +func (i *Config) GetUseCustomSpriteInterval() bool { + value := i.getBool(UseCustomSpriteInterval) + return value +} + +// GetSpriteInterval returns the time (in seconds) to be between each scrubber sprite +// A value of 0 indicates that the sprite interval should be automatically determined +// based on the minimum sprite setting. +func (i *Config) GetSpriteInterval() float64 { + value := i.getFloat64(SpriteInterval) + return value +} + +// GetMinimumSprites returns the minimum number of sprites that have to be generated +// A value of 0 will be overridden with the default of 10. +func (i *Config) GetMinimumSprites() int { + value := i.getInt(MinimumSprites) + if value <= 0 { + return MinimumSpritesDefault + } + return value +} + +// GetMaximumSprites returns the maximum number of sprites that can be generated +// A value of 0 indicates no maximum. +func (i *Config) GetMaximumSprites() int { + value := i.getInt(MaximumSprites) + return value +} + +// GetSpriteScreenshotSize returns the required size of the screenshots to be taken +// during sprite generation in pixels. This will be the width for landscape scenes +// and the height for portrait scenes, with the other dimension being scaled to maintain +// the aspect ratio. If the value is less than or equal to 0, the default will be used. +func (i *Config) GetSpriteScreenshotSize() int { + value := i.getInt(SpriteScreenshotSize) + if value <= 0 { + return spriteScreenshotSizeDefault + } + return value +} + func (i *Config) GetPreviewAudio() bool { return i.getBool(PreviewAudio) } @@ -1323,6 +1383,26 @@ func (i *Config) GetUIConfiguration() map[string]interface{} { return i.forKey(UI).Cut(UI).Raw() } +// GetMinimumPlayPercent returns the minimum percentage of a video that must be +// watched before incrementing the play count. Returns 0 if not configured. +func (i *Config) GetMinimumPlayPercent() int { + uiConfig := i.GetUIConfiguration() + if uiConfig == nil { + return 0 + } + if val, ok := uiConfig["minimumPlayPercent"]; ok { + switch v := val.(type) { + case int: + return v + case float64: + return int(v) + case int64: + return int(v) + } + } + return 0 +} + func (i *Config) SetUIConfiguration(v map[string]interface{}) { i.Lock() defer i.Unlock() @@ -1459,6 +1539,13 @@ func (i *Config) GetCustomLocalesEnabled() bool { return i.getBool(CustomLocalesEnabled) } +// GetDisableCustomizations returns true if all customizations (plugins, custom CSS, +// custom JavaScript, and custom locales) should be disabled. This is useful for +// troubleshooting issues without permanently disabling individual customizations. +func (i *Config) GetDisableCustomizations() bool { + return i.getBool(DisableCustomizations) +} + func (i *Config) GetHandyKey() string { return i.getString(HandyKey) } @@ -1615,6 +1702,22 @@ func (i *Config) GetDLNAPortAsString() string { return ":" + strconv.Itoa(i.GetDLNAPort()) } +// GetDLNAActivityTrackingEnabled returns true if DLNA activity tracking is enabled. +// This uses the same "trackActivity" UI setting that controls frontend play history tracking. +// When enabled, scenes played via DLNA will have their play count and duration tracked. +func (i *Config) GetDLNAActivityTrackingEnabled() bool { + uiConfig := i.GetUIConfiguration() + if uiConfig == nil { + return true // Default to enabled + } + if val, ok := uiConfig["trackActivity"]; ok { + if v, ok := val.(bool); ok { + return v + } + } + return true // Default to enabled +} + // GetVideoSortOrder returns the sort order to display videos. If // empty, videos will be sorted by titles. func (i *Config) GetVideoSortOrder() string { @@ -1817,6 +1920,12 @@ func (i *Config) setDefaultValues() { i.setDefault(PreviewAudio, previewAudioDefault) i.setDefault(SoundOnPreview, false) + i.setDefault(UseCustomSpriteInterval, UseCustomSpriteIntervalDefault) + i.setDefault(SpriteInterval, SpriteIntervalDefault) + i.setDefault(MinimumSprites, MinimumSpritesDefault) + i.setDefault(MaximumSprites, MaximumSpritesDefault) + i.setDefault(SpriteScreenshotSize, spriteScreenshotSizeDefault) + i.setDefault(ThemeColor, DefaultThemeColor) i.setDefault(WriteImageThumbnails, writeImageThumbnailsDefault) diff --git a/internal/manager/config/stash_config.go b/internal/manager/config/stash_config.go index 4a2cc7d60..7a103631c 100644 --- a/internal/manager/config/stash_config.go +++ b/internal/manager/config/stash_config.go @@ -38,3 +38,12 @@ func (s StashConfigs) GetStashFromDirPath(dirPath string) *StashConfig { } return nil } + +func (s StashConfigs) Paths() []string { + paths := make([]string, len(s)) + for i, c := range s { + // #6618 - clean the path to ensure comparison works correctly + paths[i] = filepath.Clean(c.Path) + } + return paths +} diff --git a/internal/manager/config/tasks.go b/internal/manager/config/tasks.go index 0cfabef30..af7d5f674 100644 --- a/internal/manager/config/tasks.go +++ b/internal/manager/config/tasks.go @@ -11,8 +11,10 @@ type ScanMetadataOptions struct { ScanGenerateImagePreviews bool `json:"scanGenerateImagePreviews"` // Generate sprites during scan ScanGenerateSprites bool `json:"scanGenerateSprites"` - // Generate phashes during scan + // Generate video phashes during scan ScanGeneratePhashes bool `json:"scanGeneratePhashes"` + // Generate image phashes during scan + ScanGenerateImagePhashes bool `json:"scanGenerateImagePhashes"` // Generate image thumbnails during scan ScanGenerateThumbnails bool `json:"scanGenerateThumbnails"` // Generate image thumbnails during scan diff --git a/internal/manager/generator_sprite.go b/internal/manager/generator_sprite.go index c28d28674..dc56fde88 100644 --- a/internal/manager/generator_sprite.go +++ b/internal/manager/generator_sprite.go @@ -21,8 +21,7 @@ type SpriteGenerator struct { VideoChecksum string ImageOutputPath string VTTOutputPath string - Rows int - Columns int + Config SpriteGeneratorConfig SlowSeek bool // use alternate seek function, very slow! Overwrite bool @@ -30,13 +29,81 @@ type SpriteGenerator struct { g *generate.Generator } -func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) { +// SpriteGeneratorConfig holds configuration for the SpriteGenerator +type SpriteGeneratorConfig struct { + // MinimumSprites is the minimum number of sprites to generate, even if the video duration is short + // SpriteInterval will be adjusted accordingly to ensure at least this many sprites are generated. + // A value of 0 means no minimum, and the generator will use the provided SpriteInterval or + // calculate it based on the video duration and MaximumSprites + MinimumSprites int + + // MaximumSprites is the maximum number of sprites to generate, even if the video duration is long + // SpriteInterval will be adjusted accordingly to ensure no more than this many sprites are generated + // A value of 0 means no maximum, and the generator will use the provided SpriteInterval or + // calculate it based on the video duration and MinimumSprites + MaximumSprites int + + // SpriteInterval is the default interval in seconds between each sprite. + // If MinimumSprites or MaximumSprites are set, this value will be adjusted accordingly + // to ensure the desired number of sprites are generated + // A value of 0 means the generator will calculate the interval based on the video duration and + // the provided MinimumSprites and MaximumSprites + SpriteInterval float64 + + // SpriteSize is the size in pixels of the longest dimension of each sprite image. + // The other dimension will be automatically calculated to maintain the aspect ratio of the video + SpriteSize int +} + +const ( + // DefaultSpriteAmount is the default number of sprites to generate if no configuration is provided + // This corresponds to the legacy behavior of the generator, which generates 81 sprites at equal + // intervals across the video duration + DefaultSpriteAmount = 81 + + // DefaultSpriteSize is the default size in pixels of the longest dimension of each sprite image + // if no configuration is provided. This corresponds to the legacy behavior of the generator. + DefaultSpriteSize = 160 +) + +var DefaultSpriteGeneratorConfig = SpriteGeneratorConfig{ + MinimumSprites: DefaultSpriteAmount, + MaximumSprites: DefaultSpriteAmount, + SpriteInterval: 0, + SpriteSize: DefaultSpriteSize, +} + +// NewSpriteGenerator creates a new SpriteGenerator for the given video file and configuration +// It calculates the appropriate sprite interval and count based on the video duration and the provided configuration +func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, config SpriteGeneratorConfig) (*SpriteGenerator, error) { exists, err := fsutil.FileExists(videoFile.Path) if !exists { return nil, err } + + if videoFile.VideoStreamDuration <= 0 { + s := fmt.Sprintf("video %s: duration(%.3f)/frame count(%d) invalid, skipping sprite creation", videoFile.Path, videoFile.VideoStreamDuration, videoFile.FrameCount) + return nil, errors.New(s) + } + + config.SpriteInterval = calculateSpriteInterval(videoFile, config) + chunkCount := int(math.Ceil(videoFile.VideoStreamDuration / config.SpriteInterval)) + + // adjust the chunk count to the next highest perfect square, to ensure the sprite image + // is completely filled (no empty space in the grid) and the grid is as square as possible (minimizing the number of rows/columns) + gridSize := generate.GetSpriteGridSize(chunkCount) + newChunkCount := gridSize * gridSize + + if newChunkCount != chunkCount { + logger.Debugf("[generator] adjusting chunk count from %d to %d to fit a %dx%d grid", chunkCount, newChunkCount, gridSize, gridSize) + chunkCount = newChunkCount + } + + if config.SpriteSize <= 0 { + config.SpriteSize = DefaultSpriteSize + } + slowSeek := false - chunkCount := rows * cols // For files with small duration / low frame count try to seek using frame number intead of seconds if videoFile.VideoStreamDuration < 5 || (0 < videoFile.FrameCount && videoFile.FrameCount <= int64(chunkCount)) { // some files can have FrameCount == 0, only use SlowSeek if duration < 5 @@ -71,9 +138,8 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO VideoChecksum: videoChecksum, ImageOutputPath: imageOutputPath, VTTOutputPath: vttOutputPath, - Rows: rows, + Config: config, SlowSeek: slowSeek, - Columns: cols, g: &generate.Generator{ Encoder: instance.FFMpeg, FFMpegConfig: instance.Config, @@ -83,6 +149,40 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO }, nil } +func calculateSpriteInterval(videoFile ffmpeg.VideoFile, config SpriteGeneratorConfig) float64 { + // If a custom sprite interval is provided, start with that + spriteInterval := config.SpriteInterval + + // If no custom interval is provided, calculate the interval based on the + // video duration and minimum sprite count + if spriteInterval <= 0 { + minSprites := config.MinimumSprites + if minSprites <= 0 { + panic("invalid configuration: MinimumSprites must be greater than 0 if SpriteInterval is not set") + } + + logger.Debugf("[generator] calculating sprite interval for video duration %.3fs with minimum sprites %d", videoFile.VideoStreamDuration, minSprites) + return videoFile.VideoStreamDuration / float64(minSprites) + } + + // Calculate the number of sprites that would be generated with the provided interval + spriteCount := int(math.Ceil(videoFile.VideoStreamDuration / spriteInterval)) + + // If the calculated sprite count is greater than the maximum, adjust the interval to meet the maximum + if config.MaximumSprites > 0 && spriteCount > int(config.MaximumSprites) { + spriteInterval = videoFile.VideoStreamDuration / float64(config.MaximumSprites) + logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which exceeds the maximum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MaximumSprites, spriteInterval) + } + + // If the calculated sprite count is less than the minimum, adjust the interval to meet the minimum + if config.MinimumSprites > 0 && spriteCount < int(config.MinimumSprites) { + spriteInterval = videoFile.VideoStreamDuration / float64(config.MinimumSprites) + logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which is less than the minimum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MinimumSprites, spriteInterval) + } + + return spriteInterval +} + func (g *SpriteGenerator) Generate() error { if err := g.generateSpriteImage(); err != nil { return err @@ -100,6 +200,8 @@ func (g *SpriteGenerator) generateSpriteImage() error { var images []image.Image + isPortrait := g.Info.VideoFile.Height > g.Info.VideoFile.Width + if !g.SlowSeek { logger.Infof("[generator] generating sprite image for %s", g.Info.VideoFile.Path) // generate `ChunkCount` thumbnails @@ -107,8 +209,7 @@ func (g *SpriteGenerator) generateSpriteImage() error { for i := 0; i < g.Info.ChunkCount; i++ { time := float64(i) * stepSize - - img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time) + img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time, g.Config.SpriteSize, isPortrait) if err != nil { return err } @@ -126,7 +227,7 @@ func (g *SpriteGenerator) generateSpriteImage() error { return errors.New("invalid frame number conversion") } - img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame)) + img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame), g.Config.SpriteSize) if err != nil { return err } @@ -158,7 +259,7 @@ func (g *SpriteGenerator) generateSpriteVTT() error { stepSize /= g.Info.FrameRate } - return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize) + return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize, g.Info.ChunkCount) } func (g *SpriteGenerator) imageExists() bool { diff --git a/internal/manager/init.go b/internal/manager/init.go index b388bd15c..b4af5eab7 100644 --- a/internal/manager/init.go +++ b/internal/manager/init.go @@ -78,7 +78,7 @@ func Initialize(cfg *config.Config, l *log.Logger) (*Manager, error) { } dlnaRepository := dlna.NewRepository(repo) - dlnaService := dlna.NewService(dlnaRepository, cfg, sceneServer) + dlnaService := dlna.NewService(dlnaRepository, cfg, sceneServer, repo.Scene, cfg.GetMinimumPlayPercent()) mgr := &Manager{ Config: cfg, diff --git a/internal/manager/manager.go b/internal/manager/manager.go index f4f3fa636..d3b91ec29 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -313,46 +313,6 @@ func (s *Manager) validateFFmpeg() error { return nil } -func (s *Manager) BackupDatabase(download bool) (string, string, error) { - var backupPath string - var backupName string - if download { - backupDir := s.Paths.Generated.Downloads - if err := fsutil.EnsureDir(backupDir); err != nil { - return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) - } - f, err := os.CreateTemp(backupDir, "backup*.sqlite") - if err != nil { - return "", "", err - } - - backupPath = f.Name() - backupName = s.Database.DatabaseBackupPath("") - f.Close() - - // delete the temp file so that the backup operation can create it - if err := os.Remove(backupPath); err != nil { - return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) - } - } else { - backupDir := s.Config.GetBackupDirectoryPathOrDefault() - if backupDir != "" { - if err := fsutil.EnsureDir(backupDir); err != nil { - return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) - } - } - backupPath = s.Database.DatabaseBackupPath(backupDir) - backupName = filepath.Base(backupPath) - } - - err := s.Database.Backup(backupPath) - if err != nil { - return "", "", err - } - - return backupPath, backupName, nil -} - func (s *Manager) AnonymiseDatabase(download bool) (string, string, error) { var outPath string var outName string diff --git a/internal/manager/manager_tasks.go b/internal/manager/manager_tasks.go index 1e66433be..76938e9ff 100644 --- a/internal/manager/manager_tasks.go +++ b/internal/manager/manager_tasks.go @@ -74,6 +74,28 @@ func getScanPaths(inputPaths []string) []*config.StashConfig { return ret } +// Filters the input array for paths that are within the paths managed by stash +func filterStashPaths(inputPaths []string) []string { + if len(inputPaths) == 0 { + return inputPaths + } + + stashPaths := config.GetInstance().GetStashPaths() + + var ret []string + for _, p := range inputPaths { + s := stashPaths.GetStashFromDirPath(p) + if s == nil { + logger.Warnf("%s is not in the configured stash paths", p) + continue + } + + ret = append(ret, p) + } + + return ret +} + // ScanSubscribe subscribes to a notification that is triggered when a // scan or clean is complete. func (s *Manager) ScanSubscribe(ctx context.Context) <-chan bool { @@ -100,6 +122,8 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error return 0, err } + cfg := config.GetInstance() + scanner := &file.Scanner{ Repository: file.NewRepository(s.Repository), FileDecorators: []file.Decorator{ @@ -118,6 +142,11 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error }, FingerprintCalculator: &fingerprintCalculator{s.Config}, FS: &file.OsFS{}, + ZipFileExtensions: cfg.GetGalleryExtensions(), + // ScanFilters is set in ScanJob.Execute + // HandlerRequiredFilters is set in ScanJob.Execute + RootPaths: cfg.GetStashPaths().Paths(), + Rescan: input.Rescan, } scanJob := ScanJob{ @@ -285,6 +314,8 @@ type CleanMetadataInput struct { Paths []string `json:"paths"` // Do a dry run. Don't delete any files DryRun bool `json:"dryRun"` + + IgnoreZipFileContents bool `json:"ignoreZipFileContents"` } func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int { @@ -402,7 +433,7 @@ type StashBoxBatchTagInput struct { ExcludeFields []string `json:"exclude_fields"` // Refresh items already tagged by StashBox if true. Only tag items with no StashBox tagging if false Refresh bool `json:"refresh"` - // If batch adding studios, should their parent studios also be created? + // If batch adding studios or tags, should their parent entities also be created? CreateParent bool `json:"createParent"` // IDs in stash of the items to update. // If set, names and stash_ids fields will be ignored. @@ -698,3 +729,137 @@ func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashB return s.JobManager.Add(ctx, "Batch stash-box studio tag...", j) } + +func (s *Manager) batchTagTagsByIds(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) { + var tasks []Task + + err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + tagQuery := s.Repository.Tag + + for _, tagID := range input.Ids { + if id, err := strconv.Atoi(tagID); err == nil { + t, err := tagQuery.Find(ctx, id) + if err != nil { + return err + } + + if err := t.LoadStashIDs(ctx, tagQuery); err != nil { + return fmt.Errorf("loading tag stash ids: %w", err) + } + + hasStashID := t.StashIDs.ForEndpoint(box.Endpoint) != nil + if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) { + tasks = append(tasks, &stashBoxBatchTagTagTask{ + tag: t, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + } + return nil + }) + + return tasks, err +} + +func (s *Manager) batchTagTagsByNamesOrStashIds(input StashBoxBatchTagInput, box *models.StashBox) []Task { + var tasks []Task + + for i := range input.StashIDs { + stashID := input.StashIDs[i] + if len(stashID) > 0 { + tasks = append(tasks, &stashBoxBatchTagTagTask{ + stashID: &stashID, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + + for i := range input.Names { + name := input.Names[i] + if len(name) > 0 { + tasks = append(tasks, &stashBoxBatchTagTagTask{ + name: &name, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + + return tasks +} + +func (s *Manager) batchTagAllTags(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) { + var tasks []Task + + err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + tagQuery := s.Repository.Tag + var tags []*models.Tag + var err error + + tags, err = tagQuery.FindByStashIDStatus(ctx, input.Refresh, box.Endpoint) + + if err != nil { + return fmt.Errorf("error querying tags: %v", err) + } + + for _, t := range tags { + tasks = append(tasks, &stashBoxBatchTagTagTask{ + tag: t, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + return nil + }) + + return tasks, err +} + +func (s *Manager) StashBoxBatchTagTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int { + j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error { + logger.Infof("Initiating stash-box batch tag tag") + + var tasks []Task + var err error + + switch input.getBatchTagType(false) { + case batchTagByIds: + tasks, err = s.batchTagTagsByIds(ctx, input, box) + case batchTagByNamesOrStashIds: + tasks = s.batchTagTagsByNamesOrStashIds(input, box) + case batchTagAll: + tasks, err = s.batchTagAllTags(ctx, input, box) + } + + if err != nil { + return err + } + + if len(tasks) == 0 { + return nil + } + + progress.SetTotal(len(tasks)) + + logger.Infof("Starting stash-box batch operation for %d tags", len(tasks)) + + for _, task := range tasks { + progress.ExecuteTask(task.GetDescription(), func() { + task.Start(ctx) + }) + + progress.Increment() + } + + return nil + }) + + return s.JobManager.Add(ctx, "Batch stash-box tag tag...", j) +} diff --git a/internal/manager/repository.go b/internal/manager/repository.go index 8d4ef1137..65514ed1d 100644 --- a/internal/manager/repository.go +++ b/internal/manager/repository.go @@ -10,17 +10,17 @@ import ( ) type SceneService interface { - Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) + Create(ctx context.Context, input models.CreateSceneInput) (*models.Scene, error) AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error - Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error FindByIDs(ctx context.Context, ids []int, load ...scene.LoadRelationshipOption) ([]*models.Scene, error) sceneFingerprintGetter } type ImageService interface { - Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } @@ -31,7 +31,7 @@ type GalleryService interface { SetCover(ctx context.Context, g *models.Gallery, coverImageId int) error ResetCover(ctx context.Context, g *models.Gallery) error - Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) + Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) ValidateImageGalleryChange(ctx context.Context, i *models.Image, updateIDs models.UpdateIDs) error @@ -39,7 +39,7 @@ type GalleryService interface { } type GroupService interface { - Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error + Create(ctx context.Context, input *models.CreateGroupInput) error UpdatePartial(ctx context.Context, id int, updatedGroup models.GroupPartial, frontImage group.ImageInput, backImage group.ImageInput) (*models.Group, error) AddSubGroups(ctx context.Context, groupID int, subGroups []models.GroupIDDescription, insertIndex *int) error diff --git a/internal/manager/scan_stashignore_test.go b/internal/manager/scan_stashignore_test.go new file mode 100644 index 000000000..2745ff970 --- /dev/null +++ b/internal/manager/scan_stashignore_test.go @@ -0,0 +1,268 @@ +//go:build integration +// +build integration + +package manager + +import ( + "context" + "io/fs" + "os" + "path/filepath" + "testing" + + "github.com/stashapp/stash/pkg/file" + + // Necessary to register custom migrations. + _ "github.com/stashapp/stash/pkg/sqlite/migrations" +) + +// stashIgnorePathFilter wraps StashIgnoreFilter to implement PathFilter for testing. +// It provides a fixed library root for the filter. +type stashIgnorePathFilter struct { + filter *file.StashIgnoreFilter + libraryRoot string +} + +func (f *stashIgnorePathFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool { + return f.filter.Accept(ctx, path, info, f.libraryRoot, zipFilePath) +} + +// createTestFileOnDisk creates a file with some content. +func createTestFileOnDisk(t *testing.T, dir, name string) string { + t.Helper() + path := filepath.Join(dir, name) + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + t.Fatalf("failed to create directory for %s: %v", path, err) + } + // Write some content so the file has a non-zero size. + if err := os.WriteFile(path, []byte("test content for "+name), 0644); err != nil { + t.Fatalf("failed to create file %s: %v", path, err) + } + return path +} + +// createStashIgnoreFile creates a .stashignore file with the given content. +func createStashIgnoreFile(t *testing.T, dir, content string) { + t.Helper() + path := filepath.Join(dir, ".stashignore") + if err := os.WriteFile(path, []byte(content), 0644); err != nil { + t.Fatalf("failed to create .stashignore: %v", err) + } +} + +func TestScannerWithStashIgnore(t *testing.T) { + // Create temp directory structure. + tmpDir := t.TempDir() + + // Create test files. + createTestFileOnDisk(t, tmpDir, "video1.mp4") + createTestFileOnDisk(t, tmpDir, "video2.mp4") + createTestFileOnDisk(t, tmpDir, "ignore_me.mp4") + createTestFileOnDisk(t, tmpDir, "subdir/video3.mp4") + createTestFileOnDisk(t, tmpDir, "subdir/skip_this.mp4") + createTestFileOnDisk(t, tmpDir, "excluded_dir/video4.mp4") + createTestFileOnDisk(t, tmpDir, "temp/processing.mp4") + + // Create .stashignore file. + stashignore := `# Ignore specific files +ignore_me.mp4 +subdir/skip_this.mp4 + +# Ignore directories +excluded_dir/ +temp/ +` + createStashIgnoreFile(t, tmpDir, stashignore) + + // Create stashignore filter with library root. + stashIgnoreFilter := &stashIgnorePathFilter{ + filter: file.NewStashIgnoreFilter(), + libraryRoot: tmpDir, + } + + // Create scanner. + scanner := &file.Scanner{ + ScanFilters: []file.PathFilter{stashIgnoreFilter}, + } + + testScenarios := []struct { + path string + accepted bool + }{ + {filepath.Join(tmpDir, "video1.mp4"), true}, + {filepath.Join(tmpDir, "video2.mp4"), true}, + {filepath.Join(tmpDir, "ignore_me.mp4"), false}, + {filepath.Join(tmpDir, "subdir/video3.mp4"), true}, + {filepath.Join(tmpDir, "subdir/skip_this.mp4"), false}, + {filepath.Join(tmpDir, "excluded_dir/video4.mp4"), false}, + {filepath.Join(tmpDir, "temp/processing.mp4"), false}, + } + + ctx := context.Background() + + for _, scenario := range testScenarios { + info, err := os.Stat(scenario.path) + if err != nil { + t.Fatalf("failed to stat file %s: %v", scenario.path, err) + } + accepted := scanner.AcceptEntry(ctx, scenario.path, info, "") + + if accepted != scenario.accepted { + t.Errorf("unexpected accept result for %s: expected %v, got %v", + scenario.path, scenario.accepted, accepted) + } + } +} + +func TestScannerWithNestedStashIgnore(t *testing.T) { + // Create temp directory structure. + tmpDir := t.TempDir() + + // Create test files. + createTestFileOnDisk(t, tmpDir, "root.mp4") + createTestFileOnDisk(t, tmpDir, "root.tmp") + createTestFileOnDisk(t, tmpDir, "subdir/sub.mp4") + createTestFileOnDisk(t, tmpDir, "subdir/sub.log") + createTestFileOnDisk(t, tmpDir, "subdir/sub.tmp") + + // Root .stashignore excludes *.tmp. + createStashIgnoreFile(t, tmpDir, "*.tmp\n") + + // Subdir .stashignore excludes *.log. + createStashIgnoreFile(t, filepath.Join(tmpDir, "subdir"), "*.log\n") + + // Create stashignore filter with library root. + stashIgnoreFilter := &stashIgnorePathFilter{ + filter: file.NewStashIgnoreFilter(), + libraryRoot: tmpDir, + } + + // Create scanner. + scanner := &file.Scanner{ + ScanFilters: []file.PathFilter{stashIgnoreFilter}, + } + + testScenarios := []struct { + path string + accepted bool + }{ + {filepath.Join(tmpDir, "root.mp4"), true}, + {filepath.Join(tmpDir, "root.tmp"), false}, + {filepath.Join(tmpDir, "subdir/sub.mp4"), true}, + {filepath.Join(tmpDir, "subdir/sub.log"), false}, + {filepath.Join(tmpDir, "subdir/sub.tmp"), false}, + } + + ctx := context.Background() + + for _, scenario := range testScenarios { + info, err := os.Stat(scenario.path) + if err != nil { + t.Fatalf("failed to stat file %s: %v", scenario.path, err) + } + accepted := scanner.AcceptEntry(ctx, scenario.path, info, "") + + if accepted != scenario.accepted { + t.Errorf("unexpected accept result for %s: expected %v, got %v", + scenario.path, scenario.accepted, accepted) + } + } +} + +func TestScannerWithoutStashIgnore(t *testing.T) { + // Create temp directory structure (no .stashignore). + tmpDir := t.TempDir() + + // Create test files. + createTestFileOnDisk(t, tmpDir, "video1.mp4") + createTestFileOnDisk(t, tmpDir, "video2.mp4") + createTestFileOnDisk(t, tmpDir, "subdir/video3.mp4") + + // Create stashignore filter with library root (but no .stashignore file exists). + stashIgnoreFilter := &stashIgnorePathFilter{ + filter: file.NewStashIgnoreFilter(), + libraryRoot: tmpDir, + } + + // Create scanner. + scanner := &file.Scanner{ + ScanFilters: []file.PathFilter{stashIgnoreFilter}, + } + + testScenarios := []struct { + path string + accepted bool + }{ + {filepath.Join(tmpDir, "video1.mp4"), true}, + {filepath.Join(tmpDir, "video2.mp4"), true}, + {filepath.Join(tmpDir, "subdir/video3.mp4"), true}, + } + + ctx := context.Background() + + for _, scenario := range testScenarios { + info, err := os.Stat(scenario.path) + if err != nil { + t.Fatalf("failed to stat file %s: %v", scenario.path, err) + } + accepted := scanner.AcceptEntry(ctx, scenario.path, info, "") + + if accepted != scenario.accepted { + t.Errorf("unexpected accept result for %s: expected %v, got %v", + scenario.path, scenario.accepted, accepted) + } + } +} + +func TestScannerWithNegationPattern(t *testing.T) { + // Create temp directory structure. + tmpDir := t.TempDir() + + // Create test files. + createTestFileOnDisk(t, tmpDir, "file1.tmp") + createTestFileOnDisk(t, tmpDir, "file2.tmp") + createTestFileOnDisk(t, tmpDir, "keep_this.tmp") + createTestFileOnDisk(t, tmpDir, "video.mp4") + + // Create .stashignore with negation. + stashignore := `*.tmp +!keep_this.tmp +` + createStashIgnoreFile(t, tmpDir, stashignore) + + // Create stashignore filter with library root. + stashIgnoreFilter := &stashIgnorePathFilter{ + filter: file.NewStashIgnoreFilter(), + libraryRoot: tmpDir, + } + + // Create scanner. + scanner := &file.Scanner{ + ScanFilters: []file.PathFilter{stashIgnoreFilter}, + } + + testScenarios := []struct { + path string + accepted bool + }{ + {filepath.Join(tmpDir, "file1.tmp"), false}, + {filepath.Join(tmpDir, "file2.tmp"), false}, + {filepath.Join(tmpDir, "keep_this.tmp"), true}, + {filepath.Join(tmpDir, "video.mp4"), true}, + } + + ctx := context.Background() + + for _, scenario := range testScenarios { + info, err := os.Stat(scenario.path) + if err != nil { + t.Fatalf("failed to stat file %s: %v", scenario.path, err) + } + accepted := scanner.AcceptEntry(ctx, scenario.path, info, "") + + if accepted != scenario.accepted { + t.Errorf("unexpected accept result for %s: expected %v, got %v", + scenario.path, scenario.accepted, accepted) + } + } +} diff --git a/internal/manager/task/clean_generated.go b/internal/manager/task/clean_generated.go index 902989046..a59bda6d1 100644 --- a/internal/manager/task/clean_generated.go +++ b/internal/manager/task/clean_generated.go @@ -565,6 +565,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. j.setProgressFromFilename(sceneHash[0:2], progress) // check if the scene exists + var walkErr error if err := j.Repository.WithReadTxn(ctx, func(ctx context.Context) error { var err error scenes, err = j.getScenesWithHash(ctx, sceneHash) @@ -575,15 +576,18 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. if len(scenes) == 0 { j.logDelete("deleting unused marker directory: %s", sceneHash) j.deleteDir(path) - } else { - // get the markers now - for _, scene := range scenes { - thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID) - if err != nil { - return fmt.Errorf("error getting markers for scene: %v", err) - } - markers = append(markers, thisMarkers...) + // #5911 - we've just deleted the directory, so skip it in the walk to avoid errors + walkErr = fs.SkipDir + return nil + } + + // get the markers now + for _, scene := range scenes { + thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID) + if err != nil { + return fmt.Errorf("error getting markers for scene: %v", err) } + markers = append(markers, thisMarkers...) } return nil @@ -591,7 +595,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. logger.Error(err.Error()) } - return nil + return walkErr } filename := info.Name() diff --git a/internal/manager/task_clean.go b/internal/manager/task_clean.go index 9690cf4c8..67b7038b6 100644 --- a/internal/manager/task_clean.go +++ b/internal/manager/task_clean.go @@ -40,9 +40,10 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) error { } j.cleaner.Clean(ctx, file.CleanOptions{ - Paths: j.input.Paths, - DryRun: j.input.DryRun, - PathFilter: newCleanFilter(instance.Config), + Paths: j.input.Paths, + DryRun: j.input.DryRun, + IgnoreZipFileContents: j.input.IgnoreZipFileContents, + PathFilter: newCleanFilter(instance.Config), }, progress) if job.IsCancelled(ctx) { @@ -154,11 +155,12 @@ func newCleanFilter(c *config.Config) *cleanFilter { generatedPath: c.GetGeneratedPath(), videoExcludeRegex: generateRegexps(c.GetExcludes()), imageExcludeRegex: generateRegexps(c.GetImageExcludes()), + stashIgnoreFilter: file.NewStashIgnoreFilter(), }, } } -func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool { +func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool { // #1102 - clean anything in generated path generatedPath := f.generatedPath @@ -173,12 +175,18 @@ func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) } if stash == nil { - logger.Infof("%s not in any stash library directories. Marking to clean: \"%s\"", fileOrFolder, path) + logger.Infof("%s not in any stash library directories. Marking to clean: %q", fileOrFolder, path) return false } if fsutil.IsPathInDir(generatedPath, path) { - logger.Infof("%s is in generated path. Marking to clean: \"%s\"", fileOrFolder, path) + logger.Infof("%s is in generated path. Marking to clean: %q", fileOrFolder, path) + return false + } + + // Check .stashignore files, bounded to the library root. + if !f.stashIgnoreFilter.Accept(ctx, path, info, stash.Path, zipFilePath) { + logger.Infof("%s is excluded due to .stashignore. Marking to clean: %q", fileOrFolder, path) return false } @@ -300,7 +308,10 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil // only delete if the scene has no other files if len(scene.Files.List()) <= 1 { logger.Infof("Deleting scene %q since it has no other related files", scene.DisplayName()) - if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil { + const deleteGenerated = true + const deleteFile = false + const destroyFileEntry = false + if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } @@ -421,7 +432,10 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil if len(i.Files.List()) <= 1 { logger.Infof("Deleting image %q since it has no other related files", i.DisplayName()) - if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil { + const deleteGenerated = true + const deleteFile = false + const destroyFileEntry = false + if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } diff --git a/internal/manager/task_export.go b/internal/manager/task_export.go index 5f2897670..01bab9430 100644 --- a/internal/manager/task_export.go +++ b/internal/manager/task_export.go @@ -651,6 +651,7 @@ func (t *ExportTask) exportImage(ctx context.Context, wg *sync.WaitGroup, jobCha galleryReader := r.Gallery performerReader := r.Performer tagReader := r.Tag + imageReader := r.Image for s := range jobChan { imageHash := s.Checksum @@ -665,14 +666,17 @@ func (t *ExportTask) exportImage(ctx context.Context, wg *sync.WaitGroup, jobCha continue } - newImageJSON := image.ToBasicJSON(s) + newImageJSON, err := image.ToBasicJSON(ctx, imageReader, s) + if err != nil { + logger.Errorf("[images] <%s> error converting image to JSON: %v", imageHash, err) + continue + } // export files for _, f := range s.Files.List() { t.exportFile(f) } - var err error newImageJSON.Studio, err = image.GetStudioName(ctx, studioReader, s) if err != nil { logger.Errorf("[images] <%s> error getting image studio name: %v", imageHash, err) @@ -779,6 +783,7 @@ func (t *ExportTask) exportGallery(ctx context.Context, wg *sync.WaitGroup, jobC studioReader := r.Studio performerReader := r.Performer tagReader := r.Tag + galleryReader := r.Gallery galleryChapterReader := r.GalleryChapter for g := range jobChan { @@ -847,6 +852,12 @@ func (t *ExportTask) exportGallery(ctx context.Context, wg *sync.WaitGroup, jobC newGalleryJSON.Tags = tag.GetNames(tags) + newGalleryJSON.CustomFields, err = galleryReader.GetCustomFields(ctx, g.ID) + if err != nil { + logger.Errorf("[galleries] <%s> error getting gallery custom fields: %v", g.DisplayName(), err) + continue + } + if t.includeDependencies { if g.StudioID != nil { t.studios.IDs = sliceutil.AppendUnique(t.studios.IDs, *g.StudioID) diff --git a/internal/manager/task_generate.go b/internal/manager/task_generate.go index c28ffe55b..f2aab2b3c 100644 --- a/internal/manager/task_generate.go +++ b/internal/manager/task_generate.go @@ -29,6 +29,7 @@ type GenerateMetadataInput struct { // Generate transcodes even if not required ForceTranscodes bool `json:"forceTranscodes"` Phashes bool `json:"phashes"` + ImagePhashes bool `json:"imagePhashes"` InteractiveHeatmapsSpeeds bool `json:"interactiveHeatmapsSpeeds"` ClipPreviews bool `json:"clipPreviews"` ImageThumbnails bool `json:"imageThumbnails"` @@ -36,8 +37,14 @@ type GenerateMetadataInput struct { SceneIDs []string `json:"sceneIDs"` // marker ids to generate for MarkerIDs []string `json:"markerIDs"` + // image ids to generate for + ImageIDs []string `json:"imageIDs"` + // gallery ids to generate for + GalleryIDs []string `json:"galleryIDs"` // overwrite existing media Overwrite bool `json:"overwrite"` + // paths to run generate on, in addition to the other ID lists + Paths []string `json:"paths"` } type GeneratePreviewOptionsInput struct { @@ -73,6 +80,7 @@ type totalsGenerate struct { markers int64 transcodes int64 phashes int64 + imagePhashes int64 interactiveHeatmapSpeeds int64 clipPreviews int64 imageThumbnails int64 @@ -82,8 +90,9 @@ type totalsGenerate struct { func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error { var scenes []*models.Scene - var err error var markers []*models.SceneMarker + var images []*models.Image + var err error j.overwrite = j.input.Overwrite j.fileNamingAlgo = config.GetInstance().GetVideoFileNamingAlgorithm() @@ -105,6 +114,14 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error if err != nil { logger.Error(err.Error()) } + imageIDs, err := stringslice.StringSliceToIntSlice(j.input.ImageIDs) + if err != nil { + logger.Error(err.Error()) + } + galleryIDs, err := stringslice.StringSliceToIntSlice(j.input.GalleryIDs) + if err != nil { + logger.Error(err.Error()) + } g := &generate.Generator{ Encoder: instance.FFMpeg, @@ -118,8 +135,13 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error r := j.repository if err := r.WithReadTxn(ctx, func(ctx context.Context) error { qb := r.Scene - if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 { - j.queueTasks(ctx, g, queue) + if len(j.input.SceneIDs) == 0 && + len(j.input.MarkerIDs) == 0 && + len(j.input.ImageIDs) == 0 && + len(j.input.GalleryIDs) == 0 && + len(j.input.Paths) == 0 { + + j.queueTasks(ctx, g, nil, queue) } else { if len(j.input.SceneIDs) > 0 { scenes, err = qb.FindMany(ctx, sceneIDs) @@ -141,6 +163,38 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error j.queueMarkerJob(g, m, queue) } } + + if len(j.input.ImageIDs) > 0 { + images, err = r.Image.FindMany(ctx, imageIDs) + for _, i := range images { + if err := i.LoadFiles(ctx, r.Image); err != nil { + return err + } + + j.queueImageJob(g, i, queue) + } + } + + if len(j.input.GalleryIDs) > 0 { + for _, galleryID := range galleryIDs { + imgs, err := r.Image.FindByGalleryID(ctx, galleryID) + if err != nil { + return err + } + for _, img := range imgs { + if err := img.LoadFiles(ctx, r.Image); err != nil { + return err + } + + j.queueImageJob(g, img, queue) + } + } + } + + if len(j.input.Paths) > 0 { + paths := filterStashPaths(j.input.Paths) + j.queueTasks(ctx, g, paths, queue) + } } return nil @@ -172,14 +226,17 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error if j.input.Phashes { logMsg += fmt.Sprintf(" %d phashes", totals.phashes) } + if j.input.ImagePhashes { + logMsg += fmt.Sprintf(" %d image phashes", totals.imagePhashes) + } if j.input.InteractiveHeatmapsSpeeds { logMsg += fmt.Sprintf(" %d heatmaps & speeds", totals.interactiveHeatmapSpeeds) } if j.input.ClipPreviews { - logMsg += fmt.Sprintf(" %d Image Clip Previews", totals.clipPreviews) + logMsg += fmt.Sprintf(" %d image clip previews", totals.clipPreviews) } if j.input.ImageThumbnails { - logMsg += fmt.Sprintf(" %d Image Thumbnails", totals.imageThumbnails) + logMsg += fmt.Sprintf(" %d image thumbnails", totals.imageThumbnails) } if logMsg == "Generating" { logMsg = "Nothing selected to generate" @@ -231,17 +288,18 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error return nil } -func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) { +func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) { j.totals = totalsGenerate{} - j.queueScenesTasks(ctx, g, queue) - j.queueImagesTasks(ctx, g, queue) + j.queueScenesTasks(ctx, g, paths, queue) + j.queueImagesTasks(ctx, g, paths, queue) } -func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) { +func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) { const batchSize = 1000 findFilter := models.BatchFindFilter(batchSize) + sceneFilter := scene.FilterFromPaths(paths) r := j.repository @@ -250,7 +308,7 @@ func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generato return } - scenes, err := scene.Query(ctx, r.Scene, nil, findFilter) + scenes, err := scene.Query(ctx, r.Scene, sceneFilter, findFilter) if err != nil { logger.Errorf("Error encountered queuing files to scan: %s", err.Error()) return @@ -277,19 +335,20 @@ func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generato } } -func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) { +func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) { const batchSize = 1000 findFilter := models.BatchFindFilter(batchSize) + imageFilter := image.FilterFromPaths(paths) r := j.repository - for more := j.input.ClipPreviews || j.input.ImageThumbnails; more; { + for more := j.input.ClipPreviews || j.input.ImageThumbnails || j.input.ImagePhashes; more; { if job.IsCancelled(ctx) { return } - images, err := image.Query(ctx, r.Image, nil, findFilter) + images, err := image.Query(ctx, r.Image, imageFilter, findFilter) if err != nil { logger.Errorf("Error encountered queuing files to scan: %s", err.Error()) return @@ -411,12 +470,13 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator, } } - if j.input.Markers { + if j.input.Markers || j.input.MarkerImagePreviews || j.input.MarkerScreenshots { task := &GenerateMarkersTask{ repository: r, Scene: scene, Overwrite: j.overwrite, fileNamingAlgorithm: j.fileNamingAlgo, + VideoPreview: j.input.Markers, ImagePreview: j.input.MarkerImagePreviews, Screenshot: j.input.MarkerScreenshots, @@ -488,6 +548,9 @@ func (j *GenerateJob) queueMarkerJob(g *generate.Generator, marker *models.Scene Marker: marker, Overwrite: j.overwrite, fileNamingAlgorithm: j.fileNamingAlgo, + VideoPreview: j.input.Markers, + ImagePreview: j.input.MarkerImagePreviews, + Screenshot: j.input.MarkerScreenshots, generator: g, } j.totals.markers++ @@ -521,4 +584,23 @@ func (j *GenerateJob) queueImageJob(g *generate.Generator, image *models.Image, queue <- task } } + + if j.input.ImagePhashes { + // generate for all files in image + for _, f := range image.Files.List() { + if imageFile, ok := f.(*models.ImageFile); ok { + task := &GenerateImagePhashTask{ + repository: j.repository, + File: imageFile, + Overwrite: j.overwrite, + } + + if task.required() { + j.totals.imagePhashes++ + j.totals.tasks++ + queue <- task + } + } + } + } } diff --git a/internal/manager/task_generate_image_phash.go b/internal/manager/task_generate_image_phash.go new file mode 100644 index 000000000..a5c764df0 --- /dev/null +++ b/internal/manager/task_generate_image_phash.go @@ -0,0 +1,103 @@ +package manager + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/hash/imagephash" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type GenerateImagePhashTask struct { + repository models.Repository + File *models.ImageFile + Overwrite bool +} + +func (t *GenerateImagePhashTask) GetDescription() string { + return fmt.Sprintf("Generating phash for %s", t.File.Path) +} + +func (t *GenerateImagePhashTask) Start(ctx context.Context) { + if !t.required() { + return + } + + var hash int64 + set := false + + // #4393 - if there is a file with the same md5, we can use the same phash + // only use this if we're not overwriting + if !t.Overwrite { + existing, err := t.findExistingPhash(ctx) + if err != nil { + logger.Warnf("Error finding existing phash: %v", err) + } else if existing != nil { + logger.Infof("Using existing phash for %s", t.File.Path) + hash = existing.(int64) + set = true + } + } + + if !set { + generated, err := imagephash.Generate(instance.FFMpeg, t.File) + if err != nil { + logger.Errorf("Error generating phash for %q: %v", t.File.Path, err) + logErrorOutput(err) + return + } + + hash = int64(*generated) + } + + r := t.repository + if err := r.WithTxn(ctx, func(ctx context.Context) error { + t.File.Fingerprints = t.File.Fingerprints.AppendUnique(models.Fingerprint{ + Type: models.FingerprintTypePhash, + Fingerprint: hash, + }) + + return r.File.Update(ctx, t.File) + }); err != nil && ctx.Err() == nil { + logger.Errorf("Error setting phash: %v", err) + } +} + +func (t *GenerateImagePhashTask) findExistingPhash(ctx context.Context) (interface{}, error) { + r := t.repository + var ret interface{} + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + md5 := t.File.Fingerprints.Get(models.FingerprintTypeMD5) + + // find other files with the same md5 + files, err := r.File.FindByFingerprint(ctx, models.Fingerprint{ + Type: models.FingerprintTypeMD5, + Fingerprint: md5, + }) + if err != nil { + return fmt.Errorf("finding files by md5: %w", err) + } + + // find the first file with a phash + for _, file := range files { + if phash := file.Base().Fingerprints.Get(models.FingerprintTypePhash); phash != nil { + ret = phash + return nil + } + } + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (t *GenerateImagePhashTask) required() bool { + if t.Overwrite { + return true + } + + return t.File.Fingerprints.Get(models.FingerprintTypePhash) == nil +} diff --git a/internal/manager/task_generate_markers.go b/internal/manager/task_generate_markers.go index cfe17926c..1da458ba8 100644 --- a/internal/manager/task_generate_markers.go +++ b/internal/manager/task_generate_markers.go @@ -18,6 +18,7 @@ type GenerateMarkersTask struct { Overwrite bool fileNamingAlgorithm models.HashAlgorithm + VideoPreview bool ImagePreview bool Screenshot bool @@ -115,9 +116,11 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene g := t.generator - if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil { - logger.Errorf("[generator] failed to generate marker video: %v", err) - logErrorOutput(err) + if t.VideoPreview { + if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil { + logger.Errorf("[generator] failed to generate marker video: %v", err) + logErrorOutput(err) + } } if t.ImagePreview { @@ -164,7 +167,7 @@ func (t *GenerateMarkersTask) markerExists(sceneChecksum string, seconds int) bo return false } - videoExists := t.videoExists(sceneChecksum, seconds) + videoExists := !t.VideoPreview || t.videoExists(sceneChecksum, seconds) imageExists := !t.ImagePreview || t.imageExists(sceneChecksum, seconds) screenshotExists := !t.Screenshot || t.screenshotExists(sceneChecksum, seconds) diff --git a/internal/manager/task_generate_phash.go b/internal/manager/task_generate_phash.go index 54dc1a10b..5d35a8738 100644 --- a/internal/manager/task_generate_phash.go +++ b/internal/manager/task_generate_phash.go @@ -44,7 +44,7 @@ func (t *GeneratePhashTask) Start(ctx context.Context) { if !set { generated, err := videophash.Generate(instance.FFMpeg, t.File) if err != nil { - logger.Errorf("Error generating phash: %v", err) + logger.Errorf("Error generating phash for %q: %v", t.File.Path, err) logErrorOutput(err) return } diff --git a/internal/manager/task_generate_sprite.go b/internal/manager/task_generate_sprite.go index 0275830ab..c173147cd 100644 --- a/internal/manager/task_generate_sprite.go +++ b/internal/manager/task_generate_sprite.go @@ -34,7 +34,17 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) { sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) imagePath := instance.Paths.Scene.GetSpriteImageFilePath(sceneHash) vttPath := instance.Paths.Scene.GetSpriteVttFilePath(sceneHash) - generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, 9, 9) + + cfg := DefaultSpriteGeneratorConfig + cfg.SpriteSize = instance.Config.GetSpriteScreenshotSize() + + if instance.Config.GetUseCustomSpriteInterval() { + cfg.MinimumSprites = instance.Config.GetMinimumSprites() + cfg.MaximumSprites = instance.Config.GetMaximumSprites() + cfg.SpriteInterval = instance.Config.GetSpriteInterval() + } + + generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, cfg) if err != nil { logger.Errorf("error creating sprite generator: %s", err.Error()) diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go index 6f7f34b3c..22849124c 100644 --- a/internal/manager/task_scan.go +++ b/internal/manager/task_scan.go @@ -2,13 +2,17 @@ package manager import ( "context" + "errors" "fmt" "io/fs" "path/filepath" "regexp" + "runtime/debug" + "sync" "time" "github.com/99designs/gqlgen/graphql/handler/lru" + "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" @@ -22,16 +26,18 @@ import ( "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene/generate" "github.com/stashapp/stash/pkg/txn" + "github.com/stashapp/stash/pkg/utils" ) -type scanner interface { - Scan(ctx context.Context, handlers []file.Handler, options file.ScanOptions, progressReporter file.ProgressReporter) -} - type ScanJob struct { - scanner scanner + scanner *file.Scanner input ScanMetadataInput subscriptions *subscriptionManager + + fileQueue chan file.ScannedFile + count int + + unmatchedCaptionFiles utils.MutexField[[]string] } func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { @@ -55,22 +61,24 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { start := time.Now() + nTasks := cfg.GetParallelTasksWithAutoDetection() + const taskQueueSize = 200000 - taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, cfg.GetParallelTasksWithAutoDetection()) + taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, nTasks) var minModTime time.Time if j.input.Filter != nil && j.input.Filter.MinModTime != nil { minModTime = *j.input.Filter.MinModTime } - j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{ - Paths: paths, - ScanFilters: []file.PathFilter{newScanFilter(c, repo, minModTime)}, - ZipFileExtensions: cfg.GetGalleryExtensions(), - ParallelTasks: cfg.GetParallelTasksWithAutoDetection(), - HandlerRequiredFilters: []file.Filter{newHandlerRequiredFilter(cfg, repo)}, - Rescan: j.input.Rescan, - }, progress) + // HACK - these should really be set in the scanner initialization + j.scanner.FileHandlers = getScanHandlers(j.input, taskQueue, progress) + j.scanner.ScanFilters = []file.PathFilter{newScanFilter(c, repo, minModTime)} + j.scanner.HandlerRequiredFilters = []file.Filter{newHandlerRequiredFilter(cfg, repo)} + + logger.Infof("Starting scan of %d paths with %d parallel tasks", len(paths), nTasks) + + j.runJob(ctx, paths, nTasks, progress) taskQueue.Close() @@ -80,12 +88,336 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { } elapsed := time.Since(start) - logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed)) + logger.Infof("Scan finished (%s)", elapsed) j.subscriptions.notify() return nil } +func (j *ScanJob) runJob(ctx context.Context, paths []string, nTasks int, progress *job.Progress) { + var wg sync.WaitGroup + wg.Add(1) + + j.fileQueue = make(chan file.ScannedFile, scanQueueSize) + + go func() { + defer func() { + wg.Done() + + // handle panics in goroutine + if p := recover(); p != nil { + logger.Errorf("panic while queuing files for scan: %v", p) + logger.Errorf(string(debug.Stack())) + } + }() + + if err := j.queueFiles(ctx, paths, progress); err != nil { + if errors.Is(err, context.Canceled) { + return + } + + logger.Errorf("error queuing files for scan: %v", err) + return + } + + logger.Infof("Finished adding files to queue. %d files queued", j.count) + }() + + defer wg.Wait() + + j.processQueue(ctx, nTasks, progress) +} + +const scanQueueSize = 200000 + +func (j *ScanJob) queueFiles(ctx context.Context, paths []string, progress *job.Progress) error { + fs := &file.OsFS{} + + defer func() { + close(j.fileQueue) + + progress.AddTotal(j.count) + progress.Definite() + }() + + var err error + progress.ExecuteTask("Walking directory tree", func() { + for _, p := range paths { + err = file.SymWalk(fs, p, j.queueFileFunc(ctx, fs, nil, progress)) + if err != nil { + return + } + } + }) + + return err +} + +func (j *ScanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *file.ScannedFile, progress *job.Progress) fs.WalkDirFunc { + return func(path string, d fs.DirEntry, err error) error { + if err != nil { + // don't let errors prevent scanning + logger.Errorf("error scanning %s: %v", path, err) + return nil + } + + if err = ctx.Err(); err != nil { + return err + } + + info, err := d.Info() + if err != nil { + logger.Errorf("reading info for %q: %v", path, err) + return nil + } + + zipFilePath := "" + if zipFile != nil { + zipFilePath = zipFile.Path + } + + if !j.scanner.AcceptEntry(ctx, path, info, zipFilePath) { + if info.IsDir() { + logger.Debugf("Skipping directory %s", path) + return fs.SkipDir + } + + // we don't include caption files in the file scan, but we do need + // to handle them + if fsutil.MatchExtension(path, video.CaptionExts) { + fileRepo := j.scanner.Repository.File + matched := video.AssociateCaptions(ctx, path, j.scanner.Repository.TxnManager, fileRepo, fileRepo) + + if !matched { + logger.Debugf("No matching video file found for caption file %s", path) + j.unmatchedCaptionFiles.SetFunc(func(files []string) []string { + return append(files, path) + }) + } + + return nil + } + + logger.Debugf("Skipping file %s", path) + return nil + } + + size, err := file.GetFileSize(f, path, info) + if err != nil { + return err + } + + ff := file.ScannedFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ + ModTime: file.ModTime(info), + }, + Path: path, + Basename: filepath.Base(path), + Size: size, + }, + FS: f, + Info: info, + } + + if zipFile != nil { + ff.ZipFileID = &zipFile.ID + ff.ZipFile = zipFile + } + + if info.IsDir() { + // handle folders immediately + if err := j.handleFolder(ctx, ff, progress); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", path, err) + } + + // skip the directory since we won't be able to process the files anyway + return fs.SkipDir + } + + return nil + } + + // if zip file is present, we handle immediately + if zipFile != nil { + progress.ExecuteTask("Scanning "+path, func() { + // don't increment progress in zip files + if err := j.handleFile(ctx, ff, nil); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", path, err) + } + // don't return an error, just skip the file + } + }) + + return nil + } + + logger.Tracef("Queueing file %s for scanning", path) + j.fileQueue <- ff + + j.count++ + + return nil + } +} + +func (j *ScanJob) processQueue(ctx context.Context, parallelTasks int, progress *job.Progress) { + if parallelTasks < 1 { + parallelTasks = 1 + } + + wg := sizedwaitgroup.New(parallelTasks) + + func() { + defer func() { + wg.Wait() + + // handle panics in goroutine + if p := recover(); p != nil { + logger.Errorf("panic while scanning files: %v", p) + logger.Errorf(string(debug.Stack())) + } + }() + + for f := range j.fileQueue { + logger.Tracef("Processing queued file %s", f.Path) + if err := ctx.Err(); err != nil { + return + } + + wg.Add() + ff := f + go func() { + defer wg.Done() + j.processQueueItem(ctx, ff, progress) + }() + } + }() +} + +func (j *ScanJob) processQueueItem(ctx context.Context, f file.ScannedFile, progress *job.Progress) { + progress.ExecuteTask("Scanning "+f.Path, func() { + var err error + if f.Info.IsDir() { + err = j.handleFolder(ctx, f, progress) + } else { + err = j.handleFile(ctx, f, progress) + } + + if err != nil && !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", f.Path, err) + } + }) +} + +func (j *ScanJob) handleFolder(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + if progress != nil { + defer progress.Increment() + } + + _, err := j.scanner.ScanFolder(ctx, f) + if err != nil { + return err + } + + return nil +} + +func (j *ScanJob) handleFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + if progress != nil { + defer progress.Increment() + } + + r, err := j.scanner.ScanFile(ctx, f) + if err != nil { + return err + } + + // if this is a new video file, match it with any unmatched caption files + if r.New && len(j.unmatchedCaptionFiles.Get()) > 0 { + videoFile, _ := r.File.(*models.VideoFile) + + if videoFile != nil { + // try to match any unmatched caption files to this video file + for _, captionPath := range j.unmatchedCaptionFiles.Get() { + if video.MatchesCaption(videoFile.Path, captionPath) { + video.AssociateCaptions(ctx, captionPath, j.scanner.Repository.TxnManager, j.scanner.Repository.File, j.scanner.Repository.File) + + // remove from the unmatched list + j.unmatchedCaptionFiles.SetFunc(func(files []string) []string { + newFiles := make([]string, 0, len(files)-1) + for _, f := range files { + if f != captionPath { + newFiles = append(newFiles, f) + } + } + return newFiles + }) + } + } + } + } + + // clean captions - scene handler handles this as well, but + // unchanged files aren't processed by the scene handler + if r.IsUnchanged() { + videoFile, _ := r.File.(*models.VideoFile) + + if videoFile != nil { + txnMgr := j.scanner.Repository.TxnManager + fileRepo := j.scanner.Repository.File + if err := txn.WithDatabase(ctx, txnMgr, func(ctx context.Context) error { + return video.CleanCaptions(ctx, videoFile, txnMgr, fileRepo) + }); err != nil { + logger.Errorf("Error cleaning captions: %v", err) + } + } + } + + // handle rename should have already handled the contents of the zip file + // so shouldn't need to scan it again. + // Only scan zip contents if the file is new, the fingerprint changed, + // or if a force rescan was requested. + + if j.scanner.IsZipFile(f.Info.Name()) && (r.New || r.FingerprintChanged || j.scanner.Rescan) { + ff := r.File + f.BaseFile = ff.Base() + + // scan zip files with a different context that is not cancellable + // cancelling while scanning zip file contents results in the scan + // contents being partially completed + zipCtx := context.WithoutCancel(ctx) + + if err := j.scanZipFile(zipCtx, f, progress); err != nil { + logger.Errorf("Error scanning zip file %q: %v", f.Path, err) + } + } else if r.Updated && j.scanner.IsZipFile(f.Info.Name()) { + logger.Debugf("Skipping zip file scan for %q: fingerprint unchanged", f.Path) + } + + return nil +} + +func (j *ScanJob) scanZipFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + zipFS, err := f.FS.OpenZip(f.Path, f.Size) + if err != nil { + if errors.Is(err, file.ErrNotReaderAt) { + // can't walk the zip file + // just return + logger.Debugf("Skipping zip file %q as it cannot be opened for walking", f.Path) + return nil + } + + return err + } + + defer zipFS.Close() + + return file.SymWalk(zipFS, f.Path, j.queueFileFunc(ctx, zipFS, &f, progress)) +} + type extensionConfig struct { vidExt []string imgExt []string @@ -117,11 +449,10 @@ type sceneFinder interface { // handlerRequiredFilter returns true if a File's handler needs to be executed despite the file not being updated. type handlerRequiredFilter struct { extensionConfig - txnManager txn.Manager - SceneFinder sceneFinder - ImageFinder fileCounter - GalleryFinder galleryFinder - CaptionUpdater video.CaptionUpdater + txnManager txn.Manager + SceneFinder sceneFinder + ImageFinder fileCounter + GalleryFinder galleryFinder FolderCache *lru.LRU[bool] @@ -137,7 +468,6 @@ func newHandlerRequiredFilter(c *config.Config, repo models.Repository) *handler SceneFinder: repo.Scene, ImageFinder: repo.Image, GalleryFinder: repo.Gallery, - CaptionUpdater: repo.File, FolderCache: lru.New[bool](processes * 2), videoFileNamingAlgorithm: c.GetVideoFileNamingAlgorithm(), } @@ -212,65 +542,35 @@ func (f *handlerRequiredFilter) Accept(ctx context.Context, ff models.File) bool } } - if isVideoFile { - // TODO - check if the cover exists - // hash := scene.GetHash(ff, f.videoFileNamingAlgorithm) - // ssPath := instance.Paths.Scene.GetScreenshotPath(hash) - // if exists, _ := fsutil.FileExists(ssPath); !exists { - // // if not, check if the file is a primary file for a scene - // scenes, err := f.SceneFinder.FindByPrimaryFileID(ctx, ff.Base().ID) - // if err != nil { - // // just ignore - // return false - // } - - // if len(scenes) > 0 { - // // if it is, then it needs to be re-generated - // return true - // } - // } - - // clean captions - scene handler handles this as well, but - // unchanged files aren't processed by the scene handler - videoFile, _ := ff.(*models.VideoFile) - if videoFile != nil { - if err := video.CleanCaptions(ctx, videoFile, f.txnManager, f.CaptionUpdater); err != nil { - logger.Errorf("Error cleaning captions: %v", err) - } - } - } - return false } type scanFilter struct { extensionConfig - txnManager txn.Manager - FileFinder models.FileFinder - CaptionUpdater video.CaptionUpdater + txnManager txn.Manager stashPaths config.StashConfigs generatedPath string videoExcludeRegex []*regexp.Regexp imageExcludeRegex []*regexp.Regexp minModTime time.Time + stashIgnoreFilter *file.StashIgnoreFilter } func newScanFilter(c *config.Config, repo models.Repository, minModTime time.Time) *scanFilter { return &scanFilter{ extensionConfig: newExtensionConfig(c), txnManager: repo.TxnManager, - FileFinder: repo.File, - CaptionUpdater: repo.File, stashPaths: c.GetStashPaths(), generatedPath: c.GetGeneratedPath(), videoExcludeRegex: generateRegexps(c.GetExcludes()), imageExcludeRegex: generateRegexps(c.GetImageExcludes()), minModTime: minModTime, + stashIgnoreFilter: file.NewStashIgnoreFilter(), } } -func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool { +func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool { if fsutil.IsPathInDir(f.generatedPath, path) { logger.Warnf("Skipping %q as it overlaps with the generated folder", path) return false @@ -287,19 +587,16 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) return false } + // Check .stashignore files, bounded to the library root. + if !f.stashIgnoreFilter.Accept(ctx, path, info, s.Path, zipFilePath) { + logger.Debugf("Skipping %s due to .stashignore", path) + return false + } + isVideoFile := useAsVideo(path) isImageFile := useAsImage(path) isZipFile := fsutil.MatchExtension(path, f.zipExt) - // handle caption files - if fsutil.MatchExtension(path, video.CaptionExts) { - // we don't include caption files in the file scan, but we do need - // to handle them - video.AssociateCaptions(ctx, path, f.txnManager, f.FileFinder, f.CaptionUpdater) - - return false - } - if !info.IsDir() && !isVideoFile && !isImageFile && !isZipFile { logger.Debugf("Skipping %s as it does not match any known file extensions", path) return false @@ -363,8 +660,9 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre &file.FilteredHandler{ Filter: file.FilterFunc(imageFileFilter), Handler: &image.ScanHandler{ - CreatorUpdater: r.Image, - GalleryFinder: r.Gallery, + CreatorUpdater: r.Image, + GalleryFinder: r.Gallery, + SceneFinderUpdater: r.Scene, ScanGenerator: &imageGenerators{ input: options, taskQueue: taskQueue, @@ -393,9 +691,10 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre &file.FilteredHandler{ Filter: file.FilterFunc(videoFileFilter), Handler: &scene.ScanHandler{ - CreatorUpdater: r.Scene, - CaptionUpdater: r.File, - PluginCache: pluginCache, + CreatorUpdater: r.Scene, + GalleryFinderUpdater: r.Gallery, + CaptionUpdater: r.File, + PluginCache: pluginCache, ScanGenerator: &sceneGenerators{ input: options, taskQueue: taskQueue, @@ -463,6 +762,29 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f model } } + if t.ScanGenerateImagePhashes { + progress.AddTotal(1) + phashFn := func(ctx context.Context) { + mgr := GetInstance() + // Only generate phash for image files, not video files + if imageFile, ok := f.(*models.ImageFile); ok { + taskPhash := GenerateImagePhashTask{ + repository: mgr.Repository, + File: imageFile, + Overwrite: overwrite, + } + taskPhash.Start(ctx) + } + progress.Increment() + } + + if g.sequentialScanning { + phashFn(ctx) + } else { + g.taskQueue.Add(fmt.Sprintf("Generating phash for %s", path), phashFn) + } + } + return nil } diff --git a/internal/manager/task_stash_box_tag.go b/internal/manager/task_stash_box_tag.go index 37859ba61..264e7e96c 100644 --- a/internal/manager/task_stash_box_tag.go +++ b/internal/manager/task_stash_box_tag.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "strconv" + "strings" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" @@ -12,6 +13,7 @@ import ( "github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/stashbox" "github.com/stashapp/stash/pkg/studio" + "github.com/stashapp/stash/pkg/tag" ) // stashBoxBatchPerformerTagTask is used to tag or create performers from stash-box. @@ -275,6 +277,12 @@ func (t *stashBoxBatchStudioTagTask) getName() string { } func (t *stashBoxBatchStudioTagTask) Start(ctx context.Context) { + // Skip organized studios + if t.studio != nil && t.studio.Organized { + logger.Infof("Skipping organized studio %s", t.studio.Name) + return + } + studio, err := t.findStashBoxStudio(ctx) if err != nil { logger.Errorf("Error fetching studio data from stash-box: %v", err) @@ -523,3 +531,235 @@ func (t *stashBoxBatchStudioTagTask) processParentStudio(ctx context.Context, pa return err } } + +// stashBoxBatchTagTagTask is used to tag or create tags from stash-box. +// +// Two modes of operation: +// - Update existing tag: set tag to update from stash-box data +// - Create new tag: set name or stashID to search stash-box and create locally +type stashBoxBatchTagTagTask struct { + box *models.StashBox + name *string + stashID *string + tag *models.Tag + createParent bool + excludedFields []string +} + +func (t *stashBoxBatchTagTagTask) getName() string { + switch { + case t.name != nil: + return *t.name + case t.stashID != nil: + return *t.stashID + case t.tag != nil: + return t.tag.Name + default: + return "" + } +} + +func (t *stashBoxBatchTagTagTask) Start(ctx context.Context) { + scrapedTag, err := t.findStashBoxTag(ctx) + if err != nil { + logger.Errorf("Error fetching tag data from stash-box: %v", err) + return + } + + excluded := map[string]bool{} + for _, field := range t.excludedFields { + excluded[field] = true + } + + if scrapedTag != nil { + t.processMatchedTag(ctx, scrapedTag, excluded) + } else { + logger.Infof("No match found for %s", t.getName()) + } +} + +func (t *stashBoxBatchTagTagTask) GetDescription() string { + return fmt.Sprintf("Tagging tag %s from stash-box", t.getName()) +} + +func (t *stashBoxBatchTagTagTask) findStashBoxTag(ctx context.Context) (*models.ScrapedTag, error) { + var results []*models.ScrapedTag + var err error + + r := instance.Repository + + client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns())) + + nameQuery := "" + + switch { + case t.name != nil: + nameQuery = *t.name + results, err = client.QueryTag(ctx, *t.name) + case t.stashID != nil: + results, err = client.QueryTag(ctx, *t.stashID) + case t.tag != nil: + var remoteID string + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + if !t.tag.StashIDs.Loaded() { + err = t.tag.LoadStashIDs(ctx, r.Tag) + if err != nil { + return err + } + } + for _, id := range t.tag.StashIDs.List() { + if id.Endpoint == t.box.Endpoint { + remoteID = id.StashID + } + } + return nil + }); err != nil { + return nil, err + } + + if remoteID != "" { + results, err = client.QueryTag(ctx, remoteID) + } else { + nameQuery = t.tag.Name + results, err = client.QueryTag(ctx, t.tag.Name) + } + } + + if err != nil { + return nil, err + } + + if len(results) == 0 { + return nil, nil + } + + var result *models.ScrapedTag + + // QueryTag returns tags that partially match the name, so find the exact match if searching by name + if nameQuery != "" { + for _, r := range results { + if strings.EqualFold(r.Name, nameQuery) { + result = r + break + } + } + } else { + result = results[0] + } + + if result == nil { + return nil, nil + } + + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + return match.ScrapedTagHierarchy(ctx, r.Tag, result, t.box.Endpoint) + }); err != nil { + return nil, err + } + + return result, nil +} + +func (t *stashBoxBatchTagTagTask) processParentTag(ctx context.Context, parent *models.ScrapedTag, excluded map[string]bool) error { + if parent.StoredID == nil { + // Create new parent tag + newParentTag := parent.ToTag(t.box.Endpoint, excluded) + + r := instance.Repository + err := r.WithTxn(ctx, func(ctx context.Context) error { + qb := r.Tag + + if err := tag.ValidateCreate(ctx, *newParentTag, qb); err != nil { + return err + } + + if err := qb.Create(ctx, &models.CreateTagInput{Tag: newParentTag}); err != nil { + return err + } + + storedID := strconv.Itoa(newParentTag.ID) + parent.StoredID = &storedID + return nil + }) + if err != nil { + logger.Errorf("Failed to create parent tag %s: %v", parent.Name, err) + } else { + logger.Infof("Created parent tag %s", parent.Name) + } + return err + } + + // Parent already exists — nothing to update for categories + return nil +} + +func (t *stashBoxBatchTagTagTask) processMatchedTag(ctx context.Context, s *models.ScrapedTag, excluded map[string]bool) { + // Determine the tag ID to update — either from the task's tag or from the + // StoredID set by match.ScrapedTag (when batch adding by name and the tag + // already exists locally). + tagID := 0 + if t.tag != nil { + tagID = t.tag.ID + } else if s.StoredID != nil { + tagID, _ = strconv.Atoi(*s.StoredID) + } + + if s.Parent != nil && t.createParent { + if err := t.processParentTag(ctx, s.Parent, excluded); err != nil { + return + } + } + + if tagID > 0 { + r := instance.Repository + err := r.WithTxn(ctx, func(ctx context.Context) error { + qb := r.Tag + + existingStashIDs, err := qb.GetStashIDs(ctx, tagID) + if err != nil { + return err + } + + storedID := strconv.Itoa(tagID) + partial := s.ToPartial(storedID, t.box.Endpoint, excluded, existingStashIDs) + + if err := tag.ValidateUpdate(ctx, tagID, partial, qb); err != nil { + return err + } + + if _, err := qb.UpdatePartial(ctx, tagID, partial); err != nil { + return err + } + + return nil + }) + if err != nil { + logger.Errorf("Failed to update tag %s: %v", s.Name, err) + } else { + logger.Infof("Updated tag %s", s.Name) + } + } else if s.Name != "" { + // no existing tag, create a new one + newTag := s.ToTag(t.box.Endpoint, excluded) + + r := instance.Repository + err := r.WithTxn(ctx, func(ctx context.Context) error { + qb := r.Tag + + if err := tag.ValidateCreate(ctx, *newTag, qb); err != nil { + return err + } + + if err := qb.Create(ctx, &models.CreateTagInput{Tag: newTag}); err != nil { + return err + } + + return nil + }) + if err != nil { + logger.Errorf("Failed to create tag %s: %v", s.Name, err) + } else { + logger.Infof("Created tag %s", s.Name) + } + } +} diff --git a/internal/static/performer/NoName01.png b/internal/static/performer/NoName01.png deleted file mode 100644 index cdcba1db9..000000000 Binary files a/internal/static/performer/NoName01.png and /dev/null differ diff --git a/internal/static/performer/NoName02.png b/internal/static/performer/NoName02.png deleted file mode 100644 index 4687adc08..000000000 Binary files a/internal/static/performer/NoName02.png and /dev/null differ diff --git a/internal/static/performer/NoName02.svg b/internal/static/performer/NoName02.svg new file mode 100644 index 000000000..b5dbaf2b9 --- /dev/null +++ b/internal/static/performer/NoName02.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName03.png b/internal/static/performer/NoName03.png deleted file mode 100644 index 8ac0d13b7..000000000 Binary files a/internal/static/performer/NoName03.png and /dev/null differ diff --git a/internal/static/performer/NoName04.png b/internal/static/performer/NoName04.png deleted file mode 100644 index 41b55b816..000000000 Binary files a/internal/static/performer/NoName04.png and /dev/null differ diff --git a/internal/static/performer/NoName05.png b/internal/static/performer/NoName05.png deleted file mode 100644 index 8a49ba6d3..000000000 Binary files a/internal/static/performer/NoName05.png and /dev/null differ diff --git a/internal/static/performer/NoName05.svg b/internal/static/performer/NoName05.svg new file mode 100644 index 000000000..5a26d98d8 --- /dev/null +++ b/internal/static/performer/NoName05.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName06.png b/internal/static/performer/NoName06.png index 4359911ae..f2a8016e2 100644 Binary files a/internal/static/performer/NoName06.png and b/internal/static/performer/NoName06.png differ diff --git a/internal/static/performer/NoName07.png b/internal/static/performer/NoName07.png deleted file mode 100644 index 1bb5f6f82..000000000 Binary files a/internal/static/performer/NoName07.png and /dev/null differ diff --git a/internal/static/performer/NoName07.svg b/internal/static/performer/NoName07.svg new file mode 100644 index 000000000..ac90cf6d1 --- /dev/null +++ b/internal/static/performer/NoName07.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName08.png b/internal/static/performer/NoName08.png deleted file mode 100644 index 8ff7ff734..000000000 Binary files a/internal/static/performer/NoName08.png and /dev/null differ diff --git a/internal/static/performer/NoName09.png b/internal/static/performer/NoName09.png deleted file mode 100644 index 49b54b725..000000000 Binary files a/internal/static/performer/NoName09.png and /dev/null differ diff --git a/internal/static/performer/NoName09.svg b/internal/static/performer/NoName09.svg new file mode 100644 index 000000000..6009133a4 --- /dev/null +++ b/internal/static/performer/NoName09.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName10.png b/internal/static/performer/NoName10.png deleted file mode 100644 index a2b72043a..000000000 Binary files a/internal/static/performer/NoName10.png and /dev/null differ diff --git a/internal/static/performer/NoName11.png b/internal/static/performer/NoName11.png index 01034c2b0..45158b094 100644 Binary files a/internal/static/performer/NoName11.png and b/internal/static/performer/NoName11.png differ diff --git a/internal/static/performer/NoName12.png b/internal/static/performer/NoName12.png deleted file mode 100644 index 7f48ba39a..000000000 Binary files a/internal/static/performer/NoName12.png and /dev/null differ diff --git a/internal/static/performer/NoName12.svg b/internal/static/performer/NoName12.svg new file mode 100644 index 000000000..89843a774 --- /dev/null +++ b/internal/static/performer/NoName12.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName13.png b/internal/static/performer/NoName13.png deleted file mode 100644 index fdefafb59..000000000 Binary files a/internal/static/performer/NoName13.png and /dev/null differ diff --git a/internal/static/performer/NoName13.svg b/internal/static/performer/NoName13.svg new file mode 100644 index 000000000..fbbacaacf --- /dev/null +++ b/internal/static/performer/NoName13.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName14.png b/internal/static/performer/NoName14.png deleted file mode 100644 index 20a20a209..000000000 Binary files a/internal/static/performer/NoName14.png and /dev/null differ diff --git a/internal/static/performer/NoName14.svg b/internal/static/performer/NoName14.svg new file mode 100644 index 000000000..1d0231ab3 --- /dev/null +++ b/internal/static/performer/NoName14.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName15.png b/internal/static/performer/NoName15.png deleted file mode 100644 index cfc9d3a8c..000000000 Binary files a/internal/static/performer/NoName15.png and /dev/null differ diff --git a/internal/static/performer/NoName16.png b/internal/static/performer/NoName16.png deleted file mode 100644 index f54744280..000000000 Binary files a/internal/static/performer/NoName16.png and /dev/null differ diff --git a/internal/static/performer/NoName17.png b/internal/static/performer/NoName17.png deleted file mode 100644 index 068d1cf73..000000000 Binary files a/internal/static/performer/NoName17.png and /dev/null differ diff --git a/internal/static/performer/NoName17.svg b/internal/static/performer/NoName17.svg new file mode 100644 index 000000000..8df98d6c4 --- /dev/null +++ b/internal/static/performer/NoName17.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName18.png b/internal/static/performer/NoName18.png deleted file mode 100644 index 179d1d323..000000000 Binary files a/internal/static/performer/NoName18.png and /dev/null differ diff --git a/internal/static/performer/NoName19.png b/internal/static/performer/NoName19.png deleted file mode 100644 index 7349c26b2..000000000 Binary files a/internal/static/performer/NoName19.png and /dev/null differ diff --git a/internal/static/performer/NoName19.svg b/internal/static/performer/NoName19.svg new file mode 100644 index 000000000..a35c979d6 --- /dev/null +++ b/internal/static/performer/NoName19.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName20.png b/internal/static/performer/NoName20.png deleted file mode 100644 index 86dd404bc..000000000 Binary files a/internal/static/performer/NoName20.png and /dev/null differ diff --git a/internal/static/performer/NoName21.png b/internal/static/performer/NoName21.png deleted file mode 100644 index 7bee5cdb6..000000000 Binary files a/internal/static/performer/NoName21.png and /dev/null differ diff --git a/internal/static/performer/NoName21.svg b/internal/static/performer/NoName21.svg new file mode 100644 index 000000000..2d7647c1d --- /dev/null +++ b/internal/static/performer/NoName21.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName22.png b/internal/static/performer/NoName22.png deleted file mode 100644 index d92384f93..000000000 Binary files a/internal/static/performer/NoName22.png and /dev/null differ diff --git a/internal/static/performer/NoName22.svg b/internal/static/performer/NoName22.svg new file mode 100644 index 000000000..c81400587 --- /dev/null +++ b/internal/static/performer/NoName22.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName23.png b/internal/static/performer/NoName23.png deleted file mode 100644 index f28ca89c8..000000000 Binary files a/internal/static/performer/NoName23.png and /dev/null differ diff --git a/internal/static/performer/NoName23.svg b/internal/static/performer/NoName23.svg new file mode 100644 index 000000000..3156c267f --- /dev/null +++ b/internal/static/performer/NoName23.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName24.png b/internal/static/performer/NoName24.png deleted file mode 100644 index 7b9bb42a2..000000000 Binary files a/internal/static/performer/NoName24.png and /dev/null differ diff --git a/internal/static/performer/NoName24.svg b/internal/static/performer/NoName24.svg new file mode 100644 index 000000000..3afd26f25 --- /dev/null +++ b/internal/static/performer/NoName24.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName25.png b/internal/static/performer/NoName25.png deleted file mode 100644 index 1f4864eed..000000000 Binary files a/internal/static/performer/NoName25.png and /dev/null differ diff --git a/internal/static/performer/NoName25.svg b/internal/static/performer/NoName25.svg new file mode 100644 index 000000000..ab040b917 --- /dev/null +++ b/internal/static/performer/NoName25.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName26.png b/internal/static/performer/NoName26.png deleted file mode 100644 index b63c47ab5..000000000 Binary files a/internal/static/performer/NoName26.png and /dev/null differ diff --git a/internal/static/performer/NoName26.svg b/internal/static/performer/NoName26.svg new file mode 100644 index 000000000..0c1679e16 --- /dev/null +++ b/internal/static/performer/NoName26.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName27.png b/internal/static/performer/NoName27.png deleted file mode 100644 index eb57d9cf4..000000000 Binary files a/internal/static/performer/NoName27.png and /dev/null differ diff --git a/internal/static/performer/NoName27.svg b/internal/static/performer/NoName27.svg new file mode 100644 index 000000000..4bf73d04a --- /dev/null +++ b/internal/static/performer/NoName27.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName28.png b/internal/static/performer/NoName28.png deleted file mode 100644 index c00fb15b5..000000000 Binary files a/internal/static/performer/NoName28.png and /dev/null differ diff --git a/internal/static/performer/NoName28.svg b/internal/static/performer/NoName28.svg new file mode 100644 index 000000000..5af3dbc38 --- /dev/null +++ b/internal/static/performer/NoName28.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName29.png b/internal/static/performer/NoName29.png index 21e9e27fa..8a53967a5 100644 Binary files a/internal/static/performer/NoName29.png and b/internal/static/performer/NoName29.png differ diff --git a/internal/static/performer/NoName30.png b/internal/static/performer/NoName30.png deleted file mode 100644 index ba968026d..000000000 Binary files a/internal/static/performer/NoName30.png and /dev/null differ diff --git a/internal/static/performer/NoName30.svg b/internal/static/performer/NoName30.svg new file mode 100644 index 000000000..c77b1163f --- /dev/null +++ b/internal/static/performer/NoName30.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName31.png b/internal/static/performer/NoName31.png deleted file mode 100644 index a4003fa75..000000000 Binary files a/internal/static/performer/NoName31.png and /dev/null differ diff --git a/internal/static/performer/NoName31.svg b/internal/static/performer/NoName31.svg new file mode 100644 index 000000000..5504136d2 --- /dev/null +++ b/internal/static/performer/NoName31.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName32.png b/internal/static/performer/NoName32.png deleted file mode 100644 index 0ca4aca17..000000000 Binary files a/internal/static/performer/NoName32.png and /dev/null differ diff --git a/internal/static/performer/NoName32.svg b/internal/static/performer/NoName32.svg new file mode 100644 index 000000000..ec72d0836 --- /dev/null +++ b/internal/static/performer/NoName32.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName33.png b/internal/static/performer/NoName33.png index 38ae2116c..025a1ff7f 100644 Binary files a/internal/static/performer/NoName33.png and b/internal/static/performer/NoName33.png differ diff --git a/internal/static/performer/NoName34.png b/internal/static/performer/NoName34.png deleted file mode 100644 index c40683098..000000000 Binary files a/internal/static/performer/NoName34.png and /dev/null differ diff --git a/internal/static/performer/NoName34.svg b/internal/static/performer/NoName34.svg new file mode 100644 index 000000000..49086ca8a --- /dev/null +++ b/internal/static/performer/NoName34.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName35.png b/internal/static/performer/NoName35.png index 92d9ad784..70dc81443 100644 Binary files a/internal/static/performer/NoName35.png and b/internal/static/performer/NoName35.png differ diff --git a/internal/static/performer/NoName36.png b/internal/static/performer/NoName36.png deleted file mode 100644 index 7796c8b63..000000000 Binary files a/internal/static/performer/NoName36.png and /dev/null differ diff --git a/internal/static/performer/NoName36.svg b/internal/static/performer/NoName36.svg new file mode 100644 index 000000000..b69ce0aa3 --- /dev/null +++ b/internal/static/performer/NoName36.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName37.png b/internal/static/performer/NoName37.png deleted file mode 100644 index c47f0abac..000000000 Binary files a/internal/static/performer/NoName37.png and /dev/null differ diff --git a/internal/static/performer/NoName37.svg b/internal/static/performer/NoName37.svg new file mode 100644 index 000000000..d0053cb58 --- /dev/null +++ b/internal/static/performer/NoName37.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName38.png b/internal/static/performer/NoName38.png deleted file mode 100644 index da9fa37c9..000000000 Binary files a/internal/static/performer/NoName38.png and /dev/null differ diff --git a/internal/static/performer/NoName38.svg b/internal/static/performer/NoName38.svg new file mode 100644 index 000000000..0131c7efe --- /dev/null +++ b/internal/static/performer/NoName38.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName39.png b/internal/static/performer/NoName39.png deleted file mode 100644 index a7921d01d..000000000 Binary files a/internal/static/performer/NoName39.png and /dev/null differ diff --git a/internal/static/performer/NoName39.svg b/internal/static/performer/NoName39.svg new file mode 100644 index 000000000..6cc5080ac --- /dev/null +++ b/internal/static/performer/NoName39.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName40.png b/internal/static/performer/NoName40.png deleted file mode 100644 index 0214efad4..000000000 Binary files a/internal/static/performer/NoName40.png and /dev/null differ diff --git a/internal/static/performer/attribution.md b/internal/static/performer/attribution.md new file mode 100644 index 000000000..3cb40ca04 --- /dev/null +++ b/internal/static/performer/attribution.md @@ -0,0 +1,34 @@ +NoName02.svg - "[Exotic dancer silhouette](https://freesvg.org/exotic-dancer-silhouette)" by OpenClipart-Vectors under CC0 License +NoName05.svg - "[Fashion girl silhouette](https://creazilla.com/media/silhouette/76433/fashion-girl)" by Creazilla under CC0 License +NoName06.png - "[Woman, Female, Girl](https://pixabay.com/illustrations/woman-female-girl-lady-silhouette-163525/)" by No-longer-here under Pixabay License +NoName07.svg - "[Woman Silhouette 11](https://openclipart.org/detail/14083/woman-silhouette-11)" by nicubunu under CC0 License +NoName09.svg - "[Girl, Pose, Posing](https://pixabay.com/vectors/girl-pose-posing-female-woman-311535/)" by Clker-Free-Vector-Images under CC0 License +NoName11.png - "[Alpha Mask, Silhouette, Woman](https://pixabay.com/illustrations/alpha-mask-silhouette-woman-girl-3072470/)" by Wolfgang Eckert under Pixabay License +NoName12.svg - "[Dance, Dancer, Dancing](https://pixabay.com/vectors/dance-dancer-dancing-female-girl-2023863/)" by OpenClipart-Vectors under CC0 License +NoName13.svg - "[Dress, Silhouette, Woman](https://pixabay.com/vectors/dress-silhouette-woman-female-148745/)" by OpenClipart-Vectors under CC0 License +NoName14.svg - "[Woman in long dress silhouette](https://freesvg.org/woman-in-long-dress-silhouette)" by OpenClipart-Vectors under CC0 License +NoName17.svg - "[Female Model silhouette](https://creazilla.com/media/silhouette/2495/female-model)" by Natasha Sinegina under CC-BY-4.0 +NoName19.svg - "[Female, Girl, Heel](https://pixabay.com/vectors/female-girl-heel-silhouette-woman-2023898/)" by OpenClipart-Vectors under CC0 License +NoName21.svg - "[Lady, Silhouette, Woman](https://pixabay.com/vectors/lady-silhouette-woman-pink-296698/)" by Clker-Free-Vector-Images under CC0 License +NoName22.svg - "[Female, Girl, Heel](https://pixabay.com/vectors/female-girl-heel-silhouette-woman-2023856/)" by OpenClipart-Vectors under CC0 License +NoName23.svg - "[Woman, Female, Figure](https://pixabay.com/vectors/woman-female-figure-slender-slim-149723/)" by OpenClipart-Vectors under CC0 License +NoName24.svg - "[Silhouette, Woman, Bunny](https://pixabay.com/illustrations/silhouette-woman-bunny-girl-female-3196716/)" by Wolfgang Eckert under Pixabay License +NoName25.svg - "[Female, Girl, Silhouette](https://pixabay.com/vectors/female-girl-silhouette-woman-2023857/)" by OpenClipart-Vectors under CC0 License +NoName26.svg - "[Female, Girl, Silhouette](https://pixabay.com/vectors/female-girl-silhouette-woman-2024047/)" by OpenClipart-Vectors under CC0 License +NoName27.svg - "[Woman, School Clothes, Uniform](https://pixabay.com/illustrations/woman-school-clothes-uniform-644569/)" by Silvia under Pixabay License +NoName28.svg - "[Girl, Woman, Feminine](https://pixabay.com/illustrations/girl-woman-feminine-sensual-1369733/)" by Calzas under Pixabay License +NoName29.png - "[Alpha Mask, Silhouette, Woman](https://pixabay.com/illustrations/alpha-mask-silhouette-woman-girl-3066005/)" by Wolfgang Eckert under Pixabay License +NoName30.svg - "[Architetto](https://openclipart.org/detail/68047)" by Emilie Rollandin under CC0 License +NoName31.svg - "[Model silhouette](https://creazilla.com/media/silhouette/1785/model)" by Bob Comix under CC-BY-4.0 License +NoName32.svg - "[Fashion, Female, Girl](https://pixabay.com/vectors/fashion-female-girl-heel-model-2023859/)" by OpenClipart-Vectors under CC0 License +NoName33.png - "[Silhouette Donna 6](https://www.publicdomainpictures.net/view-image.php?image=82268)" by Tammy Sue under CC0 License +NoName34.svg - "[Donna in piedi 01](https://openclipart.org/detail/33139)" by Emilie Rollandin under CC0 License +NoName35.png - "[Silhouette, Woman, Young](https://pixabay.com/illustrations/silhouette-woman-young-move-female-3104942/)" by Wolfgang Eckert under Pixabay License +NoName36.svg - "[Fashion Model silhouette](https://creazilla.com/media/silhouette/2506/fashion-model)" by Natasha Sinegina under CC-BY-4.0 License +NoName37.svg - "[Female, Woman, Standing](https://pixabay.com/vectors/female-woman-standing-confident-2816234/)" by Mohamed Hassan under Pixabay License +NoName38.svg - "[Dress, Silhouette, Women](https://pixabay.com/vectors/dress-silhouette-women-dance-lady-3360422/)" by Mohamed Hassan under Pixabay License +NoName39.svg - "[Woman, Female, Lady](https://pixabay.com/illustrations/woman-female-lady-business-woman-220260/)" by No-longer-here under Pixabay License + +CC0 License: https://creativecommons.org/publicdomain/zero/1.0/ +CC-BY-4.0 License: https://creativecommons.org/licenses/by/4.0/ +Pixabay License: https://pixabay.com/service/license-summary/ \ No newline at end of file diff --git a/internal/static/performer_male/Male01.png b/internal/static/performer_male/Male01.png deleted file mode 100644 index 8a486299a..000000000 Binary files a/internal/static/performer_male/Male01.png and /dev/null differ diff --git a/internal/static/performer_male/Male01.svg b/internal/static/performer_male/Male01.svg new file mode 100644 index 000000000..72599423a --- /dev/null +++ b/internal/static/performer_male/Male01.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male02.png b/internal/static/performer_male/Male02.png deleted file mode 100644 index 673b120eb..000000000 Binary files a/internal/static/performer_male/Male02.png and /dev/null differ diff --git a/internal/static/performer_male/Male02.svg b/internal/static/performer_male/Male02.svg new file mode 100644 index 000000000..1f7f4072e --- /dev/null +++ b/internal/static/performer_male/Male02.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male03.png b/internal/static/performer_male/Male03.png deleted file mode 100644 index 1814d05bb..000000000 Binary files a/internal/static/performer_male/Male03.png and /dev/null differ diff --git a/internal/static/performer_male/Male03.svg b/internal/static/performer_male/Male03.svg new file mode 100644 index 000000000..60e0857ce --- /dev/null +++ b/internal/static/performer_male/Male03.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male04.png b/internal/static/performer_male/Male04.png deleted file mode 100644 index 9dd1f0bcc..000000000 Binary files a/internal/static/performer_male/Male04.png and /dev/null differ diff --git a/internal/static/performer_male/Male04.svg b/internal/static/performer_male/Male04.svg new file mode 100644 index 000000000..7e7e29fae --- /dev/null +++ b/internal/static/performer_male/Male04.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male05.png b/internal/static/performer_male/Male05.png deleted file mode 100644 index 35231f914..000000000 Binary files a/internal/static/performer_male/Male05.png and /dev/null differ diff --git a/internal/static/performer_male/Male05.svg b/internal/static/performer_male/Male05.svg new file mode 100644 index 000000000..b41f8d1cd --- /dev/null +++ b/internal/static/performer_male/Male05.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male06.png b/internal/static/performer_male/Male06.png deleted file mode 100644 index 9530d274a..000000000 Binary files a/internal/static/performer_male/Male06.png and /dev/null differ diff --git a/internal/static/performer_male/Male06.svg b/internal/static/performer_male/Male06.svg new file mode 100644 index 000000000..14578c380 --- /dev/null +++ b/internal/static/performer_male/Male06.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/attribution.md b/internal/static/performer_male/attribution.md new file mode 100644 index 000000000..119d73757 --- /dev/null +++ b/internal/static/performer_male/attribution.md @@ -0,0 +1,8 @@ +Male01.svg - "[Man Silhouette](https://freesvg.org/1528398040)" by "OpenClipart" under CC0 License +Male02.svg - "[Male pose silhouette](https://freesvg.org/male-pose-silhouette)" by OpenClipart under CC0 License +Male03.svg - "[Bald man walking in a suit silhouette vector image](https://freesvg.org/bald-man-walking-in-a-suit-silhouette-vector-image)" by OpenClipart under CC0 License +Male04.svg - "[Man silhouette vector clip art](https://freesvg.org/man-silhouette-vector-clip-art) by OpenClipart under CC0 License +Male05.svg - "[Man, Walking, Confident](https://pixabay.com/vectors/man-walking-confident-silhouette-2759950/)" by Mohamed Hassan under Pixabay License + +CC0 Licence: https://creativecommons.org/public-domain/cc0/ +Pixabay License: https://pixabay.com/service/license-summary/ \ No newline at end of file diff --git a/pkg/ffmpeg/codec_hardware.go b/pkg/ffmpeg/codec_hardware.go index 86fe56bde..66480c5bb 100644 --- a/pkg/ffmpeg/codec_hardware.go +++ b/pkg/ffmpeg/codec_hardware.go @@ -185,6 +185,12 @@ func (f *FFMpeg) hwCanFullHWTranscode(ctx context.Context, codec VideoCodec, vf // Prepend input for hardware encoding only func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args { + // check for custom /dev/dri device #6435 + driDevice := os.Getenv("STASH_HW_DRI_DEVICE") + if driDevice == "" { + driDevice = "/dev/dri/renderD128" + } + switch toCodec { case VideoCodecN264, VideoCodecN264H: @@ -201,7 +207,7 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args { case VideoCodecV264, VideoCodecVVP9: args = append(args, "-vaapi_device") - args = append(args, "/dev/dri/renderD128") + args = append(args, driDevice) if fullhw { args = append(args, "-hwaccel") args = append(args, "vaapi") @@ -363,8 +369,11 @@ func (f *FFMpeg) hwApplyFullHWFilter(args VideoFilter, codec VideoCodec, fullhw args = args.Append("scale_qsv=format=nv12") } case VideoCodecRK264: - // For Rockchip, no extra mapping here. If there is no scale filter, - // leave frames in DRM_PRIME for the encoder. + // Full-hw decode on 10-bit sources often produces DRM_PRIME with sw_pix_fmt=nv15. + // h264_rkmpp does NOT accept nv15, so we must force a conversion to nv12 + if fullhw { + args = args.Append("scale_rkrga=w=iw:h=ih:format=nv12") + } } return args @@ -399,7 +408,7 @@ func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []in // by downloading the scaled frame to system RAM and re-uploading it. // The filter chain below uses a zero-copy approach, passing the hardware-scaled // frame directly to the encoder. This is more efficient but may be less stable. - template = "scale_rkrga=$value" + template = "scale_rkrga=$value:format=nv12" default: return VideoFilter(sargs) } diff --git a/pkg/ffmpeg/transcoder/screenshot.go b/pkg/ffmpeg/transcoder/screenshot.go index c3343d594..c65f23941 100644 --- a/pkg/ffmpeg/transcoder/screenshot.go +++ b/pkg/ffmpeg/transcoder/screenshot.go @@ -9,7 +9,11 @@ type ScreenshotOptions struct { // Quality is the quality scale. See https://ffmpeg.org/ffmpeg.html#Main-options Quality int + // Width is the width to scale the screenshot to. If 0, no scaling will be applied. Width int + // Height is the height to scale the screenshot to. If 0, no scaling will be applied. + // Not used if Width is set. + Height int // Verbosity is the logging verbosity. Defaults to LogLevelError if not set. Verbosity ffmpeg.LogLevel @@ -70,6 +74,9 @@ func ScreenshotTime(input string, t float64, options ScreenshotOptions) ffmpeg.A if options.Width > 0 { vf = vf.ScaleWidth(options.Width) args = args.VideoFilter(vf) + } else if options.Height > 0 { + vf = vf.ScaleHeight(options.Height) + args = args.VideoFilter(vf) } args = args.AppendArgs(options.OutputType) diff --git a/pkg/file/clean.go b/pkg/file/clean.go index 53b2e0612..369600f4c 100644 --- a/pkg/file/clean.go +++ b/pkg/file/clean.go @@ -33,6 +33,11 @@ type cleanJob struct { type CleanOptions struct { Paths []string + // IgnoreZipFileContents will skip checking the contents of zip files when determining whether to clean a file. + // This can significantly speed up the clean process, but will potentially miss removed files within zip files. + // Where users do not modify zip files contents directly, this should be safe to use. + IgnoreZipFileContents bool + // Do a dry run. Don't delete any files DryRun bool @@ -174,13 +179,16 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error { more := true r := j.Repository + + includeZipContents := !j.options.IgnoreZipFileContents + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { for more { if job.IsCancelled(ctx) { return nil } - files, err := r.File.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) + files, err := r.File.FindAllInPaths(ctx, j.options.Paths, includeZipContents, batchSize, offset) if err != nil { return fmt.Errorf("error querying for files: %w", err) } @@ -258,6 +266,8 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error offset := 0 progress := j.progress + includeZipContents := !j.options.IgnoreZipFileContents + more := true r := j.Repository if err := r.WithReadTxn(ctx, func(ctx context.Context) error { @@ -266,7 +276,7 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error return nil } - folders, err := r.Folder.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) + folders, err := r.Folder.FindAllInPaths(ctx, j.options.Paths, includeZipContents, batchSize, offset) if err != nil { return fmt.Errorf("error querying for folders: %w", err) } @@ -348,8 +358,14 @@ func (j *cleanJob) shouldClean(ctx context.Context, f models.File) bool { // run through path filter, if returns false then the file should be cleaned filter := j.options.PathFilter + // need to get the zip file path if present + zipFilePath := "" + if f.Base().ZipFile != nil { + zipFilePath = f.Base().ZipFile.Base().Path + } + // don't log anything - assume filter will have logged the reason - return !filter.Accept(ctx, path, info) + return !filter.Accept(ctx, path, info, zipFilePath) } func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool { @@ -387,8 +403,14 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool // run through path filter, if returns false then the file should be cleaned filter := j.options.PathFilter + // need to get the zip file path if present + zipFilePath := "" + if f.ZipFile != nil { + zipFilePath = f.ZipFile.Base().Path + } + // don't log anything - assume filter will have logged the reason - return !filter.Accept(ctx, path, info) + return !filter.Accept(ctx, path, info, zipFilePath) } func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) { diff --git a/pkg/file/file.go b/pkg/file/file.go index 407949ba1..b93083b35 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -3,6 +3,10 @@ package file import ( "context" + "fmt" + "io/fs" + "os" + "time" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -35,3 +39,23 @@ func (r *Repository) WithReadTxn(ctx context.Context, fn txn.TxnFunc) error { func (r *Repository) WithDB(ctx context.Context, fn txn.TxnFunc) error { return txn.WithDatabase(ctx, r.TxnManager, fn) } + +// ModTime returns the modification time truncated to seconds. +func ModTime(info fs.FileInfo) time.Time { + // truncate to seconds, since we don't store beyond that in the database + return info.ModTime().Truncate(time.Second) +} + +// GetFileSize gets the size of the file, taking into account symlinks. +func GetFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { + // #2196/#3042 - replace size with target size if file is a symlink + if info.Mode()&os.ModeSymlink == os.ModeSymlink { + targetInfo, err := f.Stat(path) + if err != nil { + return 0, fmt.Errorf("reading info for symlink %q: %w", path, err) + } + return targetInfo.Size(), nil + } + + return info.Size(), nil +} diff --git a/pkg/file/folder.go b/pkg/file/folder.go index fe260c155..249f73a7a 100644 --- a/pkg/file/folder.go +++ b/pkg/file/folder.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "path/filepath" + "slices" "strings" "time" @@ -12,8 +13,9 @@ import ( ) // GetOrCreateFolderHierarchy gets the folder for the given path, or creates a folder hierarchy for the given path if one if no existing folder is found. -// Does not create any folders in the file system -func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string) (*models.Folder, error) { +// Creates folder entries for each level of the hierarchy that doesn't already exist, up to the provided root paths. +// Does not create any folders in the file system. +func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string, rootPaths []string) (*models.Folder, error) { // get or create folder hierarchy // assume case sensitive when searching for the folder const caseSensitive = true @@ -23,17 +25,33 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreat } if folder == nil { - parentPath := filepath.Dir(path) - parent, err := GetOrCreateFolderHierarchy(ctx, fc, parentPath) - if err != nil { - return nil, err + var parentID *models.FolderID + + if !slices.Contains(rootPaths, path) { + parentPath := filepath.Dir(path) + + // safety check - don't allow parent path to be the same as the current path, + // otherwise we could end up in an infinite loop + if parentPath == path { + // #6618 - log a warning and return nil for the parent ID, + // which will cause the folder to be created with no parent + logger.Warnf("parent path is the same as the current path: %s", path) + return nil, nil + } + + parent, err := GetOrCreateFolderHierarchy(ctx, fc, parentPath, rootPaths) + if err != nil { + return nil, err + } + + parentID = &parent.ID } now := time.Now() folder = &models.Folder{ Path: path, - ParentFolderID: &parent.ID, + ParentFolderID: parentID, DirEntry: models.DirEntry{ // leave mod time empty for now - it will be updated when the folder is scanned }, @@ -41,6 +59,8 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreat UpdatedAt: now, } + logger.Infof("%s doesn't exist. Creating new folder entry...", path) + if err = fc.Create(ctx, folder); err != nil { return nil, fmt.Errorf("creating folder %s: %w", path, err) } @@ -49,12 +69,18 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreat return folder, nil } -func transferZipHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, files models.FileFinderUpdater, zipFileID models.FileID, oldPath string, newPath string) error { - if err := transferZipFolderHierarchy(ctx, folderStore, zipFileID, oldPath, newPath); err != nil { +type zipHierarchyMover struct { + folderStore models.FolderReaderWriter + files models.FileFinderUpdater + rootPaths []string +} + +func (m zipHierarchyMover) transferZipHierarchy(ctx context.Context, zipFileID models.FileID, oldPath string, newPath string) error { + if err := m.transferZipFolderHierarchy(ctx, zipFileID, oldPath, newPath); err != nil { return fmt.Errorf("moving folder hierarchy for file %s: %w", oldPath, err) } - if err := transferZipFileEntries(ctx, folderStore, files, zipFileID, oldPath, newPath); err != nil { + if err := m.transferZipFileEntries(ctx, zipFileID, oldPath, newPath); err != nil { return fmt.Errorf("moving zip file contents for file %s: %w", oldPath, err) } @@ -63,8 +89,8 @@ func transferZipHierarchy(ctx context.Context, folderStore models.FolderReaderWr // transferZipFolderHierarchy creates the folder hierarchy for zipFileID under newPath, and removes // ZipFileID from folders under oldPath. -func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, zipFileID models.FileID, oldPath string, newPath string) error { - zipFolders, err := folderStore.FindByZipFileID(ctx, zipFileID) +func (m zipHierarchyMover) transferZipFolderHierarchy(ctx context.Context, zipFileID models.FileID, oldPath string, newPath string) error { + zipFolders, err := m.folderStore.FindByZipFileID(ctx, zipFileID) if err != nil { return err } @@ -83,7 +109,7 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe } newZfPath := filepath.Join(newPath, relZfPath) - newFolder, err := GetOrCreateFolderHierarchy(ctx, folderStore, newZfPath) + newFolder, err := GetOrCreateFolderHierarchy(ctx, m.folderStore, newZfPath, m.rootPaths) if err != nil { return err } @@ -91,14 +117,14 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe // add ZipFileID to new folder logger.Debugf("adding zip file %s to folder %s", zipFileID, newFolder.Path) newFolder.ZipFileID = &zipFileID - if err = folderStore.Update(ctx, newFolder); err != nil { + if err = m.folderStore.Update(ctx, newFolder); err != nil { return err } // remove ZipFileID from old folder logger.Debugf("removing zip file %s from folder %s", zipFileID, oldFolder.Path) oldFolder.ZipFileID = nil - if err = folderStore.Update(ctx, oldFolder); err != nil { + if err = m.folderStore.Update(ctx, oldFolder); err != nil { return err } } @@ -106,9 +132,9 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe return nil } -func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCreator, files models.FileFinderUpdater, zipFileID models.FileID, oldPath, newPath string) error { +func (m zipHierarchyMover) transferZipFileEntries(ctx context.Context, zipFileID models.FileID, oldPath, newPath string) error { // move contained files if file is a zip file - zipFiles, err := files.FindByZipFileID(ctx, zipFileID) + zipFiles, err := m.files.FindByZipFileID(ctx, zipFileID) if err != nil { return fmt.Errorf("finding contained files in file %s: %w", oldPath, err) } @@ -129,7 +155,7 @@ func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCrea newZfDir := filepath.Join(newPath, relZfDir) // folder should have been created by transferZipFolderHierarchy - newZfFolder, err := GetOrCreateFolderHierarchy(ctx, folders, newZfDir) + newZfFolder, err := GetOrCreateFolderHierarchy(ctx, m.folderStore, newZfDir, m.rootPaths) if err != nil { return fmt.Errorf("getting or creating folder hierarchy: %w", err) } @@ -137,7 +163,7 @@ func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCrea // update file parent folder zfBase.ParentFolderID = newZfFolder.ID logger.Debugf("moving %s to folder %s", zfBase.Path, newZfFolder.Path) - if err := files.Update(ctx, zf); err != nil { + if err := m.files.Update(ctx, zf); err != nil { return fmt.Errorf("updating file %s: %w", oldZfPath, err) } } diff --git a/pkg/file/folder_rename_detect.go b/pkg/file/folder_rename_detect.go index 4c057461b..d45593b28 100644 --- a/pkg/file/folder_rename_detect.go +++ b/pkg/file/folder_rename_detect.go @@ -2,7 +2,6 @@ package file import ( "context" - "errors" "fmt" "io/fs" @@ -75,7 +74,7 @@ func (d *folderRenameDetector) bestCandidate() *models.Folder { return best.folder } -func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*models.Folder, error) { // in order for a folder to be considered moved, the existing folder must be // missing, and the majority of the old folder's files must be present, unchanged, // in the new folder. @@ -88,7 +87,12 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. r := s.Repository - if err := symWalk(file.fs, file.Path, func(path string, d fs.DirEntry, err error) error { + zipFilePath := "" + if file.ZipFile != nil { + zipFilePath = file.ZipFile.Base().Path + } + + if err := SymWalk(file.FS, file.Path, func(path string, d fs.DirEntry, err error) error { if err != nil { // don't let errors prevent scanning logger.Errorf("error scanning %s: %v", path, err) @@ -111,11 +115,11 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. return nil } - if !s.acceptEntry(ctx, path, info) { + if !s.AcceptEntry(ctx, path, info, zipFilePath) { return nil } - size, err := getFileSize(file.fs, path, info) + size, err := GetFileSize(file.FS, path, info) if err != nil { return fmt.Errorf("getting file size for %q: %w", path, err) } @@ -154,16 +158,14 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. } // parent folder must be missing - _, err = file.fs.Lstat(pf.Path) + _, err = file.FS.Lstat(pf.Path) if err == nil { // parent folder exists, not a candidate detector.reject(parentFolderID) continue } - if !errors.Is(err, fs.ErrNotExist) { - return fmt.Errorf("checking for parent folder %q: %w", pf.Path, err) - } + // treat any error as missing folder // parent folder is missing, possible candidate // count the total number of files in the existing folder diff --git a/pkg/file/handler.go b/pkg/file/handler.go index 10616eefa..b4056f195 100644 --- a/pkg/file/handler.go +++ b/pkg/file/handler.go @@ -9,7 +9,7 @@ import ( // PathFilter provides a filter function for paths. type PathFilter interface { - Accept(ctx context.Context, path string, info fs.FileInfo) bool + Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool } type PathFilterFunc func(path string) bool diff --git a/pkg/file/move.go b/pkg/file/move.go index ba2a496bb..1f0a5012c 100644 --- a/pkg/file/move.go +++ b/pkg/file/move.go @@ -45,9 +45,12 @@ type Mover struct { moved map[string]string foldersCreated []string + + // needed for creating folder hierarchy when moving zip file entries + rootPaths []string } -func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter) *Mover { +func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter, rootPaths []string) *Mover { return &Mover{ Files: fileStore, Folders: folderStore, @@ -55,6 +58,7 @@ func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReade renamerRemoverImpl: newRenamerRemoverImpl(), mkDirFn: os.Mkdir, }, + rootPaths: rootPaths, } } @@ -87,7 +91,13 @@ func (m *Mover) Move(ctx context.Context, f models.File, folder *models.Folder, return fmt.Errorf("file %s already exists", newPath) } - if err := transferZipHierarchy(ctx, m.Folders, m.Files, fBase.ID, oldPath, newPath); err != nil { + zipMover := zipHierarchyMover{ + folderStore: m.Folders, + files: m.Files, + rootPaths: m.rootPaths, + } + + if err := zipMover.transferZipHierarchy(ctx, fBase.ID, oldPath, newPath); err != nil { return fmt.Errorf("moving folder hierarchy for file %s: %w", fBase.Path, err) } @@ -195,6 +205,25 @@ func correctSubFolderHierarchy(ctx context.Context, rw models.FolderReaderWriter logger.Debugf("updating folder %s to %s", oldPath, correctPath) + // #6427 - ensure folder entry with new path doesn't already exist + const caseSensitive = true + existing, err := rw.FindByPath(ctx, correctPath, caseSensitive) + if err != nil { + return fmt.Errorf("finding folder by path %s: %w", correctPath, err) + } + + if existing != nil { + // this should no longer be possible, but if it does happen, log a warning + // and skip updating this folder and its subfolders + logger.Warnf("folder with path %s already exists, setting parent_folder_id of %s to NULL and skipping", correctPath, oldPath) + f.ParentFolderID = nil + if err := rw.Update(ctx, f); err != nil { + return fmt.Errorf("updating folder parent id to NULL for folder %s: %w", oldPath, err) + } + + continue + } + f.Path = correctPath if err := rw.Update(ctx, f); err != nil { return fmt.Errorf("updating folder path %s -> %s: %w", oldPath, f.Path, err) diff --git a/pkg/file/scan.go b/pkg/file/scan.go index 4018913b0..4cfcaf7ae 100644 --- a/pkg/file/scan.go +++ b/pkg/file/scan.go @@ -2,28 +2,19 @@ package file import ( "context" - "errors" "fmt" "io/fs" - "os" "path/filepath" + "slices" "strings" "sync" "time" - "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) -const ( - scanQueueSize = 200000 - // maximum number of times to retry in the event of a locked database - // use -1 to retry forever - maxRetries = -1 -) - // Scanner scans files into the database. // // The scan process works using two goroutines. The first walks through the provided paths @@ -54,8 +45,30 @@ type Scanner struct { Repository Repository FingerprintCalculator FingerprintCalculator + // ZipFileExtensions is a list of file extensions that are considered zip files. + // Extension does not include the . character. + ZipFileExtensions []string + + // ScanFilters are used to determine if a file should be scanned. + ScanFilters []PathFilter + + // HandlerRequiredFilters are used to determine if an unchanged file needs to be handled + HandlerRequiredFilters []Filter + // FileDecorators are applied to files as they are scanned. FileDecorators []Decorator + + // handlers are called after a file has been scanned. + FileHandlers []Handler + + // RootPaths form the top-level paths for the library. + // Used to determine the root of the folder hierarchy when creating folders. + RootPaths []string + + // Rescan indicates whether files should be rescanned even if they haven't changed. + Rescan bool + + folderPathToID sync.Map } // FingerprintCalculator calculates a fingerprint for the provided file. @@ -90,248 +103,20 @@ func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs models.FS, return false } -// ProgressReporter is used to report progress of the scan. -type ProgressReporter interface { - AddTotal(total int) - Increment() - Definite() - ExecuteTask(description string, fn func()) -} - -type scanJob struct { - *Scanner - - // handlers are called after a file has been scanned. - handlers []Handler - - ProgressReports ProgressReporter - options ScanOptions - - startTime time.Time - fileQueue chan scanFile - retryList []scanFile - retrying bool - folderPathToID sync.Map - zipPathToID sync.Map - count int - - txnRetryer txn.Retryer -} - -// ScanOptions provides options for scanning files. -type ScanOptions struct { - Paths []string - - // ZipFileExtensions is a list of file extensions that are considered zip files. - // Extension does not include the . character. - ZipFileExtensions []string - - // ScanFilters are used to determine if a file should be scanned. - ScanFilters []PathFilter - - // HandlerRequiredFilters are used to determine if an unchanged file needs to be handled - HandlerRequiredFilters []Filter - - ParallelTasks int - - // When true files in path will be rescanned even if they haven't changed - Rescan bool -} - -// Scan starts the scanning process. -func (s *Scanner) Scan(ctx context.Context, handlers []Handler, options ScanOptions, progressReporter ProgressReporter) { - job := &scanJob{ - Scanner: s, - handlers: handlers, - ProgressReports: progressReporter, - options: options, - txnRetryer: txn.Retryer{ - Manager: s.Repository.TxnManager, - Retries: maxRetries, - }, - } - - job.execute(ctx) -} - -type scanFile struct { +// ScannedFile represents a file being scanned. +type ScannedFile struct { *models.BaseFile - fs models.FS - info fs.FileInfo + FS models.FS + Info fs.FileInfo } -func (s *scanJob) withTxn(ctx context.Context, fn func(ctx context.Context) error) error { - return s.txnRetryer.WithTxn(ctx, fn) -} - -func (s *scanJob) withDB(ctx context.Context, fn func(ctx context.Context) error) error { - return s.Repository.WithDB(ctx, fn) -} - -func (s *scanJob) execute(ctx context.Context) { - paths := s.options.Paths - logger.Infof("scanning %d paths", len(paths)) - s.startTime = time.Now() - - s.fileQueue = make(chan scanFile, scanQueueSize) - var wg sync.WaitGroup - wg.Add(1) - - go func() { - defer wg.Done() - if err := s.queueFiles(ctx, paths); err != nil { - if errors.Is(err, context.Canceled) { - return - } - - logger.Errorf("error queuing files for scan: %v", err) - return - } - - logger.Infof("Finished adding files to queue. %d files queued", s.count) - }() - - defer wg.Wait() - - if err := s.processQueue(ctx); err != nil { - if errors.Is(err, context.Canceled) { - return - } - - logger.Errorf("error scanning files: %v", err) - return - } -} - -func (s *scanJob) queueFiles(ctx context.Context, paths []string) error { - var err error - s.ProgressReports.ExecuteTask("Walking directory tree", func() { - for _, p := range paths { - err = symWalk(s.FS, p, s.queueFileFunc(ctx, s.FS, nil)) - if err != nil { - return - } - } - }) - - close(s.fileQueue) - - if s.ProgressReports != nil { - s.ProgressReports.AddTotal(s.count) - s.ProgressReports.Definite() - } - - return err -} - -func (s *scanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *scanFile) fs.WalkDirFunc { - return func(path string, d fs.DirEntry, err error) error { - if err != nil { - // don't let errors prevent scanning - logger.Errorf("error scanning %s: %v", path, err) - return nil - } - - if err = ctx.Err(); err != nil { - return err - } - - info, err := d.Info() - if err != nil { - logger.Errorf("reading info for %q: %v", path, err) - return nil - } - - if !s.acceptEntry(ctx, path, info) { - if info.IsDir() { - return fs.SkipDir - } - - return nil - } - - size, err := getFileSize(f, path, info) - if err != nil { - return err - } - - ff := scanFile{ - BaseFile: &models.BaseFile{ - DirEntry: models.DirEntry{ - ModTime: modTime(info), - }, - Path: path, - Basename: filepath.Base(path), - Size: size, - }, - fs: f, - info: info, - } - - if zipFile != nil { - zipFileID, err := s.getZipFileID(ctx, zipFile) - if err != nil { - return err - } - ff.ZipFileID = zipFileID - ff.ZipFile = zipFile - } - - if info.IsDir() { - // handle folders immediately - if err := s.handleFolder(ctx, ff); err != nil { - if !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", path, err) - } - - // skip the directory since we won't be able to process the files anyway - return fs.SkipDir - } - - return nil - } - - // if zip file is present, we handle immediately - if zipFile != nil { - s.ProgressReports.ExecuteTask("Scanning "+path, func() { - if err := s.handleFile(ctx, ff); err != nil { - if !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", path, err) - } - // don't return an error, just skip the file - } - }) - - return nil - } - - s.fileQueue <- ff - - s.count++ - - return nil - } -} - -func getFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { - // #2196/#3042 - replace size with target size if file is a symlink - if info.Mode()&os.ModeSymlink == os.ModeSymlink { - targetInfo, err := f.Stat(path) - if err != nil { - return 0, fmt.Errorf("reading info for symlink %q: %w", path, err) - } - return targetInfo.Size(), nil - } - - return info.Size(), nil -} - -func (s *scanJob) acceptEntry(ctx context.Context, path string, info fs.FileInfo) bool { +// AcceptEntry determines if the file entry should be accepted for scanning +func (s *Scanner) AcceptEntry(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool { // always accept if there's no filters - accept := len(s.options.ScanFilters) == 0 - for _, filter := range s.options.ScanFilters { + accept := len(s.ScanFilters) == 0 + for _, filter := range s.ScanFilters { // accept if any filter accepts the file - if filter.Accept(ctx, path, info) { + if filter.Accept(ctx, path, info, zipFilePath) { accept = true break } @@ -340,102 +125,7 @@ func (s *scanJob) acceptEntry(ctx context.Context, path string, info fs.FileInfo return accept } -func (s *scanJob) scanZipFile(ctx context.Context, f scanFile) error { - zipFS, err := f.fs.OpenZip(f.Path, f.Size) - if err != nil { - if errors.Is(err, errNotReaderAt) { - // can't walk the zip file - // just return - return nil - } - - return err - } - - defer zipFS.Close() - - return symWalk(zipFS, f.Path, s.queueFileFunc(ctx, zipFS, &f)) -} - -func (s *scanJob) processQueue(ctx context.Context) error { - parallelTasks := s.options.ParallelTasks - if parallelTasks < 1 { - parallelTasks = 1 - } - - wg := sizedwaitgroup.New(parallelTasks) - - if err := func() error { - defer wg.Wait() - - for f := range s.fileQueue { - if err := ctx.Err(); err != nil { - return err - } - - wg.Add() - ff := f - go func() { - defer wg.Done() - s.processQueueItem(ctx, ff) - }() - } - - return nil - }(); err != nil { - return err - } - - s.retrying = true - - if err := func() error { - defer wg.Wait() - - for _, f := range s.retryList { - if err := ctx.Err(); err != nil { - return err - } - - wg.Add() - ff := f - go func() { - defer wg.Done() - s.processQueueItem(ctx, ff) - }() - } - - return nil - }(); err != nil { - return err - } - - return nil -} - -func (s *scanJob) incrementProgress(f scanFile) { - // don't increment for files inside zip files since these aren't - // counted during the initial walking - if s.ProgressReports != nil && f.ZipFile == nil { - s.ProgressReports.Increment() - } -} - -func (s *scanJob) processQueueItem(ctx context.Context, f scanFile) { - s.ProgressReports.ExecuteTask("Scanning "+f.Path, func() { - var err error - if f.info.IsDir() { - err = s.handleFolder(ctx, f) - } else { - err = s.handleFile(ctx, f) - } - - if err != nil && !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", f.Path, err) - } - }) -} - -func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { +func (s *Scanner) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { // check the folder cache first if f, ok := s.folderPathToID.Load(path); ok { v := f.(models.FolderID) @@ -458,48 +148,17 @@ func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderI return &ret.ID, nil } -func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*models.FileID, error) { - if zipFile == nil { - return nil, nil - } - - if zipFile.ID != 0 { - return &zipFile.ID, nil - } - - path := zipFile.Path - - // check the folder cache first - if f, ok := s.zipPathToID.Load(path); ok { - v := f.(models.FileID) - return &v, nil - } - - // assume case sensitive when searching for the zip file - const caseSensitive = true - - ret, err := s.Repository.File.FindByPath(ctx, path, caseSensitive) - if err != nil { - return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err) - } - - if ret == nil { - return nil, fmt.Errorf("zip file %q doesn't exist in database", zipFile.Path) - } - - s.zipPathToID.Store(path, ret.Base().ID) - return &ret.Base().ID, nil -} - -func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { +// ScanFolder scans the provided folder into the database, returning the folder entry. +// If the folder already exists, it is updated if necessary. +func (s *Scanner) ScanFolder(ctx context.Context, file ScannedFile) (*models.Folder, error) { + var f *models.Folder + var err error path := file.Path - return s.withTxn(ctx, func(ctx context.Context) error { - defer s.incrementProgress(file) - + err = s.Repository.WithTxn(ctx, func(ctx context.Context) error { // determine if folder already exists in data store (by path) // assume case sensitive by default - f, err := s.Repository.Folder.FindByPath(ctx, path, true) + f, err = s.Repository.Folder.FindByPath(ctx, path, true) if err != nil { return fmt.Errorf("checking for existing folder %q: %w", path, err) } @@ -508,7 +167,7 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { // case insensitive searching // assume case sensitive if in zip if f == nil && file.ZipFileID == nil { - caseSensitive, _ := file.fs.IsPathCaseSensitive(file.Path) + caseSensitive, _ := file.FS.IsPathCaseSensitive(file.Path) if !caseSensitive { f, err = s.Repository.Folder.FindByPath(ctx, path, false) @@ -535,9 +194,15 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { return nil }) + + return f, err } -func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) isRootPath(path string) bool { + return path == "." || slices.Contains(s.RootPaths, path) +} + +func (s *Scanner) onNewFolder(ctx context.Context, file ScannedFile) (*models.Folder, error) { renamed, err := s.handleFolderRename(ctx, file) if err != nil { return nil, err @@ -556,18 +221,16 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folde UpdatedAt: now, } - dir := filepath.Dir(file.Path) - if dir != "." { - parentFolderID, err := s.getFolderID(ctx, dir) + if !s.isRootPath(file.Path) { + dir := filepath.Dir(file.Path) + + // create full folder hierarchy if parent folder doesn't exist, and set parent folder ID + parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, dir, s.RootPaths) if err != nil { return nil, fmt.Errorf("getting parent folder %q: %w", dir, err) } - // if parent folder doesn't exist, assume it's a top-level folder - // this may not be true if we're using multiple goroutines - if parentFolderID != nil { - toCreate.ParentFolderID = parentFolderID - } + toCreate.ParentFolderID = &parentFolder.ID } txn.AddPostCommitHook(ctx, func(ctx context.Context) { @@ -584,7 +247,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folde return toCreate, nil } -func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) handleFolderRename(ctx context.Context, file ScannedFile) (*models.Folder, error) { // ignore folders in zip files if file.ZipFileID != nil { return nil, nil @@ -625,7 +288,7 @@ func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*model return renamedFrom, nil } -func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *models.Folder) (*models.Folder, error) { +func (s *Scanner) onExistingFolder(ctx context.Context, f ScannedFile, existing *models.Folder) (*models.Folder, error) { update := false // update if mod time is changed @@ -656,6 +319,19 @@ func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *mo } } + // handle case where parent folder was not previously set + if existing.ParentFolderID == nil && !s.isRootPath(existing.Path) { + logger.Infof("Existing folder entry %q has no parent folder. Creating folder hierarchy and setting parent ID...", existing.Path) + + // create full folder hierarchy if parent folder doesn't exist, and set parent folder ID + parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, filepath.Dir(f.Path), s.RootPaths) + if err != nil { + return nil, fmt.Errorf("getting parent folder for %q: %w", f.Path, err) + } + existing.ParentFolderID = &parentFolder.ID + update = true + } + if update { var err error if err = s.Repository.Folder.Update(ctx, existing); err != nil { @@ -666,22 +342,27 @@ func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *mo return existing, nil } -func modTime(info fs.FileInfo) time.Time { - // truncate to seconds, since we don't store beyond that in the database - return info.ModTime().Truncate(time.Second) +type ScanFileResult struct { + File models.File + New bool + Renamed bool + Updated bool + FingerprintChanged bool } -func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { - defer s.incrementProgress(f) +func (r ScanFileResult) IsUnchanged() bool { + return !r.New && !r.Renamed && !r.Updated +} - var ff models.File +// ScanFile scans the provided file into the database, returning the scan result. +func (s *Scanner) ScanFile(ctx context.Context, f ScannedFile) (*ScanFileResult, error) { + var r *ScanFileResult // don't use a transaction to check if new or existing - if err := s.withDB(ctx, func(ctx context.Context) error { + if err := s.Repository.WithDB(ctx, func(ctx context.Context) error { // determine if file already exists in data store // assume case sensitive when searching for the file to begin with - var err error - ff, err = s.Repository.File.FindByPath(ctx, f.Path, true) + ff, err := s.Repository.File.FindByPath(ctx, f.Path, true) if err != nil { return fmt.Errorf("checking for existing file %q: %w", f.Path, err) } @@ -690,7 +371,7 @@ func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { // case insensitive search // assume case sensitive if in zip if ff == nil && f.ZipFileID != nil { - caseSensitive, _ := f.fs.IsPathCaseSensitive(f.Path) + caseSensitive, _ := f.FS.IsPathCaseSensitive(f.Path) if !caseSensitive { ff, err = s.Repository.File.FindByPath(ctx, f.Path, false) @@ -702,35 +383,23 @@ func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { if ff == nil { // returns a file only if it is actually new - ff, err = s.onNewFile(ctx, f) + r, err = s.onNewFile(ctx, f) return err } - ff, err = s.onExistingFile(ctx, f, ff) + r, err = s.onExistingFile(ctx, f, ff) return err }); err != nil { - return err + return nil, err } - if ff != nil && s.isZipFile(f.info.Name()) { - f.BaseFile = ff.Base() - - // scan zip files with a different context that is not cancellable - // cancelling while scanning zip file contents results in the scan - // contents being partially completed - zipCtx := context.WithoutCancel(ctx) - - if err := s.scanZipFile(zipCtx, f); err != nil { - logger.Errorf("Error scanning zip file %q: %v", f.Path, err) - } - } - - return nil + return r, nil } -func (s *scanJob) isZipFile(path string) bool { +// IsZipFile determines if the provided path is a zip file based on its extension. +func (s *Scanner) IsZipFile(path string) bool { fExt := filepath.Ext(path) - for _, ext := range s.options.ZipFileExtensions { + for _, ext := range s.ZipFileExtensions { if strings.EqualFold(fExt, "."+ext) { return true } @@ -739,7 +408,7 @@ func (s *scanJob) isZipFile(path string) bool { return false } -func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error) { +func (s *Scanner) onNewFile(ctx context.Context, f ScannedFile) (*ScanFileResult, error) { now := time.Now() baseFile := f.BaseFile @@ -749,54 +418,71 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error baseFile.UpdatedAt = now // find the parent folder - parentFolderID, err := s.getFolderID(ctx, filepath.Dir(path)) + folderPath := filepath.Dir(path) + parentFolderID, err := s.getFolderID(ctx, folderPath) if err != nil { return nil, fmt.Errorf("getting parent folder for %q: %w", path, err) } if parentFolderID == nil { - // if parent folder doesn't exist, assume it's not yet created - // add this file to the queue to be created later - if s.retrying { - // if we're retrying and the folder still doesn't exist, then it's a problem - return nil, fmt.Errorf("parent folder for %q doesn't exist", path) - } + // parent folders should have been created before scanning this file in a recursive scan + // assume that we are scanning specifically and only this file, + // so we should create the parent folder hierarchy if it doesn't exist + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, folderPath, s.RootPaths) + if err != nil { + return fmt.Errorf("getting parent folder for %q: %w", f.Path, err) + } - s.retryList = append(s.retryList, f) - return nil, nil + parentFolderID = &parentFolder.ID + return nil + }); err != nil { + return nil, err + } + } + if parentFolderID == nil { + // shouldn't happen + return nil, fmt.Errorf("parent folder ID is nil for %q", path) } baseFile.ParentFolderID = *parentFolderID const useExisting = false - fp, err := s.calculateFingerprints(f.fs, baseFile, path, useExisting) + fp, err := s.calculateFingerprints(f.FS, baseFile, path, useExisting) if err != nil { return nil, err } baseFile.SetFingerprints(fp) - file, err := s.fireDecorators(ctx, f.fs, baseFile) + file, err := s.fireDecorators(ctx, f.FS, baseFile) if err != nil { return nil, err } // determine if the file is renamed from an existing file in the store // do this after decoration so that missing fields can be populated - renamed, err := s.handleRename(ctx, file, fp) + zipFilePath := "" + if f.ZipFile != nil { + zipFilePath = f.ZipFile.Base().Path + } + renamed, err := s.handleRename(ctx, file, fp, zipFilePath) if err != nil { return nil, err } if renamed != nil { + return &ScanFileResult{ + File: renamed, + Renamed: true, + }, nil // handle rename should have already handled the contents of the zip file // so shouldn't need to scan it again // return nil so it doesn't - return nil, nil } // if not renamed, queue file for creation - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Create(ctx, file); err != nil { return fmt.Errorf("creating file %q: %w", path, err) } @@ -810,10 +496,13 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error return nil, err } - return file, nil + return &ScanFileResult{ + File: file, + New: true, + }, nil } -func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { +func (s *Scanner) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { for _, h := range s.FileDecorators { var err error f, err = h.Decorate(ctx, fs, f) @@ -825,8 +514,8 @@ func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.Fil return f, nil } -func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { - for _, h := range s.handlers { +func (s *Scanner) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { + for _, h := range s.FileHandlers { if err := h.Handle(ctx, f, oldFile); err != nil { return err } @@ -835,7 +524,7 @@ func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile model return nil } -func (s *scanJob) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { +func (s *Scanner) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { // only log if we're (re)calculating fingerprints if !useExisting { logger.Infof("Calculating fingerprints for %s ...", path) @@ -872,7 +561,7 @@ func appendFileUnique(v []models.File, toAdd []models.File) []models.File { return v } -func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { +func (s *Scanner) getFileFS(f *models.BaseFile) (models.FS, error) { if f.ZipFile == nil { return s.FS, nil } @@ -883,10 +572,11 @@ func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { } zipPath := f.ZipFile.Base().Path - return fs.OpenZip(zipPath, f.Size) + zipSize := f.ZipFile.Base().Size + return fs.OpenZip(zipPath, zipSize) } -func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) { +func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint, zipFilePath string) (models.File, error) { var others []models.File for _, tfp := range fp { @@ -928,7 +618,7 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F // treat as a move missing = append(missing, other) } - case !s.acceptEntry(ctx, other.Base().Path, info): + case !s.AcceptEntry(ctx, other.Base().Path, info, zipFilePath): // #4393 - if the file is no longer in the configured library paths, treat it as a move logger.Debugf("File %q no longer in library paths. Treating as a move.", other.Base().Path) missing = append(missing, other) @@ -961,13 +651,19 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F fBaseCopy.Fingerprints = updatedBase.Fingerprints *updatedBase = fBaseCopy - if err := s.withTxn(ctx, func(ctx context.Context) error { + zipMover := zipHierarchyMover{ + folderStore: s.Repository.Folder, + files: s.Repository.File, + rootPaths: s.RootPaths, + } + + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, updated); err != nil { return fmt.Errorf("updating file for rename %q: %w", newPath, err) } - if s.isZipFile(updatedBase.Basename) { - if err := transferZipHierarchy(ctx, s.Repository.Folder, s.Repository.File, updatedBase.ID, oldPath, newPath); err != nil { + if s.IsZipFile(updatedBase.Basename) { + if err := zipMover.transferZipHierarchy(ctx, updatedBase.ID, oldPath, newPath); err != nil { return fmt.Errorf("moving zip hierarchy for renamed zip file %q: %w", newPath, err) } } @@ -984,9 +680,9 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F return updated, nil } -func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { - accept := len(s.options.HandlerRequiredFilters) == 0 - for _, filter := range s.options.HandlerRequiredFilters { +func (s *Scanner) isHandlerRequired(ctx context.Context, f models.File) bool { + accept := len(s.HandlerRequiredFilters) == 0 + for _, filter := range s.HandlerRequiredFilters { // accept if any filter accepts the file if filter.Accept(ctx, f) { accept = true @@ -1005,9 +701,9 @@ func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { // - file size // - image format, width or height // - video codec, audio codec, format, width, height, framerate or bitrate -func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing models.File) bool { +func (s *Scanner) isMissingMetadata(ctx context.Context, f ScannedFile, existing models.File) bool { for _, h := range s.FileDecorators { - if h.IsMissingMetadata(ctx, f.fs, existing) { + if h.IsMissingMetadata(ctx, f.FS, existing) { return true } } @@ -1015,20 +711,20 @@ func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing mo return false } -func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) setMissingMetadata(ctx context.Context, f ScannedFile, existing models.File) (models.File, error) { path := existing.Base().Path logger.Infof("Updating metadata for %s", path) existing.Base().Size = f.Size var err error - existing, err = s.fireDecorators(ctx, f.fs, existing) + existing, err = s.fireDecorators(ctx, f.FS, existing) if err != nil { return nil, err } // queue file for update - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -1041,9 +737,9 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing m return existing, nil } -func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) setMissingFingerprints(ctx context.Context, f ScannedFile, existing models.File) (models.File, error) { const useExisting = true - fp, err := s.calculateFingerprints(f.fs, existing.Base(), f.Path, useExisting) + fp, err := s.calculateFingerprints(f.FS, existing.Base(), f.Path, useExisting) if err != nil { return nil, err } @@ -1051,7 +747,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi if fp.ContentsChanged(existing.Base().Fingerprints) { existing.SetFingerprints(fp) - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", f.Path, err) } @@ -1066,14 +762,14 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi } // returns a file only if it was updated -func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) onExistingFile(ctx context.Context, f ScannedFile, existing models.File) (*ScanFileResult, error) { base := existing.Base() path := base.Path fileModTime := f.ModTime // #6326 - also force a rescan if the basename changed updated := !fileModTime.Equal(base.ModTime) || base.Basename != f.Basename - forceRescan := s.options.Rescan + forceRescan := s.Rescan if !updated && !forceRescan { return s.onUnchangedFile(ctx, f, existing) @@ -1095,21 +791,24 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model // calculate and update fingerprints for the file const useExisting = false - fp, err := s.calculateFingerprints(f.fs, base, path, useExisting) + fp, err := s.calculateFingerprints(f.FS, base, path, useExisting) if err != nil { return nil, err } + oldFingerprints := existing.Base().Fingerprints + fingerprintChanged := fp.ContentsChanged(oldFingerprints) + s.removeOutdatedFingerprints(existing, fp) existing.SetFingerprints(fp) - existing, err = s.fireDecorators(ctx, f.fs, existing) + existing, err = s.fireDecorators(ctx, f.FS, existing) if err != nil { return nil, err } // queue file for update - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -1122,11 +821,14 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model }); err != nil { return nil, err } - - return existing, nil + return &ScanFileResult{ + File: existing, + Updated: true, + FingerprintChanged: fingerprintChanged, + }, nil } -func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { +func (s *Scanner) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { // HACK - if no MD5 fingerprint was returned, and the oshash is changed // then remove the MD5 fingerprint oshash := fp.For(models.FingerprintTypeOshash) @@ -1154,7 +856,7 @@ func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fin } // returns a file only if it was updated -func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) onUnchangedFile(ctx context.Context, f ScannedFile, existing models.File) (*ScanFileResult, error) { var err error isMissingMetdata := s.isMissingMetadata(ctx, f, existing) @@ -1173,7 +875,7 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode } handlerRequired := false - if err := s.withDB(ctx, func(ctx context.Context) error { + if err := s.Repository.WithDB(ctx, func(ctx context.Context) error { // check if the handler needs to be run handlerRequired = s.isHandlerRequired(ctx, existing) return nil @@ -1183,15 +885,20 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode if !handlerRequired { // if this file is a zip file, then we need to rescan the contents - // as well. We do this by returning the file, instead of nil. + // as well. We do this by indicating that the file is updated. if isMissingMetdata { - return existing, nil + return &ScanFileResult{ + File: existing, + Updated: true, + }, nil } - return nil, nil + return &ScanFileResult{ + File: existing, + }, nil } - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.fireHandlers(ctx, existing, nil); err != nil { return err } @@ -1202,6 +909,9 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode } // if this file is a zip file, then we need to rescan the contents - // as well. We do this by returning the file, instead of nil. - return existing, nil + // as well. We do this by indicating that the file is updated. + return &ScanFileResult{ + File: existing, + Updated: true, + }, nil } diff --git a/pkg/file/stashignore.go b/pkg/file/stashignore.go new file mode 100644 index 000000000..681ccf795 --- /dev/null +++ b/pkg/file/stashignore.go @@ -0,0 +1,262 @@ +package file + +import ( + "context" + "io/fs" + "os" + "path/filepath" + "strings" + "sync" + + lru "github.com/hashicorp/golang-lru/v2" + ignore "github.com/sabhiram/go-gitignore" + "github.com/stashapp/stash/pkg/logger" +) + +const stashIgnoreFilename = ".stashignore" + +// entriesCacheSize is the size of the LRU cache for collected ignore entries. +// This cache stores the computed list of ignore entries per directory, avoiding +// repeated directory tree walks for files in the same directory. +const entriesCacheSize = 500 + +// StashIgnoreFilter implements PathFilter to exclude files/directories +// based on .stashignore files with gitignore-style patterns. +type StashIgnoreFilter struct { + // cache stores compiled ignore patterns per directory. + cache sync.Map // map[string]*ignoreEntry + // entriesCache stores collected ignore entries per (dir, libraryRoot) pair. + // This avoids recomputing the entry list for every file in the same directory. + entriesCache *lru.Cache[string, []*ignoreEntry] +} + +// ignoreEntry holds the compiled ignore patterns for a directory. +type ignoreEntry struct { + // patterns is the compiled gitignore matcher for this directory. + patterns *ignore.GitIgnore + // dir is the directory this entry applies to. + dir string +} + +// NewStashIgnoreFilter creates a new StashIgnoreFilter. +func NewStashIgnoreFilter() *StashIgnoreFilter { + // Create the LRU cache for collected entries. + // Ignore error as it only fails if size <= 0. + entriesCache, _ := lru.New[string, []*ignoreEntry](entriesCacheSize) + return &StashIgnoreFilter{ + entriesCache: entriesCache, + } +} + +// Accept returns true if the path should be included in the scan. +// It checks for .stashignore files in the directory hierarchy and +// applies gitignore-style pattern matching. +// The libraryRoot parameter bounds the search for .stashignore files - +// only directories within the library root are checked. +// zipFilepath is the path of the zip file if the file is inside a zip. +// .stashignore files will not be read within zip files. +func (f *StashIgnoreFilter) Accept(ctx context.Context, path string, info fs.FileInfo, libraryRoot string, zipFilePath string) bool { + // If no library root provided, accept the file (safety fallback). + if libraryRoot == "" { + return true + } + + // Get the directory containing this path. + dir := filepath.Dir(path) + + // If the file is inside a zip, use the zip file's directory as the base for .stashignore lookup. + if zipFilePath != "" { + dir = filepath.Dir(zipFilePath) + } + + // Collect all applicable ignore entries from library root to this directory. + entries := f.collectIgnoreEntries(dir, libraryRoot) + + // If no .stashignore files found, accept the file. + if len(entries) == 0 { + return true + } + + // Check each ignore entry in order (from root to most specific). + // Later entries can override earlier ones with negation patterns. + ignored := false + for _, entry := range entries { + // Get path relative to the ignore file's directory. + entryRelPath, err := filepath.Rel(entry.dir, path) + if err != nil { + continue + } + entryRelPath = filepath.ToSlash(entryRelPath) + if info.IsDir() { + entryRelPath += "/" + } + + if entry.patterns.MatchesPath(entryRelPath) { + ignored = true + } + } + + return !ignored +} + +// collectIgnoreEntries gathers all ignore entries from library root to the given directory. +// It walks up the directory tree from dir to libraryRoot and returns entries in order +// from root to most specific. Results are cached to avoid repeated computation for +// files in the same directory. +func (f *StashIgnoreFilter) collectIgnoreEntries(dir string, libraryRoot string) []*ignoreEntry { + // Clean paths for consistent comparison and cache key generation. + dir = filepath.Clean(dir) + libraryRoot = filepath.Clean(libraryRoot) + + // Build cache key from dir and libraryRoot. + cacheKey := dir + "\x00" + libraryRoot + + // Check the entries cache first. + if cached, ok := f.entriesCache.Get(cacheKey); ok { + return cached + } + + // Try subdirectory shortcut: if parent's entries are cached, extend them. + if dir != libraryRoot { + parent := filepath.Dir(dir) + if isPathInOrEqual(libraryRoot, parent) { + parentKey := parent + "\x00" + libraryRoot + if parentEntries, ok := f.entriesCache.Get(parentKey); ok { + // Parent is cached - just check if current dir has a .stashignore. + entries := parentEntries + if entry := f.getOrLoadIgnoreEntry(dir); entry != nil { + // Copy parent slice and append to avoid mutating cached slice. + entries = make([]*ignoreEntry, len(parentEntries), len(parentEntries)+1) + copy(entries, parentEntries) + entries = append(entries, entry) + } + f.entriesCache.Add(cacheKey, entries) + return entries + } + } + } + + // No cache hit - compute from scratch. + // Walk up from dir to library root, collecting directories. + var dirs []string + current := dir + for { + // Check if we're still within the library root. + if !isPathInOrEqual(libraryRoot, current) { + break + } + + dirs = append(dirs, current) + + // Stop if we've reached the library root. + if current == libraryRoot { + break + } + + parent := filepath.Dir(current) + if parent == current { + // Reached filesystem root without finding library root. + break + } + current = parent + } + + // Reverse to get root-to-leaf order. + for i, j := 0, len(dirs)-1; i < j; i, j = i+1, j-1 { + dirs[i], dirs[j] = dirs[j], dirs[i] + } + + // Check each directory for .stashignore files. + var entries []*ignoreEntry + for _, d := range dirs { + if entry := f.getOrLoadIgnoreEntry(d); entry != nil { + entries = append(entries, entry) + } + } + + // Cache the result. + f.entriesCache.Add(cacheKey, entries) + + return entries +} + +// isPathInOrEqual checks if path is equal to or inside root. +func isPathInOrEqual(root, path string) bool { + if path == root { + return true + } + // Check if path starts with root + separator. + return strings.HasPrefix(path, root+string(filepath.Separator)) +} + +// getOrLoadIgnoreEntry returns the cached ignore entry for a directory, or loads it. +func (f *StashIgnoreFilter) getOrLoadIgnoreEntry(dir string) *ignoreEntry { + // Check cache first. + if cached, ok := f.cache.Load(dir); ok { + entry := cached.(*ignoreEntry) + if entry.patterns == nil { + return nil // Cached negative result. + } + return entry + } + + // Try to load .stashignore from this directory. + stashIgnorePath := filepath.Join(dir, stashIgnoreFilename) + patterns, err := f.loadIgnoreFile(stashIgnorePath) + if err != nil { + if !os.IsNotExist(err) { + logger.Warnf("Failed to load .stashignore from %s: %v", dir, err) + } + f.cache.Store(dir, &ignoreEntry{patterns: nil, dir: dir}) + return nil + } + if patterns == nil { + // File exists but has no patterns (empty or only comments). + f.cache.Store(dir, &ignoreEntry{patterns: nil, dir: dir}) + return nil + } + + logger.Debugf("Loaded .stashignore from %s", dir) + + entry := &ignoreEntry{ + patterns: patterns, + dir: dir, + } + f.cache.Store(dir, entry) + return entry +} + +// loadIgnoreFile loads and compiles a .stashignore file. +func (f *StashIgnoreFilter) loadIgnoreFile(path string) (*ignore.GitIgnore, error) { + data, err := os.ReadFile(path) + if err != nil { + return nil, err + } + + lines := strings.Split(string(data), "\n") + var patterns []string + + for _, line := range lines { + // Trim trailing whitespace (but preserve leading for patterns). + line = strings.TrimRight(line, " \t\r") + + // Skip empty lines. + if line == "" { + continue + } + + // Skip comments (but not escaped #). + if strings.HasPrefix(line, "#") && !strings.HasPrefix(line, "\\#") { + continue + } + + patterns = append(patterns, line) + } + + if len(patterns) == 0 { + // File exists but has no patterns (e.g., only comments). + return nil, nil + } + + return ignore.CompileIgnoreLines(patterns...), nil +} diff --git a/pkg/file/stashignore_test.go b/pkg/file/stashignore_test.go new file mode 100644 index 000000000..41668b51b --- /dev/null +++ b/pkg/file/stashignore_test.go @@ -0,0 +1,523 @@ +package file + +import ( + "context" + "io/fs" + "os" + "path/filepath" + "sort" + "testing" +) + +// Helper to create an empty file. +func createTestFile(t *testing.T, dir, name string) { + t.Helper() + path := filepath.Join(dir, name) + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + t.Fatalf("failed to create directory for %s: %v", path, err) + } + if err := os.WriteFile(path, []byte{}, 0644); err != nil { + t.Fatalf("failed to create file %s: %v", path, err) + } +} + +// Helper to create a file with content. +func createTestFileWithContent(t *testing.T, dir, name, content string) { + t.Helper() + path := filepath.Join(dir, name) + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + t.Fatalf("failed to create directory for %s: %v", path, err) + } + if err := os.WriteFile(path, []byte(content), 0644); err != nil { + t.Fatalf("failed to create file %s: %v", path, err) + } +} + +// Helper to create a directory. +func createTestDir(t *testing.T, dir, name string) { + t.Helper() + path := filepath.Join(dir, name) + if err := os.MkdirAll(path, 0755); err != nil { + t.Fatalf("failed to create directory %s: %v", path, err) + } +} + +// walkAndFilter walks the directory tree and returns paths accepted by the filter. +// Returns paths relative to root for easier assertion. +func walkAndFilter(t *testing.T, root string, filter *StashIgnoreFilter) []string { + t.Helper() + var accepted []string + ctx := context.Background() + + err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + // Skip the root directory itself. + if path == root { + return nil + } + + info, err := d.Info() + if err != nil { + return err + } + + if filter.Accept(ctx, path, info, root, "") { + relPath, _ := filepath.Rel(root, path) + accepted = append(accepted, relPath) + } else if info.IsDir() { + // If directory is rejected, skip it. + return filepath.SkipDir + } + + return nil + }) + + if err != nil { + t.Fatalf("walk failed: %v", err) + } + + sort.Strings(accepted) + return accepted +} + +// assertPathsEqual checks that the accepted paths match expected. +func assertPathsEqual(t *testing.T, expected, actual []string) { + t.Helper() + sort.Strings(expected) + + if len(expected) != len(actual) { + t.Errorf("path count mismatch:\nexpected %d: %v\nactual %d: %v", len(expected), expected, len(actual), actual) + return + } + + for i := range expected { + if expected[i] != actual[i] { + t.Errorf("path mismatch at index %d:\nexpected: %s\nactual: %s", i, expected[i], actual[i]) + } + } +} + +func TestStashIgnore_ExactFilename(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "video2.mp4") + createTestFile(t, tmpDir, "ignore_me.mp4") + + // Create .stashignore that excludes exact filename. + createTestFileWithContent(t, tmpDir, ".stashignore", "ignore_me.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + "video2.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_WildcardPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "video2.mp4") + createTestFile(t, tmpDir, "temp1.tmp") + createTestFile(t, tmpDir, "temp2.tmp") + createTestFile(t, tmpDir, "notes.log") + + // Create .stashignore that excludes by extension. + createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n*.log\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + "video2.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_DirectoryExclusion(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestDir(t, tmpDir, "excluded_dir") + createTestFile(t, tmpDir, "excluded_dir/video2.mp4") + createTestFile(t, tmpDir, "excluded_dir/video3.mp4") + createTestDir(t, tmpDir, "included_dir") + createTestFile(t, tmpDir, "included_dir/video4.mp4") + + // Create .stashignore that excludes a directory. + createTestFileWithContent(t, tmpDir, ".stashignore", "excluded_dir/\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "included_dir", + "included_dir/video4.mp4", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_NegationPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "file1.tmp") + createTestFile(t, tmpDir, "file2.tmp") + createTestFile(t, tmpDir, "keep_this.tmp") + + // Create .stashignore that excludes *.tmp but keeps one. + createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n!keep_this.tmp\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "keep_this.tmp", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_CommentsAndEmptyLines(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "ignore_me.mp4") + + // Create .stashignore with comments and empty lines. + stashignore := `# This is a comment +ignore_me.mp4 + +# Another comment + +` + createTestFileWithContent(t, tmpDir, ".stashignore", stashignore) + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_NestedStashIgnoreFiles(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "root_video.mp4") + createTestFile(t, tmpDir, "root_ignore.tmp") + createTestDir(t, tmpDir, "subdir") + createTestFile(t, tmpDir, "subdir/sub_video.mp4") + createTestFile(t, tmpDir, "subdir/sub_ignore.log") + createTestFile(t, tmpDir, "subdir/also_tmp.tmp") + + // Root .stashignore excludes *.tmp. + createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n") + + // Subdir .stashignore excludes *.log. + createTestFileWithContent(t, tmpDir, "subdir/.stashignore", "*.log\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + // *.tmp from root should apply everywhere. + // *.log from subdir should only apply in subdir. + expected := []string{ + ".stashignore", + "root_video.mp4", + "subdir", + "subdir/.stashignore", + "subdir/sub_video.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_PathPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestDir(t, tmpDir, "subdir") + createTestFile(t, tmpDir, "subdir/video2.mp4") + createTestFile(t, tmpDir, "subdir/skip_this.mp4") + + // Create .stashignore that excludes a specific path. + createTestFileWithContent(t, tmpDir, ".stashignore", "subdir/skip_this.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "subdir", + "subdir/video2.mp4", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_DoubleStarPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestDir(t, tmpDir, "a") + createTestFile(t, tmpDir, "a/video2.mp4") + createTestDir(t, tmpDir, "a/temp") + createTestFile(t, tmpDir, "a/temp/video3.mp4") + createTestDir(t, tmpDir, "a/b") + createTestDir(t, tmpDir, "a/b/temp") + createTestFile(t, tmpDir, "a/b/temp/video4.mp4") + + // Create .stashignore that excludes temp directories at any level. + createTestFileWithContent(t, tmpDir, ".stashignore", "**/temp/\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "a", + "a/b", + "a/video2.mp4", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_LeadingSlashPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "ignore.mp4") + createTestDir(t, tmpDir, "subdir") + createTestFile(t, tmpDir, "subdir/ignore.mp4") + + // Create .stashignore that excludes only at root level. + createTestFileWithContent(t, tmpDir, ".stashignore", "/ignore.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + // Only root ignore.mp4 should be excluded. + expected := []string{ + ".stashignore", + "subdir", + "subdir/ignore.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_NoStashIgnoreFile(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files without any .stashignore. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "video2.mp4") + createTestDir(t, tmpDir, "subdir") + createTestFile(t, tmpDir, "subdir/video3.mp4") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + // All files should be accepted. + expected := []string{ + "subdir", + "subdir/video3.mp4", + "video1.mp4", + "video2.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_HiddenDirectories(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files including hidden directory. + createTestFile(t, tmpDir, "video1.mp4") + createTestDir(t, tmpDir, ".hidden") + createTestFile(t, tmpDir, ".hidden/video2.mp4") + + // Create .stashignore that excludes hidden directories. + createTestFileWithContent(t, tmpDir, ".stashignore", ".*\n!.stashignore\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_MultiplePatternsSameLine(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "file.tmp") + createTestFile(t, tmpDir, "file.log") + createTestFile(t, tmpDir, "file.bak") + + // Each pattern should be on its own line. + createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n*.log\n*.bak\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_TrailingSpaces(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "ignore_me.mp4") + + // Pattern with trailing spaces (should be trimmed). + createTestFileWithContent(t, tmpDir, ".stashignore", "ignore_me.mp4 \n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_EscapedHash(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "#filename.mp4") + + // Escaped hash should match literal # character. + createTestFileWithContent(t, tmpDir, ".stashignore", "\\#filename.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_CaseSensitiveMatching(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files - use distinct names that work on all filesystems. + createTestFile(t, tmpDir, "video_lower.mp4") + createTestFile(t, tmpDir, "VIDEO_UPPER.mp4") + createTestFile(t, tmpDir, "other.avi") + + // Pattern should match exactly (case-sensitive). + createTestFileWithContent(t, tmpDir, ".stashignore", "video_lower.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + // Only exact match is excluded. + expected := []string{ + ".stashignore", + "VIDEO_UPPER.mp4", + "other.avi", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_ComplexScenario(t *testing.T) { + tmpDir := t.TempDir() + + // Create a complex directory structure. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "video2.avi") + createTestFile(t, tmpDir, "thumbnail.jpg") + createTestFile(t, tmpDir, "metadata.nfo") + createTestDir(t, tmpDir, "movies") + createTestFile(t, tmpDir, "movies/movie1.mp4") + createTestFile(t, tmpDir, "movies/movie1.nfo") + createTestDir(t, tmpDir, "movies/.thumbnails") + createTestFile(t, tmpDir, "movies/.thumbnails/thumb1.jpg") + createTestDir(t, tmpDir, "temp") + createTestFile(t, tmpDir, "temp/processing.mp4") + createTestDir(t, tmpDir, "backup") + createTestFile(t, tmpDir, "backup/video1.mp4.bak") + + // Complex .stashignore. + stashignore := `# Ignore metadata files +*.nfo + +# Ignore hidden directories +.* +!.stashignore + +# Ignore temp and backup directories +temp/ +backup/ + +# But keep thumbnails in specific location +!movies/.thumbnails/ +` + createTestFileWithContent(t, tmpDir, ".stashignore", stashignore) + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "movies", + "movies/.thumbnails", + "movies/.thumbnails/thumb1.jpg", + "movies/movie1.mp4", + "thumbnail.jpg", + "video1.mp4", + "video2.avi", + } + + assertPathsEqual(t, expected, accepted) +} diff --git a/pkg/file/video/caption.go b/pkg/file/video/caption.go index 43723864f..46317d90c 100644 --- a/pkg/file/video/caption.go +++ b/pkg/file/video/caption.go @@ -90,11 +90,20 @@ type CaptionUpdater interface { UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error } +// MatchesCaption returns true if the caption file matches the video file based on the filename +func MatchesCaption(videoPath, captionPath string) bool { + captionPrefix := getCaptionPrefix(captionPath) + videoPrefix := strings.TrimSuffix(videoPath, filepath.Ext(videoPath)) + "." + return captionPrefix == videoPrefix +} + // associates captions to scene/s with the same basename -func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) { +// returns true if the caption file was matched to a video file and processed, false otherwise +func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) bool { captionLang := getCaptionsLangFromPath(captionPath) captionPrefix := getCaptionPrefix(captionPath) + matched := false if err := txn.WithTxn(ctx, txnMgr, func(ctx context.Context) error { var err error files, er := fqb.FindAllByPath(ctx, captionPrefix+"*", true) @@ -117,28 +126,36 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag path := f.Base().Path logger.Debugf("Matched captions to file %s", path) + matched = true + captions, er := w.GetCaptions(ctx, fileID) - if er == nil { - fileExt := filepath.Ext(captionPath) - ext := fileExt[1:] - if !IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present - newCaption := &models.VideoCaption{ - LanguageCode: captionLang, - Filename: filepath.Base(captionPath), - CaptionType: ext, - } - captions = append(captions, newCaption) - er = w.UpdateCaptions(ctx, fileID, captions) - if er == nil { - logger.Debugf("Updated captions for file %s. Added %s", path, captionLang) - } + if er != nil { + return fmt.Errorf("getting captions for file %s: %w", path, er) + } + + fileExt := filepath.Ext(captionPath) + ext := fileExt[1:] + if !IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present + newCaption := &models.VideoCaption{ + LanguageCode: captionLang, + Filename: filepath.Base(captionPath), + CaptionType: ext, } + captions = append(captions, newCaption) + er = w.UpdateCaptions(ctx, fileID, captions) + if er != nil { + return fmt.Errorf("updating captions for file %s: %w", path, er) + } + + logger.Debugf("Updated captions for file %s. Added %s", path, captionLang) } } return err }); err != nil { logger.Error(err.Error()) } + + return matched } // CleanCaptions removes non existent/accessible language codes from captions diff --git a/pkg/file/walk.go b/pkg/file/walk.go index 3c6a157b7..bd33f42c3 100644 --- a/pkg/file/walk.go +++ b/pkg/file/walk.go @@ -81,8 +81,8 @@ func walkSym(f models.FS, filename string, linkDirname string, walkFn fs.WalkDir return fsWalk(f, filename, symWalkFunc) } -// symWalk extends filepath.Walk to also follow symlinks -func symWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { +// SymWalk extends filepath.Walk to also follow symlinks +func SymWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { return walkSym(fs, path, path, walkFn) } diff --git a/pkg/file/zip.go b/pkg/file/zip.go index 4df2453dc..6d00c7e35 100644 --- a/pkg/file/zip.go +++ b/pkg/file/zip.go @@ -18,7 +18,7 @@ import ( ) var ( - errNotReaderAt = errors.New("not a ReaderAt") + ErrNotReaderAt = errors.New("invalid reader: does not implement io.ReaderAt") errZipFSOpenZip = errors.New("cannot open zip file inside zip file") ) @@ -38,7 +38,7 @@ func newZipFS(fs models.FS, path string, size int64) (*zipFS, error) { asReaderAt, _ := reader.(io.ReaderAt) if asReaderAt == nil { reader.Close() - return nil, errNotReaderAt + return nil, ErrNotReaderAt } zipReader, err := zip.NewReader(asReaderAt, size) @@ -99,7 +99,9 @@ func (f *zipFS) rel(name string) (string, error) { relName, err := filepath.Rel(f.zipPath, name) if err != nil { - return "", fmt.Errorf("internal error getting relative path: %w", err) + // if the path is not relative to the zip path, then it's not found in the zip file, + // so treat this as a file not found + return "", fs.ErrNotExist } // convert relName to use slash, since zip files do so regardless diff --git a/pkg/fsutil/file.go b/pkg/fsutil/file.go index 1d0c0c473..05a127129 100644 --- a/pkg/fsutil/file.go +++ b/pkg/fsutil/file.go @@ -148,7 +148,7 @@ func Touch(path string) error { var ( replaceCharsRE = regexp.MustCompile(`[&=\\/:*"?_ ]`) - removeCharsRE = regexp.MustCompile(`[^[:alnum:]-.]`) + removeCharsRE = regexp.MustCompile(`[^\p{L}\p{N}\-.]`) multiHyphenRE = regexp.MustCompile(`\-+`) ) diff --git a/pkg/fsutil/file_test.go b/pkg/fsutil/file_test.go index 4d84f8a47..df1077df2 100644 --- a/pkg/fsutil/file_test.go +++ b/pkg/fsutil/file_test.go @@ -15,6 +15,9 @@ func TestSanitiseBasename(t *testing.T) { {"multi-hyphen", `hyphened--name`, "hyphened-name-2da2a58f"}, {"replaced characters", `a&b=c\d/:e*"f?_ g`, "a-b-c-d-e-f-g-ffca6fb0"}, {"removed characters", `foo!!bar@@and, more`, "foobarand-more-7cee02ab"}, + {"unicode cjk", `テスト`, "テスト-63b560db"}, + {"unicode korean", `시험`, "시험-3fcc7beb"}, + {"mixed unicode", `Test テスト`, "Test-テスト-366aff1e"}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/fsutil/fs.go b/pkg/fsutil/fs.go index 2b5c37f62..032bec53c 100644 --- a/pkg/fsutil/fs.go +++ b/pkg/fsutil/fs.go @@ -5,7 +5,6 @@ import ( "fmt" "os" "path/filepath" - "strings" "unicode" ) @@ -27,23 +26,15 @@ func IsFsPathCaseSensitive(path string) (bool, error) { if err != nil { // cannot be case flipped return false, err } - i := strings.LastIndex(path, base) - if i < 0 { // shouldn't happen - return false, fmt.Errorf("could not case flip path %s", path) - } - flipped := []byte(path) - for _, c := range []byte(fBase) { // replace base of path with the flipped one ( we need to flip the base or last dir part ) - flipped[i] = c - i++ - } + flippedPath := filepath.Join(filepath.Dir(path), fBase) - fiCase, err := os.Stat(string(flipped)) + fiCase, err := os.Stat(flippedPath) if err != nil { // cannot stat the case flipped path return true, nil // fs of path should be case sensitive } - if fiCase.ModTime() == fi.ModTime() { // file path exists and is the same + if fiCase.ModTime().Equal(fi.ModTime()) { // file path exists and is the same return false, nil // fs of path is not case sensitive } return false, fmt.Errorf("can not determine case sensitivity of path %s", path) diff --git a/pkg/fsutil/fs_test.go b/pkg/fsutil/fs_test.go new file mode 100644 index 000000000..155e76ba5 --- /dev/null +++ b/pkg/fsutil/fs_test.go @@ -0,0 +1,55 @@ +package fsutil + +import ( + "os" + "path/filepath" + "testing" +) + +func TestIsFsPathCaseSensitive_UnicodeByteLength(t *testing.T) { + // Ⱥ (U+023A) is 2 bytes in UTF-8 + // Its lowercase ⱥ (U+2C65) is 3 bytes in UTF-8 + + dir := t.TempDir() + makeDir := func(path string) { + // Create the directory so os.Stat succeeds + if err := os.Mkdir(path, 0755); err != nil { + t.Fatal(err) + } + } + + path := filepath.Join(dir, "Ⱥtest") + makeDir(path) + + // ensure the test does not panic due to byte length differences in the case flipped path + _, err := IsFsPathCaseSensitive(path) + if err != nil { + t.Fatal(err) + } + + // no guarantee about case sensitivity of the fs running the tests, + // so we just want to ensure the function works and does not panic + // assert.True(t, r, "expected fs to be case sensitive") + + // test regular ASCII paths still work + path2 := filepath.Join(dir, "Test") + makeDir(path2) + + _, err = IsFsPathCaseSensitive(path2) + if err != nil { + t.Fatal(err) + } + + // assert.True(t, r, "expected fs to be case sensitive") + + // Ensure that subfolders of a folder with multi-byte chars is not causing a panic + path3 := filepath.Join(dir, "NoPanic ❤️") + makeDir(path3) + path4 := filepath.Join(path3, "Test") + makeDir(path4) + + _, err = IsFsPathCaseSensitive(path4) + if err != nil { + t.Fatal(err) + } +} diff --git a/pkg/gallery/delete.go b/pkg/gallery/delete.go index f5186f948..4bc2e2492 100644 --- a/pkg/gallery/delete.go +++ b/pkg/gallery/delete.go @@ -8,13 +8,13 @@ import ( "github.com/stashapp/stash/pkg/models" ) -func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) { +func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) { var imgsDestroyed []*models.Image // chapter deletion is done via delete cascade, so we don't need to do anything here // if this is a zip-based gallery, delete the images as well first - zipImgsDestroyed, err := s.destroyZipFileImages(ctx, i, fileDeleter, deleteGenerated, deleteFile) + zipImgsDestroyed, err := s.destroyZipFileImages(ctx, i, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) if err != nil { return nil, err } @@ -45,7 +45,7 @@ func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, return qb.Destroy(ctx, galleryChapter.ID) } -func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) { +func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) { if err := i.LoadFiles(ctx, s.Repository); err != nil { return nil, err } @@ -81,6 +81,12 @@ func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, f if err := destroyer.DestroyZip(ctx, f, fileDeleter.Deleter, deleteFile); err != nil { return nil, err } + } else if destroyFileEntry { + // destroy file DB entry without deleting filesystem file + const deleteFileFromFS = false + if err := destroyer.DestroyZip(ctx, f, nil, deleteFileFromFS); err != nil { + return nil, err + } } } diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index 0068b3f1c..e33297bdb 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -28,8 +28,9 @@ type Importer struct { Input jsonschema.Gallery MissingRefBehaviour models.ImportMissingRefEnum - ID int - gallery models.Gallery + ID int + gallery models.Gallery + customFields map[string]interface{} } func (i *Importer) PreImport(ctx context.Context) error { @@ -51,6 +52,8 @@ func (i *Importer) PreImport(ctx context.Context) error { return err } + i.customFields = i.Input.CustomFields + return nil } @@ -126,7 +129,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -249,7 +252,9 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta newTag := models.NewTag() newTag.Name = name - err := i.TagWriter.Create(ctx, &newTag) + err := i.TagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -354,7 +359,11 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { for _, f := range i.gallery.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } - err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs) + err := i.ReaderWriter.Create(ctx, &models.CreateGalleryInput{ + Gallery: &i.gallery, + FileIDs: fileIDs, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating gallery: %v", err) } @@ -366,7 +375,12 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Update(ctx context.Context, id int) error { gallery := i.gallery gallery.ID = id - err := i.ReaderWriter.Update(ctx, &gallery) + err := i.ReaderWriter.Update(ctx, &models.UpdateGalleryInput{ + Gallery: &gallery, + CustomFields: models.CustomFieldsInput{ + Full: i.customFields, + }, + }) if err != nil { return fmt.Errorf("error updating existing gallery: %v", err) } diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go index b64f80d8f..932f84d48 100644 --- a/pkg/gallery/import_test.go +++ b/pkg/gallery/import_test.go @@ -115,9 +115,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -147,7 +147,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -289,9 +289,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -323,7 +323,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/gallery/scan.go b/pkg/gallery/scan.go index 9d0313b17..7689bb9b6 100644 --- a/pkg/gallery/scan.go +++ b/pkg/gallery/scan.go @@ -17,14 +17,13 @@ type ScanCreatorUpdater interface { FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) - Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error + models.GalleryCreator UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) AddFileID(ctx context.Context, id int, fileID models.FileID) error } type ScanSceneFinderUpdater interface { FindByPath(ctx context.Context, p string) ([]*models.Scene, error) - Update(ctx context.Context, updatedScene *models.Scene) error AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error } @@ -80,7 +79,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models. logger.Infof("%s doesn't exist. Creating new gallery...", f.Base().Path) - if err := h.CreatorUpdater.Create(ctx, &newGallery, []models.FileID{baseFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, &models.CreateGalleryInput{ + Gallery: &newGallery, + FileIDs: []models.FileID{baseFile.ID}, + }); err != nil { return fmt.Errorf("creating new gallery: %w", err) } @@ -132,13 +134,14 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. if err := h.CreatorUpdater.AddFileID(ctx, i.ID, f.Base().ID); err != nil { return fmt.Errorf("adding file to gallery: %w", err) } - // update updated_at time - if _, err := h.CreatorUpdater.UpdatePartial(ctx, i.ID, models.NewGalleryPartial()); err != nil { - return fmt.Errorf("updating gallery: %w", err) - } } if !found || updateExisting { + // update updated_at time when file association or content changes + if _, err := h.CreatorUpdater.UpdatePartial(ctx, i.ID, models.NewGalleryPartial()); err != nil { + return fmt.Errorf("updating gallery: %w", err) + } + h.PluginCache.RegisterPostHooks(ctx, i.ID, hook.GalleryUpdatePost, nil, nil) } } diff --git a/pkg/gallery/scan_test.go b/pkg/gallery/scan_test.go new file mode 100644 index 000000000..4a89206e3 --- /dev/null +++ b/pkg/gallery/scan_test.go @@ -0,0 +1,108 @@ +package gallery + +import ( + "context" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestAssociateExisting_UpdatePartialOnContentChange(t *testing.T) { + const ( + testGalleryID = 1 + testFileID = 100 + ) + + existingFile := &models.BaseFile{ID: models.FileID(testFileID), Path: "test.zip"} + + makeGallery := func() *models.Gallery { + return &models.Gallery{ + ID: testGalleryID, + Files: models.NewRelatedFiles([]models.File{existingFile}), + } + } + + tests := []struct { + name string + updateExisting bool + expectUpdate bool + }{ + { + name: "calls UpdatePartial when file content changed", + updateExisting: true, + expectUpdate: true, + }, + { + name: "skips UpdatePartial when file unchanged and already associated", + updateExisting: false, + expectUpdate: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + db := mocks.NewDatabase() + db.Gallery.On("GetFiles", mock.Anything, testGalleryID).Return([]models.File{existingFile}, nil) + + if tt.expectUpdate { + db.Gallery.On("UpdatePartial", mock.Anything, testGalleryID, mock.Anything). + Return(&models.Gallery{ID: testGalleryID}, nil) + } + + h := &ScanHandler{ + CreatorUpdater: db.Gallery, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Gallery{makeGallery()}, existingFile, tt.updateExisting) + assert.NoError(t, err) + }) + + if tt.expectUpdate { + db.Gallery.AssertCalled(t, "UpdatePartial", mock.Anything, testGalleryID, mock.Anything) + } else { + db.Gallery.AssertNotCalled(t, "UpdatePartial", mock.Anything, mock.Anything, mock.Anything) + } + }) + } +} + +func TestAssociateExisting_UpdatePartialOnNewFile(t *testing.T) { + const ( + testGalleryID = 1 + existFileID = 100 + newFileID = 200 + ) + + existingFile := &models.BaseFile{ID: models.FileID(existFileID), Path: "existing.zip"} + newFile := &models.BaseFile{ID: models.FileID(newFileID), Path: "new.zip"} + + gallery := &models.Gallery{ + ID: testGalleryID, + Files: models.NewRelatedFiles([]models.File{existingFile}), + } + + db := mocks.NewDatabase() + db.Gallery.On("GetFiles", mock.Anything, testGalleryID).Return([]models.File{existingFile}, nil) + db.Gallery.On("AddFileID", mock.Anything, testGalleryID, models.FileID(newFileID)).Return(nil) + db.Gallery.On("UpdatePartial", mock.Anything, testGalleryID, mock.Anything). + Return(&models.Gallery{ID: testGalleryID}, nil) + + h := &ScanHandler{ + CreatorUpdater: db.Gallery, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Gallery{gallery}, newFile, false) + assert.NoError(t, err) + }) + + db.Gallery.AssertCalled(t, "AddFileID", mock.Anything, testGalleryID, models.FileID(newFileID)) + db.Gallery.AssertCalled(t, "UpdatePartial", mock.Anything, testGalleryID, mock.Anything) +} diff --git a/pkg/gallery/service.go b/pkg/gallery/service.go index 62604e0c5..5b2678480 100644 --- a/pkg/gallery/service.go +++ b/pkg/gallery/service.go @@ -16,7 +16,7 @@ type ImageFinder interface { } type ImageService interface { - Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) DestroyFolderImages(ctx context.Context, folderID models.FolderID, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) } diff --git a/pkg/group/create.go b/pkg/group/create.go index 56d6b7a4e..9cc578b23 100644 --- a/pkg/group/create.go +++ b/pkg/group/create.go @@ -12,27 +12,37 @@ var ( ErrHierarchyLoop = errors.New("a group cannot be contained by one of its subgroups") ) -func (s *Service) Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error { +func (s *Service) Create(ctx context.Context, input *models.CreateGroupInput) error { r := s.Repository + group := input.Group if err := s.validateCreate(ctx, group); err != nil { return err } - err := r.Create(ctx, group) + err := r.Create(ctx, input.Group) if err != nil { return err } - // update image table - if len(frontimageData) > 0 { - if err := r.UpdateFrontImage(ctx, group.ID, frontimageData); err != nil { + // set custom fields + if len(input.CustomFields) > 0 { + if err := r.SetCustomFields(ctx, group.ID, models.CustomFieldsInput{ + Full: input.CustomFields, + }); err != nil { return err } } - if len(backimageData) > 0 { - if err := r.UpdateBackImage(ctx, group.ID, backimageData); err != nil { + // update image table + if len(input.FrontImageData) > 0 { + if err := r.UpdateFrontImage(ctx, group.ID, input.FrontImageData); err != nil { + return err + } + } + + if len(input.BackImageData) > 0 { + if err := r.UpdateBackImage(ctx, group.ID, input.BackImageData); err != nil { return err } } diff --git a/pkg/group/export.go b/pkg/group/export.go index 418ce7bed..0a56fbdbb 100644 --- a/pkg/group/export.go +++ b/pkg/group/export.go @@ -11,61 +11,67 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type ImageGetter interface { - GetFrontImage(ctx context.Context, movieID int) ([]byte, error) - GetBackImage(ctx context.Context, movieID int) ([]byte, error) +type GroupExportReader interface { + GetFrontImage(ctx context.Context, groupID int) ([]byte, error) + GetBackImage(ctx context.Context, groupID int) ([]byte, error) + GetCustomFields(ctx context.Context, groupID int) (map[string]interface{}, error) } -// ToJSON converts a Movie into its JSON equivalent. -func ToJSON(ctx context.Context, reader ImageGetter, studioReader models.StudioGetter, movie *models.Group) (*jsonschema.Group, error) { - newMovieJSON := jsonschema.Group{ - Name: movie.Name, - Aliases: movie.Aliases, - Director: movie.Director, - Synopsis: movie.Synopsis, - URLs: movie.URLs.List(), - CreatedAt: json.JSONTime{Time: movie.CreatedAt}, - UpdatedAt: json.JSONTime{Time: movie.UpdatedAt}, +// ToJSON converts a Group into its JSON equivalent. +func ToJSON(ctx context.Context, reader GroupExportReader, studioReader models.StudioGetter, group *models.Group) (*jsonschema.Group, error) { + newGroupJSON := jsonschema.Group{ + Name: group.Name, + Aliases: group.Aliases, + Director: group.Director, + Synopsis: group.Synopsis, + URLs: group.URLs.List(), + CreatedAt: json.JSONTime{Time: group.CreatedAt}, + UpdatedAt: json.JSONTime{Time: group.UpdatedAt}, } - if movie.Date != nil { - newMovieJSON.Date = movie.Date.String() + if group.Date != nil { + newGroupJSON.Date = group.Date.String() } - if movie.Rating != nil { - newMovieJSON.Rating = *movie.Rating + if group.Rating != nil { + newGroupJSON.Rating = *group.Rating } - if movie.Duration != nil { - newMovieJSON.Duration = *movie.Duration + if group.Duration != nil { + newGroupJSON.Duration = *group.Duration } - if movie.StudioID != nil { - studio, err := studioReader.Find(ctx, *movie.StudioID) + if group.StudioID != nil { + studio, err := studioReader.Find(ctx, *group.StudioID) if err != nil { return nil, fmt.Errorf("error getting movie studio: %v", err) } if studio != nil { - newMovieJSON.Studio = studio.Name + newGroupJSON.Studio = studio.Name } } - frontImage, err := reader.GetFrontImage(ctx, movie.ID) + frontImage, err := reader.GetFrontImage(ctx, group.ID) if err != nil { logger.Errorf("Error getting movie front image: %v", err) } if len(frontImage) > 0 { - newMovieJSON.FrontImage = utils.GetBase64StringFromData(frontImage) + newGroupJSON.FrontImage = utils.GetBase64StringFromData(frontImage) } - backImage, err := reader.GetBackImage(ctx, movie.ID) + backImage, err := reader.GetBackImage(ctx, group.ID) if err != nil { logger.Errorf("Error getting movie back image: %v", err) } if len(backImage) > 0 { - newMovieJSON.BackImage = utils.GetBase64StringFromData(backImage) + newGroupJSON.BackImage = utils.GetBase64StringFromData(backImage) } - return &newMovieJSON, nil + newGroupJSON.CustomFields, err = reader.GetCustomFields(ctx, group.ID) + if err != nil { + return nil, fmt.Errorf("getting group custom fields: %v", err) + } + + return &newGroupJSON, nil } diff --git a/pkg/group/export_test.go b/pkg/group/export_test.go index 5f8d9f7dc..bff50de5e 100644 --- a/pkg/group/export_test.go +++ b/pkg/group/export_test.go @@ -8,24 +8,26 @@ import ( "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/mocks" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "testing" "time" ) const ( - movieID = 1 - emptyID = 2 - errFrontImageID = 3 - errBackImageID = 4 - errStudioMovieID = 5 - missingStudioMovieID = 6 + movieID = iota + 1 + emptyID + errFrontImageID + errBackImageID + errStudioMovieID + missingStudioMovieID + errCustomFieldsID ) const ( - studioID = 1 - missingStudioID = 2 - errStudioID = 3 + studioID = iota + 1 + missingStudioID + errStudioID ) const movieName = "testMovie" @@ -51,6 +53,11 @@ const ( var ( frontImageBytes = []byte("frontImageBytes") backImageBytes = []byte("backImageBytes") + + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) var movieStudio models.Studio = models.Studio{ @@ -88,7 +95,7 @@ func createEmptyMovie(id int) models.Group { } } -func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Group { +func createFullJSONMovie(studio, frontImage, backImage string, customFields map[string]interface{}) *jsonschema.Group { return &jsonschema.Group{ Name: movieName, Aliases: movieAliases, @@ -107,6 +114,7 @@ func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Group UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: customFields, } } @@ -119,13 +127,15 @@ func createEmptyJSONMovie() *jsonschema.Group { UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: emptyCustomFields, } } type testScenario struct { - movie models.Group - expected *jsonschema.Group - err bool + movie models.Group + customFields map[string]interface{} + expected *jsonschema.Group + err bool } var scenarios []testScenario @@ -134,36 +144,48 @@ func initTestTable() { scenarios = []testScenario{ { createFullMovie(movieID, studioID), - createFullJSONMovie(studioName, frontImage, backImage), + customFields, + createFullJSONMovie(studioName, frontImage, backImage, customFields), false, }, { createEmptyMovie(emptyID), + emptyCustomFields, createEmptyJSONMovie(), false, }, { createFullMovie(errFrontImageID, studioID), - createFullJSONMovie(studioName, "", backImage), + emptyCustomFields, + createFullJSONMovie(studioName, "", backImage, emptyCustomFields), // failure to get front image should not cause error false, }, { createFullMovie(errBackImageID, studioID), - createFullJSONMovie(studioName, frontImage, ""), + emptyCustomFields, + createFullJSONMovie(studioName, frontImage, "", emptyCustomFields), // failure to get back image should not cause error false, }, { createFullMovie(errStudioMovieID, errStudioID), + emptyCustomFields, nil, true, }, { createFullMovie(missingStudioMovieID, missingStudioID), - createFullJSONMovie("", frontImage, backImage), + emptyCustomFields, + createFullJSONMovie("", frontImage, backImage, emptyCustomFields), false, }, + { + createFullMovie(errCustomFieldsID, studioID), + customFields, + nil, + true, + }, } } @@ -179,6 +201,7 @@ func TestToJSON(t *testing.T) { db.Group.On("GetFrontImage", testCtx, emptyID).Return(nil, nil).Once().Maybe() db.Group.On("GetFrontImage", testCtx, errFrontImageID).Return(nil, imageErr).Once() db.Group.On("GetFrontImage", testCtx, errBackImageID).Return(frontImageBytes, nil).Once() + db.Group.On("GetFrontImage", testCtx, errCustomFieldsID).Return(nil, nil).Once() db.Group.On("GetBackImage", testCtx, movieID).Return(backImageBytes, nil).Once() db.Group.On("GetBackImage", testCtx, missingStudioMovieID).Return(backImageBytes, nil).Once() @@ -186,6 +209,11 @@ func TestToJSON(t *testing.T) { db.Group.On("GetBackImage", testCtx, errBackImageID).Return(nil, imageErr).Once() db.Group.On("GetBackImage", testCtx, errFrontImageID).Return(backImageBytes, nil).Maybe() db.Group.On("GetBackImage", testCtx, errStudioMovieID).Return(backImageBytes, nil).Maybe() + db.Group.On("GetBackImage", testCtx, errCustomFieldsID).Return(nil, nil).Once() + + db.Group.On("GetCustomFields", testCtx, movieID).Return(customFields, nil).Once() + db.Group.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, errors.New("error getting custom fields")).Once() + db.Group.On("GetCustomFields", testCtx, mock.Anything).Return(emptyCustomFields, nil).Times(4) studioErr := errors.New("error getting studio") diff --git a/pkg/group/import.go b/pkg/group/import.go index 3fc7db8f1..1a332bac2 100644 --- a/pkg/group/import.go +++ b/pkg/group/import.go @@ -14,6 +14,7 @@ import ( type ImporterReaderWriter interface { models.GroupCreatorUpdater + models.CustomFieldsWriter FindByName(ctx context.Context, name string, nocase bool) (*models.Group, error) } @@ -126,7 +127,9 @@ func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names [] newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -203,7 +206,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -231,6 +234,14 @@ func (i *Importer) PostImport(ctx context.Context, id int) error { } } + if len(i.Input.CustomFields) > 0 { + if err := i.ReaderWriter.SetCustomFields(ctx, id, models.CustomFieldsInput{ + Full: i.Input.CustomFields, + }); err != nil { + return fmt.Errorf("error setting custom fields: %v", err) + } + } + if len(i.frontImageData) > 0 { if err := i.ReaderWriter.UpdateFrontImage(ctx, id, i.frontImageData); err != nil { return fmt.Errorf("error setting group front image: %v", err) diff --git a/pkg/group/import_test.go b/pkg/group/import_test.go index c4ca47442..006c91327 100644 --- a/pkg/group/import_test.go +++ b/pkg/group/import_test.go @@ -121,9 +121,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -156,7 +156,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -212,9 +212,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -247,7 +247,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -259,17 +259,29 @@ func TestImporterPostImport(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - ReaderWriter: db.Group, - StudioWriter: db.Studio, + ReaderWriter: db.Group, + StudioWriter: db.Studio, + Input: jsonschema.Group{ + CustomFields: customFields, + }, frontImageData: frontImageBytes, backImageData: backImageBytes, } updateMovieImageErr := errors.New("UpdateImages error") + customFieldsErr := errors.New("SetCustomFields error") + + customFieldsInput := models.CustomFieldsInput{ + Full: customFields, + } db.Group.On("UpdateFrontImage", testCtx, movieID, frontImageBytes).Return(nil).Once() - db.Group.On("UpdateBackImage", testCtx, movieID, backImageBytes).Return(nil).Once() db.Group.On("UpdateFrontImage", testCtx, errImageID, frontImageBytes).Return(updateMovieImageErr).Once() + db.Group.On("UpdateBackImage", testCtx, movieID, backImageBytes).Return(nil).Once() + + db.Group.On("SetCustomFields", testCtx, movieID, customFieldsInput).Return(nil).Once() + db.Group.On("SetCustomFields", testCtx, errImageID, customFieldsInput).Return(nil).Once() + db.Group.On("SetCustomFields", testCtx, errCustomFieldsID, customFieldsInput).Return(customFieldsErr).Once() err := i.PostImport(testCtx, movieID) assert.Nil(t, err) @@ -277,6 +289,9 @@ func TestImporterPostImport(t *testing.T) { err = i.PostImport(testCtx, errImageID) assert.NotNil(t, err) + err = i.PostImport(testCtx, errCustomFieldsID) + assert.NotNil(t, err) + db.AssertExpectations(t) } diff --git a/pkg/group/service.go b/pkg/group/service.go index ff6e03541..37094665a 100644 --- a/pkg/group/service.go +++ b/pkg/group/service.go @@ -10,6 +10,7 @@ type CreatorUpdater interface { models.GroupGetter models.GroupCreator models.GroupUpdater + models.CustomFieldsWriter models.ContainingGroupLoader models.SubGroupLoader diff --git a/pkg/hash/imagephash/phash.go b/pkg/hash/imagephash/phash.go new file mode 100644 index 000000000..0af5adec9 --- /dev/null +++ b/pkg/hash/imagephash/phash.go @@ -0,0 +1,84 @@ +package imagephash + +import ( + "bytes" + "context" + "errors" + "fmt" + "image" + + "github.com/corona10/goimagehash" + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/ffmpeg/transcoder" + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" +) + +// Generate computes a perceptual hash for an image file. +func Generate(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (*uint64, error) { + img, err := loadImage(encoder, imageFile) + if err != nil { + return nil, fmt.Errorf("loading image: %w", err) + } + + hash, err := goimagehash.PerceptionHash(img) + if err != nil { + return nil, fmt.Errorf("computing phash from image: %w", err) + } + + hashValue := hash.GetHash() + return &hashValue, nil +} + +// loadImage loads an image from disk and decodes it. +// Where Go has no built-in decoder for a specific format, ffmpeg is used to convert to BMP first. +func loadImage(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (image.Image, error) { + // try to load with Go's built-in decoders first for better performance + reader, err := imageFile.Open(&file.OsFS{}) + if err != nil { + return nil, err + } + defer reader.Close() + + buf := new(bytes.Buffer) + if _, err := buf.ReadFrom(reader); err != nil { + return nil, err + } + + img, _, err := image.Decode(buf) + if errors.Is(err, image.ErrFormat) { + // try ffmpeg as a fallback for unsupported formats + // ffmpeg cannot read files inside zips + if imageFile.Base().ZipFileID != nil { + return nil, fmt.Errorf("ffmpeg fallback unsupported for images in zip files") + } + return loadImageFFmpeg(encoder, imageFile.Path) + } + + if err != nil { + return nil, fmt.Errorf("decoding image: %w", err) + } + + return img, nil +} + +// loadImageFFmpeg uses ffmpeg to convert an image to BMP and then decodes it. +func loadImageFFmpeg(encoder *ffmpeg.FFMpeg, path string) (image.Image, error) { + options := transcoder.ScreenshotOptions{ + OutputPath: "-", + OutputType: transcoder.ScreenshotOutputTypeBMP, + } + + args := transcoder.ScreenshotTime(path, 0, options) + data, err := encoder.GenerateOutput(context.Background(), args, nil) + if err != nil { + return nil, fmt.Errorf("converting image with ffmpeg: %w", err) + } + + img, _, err := image.Decode(bytes.NewReader(data)) + if err != nil { + return nil, fmt.Errorf("decoding ffmpeg output: %w", err) + } + + return img, nil +} diff --git a/pkg/image/delete.go b/pkg/image/delete.go index aa3a9c1c8..28bb54a59 100644 --- a/pkg/image/delete.go +++ b/pkg/image/delete.go @@ -37,8 +37,8 @@ func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error { } // Destroy destroys an image, optionally marking the file and generated files for deletion. -func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { - return s.destroyImage(ctx, i, fileDeleter, deleteGenerated, deleteFile) +func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error { + return s.destroyImage(ctx, i, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) } // DestroyZipImages destroys all images in zip, optionally marking the files and generated files for deletion. @@ -75,7 +75,8 @@ func (s *Service) DestroyZipImages(ctx context.Context, zipFile models.File, fil } const deleteFileInZip = false - if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip); err != nil { + const destroyFileEntry = false + if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip, destroyFileEntry); err != nil { return nil, err } @@ -135,7 +136,8 @@ func (s *Service) DestroyFolderImages(ctx context.Context, folderID models.Folde continue } - if err := s.Destroy(ctx, img, fileDeleter, deleteGenerated, deleteFile); err != nil { + const destroyFileEntry = false + if err := s.Destroy(ctx, img, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return nil, err } @@ -146,11 +148,15 @@ func (s *Service) DestroyFolderImages(ctx context.Context, folderID models.Folde } // Destroy destroys an image, optionally marking the file and generated files for deletion. -func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { +func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error { if deleteFile { if err := s.deleteFiles(ctx, i, fileDeleter); err != nil { return err } + } else if destroyFileEntry { + if err := s.destroyFileEntries(ctx, i); err != nil { + return err + } } if deleteGenerated { @@ -192,3 +198,35 @@ func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter return nil } + +// destroyFileEntries destroys file entries from the database without deleting +// the files from the filesystem +func (s *Service) destroyFileEntries(ctx context.Context, i *models.Image) error { + if err := i.LoadFiles(ctx, s.Repository); err != nil { + return err + } + + for _, f := range i.Files.List() { + // only destroy file entries where there is no other associated image + otherImages, err := s.Repository.FindByFileID(ctx, f.Base().ID) + if err != nil { + return err + } + + if len(otherImages) > 1 { + // other image associated, don't remove + continue + } + + // don't destroy files in zip archives + if f.Base().ZipFileID == nil { + const deleteFile = false + logger.Info("Destroying image file entry: ", f.Base().Path) + if err := file.Destroy(ctx, s.File, f, nil, deleteFile); err != nil { + return err + } + } + } + + return nil +} diff --git a/pkg/image/export.go b/pkg/image/export.go index fdba6165c..eb5d5da27 100644 --- a/pkg/image/export.go +++ b/pkg/image/export.go @@ -2,16 +2,21 @@ package image import ( "context" + "fmt" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" ) +type ExportReader interface { + models.CustomFieldsReader +} + // ToBasicJSON converts a image object into its JSON object equivalent. It // does not convert the relationships to other objects, with the exception // of cover image. -func ToBasicJSON(image *models.Image) *jsonschema.Image { +func ToBasicJSON(ctx context.Context, reader ExportReader, image *models.Image) (*jsonschema.Image, error) { newImageJSON := jsonschema.Image{ Title: image.Title, Code: image.Code, @@ -33,11 +38,17 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image { newImageJSON.Organized = image.Organized newImageJSON.OCounter = image.OCounter + var err error + newImageJSON.CustomFields, err = reader.GetCustomFields(ctx, image.ID) + if err != nil { + return nil, fmt.Errorf("getting image custom fields: %v", err) + } + for _, f := range image.Files.List() { newImageJSON.Files = append(newImageJSON.Files, f.Base().Path) } - return &newImageJSON + return &newImageJSON, nil } // GetStudioName returns the name of the provided image's studio. It returns an diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go index 6adaf1d33..d0d36afbb 100644 --- a/pkg/image/export_test.go +++ b/pkg/image/export_test.go @@ -29,6 +29,10 @@ var ( dateObj, _ = models.ParseDate(date) organized = true ocounter = 2 + + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) const ( @@ -60,7 +64,7 @@ func createFullImage(id int) models.Image { } } -func createFullJSONImage() *jsonschema.Image { +func createFullJSONImage(customFields map[string]interface{}) *jsonschema.Image { return &jsonschema.Image{ Title: title, OCounter: ocounter, @@ -75,28 +79,40 @@ func createFullJSONImage() *jsonschema.Image { UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: customFields, } } type basicTestScenario struct { - input models.Image - expected *jsonschema.Image + input models.Image + customFields map[string]interface{} + expected *jsonschema.Image } var scenarios = []basicTestScenario{ { createFullImage(imageID), - createFullJSONImage(), + customFields, + createFullJSONImage(customFields), }, } func TestToJSON(t *testing.T) { + db := mocks.NewDatabase() + db.Image.On("GetCustomFields", testCtx, imageID).Return(customFields, nil).Once() + for i, s := range scenarios { image := s.input - json := ToBasicJSON(&image) + json, err := ToBasicJSON(testCtx, db.Image, &image) + if err != nil { + t.Errorf("[%d] unexpected error: %s", i, err.Error()) + continue + } assert.Equal(t, s.expected, json, "[%d]", i) } + + db.AssertExpectations(t) } func createStudioImage(studioID int) models.Image { diff --git a/pkg/image/import.go b/pkg/image/import.go index bf92a6ae8..d8dfa987f 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -31,8 +31,9 @@ type Importer struct { Input jsonschema.Image MissingRefBehaviour models.ImportMissingRefEnum - ID int - image models.Image + ID int + image models.Image + customFields map[string]interface{} } func (i *Importer) PreImport(ctx context.Context) error { @@ -58,6 +59,8 @@ func (i *Importer) PreImport(ctx context.Context) error { return err } + i.customFields = i.Input.CustomFields + return nil } @@ -159,7 +162,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -344,7 +347,11 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { fileIDs = append(fileIDs, f.Base().ID) } - err := i.ReaderWriter.Create(ctx, &i.image, fileIDs) + err := i.ReaderWriter.Create(ctx, &models.CreateImageInput{ + Image: &i.image, + FileIDs: fileIDs, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating image: %v", err) } @@ -407,7 +414,9 @@ func createTags(ctx context.Context, tagWriter models.TagCreator, names []string newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } diff --git a/pkg/image/import_test.go b/pkg/image/import_test.go index 286e51fe3..a693c4568 100644 --- a/pkg/image/import_test.go +++ b/pkg/image/import_test.go @@ -45,7 +45,8 @@ func TestImporterPreImportWithStudio(t *testing.T) { i := Importer{ StudioWriter: db.Studio, Input: jsonschema.Image{ - Studio: existingStudioName, + Studio: existingStudioName, + CustomFields: customFields, }, } @@ -57,6 +58,7 @@ func TestImporterPreImportWithStudio(t *testing.T) { err := i.PreImport(testCtx) assert.Nil(t, err) assert.Equal(t, existingStudioID, *i.image.StudioID) + assert.Equal(t, customFields, i.customFields) i.Input.Studio = existingStudioErr err = i.PreImport(testCtx) @@ -77,9 +79,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -109,7 +111,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -251,9 +253,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -285,7 +287,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/image/query.go b/pkg/image/query.go index b9b9e6628..958c9de9b 100644 --- a/pkg/image/query.go +++ b/pkg/image/query.go @@ -2,7 +2,9 @@ package image import ( "context" + "path/filepath" "strconv" + "strings" "github.com/stashapp/stash/pkg/models" ) @@ -46,6 +48,35 @@ func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, return images, nil } +// FilterFromPaths creates a ImageFilterType that filters using the provided +// paths. +func FilterFromPaths(paths []string) *models.ImageFilterType { + ret := &models.ImageFilterType{} + or := ret + sep := string(filepath.Separator) + + for _, p := range paths { + if !strings.HasSuffix(p, sep) { + p += sep + } + + if ret.Path == nil { + or = ret + } else { + newOr := &models.ImageFilterType{} + or.Or = newOr + or = newOr + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + func CountByPerformerID(ctx context.Context, r QueryCounter, id int) (int, error) { filter := &models.ImageFilterType{ Performers: &models.MultiCriterionInput{ diff --git a/pkg/image/scan.go b/pkg/image/scan.go index a6002057f..682641e66 100644 --- a/pkg/image/scan.go +++ b/pkg/image/scan.go @@ -7,6 +7,7 @@ import ( "os" "path/filepath" "slices" + "strings" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -27,7 +28,7 @@ type ScanCreatorUpdater interface { GetFiles(ctx context.Context, relatedID int) ([]models.File, error) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) - Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error + Create(ctx context.Context, newImage *models.CreateImageInput) error UpdatePartial(ctx context.Context, id int, updatedImage models.ImagePartial) (*models.Image, error) AddFileID(ctx context.Context, id int, fileID models.FileID) error } @@ -35,10 +36,15 @@ type ScanCreatorUpdater interface { type GalleryFinderCreator interface { FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) - Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error + models.GalleryCreator UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) } +type ScanSceneFinderUpdater interface { + FindByPath(ctx context.Context, p string) ([]*models.Scene, error) + AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error +} + type ScanConfig interface { GetCreateGalleriesFromFolders() bool } @@ -48,8 +54,9 @@ type ScanGenerator interface { } type ScanHandler struct { - CreatorUpdater ScanCreatorUpdater - GalleryFinder GalleryFinderCreator + CreatorUpdater ScanCreatorUpdater + GalleryFinder GalleryFinderCreator + SceneFinderUpdater ScanSceneFinderUpdater ScanGenerator ScanGenerator @@ -124,7 +131,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models. logger.Infof("Adding %s to gallery %s", f.Base().Path, g.Path) } - if err := h.CreatorUpdater.Create(ctx, &newImage, []models.FileID{imageFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, &models.CreateImageInput{ + Image: &newImage, + FileIDs: []models.FileID{imageFile.ID}, + }); err != nil { return fmt.Errorf("creating new image: %w", err) } @@ -207,8 +217,8 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. changed = true } - if changed { - // always update updated_at time + if changed || updateExisting { + // update updated_at time when file association or content changes imagePartial := models.NewImagePartial() imagePartial.GalleryIDs = galleryIDs @@ -226,9 +236,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. return fmt.Errorf("updating gallery updated at timestamp: %w", err) } } - } - if changed || updateExisting { h.PluginCache.RegisterPostHooks(ctx, i.ID, hook.ImageUpdatePost, nil, nil) } } @@ -252,9 +260,13 @@ func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f model newGallery := models.NewGallery() newGallery.FolderID = &folderID + input := models.CreateGalleryInput{ + Gallery: &newGallery, + } + logger.Infof("Creating folder-based gallery for %s", filepath.Dir(f.Base().Path)) - if err := h.GalleryFinder.Create(ctx, &newGallery, nil); err != nil { + if err := h.GalleryFinder.Create(ctx, &input); err != nil { return nil, fmt.Errorf("creating folder based gallery: %w", err) } @@ -308,15 +320,48 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile mo logger.Infof("%s doesn't exist. Creating new gallery...", zipFile.Base().Path) - if err := h.GalleryFinder.Create(ctx, &newGallery, []models.FileID{zipFile.Base().ID}); err != nil { + input := models.CreateGalleryInput{ + Gallery: &newGallery, + FileIDs: []models.FileID{zipFile.Base().ID}, + } + + if err := h.GalleryFinder.Create(ctx, &input); err != nil { return nil, fmt.Errorf("creating zip-based gallery: %w", err) } + // try to associate with scene + if err := h.associateScene(ctx, &newGallery, zipFile); err != nil { + return nil, fmt.Errorf("associating scene: %w", err) + } + h.PluginCache.RegisterPostHooks(ctx, newGallery.ID, hook.GalleryCreatePost, nil, nil) return &newGallery, nil } +func (h *ScanHandler) associateScene(ctx context.Context, existing *models.Gallery, zipFile models.File) error { + galleryIDs := []int{existing.ID} + + path := zipFile.Base().Path + withoutExt := strings.TrimSuffix(path, filepath.Ext(path)) + ".*" + + // find scenes with a file that matches + scenes, err := h.SceneFinderUpdater.FindByPath(ctx, withoutExt) + if err != nil { + return err + } + + for _, scene := range scenes { + // found related Scene + logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID) + if err := h.SceneFinderUpdater.AddGalleryIDs(ctx, scene.ID, galleryIDs); err != nil { + return err + } + } + + return nil +} + func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*models.Gallery, error) { // don't create folder-based galleries for files in zip file if f.Base().ZipFile != nil { diff --git a/pkg/image/scan_test.go b/pkg/image/scan_test.go new file mode 100644 index 000000000..f48c188ee --- /dev/null +++ b/pkg/image/scan_test.go @@ -0,0 +1,120 @@ +package image + +import ( + "context" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +type mockScanConfig struct{} + +func (m *mockScanConfig) GetCreateGalleriesFromFolders() bool { return false } + +func TestAssociateExisting_UpdatePartialOnContentChange(t *testing.T) { + const ( + testImageID = 1 + testFileID = 100 + ) + + existingFile := &models.BaseFile{ID: models.FileID(testFileID), Path: "/images/test.jpg"} + + makeImage := func() *models.Image { + return &models.Image{ + ID: testImageID, + Files: models.NewRelatedFiles([]models.File{existingFile}), + GalleryIDs: models.NewRelatedIDs([]int{}), + } + } + + tests := []struct { + name string + updateExisting bool + expectUpdate bool + }{ + { + name: "calls UpdatePartial when file content changed", + updateExisting: true, + expectUpdate: true, + }, + { + name: "skips UpdatePartial when file unchanged and already associated", + updateExisting: false, + expectUpdate: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + db := mocks.NewDatabase() + db.Image.On("GetFiles", mock.Anything, testImageID).Return([]models.File{existingFile}, nil) + db.Image.On("GetGalleryIDs", mock.Anything, testImageID).Return([]int{}, nil) + + if tt.expectUpdate { + db.Image.On("UpdatePartial", mock.Anything, testImageID, mock.Anything). + Return(&models.Image{ID: testImageID}, nil) + } + + h := &ScanHandler{ + CreatorUpdater: db.Image, + GalleryFinder: db.Gallery, + ScanConfig: &mockScanConfig{}, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Image{makeImage()}, existingFile, tt.updateExisting) + assert.NoError(t, err) + }) + + if tt.expectUpdate { + db.Image.AssertCalled(t, "UpdatePartial", mock.Anything, testImageID, mock.Anything) + } else { + db.Image.AssertNotCalled(t, "UpdatePartial", mock.Anything, mock.Anything, mock.Anything) + } + }) + } +} + +func TestAssociateExisting_UpdatePartialOnNewFile(t *testing.T) { + const ( + testImageID = 1 + existFileID = 100 + newFileID = 200 + ) + + existingFile := &models.BaseFile{ID: models.FileID(existFileID), Path: "/images/existing.jpg"} + newFile := &models.BaseFile{ID: models.FileID(newFileID), Path: "/images/new.jpg"} + + image := &models.Image{ + ID: testImageID, + Files: models.NewRelatedFiles([]models.File{existingFile}), + GalleryIDs: models.NewRelatedIDs([]int{}), + } + + db := mocks.NewDatabase() + db.Image.On("GetFiles", mock.Anything, testImageID).Return([]models.File{existingFile}, nil) + db.Image.On("GetGalleryIDs", mock.Anything, testImageID).Return([]int{}, nil) + db.Image.On("AddFileID", mock.Anything, testImageID, models.FileID(newFileID)).Return(nil) + db.Image.On("UpdatePartial", mock.Anything, testImageID, mock.Anything). + Return(&models.Image{ID: testImageID}, nil) + + h := &ScanHandler{ + CreatorUpdater: db.Image, + GalleryFinder: db.Gallery, + ScanConfig: &mockScanConfig{}, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Image{image}, newFile, false) + assert.NoError(t, err) + }) + + db.Image.AssertCalled(t, "AddFileID", mock.Anything, testImageID, models.FileID(newFileID)) + db.Image.AssertCalled(t, "UpdatePartial", mock.Anything, testImageID, mock.Anything) +} diff --git a/pkg/match/scraped.go b/pkg/match/scraped.go index d3039f4c6..a6683ff52 100644 --- a/pkg/match/scraped.go +++ b/pkg/match/scraped.go @@ -188,6 +188,20 @@ func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, na return } +// ScrapedTagHierarchy executes ScrapedTag for the provided tag and its parent. +func ScrapedTagHierarchy(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag, stashBoxEndpoint string) error { + if err := ScrapedTag(ctx, qb, s, stashBoxEndpoint); err != nil { + return err + } + + if s.Parent == nil { + return nil + } + + // Match parent by name only (categories don't have StashDB tag IDs) + return ScrapedTag(ctx, qb, s.Parent, "") +} + // ScrapedTag matches the provided tag with the tags // in the database and sets the ID field if one is found. func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag, stashBoxEndpoint string) error { diff --git a/pkg/models/custom_fields.go b/pkg/models/custom_fields.go index 5c3acd18b..3212d676f 100644 --- a/pkg/models/custom_fields.go +++ b/pkg/models/custom_fields.go @@ -17,3 +17,7 @@ type CustomFieldsReader interface { GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]CustomFieldMap, error) } + +type CustomFieldsWriter interface { + SetCustomFields(ctx context.Context, id int, fields CustomFieldsInput) error +} diff --git a/pkg/models/date.go b/pkg/models/date.go index dbd5c4ec6..912361507 100644 --- a/pkg/models/date.go +++ b/pkg/models/date.go @@ -2,6 +2,7 @@ package models import ( "fmt" + "strings" "time" "github.com/stashapp/stash/pkg/utils" @@ -61,3 +62,114 @@ func ParseDate(s string) (Date, error) { return Date{}, fmt.Errorf("failed to parse date %q: %v", s, errs) } + +func DateFromYear(year int) Date { + return Date{ + Time: time.Date(year, 1, 1, 0, 0, 0, 0, time.UTC), + Precision: DatePrecisionYear, + } +} + +func FormatYearRange(start *Date, end *Date) string { + var ( + startStr, endStr string + ) + + if start != nil { + startStr = start.Format(dateFormatPrecision[DatePrecisionYear]) + } + + if end != nil { + endStr = end.Format(dateFormatPrecision[DatePrecisionYear]) + } + + switch { + case startStr == "" && endStr == "": + return "" + case endStr == "": + return fmt.Sprintf("%s -", startStr) + case startStr == "": + return fmt.Sprintf("- %s", endStr) + default: + return fmt.Sprintf("%s - %s", startStr, endStr) + } +} + +func FormatYearRangeString(start *string, end *string) string { + switch { + case start == nil && end == nil: + return "" + case end == nil: + return fmt.Sprintf("%s -", *start) + case start == nil: + return fmt.Sprintf("- %s", *end) + default: + return fmt.Sprintf("%s - %s", *start, *end) + } +} + +// ParseYearRangeString parses a year range string into start and end year integers. +// Supported formats: "YYYY", "YYYY - YYYY", "YYYY-YYYY", "YYYY -", "- YYYY", "YYYY-present". +// Returns nil for start/end if not present in the string. +func ParseYearRangeString(s string) (start *Date, end *Date, err error) { + s = strings.TrimSpace(s) + if s == "" { + return nil, nil, fmt.Errorf("empty year range string") + } + + // normalize "present" to empty end + lower := strings.ToLower(s) + lower = strings.ReplaceAll(lower, "present", "") + + // split on "-" if it contains one + var parts []string + if strings.Contains(lower, "-") { + parts = strings.SplitN(lower, "-", 2) + } else { + // single value, treat as start year + year, err := parseYear(lower) + if err != nil { + return nil, nil, fmt.Errorf("invalid year range %q: %w", s, err) + } + return year, nil, nil + } + + startStr := strings.TrimSpace(parts[0]) + endStr := strings.TrimSpace(parts[1]) + + if startStr != "" { + y, err := parseYear(startStr) + if err != nil { + return nil, nil, fmt.Errorf("invalid start year in %q: %w", s, err) + } + start = y + } + + if endStr != "" { + y, err := parseYear(endStr) + if err != nil { + return nil, nil, fmt.Errorf("invalid end year in %q: %w", s, err) + } + end = y + } + + if start == nil && end == nil { + return nil, nil, fmt.Errorf("could not parse year range %q", s) + } + + return start, end, nil +} + +func parseYear(s string) (*Date, error) { + ret, err := ParseDate(s) + if err != nil { + return nil, fmt.Errorf("parsing year %q: %w", s, err) + } + + year := ret.Time.Year() + if year < 1900 || year > 2200 { + return nil, fmt.Errorf("year %d out of reasonable range", year) + } + + return &ret, nil +} diff --git a/pkg/models/date_test.go b/pkg/models/date_test.go index b6cca9ee1..3b2962e28 100644 --- a/pkg/models/date_test.go +++ b/pkg/models/date_test.go @@ -3,6 +3,8 @@ package models import ( "testing" "time" + + "github.com/stretchr/testify/assert" ) func TestParseDateStringAsTime(t *testing.T) { @@ -48,3 +50,102 @@ func TestParseDateStringAsTime(t *testing.T) { }) } } + +func TestFormatYearRange(t *testing.T) { + datePtr := func(v int) *Date { + date := DateFromYear(v) + return &date + } + + tests := []struct { + name string + start *Date + end *Date + want string + }{ + {"both nil", nil, nil, ""}, + {"only start", datePtr(2005), nil, "2005 -"}, + {"only end", nil, datePtr(2010), "- 2010"}, + {"start and end", datePtr(2005), datePtr(2010), "2005 - 2010"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := FormatYearRange(tt.start, tt.end) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestFormatYearRangeString(t *testing.T) { + stringPtr := func(v string) *string { return &v } + + tests := []struct { + name string + start *string + end *string + want string + }{ + {"both nil", nil, nil, ""}, + {"only start", stringPtr("2005"), nil, "2005 -"}, + {"only end", nil, stringPtr("2010"), "- 2010"}, + {"start and end", stringPtr("2005"), stringPtr("2010"), "2005 - 2010"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := FormatYearRangeString(tt.start, tt.end) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestParseYearRangeString(t *testing.T) { + intPtr := func(v int) *int { return &v } + + tests := []struct { + name string + input string + wantStart *int + wantEnd *int + wantErr bool + }{ + {"single year", "2005", intPtr(2005), nil, false}, + {"year range with spaces", "2005 - 2010", intPtr(2005), intPtr(2010), false}, + {"year range no spaces", "2005-2010", intPtr(2005), intPtr(2010), false}, + {"year dash open", "2005 -", intPtr(2005), nil, false}, + {"year dash open no space", "2005-", intPtr(2005), nil, false}, + {"dash year", "- 2010", nil, intPtr(2010), false}, + {"year present", "2005-present", intPtr(2005), nil, false}, + {"year Present caps", "2005 - Present", intPtr(2005), nil, false}, + {"whitespace padding", " 2005 - 2010 ", intPtr(2005), intPtr(2010), false}, + {"empty string", "", nil, nil, true}, + {"garbage", "not a year", nil, nil, true}, + {"partial garbage start", "abc - 2010", nil, nil, true}, + {"partial garbage end", "2005 - abc", nil, nil, true}, + {"year out of range", "1800", nil, nil, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + start, end, err := ParseYearRangeString(tt.input) + if tt.wantErr { + assert.Error(t, err) + return + } + assert.NoError(t, err) + if tt.wantStart != nil { + assert.NotNil(t, start) + assert.Equal(t, *tt.wantStart, start.Time.Year()) + } else { + assert.Nil(t, start) + } + if tt.wantEnd != nil { + assert.NotNil(t, end) + assert.Equal(t, *tt.wantEnd, end.Time.Year()) + } else { + assert.Nil(t, end) + } + }) + } +} diff --git a/pkg/models/file.go b/pkg/models/file.go index 63c30ba4d..32263319c 100644 --- a/pkg/models/file.go +++ b/pkg/models/file.go @@ -26,7 +26,7 @@ type FileFilterType struct { ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder"` ZipFile *MultiCriterionInput `json:"zip_file"` ModTime *TimestampCriterionInput `json:"mod_time"` - Duplicated *PHashDuplicationCriterionInput `json:"duplicated"` + Duplicated *FileDuplicationCriterionInput `json:"duplicated"` Hashes []*FingerprintFilterInput `json:"hashes"` VideoFileFilter *VideoFileFilterInput `json:"video_file_filter"` ImageFileFilter *ImageFileFilterInput `json:"image_file_filter"` diff --git a/pkg/models/folder.go b/pkg/models/folder.go index ada9e17b7..e9e9a3971 100644 --- a/pkg/models/folder.go +++ b/pkg/models/folder.go @@ -18,10 +18,8 @@ type FolderQueryOptions struct { type FolderFilterType struct { OperatorFilter[FolderFilterType] - Path *StringCriterionInput `json:"path,omitempty"` - Basename *StringCriterionInput `json:"basename,omitempty"` - // Filter by parent directory path - Dir *StringCriterionInput `json:"dir,omitempty"` + Path *StringCriterionInput `json:"path,omitempty"` + Basename *StringCriterionInput `json:"basename,omitempty"` ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder,omitempty"` ZipFile *MultiCriterionInput `json:"zip_file,omitempty"` // Filter by modification time diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 5b75febc5..3bf70b754 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -11,6 +11,8 @@ type GalleryFilterType struct { Checksum *StringCriterionInput `json:"checksum"` // Filter by path Path *StringCriterionInput `json:"path"` + // Filter by parent folder + ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder,omitempty"` // Filter by zip file count FileCount *IntCriterionInput `json:"file_count"` // Filter to only include galleries missing this property @@ -67,6 +69,9 @@ type GalleryFilterType struct { CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type GalleryUpdateInput struct { @@ -86,6 +91,8 @@ type GalleryUpdateInput struct { PerformerIds []string `json:"performer_ids"` PrimaryFileID *string `json:"primary_file_id"` + CustomFields *CustomFieldsInput `json:"custom_fields"` + // deprecated URL *string `json:"url"` } @@ -95,6 +102,7 @@ type GalleryDestroyInput struct { // If true, then the zip file will be deleted if the gallery is zip-file-based. // If gallery is folder-based, then any files not associated with other // galleries will be deleted, along with the folder, if it is not empty. - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } diff --git a/pkg/models/group.go b/pkg/models/group.go index 6943b1055..396384b51 100644 --- a/pkg/models/group.go +++ b/pkg/models/group.go @@ -33,6 +33,8 @@ type GroupFilterType struct { ContainingGroupCount *IntCriterionInput `json:"containing_group_count"` // Filter by number of sub-groups the group has SubGroupCount *IntCriterionInput `json:"sub_group_count"` + // Filter by number of scenes the group has + SceneCount *IntCriterionInput `json:"scene_count"` // Filter by related scenes that meet this criteria ScenesFilter *SceneFilterType `json:"scenes_filter"` // Filter by related studios that meet this criteria @@ -41,4 +43,6 @@ type GroupFilterType struct { CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } diff --git a/pkg/models/image.go b/pkg/models/image.go index 4ab10eabf..b99267e8c 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -1,6 +1,8 @@ package models -import "context" +import ( + "context" +) type ImageFilterType struct { OperatorFilter[ImageFilterType] @@ -11,6 +13,8 @@ type ImageFilterType struct { Photographer *StringCriterionInput `json:"photographer"` // Filter by file checksum Checksum *StringCriterionInput `json:"checksum"` + // Filter by phash distance + PhashDistance *PhashDistanceCriterionInput `json:"phash_distance"` // Filter by path Path *StringCriterionInput `json:"path"` // Filter by file count @@ -63,40 +67,45 @@ type ImageFilterType struct { CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type ImageUpdateInput struct { - ClientMutationID *string `json:"clientMutationId"` - ID string `json:"id"` - Title *string `json:"title"` - Code *string `json:"code"` - Urls []string `json:"urls"` - Date *string `json:"date"` - Details *string `json:"details"` - Photographer *string `json:"photographer"` - Rating100 *int `json:"rating100"` - Organized *bool `json:"organized"` - SceneIds []string `json:"scene_ids"` - StudioID *string `json:"studio_id"` - TagIds []string `json:"tag_ids"` - PerformerIds []string `json:"performer_ids"` - GalleryIds []string `json:"gallery_ids"` - PrimaryFileID *string `json:"primary_file_id"` + ClientMutationID *string `json:"clientMutationId"` + ID string `json:"id"` + Title *string `json:"title"` + Code *string `json:"code"` + Urls []string `json:"urls"` + Date *string `json:"date"` + Details *string `json:"details"` + Photographer *string `json:"photographer"` + Rating100 *int `json:"rating100"` + Organized *bool `json:"organized"` + SceneIds []string `json:"scene_ids"` + StudioID *string `json:"studio_id"` + TagIds []string `json:"tag_ids"` + PerformerIds []string `json:"performer_ids"` + GalleryIds []string `json:"gallery_ids"` + PrimaryFileID *string `json:"primary_file_id"` + CustomFields *CustomFieldsInput `json:"custom_fields"` // deprecated URL *string `json:"url"` } type ImageDestroyInput struct { - ID string `json:"id"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + ID string `json:"id"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ImagesDestroyInput struct { - Ids []string `json:"ids"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + Ids []string `json:"ids"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ImageQueryOptions struct { diff --git a/pkg/models/jsonschema/gallery.go b/pkg/models/jsonschema/gallery.go index 7323e37ba..5fb6e16ab 100644 --- a/pkg/models/jsonschema/gallery.go +++ b/pkg/models/jsonschema/gallery.go @@ -18,22 +18,23 @@ type GalleryChapter struct { } type Gallery struct { - ZipFiles []string `json:"zip_files,omitempty"` - FolderPath string `json:"folder_path,omitempty"` - Title string `json:"title,omitempty"` - Code string `json:"code,omitempty"` - URLs []string `json:"urls,omitempty"` - Date string `json:"date,omitempty"` - Details string `json:"details,omitempty"` - Photographer string `json:"photographer,omitempty"` - Rating int `json:"rating,omitempty"` - Organized bool `json:"organized,omitempty"` - Chapters []GalleryChapter `json:"chapters,omitempty"` - Studio string `json:"studio,omitempty"` - Performers []string `json:"performers,omitempty"` - Tags []string `json:"tags,omitempty"` - CreatedAt json.JSONTime `json:"created_at,omitempty"` - UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + ZipFiles []string `json:"zip_files,omitempty"` + FolderPath string `json:"folder_path,omitempty"` + Title string `json:"title,omitempty"` + Code string `json:"code,omitempty"` + URLs []string `json:"urls,omitempty"` + Date string `json:"date,omitempty"` + Details string `json:"details,omitempty"` + Photographer string `json:"photographer,omitempty"` + Rating int `json:"rating,omitempty"` + Organized bool `json:"organized,omitempty"` + Chapters []GalleryChapter `json:"chapters,omitempty"` + Studio string `json:"studio,omitempty"` + Performers []string `json:"performers,omitempty"` + Tags []string `json:"tags,omitempty"` + CreatedAt json.JSONTime `json:"created_at,omitempty"` + UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` // deprecated - for import only URL string `json:"url,omitempty"` diff --git a/pkg/models/jsonschema/group.go b/pkg/models/jsonschema/group.go index b284dab6e..357ac70bc 100644 --- a/pkg/models/jsonschema/group.go +++ b/pkg/models/jsonschema/group.go @@ -33,6 +33,8 @@ type Group struct { CreatedAt json.JSONTime `json:"created_at,omitempty"` UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` + // deprecated - for import only URL string `json:"url,omitempty"` } diff --git a/pkg/models/jsonschema/image.go b/pkg/models/jsonschema/image.go index 1bdac8770..168ea9eec 100644 --- a/pkg/models/jsonschema/image.go +++ b/pkg/models/jsonschema/image.go @@ -18,18 +18,19 @@ type Image struct { // deprecated - for import only URL string `json:"url,omitempty"` - URLs []string `json:"urls,omitempty"` - Date string `json:"date,omitempty"` - Details string `json:"details,omitempty"` - Photographer string `json:"photographer,omitempty"` - Organized bool `json:"organized,omitempty"` - OCounter int `json:"o_counter,omitempty"` - Galleries []GalleryRef `json:"galleries,omitempty"` - Performers []string `json:"performers,omitempty"` - Tags []string `json:"tags,omitempty"` - Files []string `json:"files,omitempty"` - CreatedAt json.JSONTime `json:"created_at,omitempty"` - UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + URLs []string `json:"urls,omitempty"` + Date string `json:"date,omitempty"` + Details string `json:"details,omitempty"` + Photographer string `json:"photographer,omitempty"` + Organized bool `json:"organized,omitempty"` + OCounter int `json:"o_counter,omitempty"` + Galleries []GalleryRef `json:"galleries,omitempty"` + Performers []string `json:"performers,omitempty"` + Tags []string `json:"tags,omitempty"` + Files []string `json:"files,omitempty"` + CreatedAt json.JSONTime `json:"created_at,omitempty"` + UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` } func (s Image) Filename(basename string, hash string) string { diff --git a/pkg/models/jsonschema/performer.go b/pkg/models/jsonschema/performer.go index 5edd5724c..1a8acd5f3 100644 --- a/pkg/models/jsonschema/performer.go +++ b/pkg/models/jsonschema/performer.go @@ -48,7 +48,9 @@ type Performer struct { FakeTits string `json:"fake_tits,omitempty"` PenisLength float64 `json:"penis_length,omitempty"` Circumcised string `json:"circumcised,omitempty"` - CareerLength string `json:"career_length,omitempty"` + CareerLength string `json:"career_length,omitempty"` // deprecated - for import only + CareerStart string `json:"career_start,omitempty"` + CareerEnd string `json:"career_end,omitempty"` Tattoos string `json:"tattoos,omitempty"` Piercings string `json:"piercings,omitempty"` Aliases StringOrStringList `json:"aliases,omitempty"` diff --git a/pkg/models/jsonschema/scene.go b/pkg/models/jsonschema/scene.go index c2f266d5c..8f15b9c5d 100644 --- a/pkg/models/jsonschema/scene.go +++ b/pkg/models/jsonschema/scene.go @@ -80,6 +80,8 @@ type Scene struct { PlayDuration float64 `json:"play_duration,omitempty"` StashIDs []models.StashID `json:"stash_ids,omitempty"` + + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` } func (s Scene) Filename(id int, basename string, hash string) string { diff --git a/pkg/models/jsonschema/studio.go b/pkg/models/jsonschema/studio.go index a3706df66..12a797c13 100644 --- a/pkg/models/jsonschema/studio.go +++ b/pkg/models/jsonschema/studio.go @@ -24,6 +24,9 @@ type Studio struct { StashIDs []models.StashID `json:"stash_ids,omitempty"` Tags []string `json:"tags,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` + Organized bool `json:"organized,omitempty"` + + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` // deprecated - for import only URL string `json:"url,omitempty"` diff --git a/pkg/models/jsonschema/tag.go b/pkg/models/jsonschema/tag.go index faab1bfb2..e7b16b13f 100644 --- a/pkg/models/jsonschema/tag.go +++ b/pkg/models/jsonschema/tag.go @@ -11,17 +11,18 @@ import ( ) type Tag struct { - Name string `json:"name,omitempty"` - SortName string `json:"sort_name,omitempty"` - Description string `json:"description,omitempty"` - Favorite bool `json:"favorite,omitempty"` - Aliases []string `json:"aliases,omitempty"` - Image string `json:"image,omitempty"` - Parents []string `json:"parents,omitempty"` - IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` - StashIDs []models.StashID `json:"stash_ids,omitempty"` - CreatedAt json.JSONTime `json:"created_at,omitempty"` - UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + Name string `json:"name,omitempty"` + SortName string `json:"sort_name,omitempty"` + Description string `json:"description,omitempty"` + Favorite bool `json:"favorite,omitempty"` + Aliases []string `json:"aliases,omitempty"` + Image string `json:"image,omitempty"` + Parents []string `json:"parents,omitempty"` + IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` + StashIDs []models.StashID `json:"stash_ids,omitempty"` + CreatedAt json.JSONTime `json:"created_at,omitempty"` + UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` } func (s Tag) Filename() string { diff --git a/pkg/models/mocks/FileReaderWriter.go b/pkg/models/mocks/FileReaderWriter.go index 97a0136e6..4b370459e 100644 --- a/pkg/models/mocks/FileReaderWriter.go +++ b/pkg/models/mocks/FileReaderWriter.go @@ -153,13 +153,13 @@ func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string, case return r0, r1 } -// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset -func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]models.File, error) { - ret := _m.Called(ctx, p, limit, offset) +// FindAllInPaths provides a mock function with given fields: ctx, p, includeZipContents, limit, offset +func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit int, offset int) ([]models.File, error) { + ret := _m.Called(ctx, p, includeZipContents, limit, offset) var r0 []models.File - if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []models.File); ok { - r0 = rf(ctx, p, limit, offset) + if rf, ok := ret.Get(0).(func(context.Context, []string, bool, int, int) []models.File); ok { + r0 = rf(ctx, p, includeZipContents, limit, offset) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.File) @@ -167,8 +167,8 @@ func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limi } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { - r1 = rf(ctx, p, limit, offset) + if rf, ok := ret.Get(1).(func(context.Context, []string, bool, int, int) error); ok { + r1 = rf(ctx, p, includeZipContents, limit, offset) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/FolderReaderWriter.go b/pkg/models/mocks/FolderReaderWriter.go index 7bca013fe..d2230c645 100644 --- a/pkg/models/mocks/FolderReaderWriter.go +++ b/pkg/models/mocks/FolderReaderWriter.go @@ -86,13 +86,13 @@ func (_m *FolderReaderWriter) Find(ctx context.Context, id models.FolderID) (*mo return r0, r1 } -// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset -func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]*models.Folder, error) { - ret := _m.Called(ctx, p, limit, offset) +// FindAllInPaths provides a mock function with given fields: ctx, p, includeZipContents, limit, offset +func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit int, offset int) ([]*models.Folder, error) { + ret := _m.Called(ctx, p, includeZipContents, limit, offset) var r0 []*models.Folder - if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []*models.Folder); ok { - r0 = rf(ctx, p, limit, offset) + if rf, ok := ret.Get(0).(func(context.Context, []string, bool, int, int) []*models.Folder); ok { + r0 = rf(ctx, p, includeZipContents, limit, offset) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]*models.Folder) @@ -100,8 +100,8 @@ func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, li } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { - r1 = rf(ctx, p, limit, offset) + if rf, ok := ret.Get(1).(func(context.Context, []string, bool, int, int) error); ok { + r1 = rf(ctx, p, includeZipContents, limit, offset) } else { r1 = ret.Error(1) } @@ -201,6 +201,52 @@ func (_m *FolderReaderWriter) FindMany(ctx context.Context, id []models.FolderID return r0, r1 } +// GetManyParentFolderIDs provides a mock function with given fields: ctx, folderIDs +func (_m *FolderReaderWriter) GetManyParentFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) { + ret := _m.Called(ctx, folderIDs) + + var r0 [][]models.FolderID + if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) [][]models.FolderID); ok { + r0 = rf(ctx, folderIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FolderID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok { + r1 = rf(ctx, folderIDs) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetManySubFolderIDs provides a mock function with given fields: ctx, folderIDs +func (_m *FolderReaderWriter) GetManySubFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) { + ret := _m.Called(ctx, folderIDs) + + var r0 [][]models.FolderID + if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) [][]models.FolderID); ok { + r0 = rf(ctx, folderIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FolderID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok { + r1 = rf(ctx, folderIDs) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Query provides a mock function with given fields: ctx, options func (_m *FolderReaderWriter) Query(ctx context.Context, options models.FolderQueryOptions) (*models.FolderQueryResult, error) { ret := _m.Called(ctx, options) diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index f07f8a7d9..e835ea2bc 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -49,6 +49,20 @@ func (_m *GalleryReaderWriter) AddImages(ctx context.Context, galleryID int, ima return r0 } +// AddSceneIDs provides a mock function with given fields: ctx, galleryID, sceneIDs +func (_m *GalleryReaderWriter) AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error { + ret := _m.Called(ctx, galleryID, sceneIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, galleryID, sceneIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *GalleryReaderWriter) All(ctx context.Context) ([]*models.Gallery, error) { ret := _m.Called(ctx) @@ -114,13 +128,13 @@ func (_m *GalleryReaderWriter) CountByFileID(ctx context.Context, fileID models. return r0, r1 } -// Create provides a mock function with given fields: ctx, newGallery, fileIDs -func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error { - ret := _m.Called(ctx, newGallery, fileIDs) +// Create provides a mock function with given fields: ctx, newGallery +func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.CreateGalleryInput) error { + ret := _m.Called(ctx, newGallery) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []models.FileID) error); ok { - r0 = rf(ctx, newGallery, fileIDs) + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateGalleryInput) error); ok { + r0 = rf(ctx, newGallery) } else { r0 = ret.Error(0) } @@ -395,6 +409,52 @@ func (_m *GalleryReaderWriter) FindUserGalleryByTitle(ctx context.Context, title return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *GalleryReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *GalleryReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFiles provides a mock function with given fields: ctx, relatedID func (_m *GalleryReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { ret := _m.Called(ctx, relatedID) @@ -656,12 +716,26 @@ func (_m *GalleryReaderWriter) SetCover(ctx context.Context, galleryID int, cove return r0 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *GalleryReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedGallery -func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.Gallery) error { +func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.UpdateGalleryInput) error { ret := _m.Called(ctx, updatedGallery) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateGalleryInput) error); ok { r0 = rf(ctx, updatedGallery) } else { r0 = ret.Error(0) diff --git a/pkg/models/mocks/GroupReaderWriter.go b/pkg/models/mocks/GroupReaderWriter.go index dc745d094..ac9e513f4 100644 --- a/pkg/models/mocks/GroupReaderWriter.go +++ b/pkg/models/mocks/GroupReaderWriter.go @@ -312,6 +312,52 @@ func (_m *GroupReaderWriter) GetContainingGroupDescriptions(ctx context.Context, return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *GroupReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *GroupReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFrontImage provides a mock function with given fields: ctx, groupID func (_m *GroupReaderWriter) GetFrontImage(ctx context.Context, groupID int) ([]byte, error) { ret := _m.Called(ctx, groupID) @@ -497,6 +543,20 @@ func (_m *GroupReaderWriter) QueryCount(ctx context.Context, groupFilter *models return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *GroupReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedGroup func (_m *GroupReaderWriter) Update(ctx context.Context, updatedGroup *models.Group) error { ret := _m.Called(ctx, updatedGroup) diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index afc5efdb7..f2c9934be 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -137,13 +137,13 @@ func (_m *ImageReaderWriter) CoverByGalleryID(ctx context.Context, galleryId int return r0, r1 } -// Create provides a mock function with given fields: ctx, newImage, fileIDs -func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error { - ret := _m.Called(ctx, newImage, fileIDs) +// Create provides a mock function with given fields: ctx, newImage +func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.CreateImageInput) error { + ret := _m.Called(ctx, newImage) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Image, []models.FileID) error); ok { - r0 = rf(ctx, newImage, fileIDs) + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateImageInput) error); ok { + r0 = rf(ctx, newImage) } else { r0 = ret.Error(0) } @@ -393,6 +393,52 @@ func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *ImageReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *ImageReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFiles provides a mock function with given fields: ctx, relatedID func (_m *ImageReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { ret := _m.Called(ctx, relatedID) @@ -694,6 +740,20 @@ func (_m *ImageReaderWriter) ResetOCounter(ctx context.Context, id int) (int, er return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *ImageReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Size provides a mock function with given fields: ctx func (_m *ImageReaderWriter) Size(ctx context.Context) (float64, error) { ret := _m.Called(ctx) diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index dbf19a3cd..6487bc5a5 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -473,6 +473,20 @@ func (_m *PerformerReaderWriter) HasImage(ctx context.Context, performerID int) return r0, r1 } +// Merge provides a mock function with given fields: ctx, source, destination +func (_m *PerformerReaderWriter) Merge(ctx context.Context, source []int, destination int) error { + ret := _m.Called(ctx, source, destination) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, []int, int) error); ok { + r0 = rf(ctx, source, destination) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Query provides a mock function with given fields: ctx, performerFilter, findFilter func (_m *PerformerReaderWriter) Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) { ret := _m.Called(ctx, performerFilter, findFilter) diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index ef10c890d..0053ad6f8 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -754,6 +754,52 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *SceneReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *SceneReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFiles provides a mock function with given fields: ctx, relatedID func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) { ret := _m.Called(ctx, relatedID) @@ -1332,6 +1378,20 @@ func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, sceneID int, resu return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *SceneReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Size provides a mock function with given fields: ctx func (_m *SceneReaderWriter) Size(ctx context.Context) (float64, error) { ret := _m.Called(ctx) diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index 481565d6f..f57a73aa1 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -80,11 +80,11 @@ func (_m *StudioReaderWriter) CountByTagID(ctx context.Context, tagID int) (int, } // Create provides a mock function with given fields: ctx, newStudio -func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.Studio) error { +func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.CreateStudioInput) error { ret := _m.Called(ctx, newStudio) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateStudioInput) error); ok { r0 = rf(ctx, newStudio) } else { r0 = ret.Error(0) @@ -291,6 +291,52 @@ func (_m *StudioReaderWriter) GetAliases(ctx context.Context, relatedID int) ([] return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *StudioReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *StudioReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetImage provides a mock function with given fields: ctx, studioID func (_m *StudioReaderWriter) GetImage(ctx context.Context, studioID int) ([]byte, error) { ret := _m.Called(ctx, studioID) @@ -479,11 +525,11 @@ func (_m *StudioReaderWriter) QueryForAutoTag(ctx context.Context, words []strin } // Update provides a mock function with given fields: ctx, updatedStudio -func (_m *StudioReaderWriter) Update(ctx context.Context, updatedStudio *models.Studio) error { +func (_m *StudioReaderWriter) Update(ctx context.Context, updatedStudio *models.UpdateStudioInput) error { ret := _m.Called(ctx, updatedStudio) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateStudioInput) error); ok { r0 = rf(ctx, updatedStudio) } else { r0 = ret.Error(0) diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index ac6b10584..c4423ee52 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -101,11 +101,11 @@ func (_m *TagReaderWriter) CountByParentTagID(ctx context.Context, parentID int) } // Create provides a mock function with given fields: ctx, newTag -func (_m *TagReaderWriter) Create(ctx context.Context, newTag *models.Tag) error { +func (_m *TagReaderWriter) Create(ctx context.Context, newTag *models.CreateTagInput) error { ret := _m.Called(ctx, newTag) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateTagInput) error); ok { r0 = rf(ctx, newTag) } else { r0 = ret.Error(0) @@ -450,6 +450,29 @@ func (_m *TagReaderWriter) FindByStashID(ctx context.Context, stashID models.Sta return r0, r1 } +// FindByStashIDStatus provides a mock function with given fields: ctx, hasStashID, stashboxEndpoint +func (_m *TagReaderWriter) FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*models.Tag, error) { + ret := _m.Called(ctx, hasStashID, stashboxEndpoint) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func(context.Context, bool, string) []*models.Tag); ok { + r0 = rf(ctx, hasStashID, stashboxEndpoint) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, bool, string) error); ok { + r1 = rf(ctx, hasStashID, stashboxEndpoint) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByStudioID provides a mock function with given fields: ctx, studioID func (_m *TagReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) { ret := _m.Called(ctx, studioID) @@ -542,6 +565,52 @@ func (_m *TagReaderWriter) GetChildIDs(ctx context.Context, relatedID int) ([]in return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *TagReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *TagReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetImage provides a mock function with given fields: ctx, tagID func (_m *TagReaderWriter) GetImage(ctx context.Context, tagID int) ([]byte, error) { ret := _m.Called(ctx, tagID) @@ -699,12 +768,26 @@ func (_m *TagReaderWriter) QueryForAutoTag(ctx context.Context, words []string) return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *TagReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedTag -func (_m *TagReaderWriter) Update(ctx context.Context, updatedTag *models.Tag) error { +func (_m *TagReaderWriter) Update(ctx context.Context, updatedTag *models.UpdateTagInput) error { ret := _m.Called(ctx, updatedTag) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateTagInput) error); ok { r0 = rf(ctx, updatedTag) } else { r0 = ret.Error(0) diff --git a/pkg/models/mocks/database.go b/pkg/models/mocks/database.go index ec4177b30..88f106e19 100644 --- a/pkg/models/mocks/database.go +++ b/pkg/models/mocks/database.go @@ -3,6 +3,7 @@ package mocks import ( "context" + "errors" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -89,6 +90,16 @@ func (db *Database) AssertExpectations(t mock.TestingT) { db.SavedFilter.AssertExpectations(t) } +// WithTxnCtx runs fn with a context that has a transaction hook manager registered, +// so code that calls txn.AddPostCommitHook (e.g. plugin cache) won't nil-panic. +// Always rolls back to avoid executing the registered hooks. +func (db *Database) WithTxnCtx(fn func(ctx context.Context)) { + _ = txn.WithTxn(context.Background(), db, func(ctx context.Context) error { + fn(ctx) + return errors.New("rollback") + }) +} + func (db *Database) Repository() models.Repository { return models.Repository{ TxnManager: db, diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go index 4b6a3183d..bbdba46a6 100644 --- a/pkg/models/model_gallery.go +++ b/pkg/models/model_gallery.go @@ -46,6 +46,20 @@ func NewGallery() Gallery { } } +type CreateGalleryInput struct { + *Gallery + + FileIDs []FileID + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdateGalleryInput struct { + *Gallery + + FileIDs []FileID + CustomFields CustomFieldsInput `json:"custom_fields"` +} + // GalleryPartial represents part of a Gallery object. It is used to update // the database entry. Only non-nil fields will be updated. type GalleryPartial struct { @@ -70,6 +84,8 @@ type GalleryPartial struct { TagIDs *UpdateIDs PerformerIDs *UpdateIDs PrimaryFileID *FileID + + CustomFields CustomFieldsInput } func NewGalleryPartial() GalleryPartial { diff --git a/pkg/models/model_group.go b/pkg/models/model_group.go index 82c71996a..5bfb42c44 100644 --- a/pkg/models/model_group.go +++ b/pkg/models/model_group.go @@ -34,6 +34,14 @@ func NewGroup() Group { } } +type CreateGroupInput struct { + *Group + + CustomFields map[string]interface{} `json:"custom_fields"` + FrontImageData []byte + BackImageData []byte +} + func (m *Group) LoadURLs(ctx context.Context, l URLLoader) error { return m.URLs.load(func() ([]string, error) { return l.GetURLs(ctx, m.ID) @@ -74,6 +82,8 @@ type GroupPartial struct { SubGroups *UpdateGroupDescriptions CreatedAt OptionalTime UpdatedAt OptionalTime + + CustomFields CustomFieldsInput } func NewGroupPartial() GroupPartial { diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go index 1d0993536..72ca61826 100644 --- a/pkg/models/model_image.go +++ b/pkg/models/model_image.go @@ -47,6 +47,13 @@ func NewImage() Image { } } +type CreateImageInput struct { + *Image + + FileIDs []FileID + CustomFields map[string]interface{} `json:"custom_fields"` +} + type ImagePartial struct { Title OptionalString Code OptionalString @@ -66,6 +73,7 @@ type ImagePartial struct { TagIDs *UpdateIDs PerformerIDs *UpdateIDs PrimaryFileID *FileID + CustomFields CustomFieldsInput } func NewImagePartial() ImagePartial { diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index 566dcae1e..7bc3b3174 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -6,25 +6,26 @@ import ( ) type Performer struct { - ID int `json:"id"` - Name string `json:"name"` - Disambiguation string `json:"disambiguation"` - Gender *GenderEnum `json:"gender"` - Birthdate *Date `json:"birthdate"` - Ethnicity string `json:"ethnicity"` - Country string `json:"country"` - EyeColor string `json:"eye_color"` - Height *int `json:"height"` - Measurements string `json:"measurements"` - FakeTits string `json:"fake_tits"` - PenisLength *float64 `json:"penis_length"` - Circumcised *CircumisedEnum `json:"circumcised"` - CareerLength string `json:"career_length"` - Tattoos string `json:"tattoos"` - Piercings string `json:"piercings"` - Favorite bool `json:"favorite"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` + ID int `json:"id"` + Name string `json:"name"` + Disambiguation string `json:"disambiguation"` + Gender *GenderEnum `json:"gender"` + Birthdate *Date `json:"birthdate"` + Ethnicity string `json:"ethnicity"` + Country string `json:"country"` + EyeColor string `json:"eye_color"` + Height *int `json:"height"` + Measurements string `json:"measurements"` + FakeTits string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumcisedEnum `json:"circumcised"` + CareerStart *Date `json:"career_start"` + CareerEnd *Date `json:"career_end"` + Tattoos string `json:"tattoos"` + Piercings string `json:"piercings"` + Favorite bool `json:"favorite"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` // Rating expressed in 1-100 scale Rating *int `json:"rating"` Details string `json:"details"` @@ -75,7 +76,8 @@ type PerformerPartial struct { FakeTits OptionalString PenisLength OptionalFloat64 Circumcised OptionalString - CareerLength OptionalString + CareerStart OptionalDate + CareerEnd OptionalDate Tattoos OptionalString Piercings OptionalString Favorite OptionalBool diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index cf0499388..64ad34b9c 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -53,6 +53,20 @@ func NewScene() Scene { } } +type CreateSceneInput struct { + *Scene + + FileIDs []FileID + CoverImage []byte + CustomFields CustomFieldMap `json:"custom_fields"` +} + +type UpdateSceneInput struct { + *Scene + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + // ScenePartial represents part of a Scene object. It is used to update // the database entry. type ScenePartial struct { diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go index 4254a9876..d20fbd589 100644 --- a/pkg/models/model_scraped_item.go +++ b/pkg/models/model_scraped_item.go @@ -27,9 +27,9 @@ type ScrapedStudio struct { func (ScrapedStudio) IsScrapedContent() {} -func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Studio { +func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *CreateStudioInput { // Populate a new studio from the input - ret := NewStudio() + ret := NewCreateStudioInput() ret.Name = strings.TrimSpace(s.Name) if s.RemoteSiteID != nil && endpoint != "" && *s.RemoteSiteID != "" { @@ -176,7 +176,9 @@ type ScrapedPerformer struct { FakeTits *string `json:"fake_tits"` PenisLength *string `json:"penis_length"` Circumcised *string `json:"circumcised"` - CareerLength *string `json:"career_length"` + CareerLength *string `json:"career_length"` // deprecated: use CareerStart/CareerEnd + CareerStart *string `json:"career_start"` + CareerEnd *string `json:"career_end"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` @@ -219,8 +221,20 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool ret.DeathDate = &date } } - if p.CareerLength != nil && !excluded["career_length"] { - ret.CareerLength = *p.CareerLength + + // assume that career length is _not_ populated in favour of start/end + + if p.CareerStart != nil && !excluded["career_start"] { + date, err := ParseDate(*p.CareerStart) + if err == nil { + ret.CareerStart = &date + } + } + if p.CareerEnd != nil && !excluded["career_end"] { + date, err := ParseDate(*p.CareerEnd) + if err == nil { + ret.CareerEnd = &date + } } if p.Country != nil && !excluded["country"] { ret.Country = *p.Country @@ -278,7 +292,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool } } if p.Circumcised != nil && !excluded["circumcised"] { - v := CircumisedEnum(*p.Circumcised) + v := CircumcisedEnum(*p.Circumcised) if v.IsValid() { ret.Circumcised = &v } @@ -356,7 +370,16 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, } } if p.CareerLength != nil && !excluded["career_length"] { - ret.CareerLength = NewOptionalString(*p.CareerLength) + // parse career_length into career_start/career_end + start, end, err := ParseYearRangeString(*p.CareerLength) + if err == nil { + if start != nil { + ret.CareerStart = NewOptionalDate(*start) + } + if end != nil { + ret.CareerEnd = NewOptionalDate(*end) + } + } } if p.Country != nil && !excluded["country"] { ret.Country = NewOptionalString(*p.Country) @@ -452,9 +475,12 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, type ScrapedTag struct { // Set if tag matched - StoredID *string `json:"stored_id"` - Name string `json:"name"` - RemoteSiteID *string `json:"remote_site_id"` + StoredID *string `json:"stored_id"` + Name string `json:"name"` + Description *string `json:"description"` + AliasList []string `json:"alias_list"` + RemoteSiteID *string `json:"remote_site_id"` + Parent *ScrapedTag `json:"parent"` } func (ScrapedTag) IsScrapedContent() {} @@ -463,6 +489,24 @@ func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag { currentTime := time.Now() ret := NewTag() ret.Name = t.Name + ret.ParentIDs = NewRelatedIDs([]int{}) + ret.ChildIDs = NewRelatedIDs([]int{}) + ret.Aliases = NewRelatedStrings([]string{}) + + if t.Description != nil && !excluded["description"] { + ret.Description = *t.Description + } + + if len(t.AliasList) > 0 && !excluded["aliases"] { + ret.Aliases = NewRelatedStrings(t.AliasList) + } + + if t.Parent != nil && t.Parent.StoredID != nil { + parentID, err := strconv.Atoi(*t.Parent.StoredID) + if err == nil && parentID > 0 { + ret.ParentIDs = NewRelatedIDs([]int{parentID}) + } + } if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" { ret.StashIDs = NewRelatedStashIDs([]StashID{ @@ -477,6 +521,49 @@ func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag { return &ret } +func (t *ScrapedTag) ToPartial(storedID string, endpoint string, excluded map[string]bool, existingStashIDs []StashID) TagPartial { + ret := NewTagPartial() + + if t.Name != "" && !excluded["name"] { + ret.Name = NewOptionalString(t.Name) + } + + if t.Description != nil && !excluded["description"] { + ret.Description = NewOptionalString(*t.Description) + } + + if len(t.AliasList) > 0 && !excluded["aliases"] { + ret.Aliases = &UpdateStrings{ + Values: t.AliasList, + Mode: RelationshipUpdateModeSet, + } + } + + if t.Parent != nil && t.Parent.StoredID != nil { + parentID, err := strconv.Atoi(*t.Parent.StoredID) + if err == nil && parentID > 0 { + ret.ParentIDs = &UpdateIDs{ + IDs: []int{parentID}, + Mode: RelationshipUpdateModeAdd, + } + } + } + + if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" { + ret.StashIDs = &UpdateStashIDs{ + StashIDs: existingStashIDs, + Mode: RelationshipUpdateModeSet, + } + ret.StashIDs.Set(StashID{ + Endpoint: endpoint, + StashID: *t.RemoteSiteID, + UpdatedAt: time.Now(), + }) + } + + return ret +} + func ScrapedTagSortFunction(a, b *ScrapedTag) int { return strings.Compare(strings.ToLower(a.Name), strings.ToLower(b.Name)) } diff --git a/pkg/models/model_scraped_item_test.go b/pkg/models/model_scraped_item_test.go index b6b44025f..1956d8a0b 100644 --- a/pkg/models/model_scraped_item_test.go +++ b/pkg/models/model_scraped_item_test.go @@ -113,7 +113,7 @@ func Test_scrapedToStudioInput(t *testing.T) { got.StashIDs.List()[stid].UpdatedAt = time.Time{} } } - assert.Equal(t, tt.want, got) + assert.Equal(t, tt.want, got.Studio) }) } } @@ -124,9 +124,10 @@ func Test_scrapedToPerformerInput(t *testing.T) { endpoint := "endpoint" remoteSiteID := "remoteSiteID" - var stringValues []string - for i := 0; i < 20; i++ { - stringValues = append(stringValues, strconv.Itoa(i)) + const nValues = 19 + stringValues := make([]string, nValues) + for i := 0; i < nValues; i++ { + stringValues[i] = strconv.Itoa(i) } upTo := 0 @@ -183,7 +184,8 @@ func Test_scrapedToPerformerInput(t *testing.T) { Weight: nextVal(), Measurements: nextVal(), FakeTits: nextVal(), - CareerLength: nextVal(), + CareerStart: dateStrFromInt(2005), + CareerEnd: dateStrFromInt(2015), Tattoos: nextVal(), Piercings: nextVal(), Aliases: nextVal(), @@ -208,8 +210,9 @@ func Test_scrapedToPerformerInput(t *testing.T) { Weight: nextIntVal(), Measurements: *nextVal(), FakeTits: *nextVal(), - CareerLength: *nextVal(), - Tattoos: *nextVal(), + CareerStart: dateFromInt(2005), + CareerEnd: dateFromInt(2015), + Tattoos: *nextVal(), // skip CareerLength counter slot Piercings: *nextVal(), Aliases: NewRelatedStrings([]string{*nextVal()}), URLs: NewRelatedStrings([]string{*nextVal(), *nextVal(), *nextVal()}), diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go index 8c7a687af..ec81aac0e 100644 --- a/pkg/models/model_studio.go +++ b/pkg/models/model_studio.go @@ -16,6 +16,7 @@ type Studio struct { Favorite bool `json:"favorite"` Details string `json:"details"` IgnoreAutoTag bool `json:"ignore_auto_tag"` + Organized bool `json:"organized"` Aliases RelatedStrings `json:"aliases"` URLs RelatedStrings `json:"urls"` @@ -23,6 +24,18 @@ type Studio struct { StashIDs RelatedStashIDs `json:"stash_ids"` } +type CreateStudioInput struct { + *Studio + + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdateStudioInput struct { + *Studio + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + func NewStudio() Studio { currentTime := time.Now() return Studio{ @@ -31,6 +44,13 @@ func NewStudio() Studio { } } +func NewCreateStudioInput() CreateStudioInput { + s := NewStudio() + return CreateStudioInput{ + Studio: &s, + } +} + // StudioPartial represents part of a Studio object. It is used to update the database entry. type StudioPartial struct { ID int @@ -43,11 +63,14 @@ type StudioPartial struct { CreatedAt OptionalTime UpdatedAt OptionalTime IgnoreAutoTag OptionalBool + Organized OptionalBool Aliases *UpdateStrings URLs *UpdateStrings TagIDs *UpdateIDs StashIDs *UpdateStashIDs + + CustomFields CustomFieldsInput } func NewStudioPartial() StudioPartial { diff --git a/pkg/models/model_tag.go b/pkg/models/model_tag.go index 4cd038f7e..aee468639 100644 --- a/pkg/models/model_tag.go +++ b/pkg/models/model_tag.go @@ -29,6 +29,18 @@ func NewTag() Tag { } } +type CreateTagInput struct { + *Tag + + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdateTagInput struct { + *Tag + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + func (s *Tag) LoadAliases(ctx context.Context, l AliasLoader) error { return s.Aliases.load(func() ([]string, error) { return l.GetAliases(ctx, s.ID) @@ -66,6 +78,8 @@ type TagPartial struct { ParentIDs *UpdateIDs ChildIDs *UpdateIDs StashIDs *UpdateStashIDs + + CustomFields CustomFieldsInput } func NewTagPartial() TagPartial { diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 239d8347f..606b87f9f 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -61,49 +61,49 @@ type GenderCriterionInput struct { Modifier CriterionModifier `json:"modifier"` } -type CircumisedEnum string +type CircumcisedEnum string const ( - CircumisedEnumCut CircumisedEnum = "CUT" - CircumisedEnumUncut CircumisedEnum = "UNCUT" + CircumcisedEnumCut CircumcisedEnum = "CUT" + CircumcisedEnumUncut CircumcisedEnum = "UNCUT" ) -var AllCircumcisionEnum = []CircumisedEnum{ - CircumisedEnumCut, - CircumisedEnumUncut, +var AllCircumcisionEnum = []CircumcisedEnum{ + CircumcisedEnumCut, + CircumcisedEnumUncut, } -func (e CircumisedEnum) IsValid() bool { +func (e CircumcisedEnum) IsValid() bool { switch e { - case CircumisedEnumCut, CircumisedEnumUncut: + case CircumcisedEnumCut, CircumcisedEnumUncut: return true } return false } -func (e CircumisedEnum) String() string { +func (e CircumcisedEnum) String() string { return string(e) } -func (e *CircumisedEnum) UnmarshalGQL(v interface{}) error { +func (e *CircumcisedEnum) UnmarshalGQL(v interface{}) error { str, ok := v.(string) if !ok { return fmt.Errorf("enums must be strings") } - *e = CircumisedEnum(str) + *e = CircumcisedEnum(str) if !e.IsValid() { - return fmt.Errorf("%s is not a valid CircumisedEnum", str) + return fmt.Errorf("%s is not a valid CircumcisedEnum", str) } return nil } -func (e CircumisedEnum) MarshalGQL(w io.Writer) { +func (e CircumcisedEnum) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } type CircumcisionCriterionInput struct { - Value []CircumisedEnum `json:"value"` + Value []CircumcisedEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` } @@ -137,7 +137,11 @@ type PerformerFilterType struct { // Filter by circumcision Circumcised *CircumcisionCriterionInput `json:"circumcised"` // Filter by career length - CareerLength *StringCriterionInput `json:"career_length"` + CareerLength *StringCriterionInput `json:"career_length"` // deprecated + // Filter by career start year + CareerStart *DateCriterionInput `json:"career_start"` + // Filter by career end year + CareerEnd *DateCriterionInput `json:"career_end"` // Filter by tattoos Tattoos *StringCriterionInput `json:"tattoos"` // Filter by piercings @@ -154,6 +158,8 @@ type PerformerFilterType struct { TagCount *IntCriterionInput `json:"tag_count"` // Filter by scene count SceneCount *IntCriterionInput `json:"scene_count"` + // Filter by scene marker count (via scene) + MarkerCount *IntCriterionInput `json:"marker_count"` // Filter by image count ImageCount *IntCriterionInput `json:"image_count"` // Filter by gallery count @@ -166,6 +172,8 @@ type PerformerFilterType struct { StashID *StringCriterionInput `json:"stash_id"` // Filter by StashID Endpoint StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` + // Filter by StashIDs Endpoint + StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` // Filter by rating expressed as 1-100 Rating100 *IntCriterionInput `json:"rating100"` // Filter by url @@ -196,6 +204,8 @@ type PerformerFilterType struct { GalleriesFilter *GalleryFilterType `json:"galleries_filter"` // Filter by related tags that meet this criteria TagsFilter *TagFilterType `json:"tags_filter"` + // Filter by related scene markers (via scene) that meet this criteria + MarkersFilter *SceneMarkerFilterType `json:"markers_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at @@ -206,30 +216,32 @@ type PerformerFilterType struct { } type PerformerCreateInput struct { - Name string `json:"name"` - Disambiguation *string `json:"disambiguation"` - URL *string `json:"url"` // deprecated - Urls []string `json:"urls"` - Gender *GenderEnum `json:"gender"` - Birthdate *string `json:"birthdate"` - Ethnicity *string `json:"ethnicity"` - Country *string `json:"country"` - EyeColor *string `json:"eye_color"` - Height *string `json:"height"` - HeightCm *int `json:"height_cm"` - Measurements *string `json:"measurements"` - FakeTits *string `json:"fake_tits"` - PenisLength *float64 `json:"penis_length"` - Circumcised *CircumisedEnum `json:"circumcised"` - CareerLength *string `json:"career_length"` - Tattoos *string `json:"tattoos"` - Piercings *string `json:"piercings"` - Aliases *string `json:"aliases"` - AliasList []string `json:"alias_list"` - Twitter *string `json:"twitter"` // deprecated - Instagram *string `json:"instagram"` // deprecated - Favorite *bool `json:"favorite"` - TagIds []string `json:"tag_ids"` + Name string `json:"name"` + Disambiguation *string `json:"disambiguation"` + URL *string `json:"url"` // deprecated + Urls []string `json:"urls"` + Gender *GenderEnum `json:"gender"` + Birthdate *string `json:"birthdate"` + Ethnicity *string `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *string `json:"eye_color"` + Height *string `json:"height"` + HeightCm *int `json:"height_cm"` + Measurements *string `json:"measurements"` + FakeTits *string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumcisedEnum `json:"circumcised"` + CareerLength *string `json:"career_length"` + CareerStart *string `json:"career_start"` + CareerEnd *string `json:"career_end"` + Tattoos *string `json:"tattoos"` + Piercings *string `json:"piercings"` + Aliases *string `json:"aliases"` + AliasList []string `json:"alias_list"` + Twitter *string `json:"twitter"` // deprecated + Instagram *string `json:"instagram"` // deprecated + Favorite *bool `json:"favorite"` + TagIds []string `json:"tag_ids"` // This should be a URL or a base64 encoded data URL Image *string `json:"image"` StashIds []StashIDInput `json:"stash_ids"` @@ -244,31 +256,33 @@ type PerformerCreateInput struct { } type PerformerUpdateInput struct { - ID string `json:"id"` - Name *string `json:"name"` - Disambiguation *string `json:"disambiguation"` - URL *string `json:"url"` // deprecated - Urls []string `json:"urls"` - Gender *GenderEnum `json:"gender"` - Birthdate *string `json:"birthdate"` - Ethnicity *string `json:"ethnicity"` - Country *string `json:"country"` - EyeColor *string `json:"eye_color"` - Height *string `json:"height"` - HeightCm *int `json:"height_cm"` - Measurements *string `json:"measurements"` - FakeTits *string `json:"fake_tits"` - PenisLength *float64 `json:"penis_length"` - Circumcised *CircumisedEnum `json:"circumcised"` - CareerLength *string `json:"career_length"` - Tattoos *string `json:"tattoos"` - Piercings *string `json:"piercings"` - Aliases *string `json:"aliases"` - AliasList []string `json:"alias_list"` - Twitter *string `json:"twitter"` // deprecated - Instagram *string `json:"instagram"` // deprecated - Favorite *bool `json:"favorite"` - TagIds []string `json:"tag_ids"` + ID string `json:"id"` + Name *string `json:"name"` + Disambiguation *string `json:"disambiguation"` + URL *string `json:"url"` // deprecated + Urls []string `json:"urls"` + Gender *GenderEnum `json:"gender"` + Birthdate *string `json:"birthdate"` + Ethnicity *string `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *string `json:"eye_color"` + Height *string `json:"height"` + HeightCm *int `json:"height_cm"` + Measurements *string `json:"measurements"` + FakeTits *string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumcisedEnum `json:"circumcised"` + CareerLength *string `json:"career_length"` + CareerStart *string `json:"career_start"` + CareerEnd *string `json:"career_end"` + Tattoos *string `json:"tattoos"` + Piercings *string `json:"piercings"` + Aliases *string `json:"aliases"` + AliasList []string `json:"alias_list"` + Twitter *string `json:"twitter"` // deprecated + Instagram *string `json:"instagram"` // deprecated + Favorite *bool `json:"favorite"` + TagIds []string `json:"tag_ids"` // This should be a URL or a base64 encoded data URL Image *string `json:"image"` StashIds []StashIDInput `json:"stash_ids"` diff --git a/pkg/models/repository_file.go b/pkg/models/repository_file.go index c851ce08c..e1ac0b213 100644 --- a/pkg/models/repository_file.go +++ b/pkg/models/repository_file.go @@ -14,7 +14,7 @@ type FileGetter interface { type FileFinder interface { FileGetter FindAllByPath(ctx context.Context, path string, caseSensitive bool) ([]File, error) - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error) + FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]File, error) FindByPath(ctx context.Context, path string, caseSensitive bool) (File, error) FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error) FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error) diff --git a/pkg/models/repository_folder.go b/pkg/models/repository_folder.go index 3d0fdb822..1169e53ac 100644 --- a/pkg/models/repository_folder.go +++ b/pkg/models/repository_folder.go @@ -11,10 +11,12 @@ type FolderGetter interface { // FolderFinder provides methods to find folders. type FolderFinder interface { FolderGetter - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error) + FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]*Folder, error) FindByPath(ctx context.Context, path string, caseSensitive bool) (*Folder, error) FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error) FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error) + GetManyParentFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error) + GetManySubFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error) } type FolderQueryer interface { diff --git a/pkg/models/repository_gallery.go b/pkg/models/repository_gallery.go index 0cfb9964f..8fc3b29d5 100644 --- a/pkg/models/repository_gallery.go +++ b/pkg/models/repository_gallery.go @@ -37,12 +37,12 @@ type GalleryCounter interface { // GalleryCreator provides methods to create galleries. type GalleryCreator interface { - Create(ctx context.Context, newGallery *Gallery, fileIDs []FileID) error + Create(ctx context.Context, newGallery *CreateGalleryInput) error } // GalleryUpdater provides methods to update galleries. type GalleryUpdater interface { - Update(ctx context.Context, updatedGallery *Gallery) error + Update(ctx context.Context, updatedGallery *UpdateGalleryInput) error UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error) UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error } @@ -70,6 +70,7 @@ type GalleryReader interface { PerformerIDLoader TagIDLoader FileLoader + CustomFieldsReader All(ctx context.Context) ([]*Gallery, error) } @@ -80,6 +81,9 @@ type GalleryWriter interface { GalleryUpdater GalleryDestroyer + CustomFieldsWriter + + AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error AddFileID(ctx context.Context, id int, fileID FileID) error AddImages(ctx context.Context, galleryID int, imageIDs ...int) error RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error diff --git a/pkg/models/repository_group.go b/pkg/models/repository_group.go index 704390d77..d7f74de64 100644 --- a/pkg/models/repository_group.go +++ b/pkg/models/repository_group.go @@ -68,6 +68,7 @@ type GroupReader interface { TagIDLoader ContainingGroupLoader SubGroupLoader + CustomFieldsReader All(ctx context.Context) ([]*Group, error) GetFrontImage(ctx context.Context, groupID int) ([]byte, error) @@ -81,6 +82,7 @@ type GroupWriter interface { GroupCreator GroupUpdater GroupDestroyer + CustomFieldsWriter } // GroupReaderWriter provides all group methods. diff --git a/pkg/models/repository_image.go b/pkg/models/repository_image.go index 672ecd063..99dab3479 100644 --- a/pkg/models/repository_image.go +++ b/pkg/models/repository_image.go @@ -43,7 +43,7 @@ type ImageCounter interface { // ImageCreator provides methods to create images. type ImageCreator interface { - Create(ctx context.Context, newImage *Image, fileIDs []FileID) error + Create(ctx context.Context, newImage *CreateImageInput) error } // ImageUpdater provides methods to update images. @@ -78,6 +78,7 @@ type ImageReader interface { FileLoader GalleryCoverFinder + CustomFieldsReader All(ctx context.Context) ([]*Image, error) Size(ctx context.Context) (float64, error) @@ -88,6 +89,7 @@ type ImageWriter interface { ImageCreator ImageUpdater ImageDestroyer + CustomFieldsWriter AddFileID(ctx context.Context, id int, fileID FileID) error RemoveFileID(ctx context.Context, id int, fileID FileID) error diff --git a/pkg/models/repository_performer.go b/pkg/models/repository_performer.go index ad0b61da0..175208c9d 100644 --- a/pkg/models/repository_performer.go +++ b/pkg/models/repository_performer.go @@ -92,6 +92,8 @@ type PerformerWriter interface { PerformerCreator PerformerUpdater PerformerDestroyer + + Merge(ctx context.Context, source []int, destination int) error } // PerformerReaderWriter provides all performer methods. diff --git a/pkg/models/repository_scene.go b/pkg/models/repository_scene.go index 8c2833470..6b795c3af 100644 --- a/pkg/models/repository_scene.go +++ b/pkg/models/repository_scene.go @@ -104,6 +104,7 @@ type SceneReader interface { SceneGroupLoader StashIDLoader VideoFileLoader + CustomFieldsReader All(ctx context.Context) ([]*Scene, error) Wall(ctx context.Context, q *string) ([]*Scene, error) @@ -140,6 +141,7 @@ type SceneWriter interface { ViewHistoryWriter SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) ResetActivity(ctx context.Context, sceneID int, resetResume bool, resetDuration bool) (bool, error) + CustomFieldsWriter } // SceneReaderWriter provides all scene methods. diff --git a/pkg/models/repository_studio.go b/pkg/models/repository_studio.go index 99f98bffc..54fb6ed47 100644 --- a/pkg/models/repository_studio.go +++ b/pkg/models/repository_studio.go @@ -42,12 +42,12 @@ type StudioCounter interface { // StudioCreator provides methods to create studios. type StudioCreator interface { - Create(ctx context.Context, newStudio *Studio) error + Create(ctx context.Context, newStudio *CreateStudioInput) error } // StudioUpdater provides methods to update studios. type StudioUpdater interface { - Update(ctx context.Context, updatedStudio *Studio) error + Update(ctx context.Context, updatedStudio *UpdateStudioInput) error UpdatePartial(ctx context.Context, updatedStudio StudioPartial) (*Studio, error) UpdateImage(ctx context.Context, studioID int, image []byte) error } @@ -79,6 +79,8 @@ type StudioReader interface { TagIDLoader URLLoader + CustomFieldsReader + All(ctx context.Context) ([]*Studio, error) GetImage(ctx context.Context, studioID int) ([]byte, error) HasImage(ctx context.Context, studioID int) (bool, error) diff --git a/pkg/models/repository_tag.go b/pkg/models/repository_tag.go index a7f828f0b..02dfe0cb6 100644 --- a/pkg/models/repository_tag.go +++ b/pkg/models/repository_tag.go @@ -26,6 +26,7 @@ type TagFinder interface { FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) FindByStashID(ctx context.Context, stashID StashID) ([]*Tag, error) + FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Tag, error) } // TagQueryer provides methods to query tags. @@ -51,12 +52,12 @@ type TagCounter interface { // TagCreator provides methods to create tags. type TagCreator interface { - Create(ctx context.Context, newTag *Tag) error + Create(ctx context.Context, newTag *CreateTagInput) error } // TagUpdater provides methods to update tags. type TagUpdater interface { - Update(ctx context.Context, updatedTag *Tag) error + Update(ctx context.Context, updatedTag *UpdateTagInput) error UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) UpdateAliases(ctx context.Context, tagID int, aliases []string) error UpdateImage(ctx context.Context, tagID int, image []byte) error @@ -77,6 +78,7 @@ type TagFinderCreator interface { type TagCreatorUpdater interface { TagCreator TagUpdater + CustomFieldsWriter } // TagReader provides all methods to read tags. @@ -89,6 +91,7 @@ type TagReader interface { AliasLoader TagRelationLoader StashIDLoader + CustomFieldsReader All(ctx context.Context) ([]*Tag, error) GetImage(ctx context.Context, tagID int) ([]byte, error) @@ -100,6 +103,7 @@ type TagWriter interface { TagCreator TagUpdater TagDestroyer + CustomFieldsWriter Merge(ctx context.Context, source []int, destination int) error } diff --git a/pkg/models/scene.go b/pkg/models/scene.go index f0a863bf7..839452501 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -2,10 +2,28 @@ package models import "context" -type PHashDuplicationCriterionInput struct { +type DuplicationCriterionInput struct { + // Deprecated: Use Phash field instead. Kept for backwards compatibility. Duplicated *bool `json:"duplicated"` - // Currently unimplemented + // Currently unimplemented. Intended for phash distance matching. Distance *int `json:"distance"` + // Filter by phash duplication + Phash *bool `json:"phash"` + // Filter by URL duplication + URL *bool `json:"url"` + // Filter by Stash ID duplication + StashID *bool `json:"stash_id"` + // Filter by title duplication + Title *bool `json:"title"` +} + +type FileDuplicationCriterionInput struct { + // Deprecated: Use Phash field instead. Kept for backwards compatibility. + Duplicated *bool `json:"duplicated"` + // Currently unimplemented. Intended for phash distance matching. + Distance *int `json:"distance"` + // Filter by phash duplication + Phash *bool `json:"phash"` } type SceneFilterType struct { @@ -33,8 +51,8 @@ type SceneFilterType struct { Organized *bool `json:"organized"` // Filter by o-counter OCounter *IntCriterionInput `json:"o_counter"` - // Filter Scenes that have an exact phash match available - Duplicated *PHashDuplicationCriterionInput `json:"duplicated"` + // Filter Scenes by duplication criteria + Duplicated *DuplicationCriterionInput `json:"duplicated"` // Filter by resolution Resolution *ResolutionCriterionInput `json:"resolution"` // Filter by orientation @@ -79,6 +97,10 @@ type SceneFilterType struct { StashID *StringCriterionInput `json:"stash_id"` // Filter by StashID Endpoint StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` + // Filter by StashIDs Endpoint + StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` + // Filter by StashID count + StashIDCount *IntCriterionInput `json:"stash_id_count"` // Filter by url URL *StringCriterionInput `json:"url"` // Filter by interactive @@ -117,6 +139,9 @@ type SceneFilterType struct { CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type SceneQueryOptions struct { @@ -170,7 +195,8 @@ type SceneCreateInput struct { // The first id will be assigned as primary. // Files will be reassigned from existing scenes if applicable. // Files must not already be primary for another scene. - FileIds []string `json:"file_ids"` + FileIds []string `json:"file_ids"` + CustomFields map[string]any `json:"custom_fields,omitempty"` } type SceneUpdateInput struct { @@ -199,18 +225,21 @@ type SceneUpdateInput struct { PlayDuration *float64 `json:"play_duration"` PlayCount *int `json:"play_count"` PrimaryFileID *string `json:"primary_file_id"` + CustomFields *CustomFieldsInput } type SceneDestroyInput struct { - ID string `json:"id"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + ID string `json:"id"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ScenesDestroyInput struct { - Ids []string `json:"ids"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + Ids []string `json:"ids"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } func NewSceneQueryResult(getter SceneGetter) *SceneQueryResult { diff --git a/pkg/models/stash_ids.go b/pkg/models/stash_ids.go index d761e959f..d73bfd880 100644 --- a/pkg/models/stash_ids.go +++ b/pkg/models/stash_ids.go @@ -129,8 +129,16 @@ func (u *UpdateStashIDs) Set(v StashID) { type StashIDCriterionInput struct { // If present, this value is treated as a predicate. - // That is, it will filter based on stash_ids with the matching endpoint + // That is, it will filter based on stash_id with the matching endpoint Endpoint *string `json:"endpoint"` StashID *string `json:"stash_id"` Modifier CriterionModifier `json:"modifier"` } + +type StashIDsCriterionInput struct { + // If present, this value is treated as a predicate. + // That is, it will filter based on stash_ids with the matching endpoint + Endpoint *string `json:"endpoint"` + StashIDs []*string `json:"stash_ids"` + Modifier CriterionModifier `json:"modifier"` +} diff --git a/pkg/models/studio.go b/pkg/models/studio.go index 171168129..7ad8719ac 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -10,6 +10,8 @@ type StudioFilterType struct { StashID *StringCriterionInput `json:"stash_id"` // Filter by StashID Endpoint StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` + // Filter by StashIDs Endpoint + StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` // Filter to only include studios missing this property IsMissing *string `json:"is_missing"` // Filter by rating expressed as 1-100 @@ -26,6 +28,8 @@ type StudioFilterType struct { ImageCount *IntCriterionInput `json:"image_count"` // Filter by gallery count GalleryCount *IntCriterionInput `json:"gallery_count"` + // Filter by group count + GroupCount *IntCriterionInput `json:"group_count"` // Filter by url URL *StringCriterionInput `json:"url"` // Filter by studio aliases @@ -34,16 +38,23 @@ type StudioFilterType struct { ChildCount *IntCriterionInput `json:"child_count"` // Filter by autotag ignore value IgnoreAutoTag *bool `json:"ignore_auto_tag"` + // Filter by organized + Organized *bool `json:"organized"` // Filter by related scenes that meet this criteria ScenesFilter *SceneFilterType `json:"scenes_filter"` // Filter by related images that meet this criteria ImagesFilter *ImageFilterType `json:"images_filter"` // Filter by related galleries that meet this criteria GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related groups that meet this criteria + GroupsFilter *GroupFilterType `json:"groups_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type StudioCreateInput struct { @@ -60,6 +71,9 @@ type StudioCreateInput struct { Aliases []string `json:"aliases"` TagIds []string `json:"tag_ids"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` + Organized *bool `json:"organized"` + + CustomFields map[string]interface{} `json:"custom_fields"` } type StudioUpdateInput struct { @@ -77,4 +91,7 @@ type StudioUpdateInput struct { Aliases []string `json:"aliases"` TagIds []string `json:"tag_ids"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` + Organized *bool `json:"organized"` + + CustomFields CustomFieldsInput `json:"custom_fields"` } diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 29b7e9be3..b166e5a69 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -42,14 +42,27 @@ type TagFilterType struct { IgnoreAutoTag *bool `json:"ignore_auto_tag"` // Filter by StashID Endpoint StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` + // Filter by StashIDs Endpoint + StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` // Filter by related scenes that meet this criteria ScenesFilter *SceneFilterType `json:"scenes_filter"` // Filter by related images that meet this criteria ImagesFilter *ImageFilterType `json:"images_filter"` // Filter by related galleries that meet this criteria GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related groups that meet this criteria + GroupsFilter *GroupFilterType `json:"groups_filter"` + // Filter by related performers that meet this criteria + PerformersFilter *PerformerFilterType `json:"performers_filter"` + // Filter by related studios that meet this criteria + StudiosFilter *StudioFilterType `json:"studios_filter"` + // Filter by related scene markers that meet this criteria + MarkersFilter *SceneMarkerFilterType `json:"markers_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } diff --git a/pkg/performer/export.go b/pkg/performer/export.go index 1455fb7bf..d7807f651 100644 --- a/pkg/performer/export.go +++ b/pkg/performer/export.go @@ -30,7 +30,6 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode EyeColor: performer.EyeColor, Measurements: performer.Measurements, FakeTits: performer.FakeTits, - CareerLength: performer.CareerLength, Tattoos: performer.Tattoos, Piercings: performer.Piercings, Favorite: performer.Favorite, @@ -71,6 +70,13 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode newPerformerJSON.PenisLength = *performer.PenisLength } + if performer.CareerStart != nil { + newPerformerJSON.CareerStart = performer.CareerStart.String() + } + if performer.CareerEnd != nil { + newPerformerJSON.CareerEnd = performer.CareerEnd.String() + } + if err := performer.LoadAliases(ctx, reader); err != nil { return nil, fmt.Errorf("loading performer aliases: %w", err) } diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index e51049e14..2cf476321 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -26,7 +26,6 @@ const ( performerName = "testPerformer" disambiguation = "disambiguation" url = "url" - careerLength = "careerLength" country = "country" ethnicity = "ethnicity" eyeColor = "eyeColor" @@ -49,8 +48,10 @@ var ( rating = 5 height = 123 weight = 60 + careerStart, _ = models.ParseDate("2005") + careerEnd, _ = models.ParseDate("2015") penisLength = 1.23 - circumcisedEnum = models.CircumisedEnumCut + circumcisedEnum = models.CircumcisedEnumCut circumcised = circumcisedEnum.String() emptyCustomFields = make(map[string]interface{}) @@ -87,7 +88,8 @@ func createFullPerformer(id int, name string) *models.Performer { URLs: models.NewRelatedStrings([]string{url, twitter, instagram}), Aliases: models.NewRelatedStrings(aliases), Birthdate: &birthDate, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Country: country, Ethnicity: ethnicity, EyeColor: eyeColor, @@ -132,7 +134,8 @@ func createFullJSONPerformer(name string, image string, withCustomFields bool) * URLs: []string{url, twitter, instagram}, Aliases: aliases, Birthdate: birthDate.String(), - CareerLength: careerLength, + CareerStart: careerStart.String(), + CareerEnd: careerEnd.String(), Country: country, Ethnicity: ethnicity, EyeColor: eyeColor, diff --git a/pkg/performer/import.go b/pkg/performer/import.go index 622af2b1a..62b4d87d0 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -32,14 +32,17 @@ type Importer struct { } func (i *Importer) PreImport(ctx context.Context) error { - i.performer = performerJSONToPerformer(i.Input) + var err error + i.performer, err = performerJSONToPerformer(i.Input) + if err != nil { + return err + } i.customFields = i.Input.CustomFields if err := i.populateTags(ctx); err != nil { return err } - var err error if len(i.Input.Image) > 0 { i.imageData, err = utils.ProcessBase64Image(i.Input.Image) if err != nil { @@ -107,7 +110,9 @@ func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names [] newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -194,7 +199,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Performer { +func performerJSONToPerformer(performerJSON jsonschema.Performer) (models.Performer, error) { newPerformer := models.Performer{ Name: performerJSON.Name, Disambiguation: performerJSON.Disambiguation, @@ -203,7 +208,6 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform EyeColor: performerJSON.EyeColor, Measurements: performerJSON.Measurements, FakeTits: performerJSON.FakeTits, - CareerLength: performerJSON.CareerLength, Tattoos: performerJSON.Tattoos, Piercings: performerJSON.Piercings, Aliases: models.NewRelatedStrings(performerJSON.Aliases), @@ -243,7 +247,7 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform } if performerJSON.Circumcised != "" { - v := models.CircumisedEnum(performerJSON.Circumcised) + v := models.CircumcisedEnum(performerJSON.Circumcised) newPerformer.Circumcised = &v } @@ -280,5 +284,24 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform } } - return newPerformer + // prefer explicit career_start/career_end, fall back to parsing legacy career_length + if performerJSON.CareerStart != "" || performerJSON.CareerEnd != "" { + careerStart, err := models.ParseDate(performerJSON.CareerStart) + if err == nil { + newPerformer.CareerStart = &careerStart + } + careerEnd, err := models.ParseDate(performerJSON.CareerEnd) + if err == nil { + newPerformer.CareerEnd = &careerEnd + } + } else if performerJSON.CareerLength != "" { + start, end, err := models.ParseYearRangeString(performerJSON.CareerLength) + if err != nil { + return models.Performer{}, fmt.Errorf("invalid career_length %q: %w", performerJSON.CareerLength, err) + } + newPerformer.CareerStart = start + newPerformer.CareerEnd = end + } + + return newPerformer, nil } diff --git a/pkg/performer/import_test.go b/pkg/performer/import_test.go index 0a3f86291..0d5f80d01 100644 --- a/pkg/performer/import_test.go +++ b/pkg/performer/import_test.go @@ -111,9 +111,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -146,7 +146,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -315,3 +315,86 @@ func TestUpdate(t *testing.T) { db.AssertExpectations(t) } + +func TestImportCareerFields(t *testing.T) { + startYear, _ := models.ParseDate("2005") + endYear, _ := models.ParseDate("2015") + + // explicit career_start/career_end should be used directly + t.Run("explicit fields", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerStart: startYear.String(), + CareerEnd: endYear.String(), + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // explicit fields take priority over legacy career_length + t.Run("explicit fields override legacy", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerStart: startYear.String(), + CareerEnd: endYear.String(), + CareerLength: "1990 - 1995", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // legacy career_length should be parsed when explicit fields are absent + t.Run("legacy career_length fallback", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "2005 - 2015", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // legacy career_length with only start year + t.Run("legacy career_length start only", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "2005 -", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Nil(t, p.CareerEnd) + }) + + // unparseable career_length should return an error + t.Run("legacy career_length unparseable", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "not a year range", + } + + _, err := performerJSONToPerformer(input) + assert.NotNil(t, err) + }) + + // no career fields at all + t.Run("no career fields", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Nil(t, p.CareerStart) + assert.Nil(t, p.CareerEnd) + }) +} diff --git a/pkg/performer/validate.go b/pkg/performer/validate.go index 68f7a8ef5..3baaa182b 100644 --- a/pkg/performer/validate.go +++ b/pkg/performer/validate.go @@ -225,6 +225,11 @@ func ValidateUpdateAliases(existing models.Performer, name models.OptionalString newName = name.Value } + // If aliases is nil, we're only changing the name - check existing aliases against new name + if aliases == nil { + return ValidateAliases(newName, existing.Aliases) + } + newAliases := aliases.Apply(existing.Aliases.List()) return ValidateAliases(newName, models.NewRelatedStrings(newAliases)) diff --git a/pkg/performer/validate_test.go b/pkg/performer/validate_test.go index 33f4b1cec..afd9c01c5 100644 --- a/pkg/performer/validate_test.go +++ b/pkg/performer/validate_test.go @@ -213,12 +213,12 @@ func TestValidateUpdateAliases(t *testing.T) { want error }{ {"both unset", osUnset, nil, nil}, - {"invalid name set", os2, nil, &DuplicateAliasError{name2}}, + {"name conflicts with alias", os2, nil, &DuplicateAliasError{name2}}, {"valid name set", os3, nil, nil}, {"valid aliases empty", os1, []string{}, nil}, - {"invalid aliases set", osUnset, []string{name1U}, &DuplicateAliasError{name1U}}, + {"alias matches name", osUnset, []string{name1U}, &DuplicateAliasError{name1U}}, {"valid aliases set", osUnset, []string{name3, name2}, nil}, - {"invalid both set", os4, []string{name4}, &DuplicateAliasError{name4}}, + {"alias matches new name", os4, []string{name4}, &DuplicateAliasError{name4}}, {"valid both set", os2, []string{name1}, nil}, } diff --git a/pkg/pkg/cache.go b/pkg/pkg/cache.go index 9d36bdd1d..e94b2cb41 100644 --- a/pkg/pkg/cache.go +++ b/pkg/pkg/cache.go @@ -1,6 +1,7 @@ package pkg import ( + "sync" "time" ) @@ -10,22 +11,23 @@ type cacheEntry struct { } type repositoryCache struct { + mu sync.RWMutex // cache maps the URL to the last modified time and the data cache map[string]cacheEntry } -func (c *repositoryCache) ensureCache() { - if c.cache == nil { - c.cache = make(map[string]cacheEntry) - } -} - func (c *repositoryCache) lastModified(url string) *time.Time { if c == nil { return nil } - c.ensureCache() + c.mu.RLock() + defer c.mu.RUnlock() + + if c.cache == nil { + return nil + } + e, found := c.cache[url] if !found { @@ -36,7 +38,13 @@ func (c *repositoryCache) lastModified(url string) *time.Time { } func (c *repositoryCache) getPackageList(url string) []RemotePackage { - c.ensureCache() + c.mu.RLock() + defer c.mu.RUnlock() + + if c.cache == nil { + return nil + } + e, found := c.cache[url] if !found { @@ -51,7 +59,13 @@ func (c *repositoryCache) cacheList(url string, lastModified time.Time, data []R return } - c.ensureCache() + c.mu.Lock() + defer c.mu.Unlock() + + if c.cache == nil { + c.cache = make(map[string]cacheEntry) + } + c.cache[url] = cacheEntry{ lastModified: lastModified, data: data, diff --git a/pkg/pkg/manager.go b/pkg/pkg/manager.go index 18fa4e0d1..4024191ad 100644 --- a/pkg/pkg/manager.go +++ b/pkg/pkg/manager.go @@ -10,6 +10,7 @@ import ( "net/http" "net/url" "path/filepath" + "sync" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -31,13 +32,14 @@ type Manager struct { Client *http.Client - cache *repositoryCache + cacheOnce sync.Once + cache *repositoryCache } func (m *Manager) getCache() *repositoryCache { - if m.cache == nil { + m.cacheOnce.Do(func() { m.cache = &repositoryCache{} - } + }) return m.cache } diff --git a/pkg/scene/create.go b/pkg/scene/create.go index cd9234b5d..248906295 100644 --- a/pkg/scene/create.go +++ b/pkg/scene/create.go @@ -10,14 +10,14 @@ import ( "github.com/stashapp/stash/pkg/plugin/hook" ) -func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) { +func (s *Service) Create(ctx context.Context, input models.CreateSceneInput) (*models.Scene, error) { // title must be set if no files are provided - if input.Title == "" && len(fileIDs) == 0 { + if input.Scene.Title == "" && len(input.FileIDs) == 0 { return nil, errors.New("title must be set if scene has no files") } now := time.Now() - newScene := *input + newScene := *input.Scene newScene.CreatedAt = now newScene.UpdatedAt = now @@ -27,16 +27,24 @@ func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []mod return nil, fmt.Errorf("creating new scene: %w", err) } - for _, f := range fileIDs { + if len(input.CustomFields) > 0 { + if err := s.Repository.SetCustomFields(ctx, newScene.ID, models.CustomFieldsInput{ + Full: input.CustomFields, + }); err != nil { + return nil, fmt.Errorf("setting custom fields on new scene: %w", err) + } + } + + for _, f := range input.FileIDs { if err := s.AssignFile(ctx, newScene.ID, f); err != nil { return nil, fmt.Errorf("assigning file %d to new scene: %w", f, err) } } - if len(fileIDs) > 0 { + if len(input.FileIDs) > 0 { // assign the primary to the first if _, err := s.Repository.UpdatePartial(ctx, newScene.ID, models.ScenePartial{ - PrimaryFileID: &fileIDs[0], + PrimaryFileID: &input.FileIDs[0], }); err != nil { return nil, fmt.Errorf("setting primary file on new scene: %w", err) } @@ -48,8 +56,8 @@ func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []mod return nil, err } - if len(coverImage) > 0 { - if err := s.Repository.UpdateCover(ctx, ret.ID, coverImage); err != nil { + if len(input.CoverImage) > 0 { + if err := s.Repository.UpdateCover(ctx, ret.ID, input.CoverImage); err != nil { return nil, fmt.Errorf("setting cover on new scene: %w", err) } } diff --git a/pkg/scene/delete.go b/pkg/scene/delete.go index c34bbdf14..8ca3d6e11 100644 --- a/pkg/scene/delete.go +++ b/pkg/scene/delete.go @@ -109,7 +109,7 @@ func (d *FileDeleter) MarkMarkerFiles(scene *models.Scene, seconds int) error { // Destroy deletes a scene and its associated relationships from the // database. -func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { +func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error { mqb := s.MarkerRepository markers, err := mqb.FindBySceneID(ctx, scene.ID) if err != nil { @@ -126,6 +126,10 @@ func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter if err := s.deleteFiles(ctx, scene, fileDeleter); err != nil { return err } + } else if destroyFileEntry { + if err := s.destroyFileEntries(ctx, scene); err != nil { + return err + } } if deleteGenerated { @@ -180,6 +184,35 @@ func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDele return nil } +// destroyFileEntries destroys file entries from the database without deleting +// the files from the filesystem +func (s *Service) destroyFileEntries(ctx context.Context, scene *models.Scene) error { + if err := scene.LoadFiles(ctx, s.Repository); err != nil { + return err + } + + for _, f := range scene.Files.List() { + // only destroy file entries where there is no other associated scene + otherScenes, err := s.Repository.FindByFileID(ctx, f.ID) + if err != nil { + return err + } + + if len(otherScenes) > 1 { + // other scenes associated, don't remove + continue + } + + const deleteFile = false + logger.Info("Destroying scene file entry: ", f.Path) + if err := file.Destroy(ctx, s.File, f, nil, deleteFile); err != nil { + return err + } + } + + return nil +} + // DestroyMarker deletes the scene marker from the database and returns a // function that removes the generated files, to be executed after the // transaction is successfully committed. diff --git a/pkg/scene/export.go b/pkg/scene/export.go index a012d1850..069bd587f 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -17,6 +17,7 @@ import ( type ExportGetter interface { models.ViewDateReader models.ODateReader + models.CustomFieldsReader GetCover(ctx context.Context, sceneID int) ([]byte, error) } @@ -92,6 +93,11 @@ func ToBasicJSON(ctx context.Context, reader ExportGetter, scene *models.Scene) newSceneJSON.OHistory = append(newSceneJSON.OHistory, json.JSONTime{Time: date}) } + newSceneJSON.CustomFields, err = reader.GetCustomFields(ctx, scene.ID) + if err != nil { + return nil, fmt.Errorf("getting scene custom fields: %v", err) + } + return &newSceneJSON, nil } diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index cde421bd8..9547ab5e7 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -22,6 +22,7 @@ const ( studioID = 4 missingStudioID = 5 errStudioID = 6 + customFieldsID = 7 noTagsID = 11 errTagsID = 12 @@ -33,6 +34,7 @@ const ( errMarkersID = 17 errFindPrimaryTagID = 18 errFindByMarkerID = 19 + errCustomFieldsID = 20 ) var ( @@ -82,6 +84,13 @@ var ( updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) ) +var ( + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } +) + func createFullScene(id int) models.Scene { return models.Scene{ ID: id, @@ -123,7 +132,7 @@ func createEmptyScene(id int) models.Scene { } } -func createFullJSONScene(image string) *jsonschema.Scene { +func createFullJSONScene(image string, customFields map[string]interface{}) *jsonschema.Scene { return &jsonschema.Scene{ Title: title, Files: []string{path}, @@ -142,6 +151,7 @@ func createFullJSONScene(image string) *jsonschema.Scene { StashIDs: []models.StashID{ stashID, }, + CustomFields: customFields, } } @@ -155,32 +165,49 @@ func createEmptyJSONScene() *jsonschema.Scene { UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: emptyCustomFields, } } type basicTestScenario struct { - input models.Scene - expected *jsonschema.Scene - err bool + input models.Scene + customFields map[string]interface{} + expected *jsonschema.Scene + err bool } var scenarios = []basicTestScenario{ { createFullScene(sceneID), - createFullJSONScene(imageBase64), + emptyCustomFields, + createFullJSONScene(imageBase64, emptyCustomFields), + false, + }, + { + createFullScene(customFieldsID), + customFields, + createFullJSONScene("", customFields), false, }, { createEmptyScene(noImageID), + emptyCustomFields, createEmptyJSONScene(), false, }, { createFullScene(errImageID), - createFullJSONScene(""), + emptyCustomFields, + createFullJSONScene("", emptyCustomFields), // failure to get image should not cause an error false, }, + { + createFullScene(errCustomFieldsID), + customFields, + createFullJSONScene("", customFields), + true, + }, } func TestToJSON(t *testing.T) { @@ -191,8 +218,12 @@ func TestToJSON(t *testing.T) { db.Scene.On("GetCover", testCtx, sceneID).Return(imageBytes, nil).Once() db.Scene.On("GetCover", testCtx, noImageID).Return(nil, nil).Once() db.Scene.On("GetCover", testCtx, errImageID).Return(nil, imageErr).Once() + db.Scene.On("GetCover", testCtx, mock.Anything).Return(nil, nil) db.Scene.On("GetViewDates", testCtx, mock.Anything).Return(nil, nil) db.Scene.On("GetODates", testCtx, mock.Anything).Return(nil, nil) + db.Scene.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once() + db.Scene.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, errors.New("error getting custom fields")).Once() + db.Scene.On("GetCustomFields", testCtx, mock.Anything).Return(emptyCustomFields, nil) for i, s := range scenarios { scene := s.input @@ -203,6 +234,8 @@ func TestToJSON(t *testing.T) { t.Errorf("[%d] unexpected error: %s", i, err.Error()) case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) + case err != nil: + // error case already handled, no need for assertion default: assert.Equal(t, s.expected, json, "[%d]", i) } diff --git a/pkg/scene/generate/sprite.go b/pkg/scene/generate/sprite.go index c3b10f680..e0dea9659 100644 --- a/pkg/scene/generate/sprite.go +++ b/pkg/scene/generate/sprite.go @@ -18,22 +18,19 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -const ( - spriteScreenshotWidth = 160 - - spriteRows = 9 - spriteCols = 9 - spriteChunks = spriteRows * spriteCols -) - -func (g Generator) SpriteScreenshot(ctx context.Context, input string, seconds float64) (image.Image, error) { +func (g Generator) SpriteScreenshot(ctx context.Context, input string, seconds float64, size int, isPortrait bool) (image.Image, error) { lockCtx := g.LockManager.ReadLock(ctx, input) defer lockCtx.Cancel() ssOptions := transcoder.ScreenshotOptions{ OutputPath: "-", OutputType: transcoder.ScreenshotOutputTypeBMP, - Width: spriteScreenshotWidth, + } + + if !isPortrait { + ssOptions.Width = size + } else { + ssOptions.Height = size } args := transcoder.ScreenshotTime(input, seconds, ssOptions) @@ -41,14 +38,14 @@ func (g Generator) SpriteScreenshot(ctx context.Context, input string, seconds f return g.generateImage(lockCtx, args) } -func (g Generator) SpriteScreenshotSlow(ctx context.Context, input string, frame int) (image.Image, error) { +func (g Generator) SpriteScreenshotSlow(ctx context.Context, input string, frame int, width int) (image.Image, error) { lockCtx := g.LockManager.ReadLock(ctx, input) defer lockCtx.Cancel() ssOptions := transcoder.ScreenshotOptions{ OutputPath: "-", OutputType: transcoder.ScreenshotOutputTypeBMP, - Width: spriteScreenshotWidth, + Width: width, } args := transcoder.ScreenshotFrame(input, frame, ssOptions) @@ -74,12 +71,13 @@ func (g Generator) CombineSpriteImages(images []image.Image) image.Image { // Combine all of the thumbnails into a sprite image width := images[0].Bounds().Size().X height := images[0].Bounds().Size().Y - canvasWidth := width * spriteCols - canvasHeight := height * spriteRows + gridSize := GetSpriteGridSize(len(images)) + canvasWidth := width * gridSize + canvasHeight := height * gridSize montage := imaging.New(canvasWidth, canvasHeight, color.NRGBA{}) for index := 0; index < len(images); index++ { - x := width * (index % spriteCols) - y := height * int(math.Floor(float64(index)/float64(spriteRows))) + x := width * (index % gridSize) + y := height * int(math.Floor(float64(index)/float64(gridSize))) img := images[index] montage = imaging.Paste(montage, img, image.Pt(x, y)) } @@ -87,14 +85,19 @@ func (g Generator) CombineSpriteImages(images []image.Image) image.Image { return montage } -func (g Generator) SpriteVTT(ctx context.Context, output string, spritePath string, stepSize float64) error { - lockCtx := g.LockManager.ReadLock(ctx, spritePath) - defer lockCtx.Cancel() - - return g.generateFile(lockCtx, g.ScenePaths, vttPattern, output, g.spriteVTT(spritePath, stepSize)) +// GetSpriteGridSize return the required size of a grid, where the number of images in width +// equals the number of images in height, to hold 'imageCount' images +func GetSpriteGridSize(imageCount int) int { + return int(math.Ceil(math.Sqrt(float64(imageCount)))) } -func (g Generator) spriteVTT(spritePath string, stepSize float64) generateFn { +func (g Generator) SpriteVTT(ctx context.Context, output string, spritePath string, stepSize float64, spriteChunks int) error { + lockCtx := g.LockManager.ReadLock(ctx, spritePath) + defer lockCtx.Cancel() + return g.generateFile(lockCtx, g.ScenePaths, vttPattern, output, g.spriteVTT(spritePath, stepSize, spriteChunks)) +} + +func (g Generator) spriteVTT(spritePath string, stepSize float64, spriteChunks int) generateFn { return func(lockCtx *fsutil.LockContext, tmpFn string) error { spriteImage, err := os.Open(spritePath) if err != nil { @@ -106,16 +109,17 @@ func (g Generator) spriteVTT(spritePath string, stepSize float64) generateFn { if err != nil { return err } - width := image.Width / spriteCols - height := image.Height / spriteRows + + gridSize := GetSpriteGridSize(spriteChunks) + width := image.Width / gridSize + height := image.Height / gridSize vttLines := []string{"WEBVTT", ""} for index := 0; index < spriteChunks; index++ { - x := width * (index % spriteCols) - y := height * int(math.Floor(float64(index)/float64(spriteRows))) + x := width * (index % gridSize) + y := height * int(math.Floor(float64(index)/float64(gridSize))) startTime := utils.GetVTTTime(float64(index) * stepSize) endTime := utils.GetVTTTime(float64(index+1) * stepSize) - vttLines = append(vttLines, startTime+" --> "+endTime) vttLines = append(vttLines, fmt.Sprintf("%s#xywh=%d,%d,%d,%d", spriteImageName, x, y, width, height)) vttLines = append(vttLines, "") diff --git a/pkg/scene/import.go b/pkg/scene/import.go index efffd380d..24dbf1cc0 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -18,6 +18,7 @@ type ImporterReaderWriter interface { models.SceneCreatorUpdater models.ViewHistoryWriter models.OHistoryWriter + models.CustomFieldsWriter FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) } @@ -35,6 +36,7 @@ type Importer struct { ID int scene models.Scene + customFields map[string]interface{} coverImageData []byte viewHistory []time.Time oHistory []time.Time @@ -75,6 +77,8 @@ func (i *Importer) PreImport(ctx context.Context) error { } } + i.customFields = i.Input.CustomFields + i.populateViewHistory() i.populateOHistory() @@ -213,7 +217,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -449,6 +453,14 @@ func (i *Importer) PostImport(ctx context.Context, id int) error { return err } + if len(i.customFields) > 0 { + if err := i.ReaderWriter.SetCustomFields(ctx, id, models.CustomFieldsInput{ + Full: i.customFields, + }); err != nil { + return fmt.Errorf("error setting scene custom fields: %v", err) + } + } + return nil } @@ -549,7 +561,9 @@ func createTags(ctx context.Context, tagWriter models.TagCreator, names []string newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } diff --git a/pkg/scene/import_test.go b/pkg/scene/import_test.go index a6e3edcdf..98924e20d 100644 --- a/pkg/scene/import_test.go +++ b/pkg/scene/import_test.go @@ -241,9 +241,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -273,7 +273,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -508,9 +508,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -542,10 +542,110 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) db.AssertExpectations(t) } + +func TestImporterPostImport(t *testing.T) { + db := mocks.NewDatabase() + + vt := time.Now() + ot := vt.Add(time.Minute) + + var ( + okID = 1 + errViewHistoryID = 2 + errOHistoryID = 3 + errImageID = 4 + errCustomFieldsID = 5 + ) + + var ( + errImage = errors.New("error updating cover image") + errViewHistory = errors.New("error updating view history") + errOHistory = errors.New("error updating o history") + errCustomFields = errors.New("error updating custom fields") + ) + + table := []struct { + name string + importer Importer + err bool + }{ + { + name: "all set successfully", + importer: Importer{ + ID: okID, + coverImageData: []byte(imageBase64), + viewHistory: []time.Time{vt}, + oHistory: []time.Time{ot}, + customFields: customFields, + }, + err: false, + }, + { + name: "cover image set with error", + importer: Importer{ + ID: errImageID, + coverImageData: []byte(invalidImage), + }, + err: true, + }, + { + name: "view history set with error", + importer: Importer{ + ID: errViewHistoryID, + viewHistory: []time.Time{vt}, + }, + err: true, + }, + { + name: "o history set with error", + importer: Importer{ + ID: errOHistoryID, + oHistory: []time.Time{ot}, + }, + err: true, + }, + { + name: "custom fields set with error", + importer: Importer{ + ID: errCustomFieldsID, + customFields: customFields, + }, + err: true, + }, + } + + db.Scene.On("UpdateCover", testCtx, okID, []byte(imageBase64)).Return(nil).Once() + db.Scene.On("UpdateCover", testCtx, errImageID, []byte(invalidImage)).Return(errImage).Once() + db.Scene.On("AddViews", testCtx, okID, []time.Time{vt}).Return([]time.Time{vt}, nil).Once() + db.Scene.On("AddViews", testCtx, errViewHistoryID, []time.Time{vt}).Return(nil, errViewHistory).Once() + db.Scene.On("AddO", testCtx, okID, []time.Time{ot}).Return([]time.Time{ot}, nil).Once() + db.Scene.On("AddO", testCtx, errOHistoryID, []time.Time{ot}).Return(nil, errOHistory).Once() + db.Scene.On("SetCustomFields", testCtx, okID, models.CustomFieldsInput{ + Full: customFields, + }).Return(nil).Once() + db.Scene.On("SetCustomFields", testCtx, errCustomFieldsID, models.CustomFieldsInput{ + Full: customFields, + }).Return(errCustomFields).Once() + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + i := tt.importer + i.ReaderWriter = db.Scene + + err := i.PostImport(testCtx, i.ID) + + if tt.err { + assert.NotNil(t, err, "expected error but got nil") + } else { + assert.Nil(t, err, "unexpected error: %v", err) + } + }) + } +} diff --git a/pkg/scene/merge.go b/pkg/scene/merge.go index 77b551ab2..b2650ca92 100644 --- a/pkg/scene/merge.go +++ b/pkg/scene/merge.go @@ -120,7 +120,8 @@ func (s *Service) Merge(ctx context.Context, sourceIDs []int, destinationID int, for _, src := range sources { const deleteGenerated = true const deleteFile = false - if err := s.Destroy(ctx, src, fileDeleter, deleteGenerated, deleteFile); err != nil { + const destroyFileEntry = false + if err := s.Destroy(ctx, src, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return fmt.Errorf("deleting scene %d: %w", src.ID, err) } } diff --git a/pkg/scene/scan.go b/pkg/scene/scan.go index e1038fbc3..c9cc2c567 100644 --- a/pkg/scene/scan.go +++ b/pkg/scene/scan.go @@ -4,6 +4,8 @@ import ( "context" "errors" "fmt" + "path/filepath" + "strings" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/logger" @@ -32,12 +34,18 @@ type ScanCreatorUpdater interface { AddFileID(ctx context.Context, id int, fileID models.FileID) error } +type ScanGalleryFinderUpdater interface { + FindByPath(ctx context.Context, p string) ([]*models.Gallery, error) + AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error +} + type ScanGenerator interface { Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error } type ScanHandler struct { - CreatorUpdater ScanCreatorUpdater + CreatorUpdater ScanCreatorUpdater + GalleryFinderUpdater ScanGalleryFinderUpdater ScanGenerator ScanGenerator CaptionUpdater video.CaptionUpdater @@ -127,6 +135,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models. } } + if err := h.associateGallery(ctx, existing, f); err != nil { + return err + } + // do this after the commit so that cover generation doesn't hold up the transaction txn.AddPostCommitHook(ctx, func(ctx context.Context) { for _, s := range existing { @@ -160,18 +172,44 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. if err := h.CreatorUpdater.AddFileID(ctx, s.ID, f.ID); err != nil { return fmt.Errorf("adding file to scene: %w", err) } + } - // update updated_at time + if !found || updateExisting { + // update updated_at time when file association or content changes scenePartial := models.NewScenePartial() if _, err := h.CreatorUpdater.UpdatePartial(ctx, s.ID, scenePartial); err != nil { return fmt.Errorf("updating scene: %w", err) } - } - if !found || updateExisting { h.PluginCache.RegisterPostHooks(ctx, s.ID, hook.SceneUpdatePost, nil, nil) } } return nil } + +func (h *ScanHandler) associateGallery(ctx context.Context, existing []*models.Scene, f models.File) error { + sceneIDs := make([]int, len(existing)) + for i, s := range existing { + sceneIDs[i] = s.ID + } + + path := f.Base().Path + zipPath := strings.TrimSuffix(path, filepath.Ext(path)) + ".zip" + + // find galleries with a file that matches + galleries, err := h.GalleryFinderUpdater.FindByPath(ctx, zipPath) + if err != nil { + return err + } + + for _, gallery := range galleries { + // found related Scene + logger.Infof("associate: Scene %s is related to gallery: %d", path, gallery.ID) + if err := h.GalleryFinderUpdater.AddSceneIDs(ctx, gallery.ID, sceneIDs); err != nil { + return err + } + } + + return nil +} diff --git a/pkg/scene/scan_test.go b/pkg/scene/scan_test.go new file mode 100644 index 000000000..71729bb57 --- /dev/null +++ b/pkg/scene/scan_test.go @@ -0,0 +1,114 @@ +package scene + +import ( + "context" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestAssociateExisting_UpdatePartialOnContentChange(t *testing.T) { + const ( + testSceneID = 1 + testFileID = 100 + ) + + existingFile := &models.VideoFile{ + BaseFile: &models.BaseFile{ID: models.FileID(testFileID), Path: "test.mp4"}, + } + + makeScene := func() *models.Scene { + return &models.Scene{ + ID: testSceneID, + Files: models.NewRelatedVideoFiles([]*models.VideoFile{existingFile}), + } + } + + tests := []struct { + name string + updateExisting bool + expectUpdate bool + }{ + { + name: "calls UpdatePartial when file content changed", + updateExisting: true, + expectUpdate: true, + }, + { + name: "skips UpdatePartial when file unchanged and already associated", + updateExisting: false, + expectUpdate: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + db := mocks.NewDatabase() + db.Scene.On("GetFiles", mock.Anything, testSceneID).Return([]*models.VideoFile{existingFile}, nil) + + if tt.expectUpdate { + db.Scene.On("UpdatePartial", mock.Anything, testSceneID, mock.Anything). + Return(&models.Scene{ID: testSceneID}, nil) + } + + h := &ScanHandler{ + CreatorUpdater: db.Scene, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Scene{makeScene()}, existingFile, tt.updateExisting) + assert.NoError(t, err) + }) + + if tt.expectUpdate { + db.Scene.AssertCalled(t, "UpdatePartial", mock.Anything, testSceneID, mock.Anything) + } else { + db.Scene.AssertNotCalled(t, "UpdatePartial", mock.Anything, mock.Anything, mock.Anything) + } + }) + } +} + +func TestAssociateExisting_UpdatePartialOnNewFile(t *testing.T) { + const ( + testSceneID = 1 + existFileID = 100 + newFileID = 200 + ) + + existingFile := &models.VideoFile{ + BaseFile: &models.BaseFile{ID: models.FileID(existFileID), Path: "existing.mp4"}, + } + newFile := &models.VideoFile{ + BaseFile: &models.BaseFile{ID: models.FileID(newFileID), Path: "new.mp4"}, + } + + scene := &models.Scene{ + ID: testSceneID, + Files: models.NewRelatedVideoFiles([]*models.VideoFile{existingFile}), + } + + db := mocks.NewDatabase() + db.Scene.On("GetFiles", mock.Anything, testSceneID).Return([]*models.VideoFile{existingFile}, nil) + db.Scene.On("AddFileID", mock.Anything, testSceneID, models.FileID(newFileID)).Return(nil) + db.Scene.On("UpdatePartial", mock.Anything, testSceneID, mock.Anything). + Return(&models.Scene{ID: testSceneID}, nil) + + h := &ScanHandler{ + CreatorUpdater: db.Scene, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Scene{scene}, newFile, false) + assert.NoError(t, err) + }) + + db.Scene.AssertCalled(t, "AddFileID", mock.Anything, testSceneID, models.FileID(newFileID)) + db.Scene.AssertCalled(t, "UpdatePartial", mock.Anything, testSceneID, mock.Anything) +} diff --git a/pkg/scraper/action.go b/pkg/scraper/action.go index 74bbca415..cd31fbe72 100644 --- a/pkg/scraper/action.go +++ b/pkg/scraper/action.go @@ -24,9 +24,85 @@ func (e scraperAction) IsValid() bool { return false } -type scraperActionImpl interface { +type urlScraperActionImpl interface { scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) +} + +func (c Definition) getURLScraper(def ByURLDefinition, client *http.Client, globalConfig GlobalConfig) urlScraperActionImpl { + switch def.Action { + case scraperActionScript: + return &scriptURLScraper{ + scriptScraper: scriptScraper{ + definition: c, + globalConfig: globalConfig, + }, + definition: def, + } + case scraperActionStash: + return newStashScraper(client, c, globalConfig) + case scraperActionXPath: + return &xpathURLScraper{ + xpathScraper: xpathScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: def, + } + case scraperActionJson: + return &jsonURLScraper{ + jsonScraper: jsonScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: def, + } + } + + panic("unknown scraper action: " + def.Action) +} + +type nameScraperActionImpl interface { scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) +} + +func (c Definition) getNameScraper(def ByNameDefinition, client *http.Client, globalConfig GlobalConfig) nameScraperActionImpl { + switch def.Action { + case scraperActionScript: + return &scriptNameScraper{ + scriptScraper: scriptScraper{ + definition: c, + globalConfig: globalConfig, + }, + definition: def, + } + case scraperActionStash: + return newStashScraper(client, c, globalConfig) + case scraperActionXPath: + return &xpathNameScraper{ + xpathScraper: xpathScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: def, + } + case scraperActionJson: + return &jsonNameScraper{ + jsonScraper: jsonScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: def, + } + } + + panic("unknown scraper action: " + def.Action) +} + +type fragmentScraperActionImpl interface { scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) @@ -34,17 +110,37 @@ type scraperActionImpl interface { scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) } -func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, globalConfig GlobalConfig) scraperActionImpl { - switch scraper.Action { +func (c Definition) getFragmentScraper(actionDef ByFragmentDefinition, client *http.Client, globalConfig GlobalConfig) fragmentScraperActionImpl { + switch actionDef.Action { case scraperActionScript: - return newScriptScraper(scraper, c, globalConfig) + return &scriptFragmentScraper{ + scriptScraper: scriptScraper{ + definition: c, + globalConfig: globalConfig, + }, + definition: actionDef, + } case scraperActionStash: - return newStashScraper(scraper, client, c, globalConfig) + return newStashScraper(client, c, globalConfig) case scraperActionXPath: - return newXpathScraper(scraper, client, c, globalConfig) + return &xpathFragmentScraper{ + xpathScraper: xpathScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: actionDef, + } case scraperActionJson: - return newJsonScraper(scraper, client, c, globalConfig) + return &jsonFragmentScraper{ + jsonScraper: jsonScraper{ + definition: c, + globalConfig: globalConfig, + client: client, + }, + definition: actionDef, + } } - panic("unknown scraper action: " + scraper.Action) + panic("unknown scraper action: " + actionDef.Action) } diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go index 5cc51ac54..6aeb95fcf 100644 --- a/pkg/scraper/cache.go +++ b/pkg/scraper/cache.go @@ -182,7 +182,7 @@ func (c *Cache) ReloadScrapers() { if err != nil { logger.Errorf("Error loading scraper %s: %v", fp, err) } else { - scraper := newGroupScraper(*conf, c.globalConfig) + scraper := scraperFromDefinition(*conf, c.globalConfig) scrapers[scraper.spec().ID] = scraper } } diff --git a/pkg/scraper/cookies.go b/pkg/scraper/cookies.go index 0a2877b7b..c76dae037 100644 --- a/pkg/scraper/cookies.go +++ b/pkg/scraper/cookies.go @@ -18,7 +18,7 @@ import ( ) // jar constructs a cookie jar from a configuration -func (c config) jar() (*cookiejar.Jar, error) { +func (c Definition) jar() (*cookiejar.Jar, error) { opts := c.DriverOptions jar, err := cookiejar.New(&cookiejar.Options{ PublicSuffixList: publicsuffix.List, @@ -77,7 +77,7 @@ func randomSequence(n int) string { } // printCookies prints all cookies from the given cookie jar -func printCookies(jar *cookiejar.Jar, scraperConfig config, msg string) { +func printCookies(jar *cookiejar.Jar, scraperConfig Definition, msg string) { driverOptions := scraperConfig.DriverOptions if driverOptions != nil && !driverOptions.UseCDP { var foundURLs []*url.URL diff --git a/pkg/scraper/group.go b/pkg/scraper/defined_scraper.go similarity index 56% rename from pkg/scraper/group.go rename to pkg/scraper/defined_scraper.go index 43fd2a37b..0287101d0 100644 --- a/pkg/scraper/group.go +++ b/pkg/scraper/defined_scraper.go @@ -8,25 +8,26 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type group struct { - config config +// definedScraper implements the scraper interface using a Definition object. +type definedScraper struct { + config Definition globalConf GlobalConfig } -func newGroupScraper(c config, globalConfig GlobalConfig) scraper { - return group{ +func scraperFromDefinition(c Definition, globalConfig GlobalConfig) definedScraper { + return definedScraper{ config: c, globalConf: globalConfig, } } -func (g group) spec() Scraper { +func (g definedScraper) spec() Scraper { return g.config.spec() } // fragmentScraper finds an appropriate fragment scraper based on input. -func (g group) fragmentScraper(input Input) *scraperTypeConfig { +func (g definedScraper) fragmentScraper(input Input) *ByFragmentDefinition { switch { case input.Performer != nil: return g.config.PerformerByFragment @@ -43,7 +44,7 @@ func (g group) fragmentScraper(input Input) *scraperTypeConfig { return nil } -func (g group) viaFragment(ctx context.Context, client *http.Client, input Input) (ScrapedContent, error) { +func (g definedScraper) viaFragment(ctx context.Context, client *http.Client, input Input) (ScrapedContent, error) { stc := g.fragmentScraper(input) if stc == nil { // If there's no performer fragment scraper in the group, we try to use @@ -56,38 +57,38 @@ func (g group) viaFragment(ctx context.Context, client *http.Client, input Input return nil, ErrNotSupported } - s := g.config.getScraper(*stc, client, g.globalConf) + s := g.config.getFragmentScraper(*stc, client, g.globalConf) return s.scrapeByFragment(ctx, input) } -func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) { +func (g definedScraper) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) { if g.config.SceneByFragment == nil { return nil, ErrNotSupported } - s := g.config.getScraper(*g.config.SceneByFragment, client, g.globalConf) + s := g.config.getFragmentScraper(*g.config.SceneByFragment, client, g.globalConf) return s.scrapeSceneByScene(ctx, scene) } -func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) { +func (g definedScraper) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) { if g.config.GalleryByFragment == nil { return nil, ErrNotSupported } - s := g.config.getScraper(*g.config.GalleryByFragment, client, g.globalConf) + s := g.config.getFragmentScraper(*g.config.GalleryByFragment, client, g.globalConf) return s.scrapeGalleryByGallery(ctx, gallery) } -func (g group) viaImage(ctx context.Context, client *http.Client, gallery *models.Image) (*models.ScrapedImage, error) { +func (g definedScraper) viaImage(ctx context.Context, client *http.Client, gallery *models.Image) (*models.ScrapedImage, error) { if g.config.ImageByFragment == nil { return nil, ErrNotSupported } - s := g.config.getScraper(*g.config.ImageByFragment, client, g.globalConf) + s := g.config.getFragmentScraper(*g.config.ImageByFragment, client, g.globalConf) return s.scrapeImageByImage(ctx, gallery) } -func loadUrlCandidates(c config, ty ScrapeContentType) []*scrapeByURLConfig { +func loadUrlCandidates(c Definition, ty ScrapeContentType) []*ByURLDefinition { switch ty { case ScrapeContentTypePerformer: return c.PerformerByURL @@ -104,12 +105,13 @@ func loadUrlCandidates(c config, ty ScrapeContentType) []*scrapeByURLConfig { panic("loadUrlCandidates: unreachable") } -func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty ScrapeContentType) (ScrapedContent, error) { +func (g definedScraper) viaURL(ctx context.Context, client *http.Client, url string, ty ScrapeContentType) (ScrapedContent, error) { candidates := loadUrlCandidates(g.config, ty) for _, scraper := range candidates { if scraper.matchesURL(url) { - s := g.config.getScraper(scraper.scraperTypeConfig, client, g.globalConf) - ret, err := s.scrapeByURL(ctx, url, ty) + u := replaceURL(url, *scraper) // allow a URL Replace for url-queries + s := g.config.getURLScraper(*scraper, client, g.globalConf) + ret, err := s.scrapeByURL(ctx, u, ty) if err != nil { return nil, err } @@ -123,31 +125,31 @@ func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty S return nil, nil } -func (g group) viaName(ctx context.Context, client *http.Client, name string, ty ScrapeContentType) ([]ScrapedContent, error) { +func (g definedScraper) viaName(ctx context.Context, client *http.Client, name string, ty ScrapeContentType) ([]ScrapedContent, error) { switch ty { case ScrapeContentTypePerformer: if g.config.PerformerByName == nil { break } - s := g.config.getScraper(*g.config.PerformerByName, client, g.globalConf) + s := g.config.getNameScraper(*g.config.PerformerByName, client, g.globalConf) return s.scrapeByName(ctx, name, ty) case ScrapeContentTypeScene: if g.config.SceneByName == nil { break } - s := g.config.getScraper(*g.config.SceneByName, client, g.globalConf) + s := g.config.getNameScraper(*g.config.SceneByName, client, g.globalConf) return s.scrapeByName(ctx, name, ty) } return nil, fmt.Errorf("%w: cannot load %v by name", ErrNotSupported, ty) } -func (g group) supports(ty ScrapeContentType) bool { +func (g definedScraper) supports(ty ScrapeContentType) bool { return g.config.supports(ty) } -func (g group) supportsURL(url string, ty ScrapeContentType) bool { +func (g definedScraper) supportsURL(url string, ty ScrapeContentType) bool { return g.config.matchesURL(url, ty) } diff --git a/pkg/scraper/config.go b/pkg/scraper/definition.go similarity index 80% rename from pkg/scraper/config.go rename to pkg/scraper/definition.go index 5775dc97c..03ba4d75b 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/definition.go @@ -11,7 +11,8 @@ import ( "gopkg.in/yaml.v2" ) -type config struct { +// Definition represents a scraper definition (typically) loaded from a YAML configuration file. +type Definition struct { ID string path string @@ -19,43 +20,43 @@ type config struct { Name string `yaml:"name"` // Configuration for querying performers by name - PerformerByName *scraperTypeConfig `yaml:"performerByName"` + PerformerByName *ByNameDefinition `yaml:"performerByName"` // Configuration for querying performers by a Performer fragment - PerformerByFragment *scraperTypeConfig `yaml:"performerByFragment"` + PerformerByFragment *ByFragmentDefinition `yaml:"performerByFragment"` // Configuration for querying a performer by a URL - PerformerByURL []*scrapeByURLConfig `yaml:"performerByURL"` + PerformerByURL []*ByURLDefinition `yaml:"performerByURL"` // Configuration for querying scenes by a Scene fragment - SceneByFragment *scraperTypeConfig `yaml:"sceneByFragment"` + SceneByFragment *ByFragmentDefinition `yaml:"sceneByFragment"` // Configuration for querying gallery by a Gallery fragment - GalleryByFragment *scraperTypeConfig `yaml:"galleryByFragment"` + GalleryByFragment *ByFragmentDefinition `yaml:"galleryByFragment"` // Configuration for querying scenes by name - SceneByName *scraperTypeConfig `yaml:"sceneByName"` + SceneByName *ByNameDefinition `yaml:"sceneByName"` // Configuration for querying scenes by query fragment - SceneByQueryFragment *scraperTypeConfig `yaml:"sceneByQueryFragment"` + SceneByQueryFragment *ByFragmentDefinition `yaml:"sceneByQueryFragment"` // Configuration for querying a scene by a URL - SceneByURL []*scrapeByURLConfig `yaml:"sceneByURL"` + SceneByURL []*ByURLDefinition `yaml:"sceneByURL"` // Configuration for querying a gallery by a URL - GalleryByURL []*scrapeByURLConfig `yaml:"galleryByURL"` + GalleryByURL []*ByURLDefinition `yaml:"galleryByURL"` // Configuration for querying an image by a URL - ImageByURL []*scrapeByURLConfig `yaml:"imageByURL"` + ImageByURL []*ByURLDefinition `yaml:"imageByURL"` // Configuration for querying image by an Image fragment - ImageByFragment *scraperTypeConfig `yaml:"imageByFragment"` + ImageByFragment *ByFragmentDefinition `yaml:"imageByFragment"` // Configuration for querying a movie by a URL - deprecated, use GroupByURL - MovieByURL []*scrapeByURLConfig `yaml:"movieByURL"` + MovieByURL []*ByURLDefinition `yaml:"movieByURL"` // Configuration for querying a group by a URL - GroupByURL []*scrapeByURLConfig `yaml:"groupByURL"` + GroupByURL []*ByURLDefinition `yaml:"groupByURL"` // Scraper debugging options DebugOptions *scraperDebugOptions `yaml:"debug"` @@ -73,7 +74,7 @@ type config struct { DriverOptions *scraperDriverOptions `yaml:"driver"` } -func (c config) validate() error { +func (c Definition) validate() error { if strings.TrimSpace(c.Name) == "" { return errors.New("name must not be empty") } @@ -126,17 +127,13 @@ type stashServer struct { ApiKey string `yaml:"apiKey"` } -type scraperTypeConfig struct { +type ActionDefinition struct { Action scraperAction `yaml:"action"` Script []string `yaml:"script,flow"` Scraper string `yaml:"scraper"` - - // for xpath name scraper only - QueryURL string `yaml:"queryURL"` - QueryURLReplacements queryURLReplacements `yaml:"queryURLReplace"` } -func (c scraperTypeConfig) validate() error { +func (c ActionDefinition) validate() error { if !c.Action.IsValid() { return fmt.Errorf("%s is not a valid scraper action", c.Action) } @@ -148,20 +145,22 @@ func (c scraperTypeConfig) validate() error { return nil } -type scrapeByURLConfig struct { - scraperTypeConfig `yaml:",inline"` - URL []string `yaml:"url,flow"` +type ByURLDefinition struct { + ActionDefinition `yaml:",inline"` + URL []string `yaml:"url,flow"` + QueryURL string `yaml:"queryURL"` + QueryURLReplacements queryURLReplacements `yaml:"queryURLReplace"` } -func (c scrapeByURLConfig) validate() error { +func (c ByURLDefinition) validate() error { if len(c.URL) == 0 { return errors.New("url is mandatory for scrape by url scrapers") } - return c.scraperTypeConfig.validate() + return c.ActionDefinition.validate() } -func (c scrapeByURLConfig) matchesURL(url string) bool { +func (c ByURLDefinition) matchesURL(url string) bool { for _, thisURL := range c.URL { if strings.Contains(url, thisURL) { return true @@ -171,6 +170,18 @@ func (c scrapeByURLConfig) matchesURL(url string) bool { return false } +type ByFragmentDefinition struct { + ActionDefinition `yaml:",inline"` + + QueryURL string `yaml:"queryURL"` + QueryURLReplacements queryURLReplacements `yaml:"queryURLReplace"` +} + +type ByNameDefinition struct { + ActionDefinition `yaml:",inline"` + QueryURL string `yaml:"queryURL"` +} + type scraperDebugOptions struct { PrintHTML bool `yaml:"printHTML"` } @@ -206,8 +217,8 @@ type scraperDriverOptions struct { Headers []*header `yaml:"headers"` } -func loadConfigFromYAML(id string, reader io.Reader) (*config, error) { - ret := &config{} +func loadConfigFromYAML(id string, reader io.Reader) (*Definition, error) { + ret := &Definition{} parser := yaml.NewDecoder(reader) parser.SetStrict(true) @@ -225,7 +236,7 @@ func loadConfigFromYAML(id string, reader io.Reader) (*config, error) { return ret, nil } -func loadConfigFromYAMLFile(path string) (*config, error) { +func loadConfigFromYAMLFile(path string) (*Definition, error) { file, err := os.Open(path) if err != nil { return nil, err @@ -246,7 +257,7 @@ func loadConfigFromYAMLFile(path string) (*config, error) { return ret, nil } -func (c config) spec() Scraper { +func (c Definition) spec() Scraper { ret := Scraper{ ID: c.ID, Name: c.Name, @@ -334,7 +345,7 @@ func (c config) spec() Scraper { return ret } -func (c config) supports(ty ScrapeContentType) bool { +func (c Definition) supports(ty ScrapeContentType) bool { switch ty { case ScrapeContentTypePerformer: return c.PerformerByName != nil || c.PerformerByFragment != nil || len(c.PerformerByURL) > 0 @@ -351,7 +362,7 @@ func (c config) supports(ty ScrapeContentType) bool { panic("Unhandled ScrapeContentType") } -func (c config) matchesURL(url string, ty ScrapeContentType) bool { +func (c Definition) matchesURL(url string, ty ScrapeContentType) bool { switch ty { case ScrapeContentTypePerformer: for _, scraper := range c.PerformerByURL { diff --git a/pkg/scraper/freeones.go b/pkg/scraper/freeones.go index 96caf2fec..e78488b24 100644 --- a/pkg/scraper/freeones.go +++ b/pkg/scraper/freeones.go @@ -139,5 +139,5 @@ func getFreeonesScraper(globalConfig GlobalConfig) scraper { logger.Fatalf("Error loading builtin freeones scraper: %s", err.Error()) } - return newGroupScraper(*c, globalConfig) + return scraperFromDefinition(*c, globalConfig) } diff --git a/pkg/scraper/image.go b/pkg/scraper/image.go index 87f114668..2f2e038af 100644 --- a/pkg/scraper/image.go +++ b/pkg/scraper/image.go @@ -68,6 +68,12 @@ func processImageField(ctx context.Context, imageField *string, client *http.Cli return nil } + // don't try to get the image if it doesn't appear to be a URL + // this allows scrapers to return base64 data URIs directly + if !strings.HasPrefix(*imageField, "http") { + return nil + } + img, err := getImage(ctx, *imageField, client, globalConfig) if err != nil { return err diff --git a/pkg/scraper/json.go b/pkg/scraper/json.go index 9f479f1c2..1dcb887da 100644 --- a/pkg/scraper/json.go +++ b/pkg/scraper/json.go @@ -15,43 +15,22 @@ import ( ) type jsonScraper struct { - scraper scraperTypeConfig - config config + definition Definition globalConfig GlobalConfig client *http.Client } -func newJsonScraper(scraper scraperTypeConfig, client *http.Client, config config, globalConfig GlobalConfig) *jsonScraper { - return &jsonScraper{ - scraper: scraper, - config: config, - client: client, - globalConfig: globalConfig, - } -} - -func (s *jsonScraper) getJsonScraper() *mappedScraper { - return s.config.JsonScrapers[s.scraper.Scraper] -} - -func (s *jsonScraper) scrapeURL(ctx context.Context, url string) (string, *mappedScraper, error) { - scraper := s.getJsonScraper() - - if scraper == nil { - return "", nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") +func (s *jsonScraper) getJsonScraper(name string) (*mappedScraper, error) { + ret, ok := s.definition.JsonScrapers[name] + if !ok { + return nil, fmt.Errorf("json scraper with name %s not found in config", name) } - doc, err := s.loadURL(ctx, url) - - if err != nil { - return "", nil, err - } - - return doc, scraper, nil + return &ret, nil } func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) { - r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig) + r, err := loadURL(ctx, url, s.client, s.definition, s.globalConfig) if err != nil { return "", err } @@ -66,21 +45,30 @@ func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) { return "", errors.New("not valid json") } - if s.config.DebugOptions != nil && s.config.DebugOptions.PrintHTML { + if s.definition.DebugOptions != nil && s.definition.DebugOptions.PrintHTML { logger.Infof("loadURL (%s) response: \n%s", url, docStr) } return docStr, err } -func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { - u := replaceURL(url, s.scraper) // allow a URL Replace for url-queries - doc, scraper, err := s.scrapeURL(ctx, u) +type jsonURLScraper struct { + jsonScraper + definition ByURLDefinition +} + +func (s *jsonURLScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { + scraper, err := s.getJsonScraper(s.definition.Scraper) if err != nil { return nil, err } - q := s.getJsonQuery(doc, u) + doc, err := s.loadURL(ctx, url) + if err != nil { + return nil, err + } + + q := s.getJsonQuery(doc, url) // if these just return the return values from scraper.scrape* functions then // it ends up returning ScrapedContent(nil) rather than nil switch ty { @@ -119,11 +107,15 @@ func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCont return nil, ErrNotSupported } -func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { - scraper := s.getJsonScraper() +type jsonNameScraper struct { + jsonScraper + definition ByNameDefinition +} - if scraper == nil { - return nil, fmt.Errorf("%w: name %v", ErrNotFound, s.scraper.Scraper) +func (s *jsonNameScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } const placeholder = "{}" @@ -131,7 +123,7 @@ func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty ScrapeCo // replace the placeholder string with the URL-escaped name escapedName := url.QueryEscape(name) - url := s.scraper.QueryURL + url := s.definition.QueryURL url = strings.ReplaceAll(url, placeholder, escapedName) doc, err := s.loadURL(ctx, url) @@ -172,18 +164,22 @@ func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty ScrapeCo return nil, ErrNotSupported } -func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { +type jsonFragmentScraper struct { + jsonScraper + definition ByFragmentDefinition +} + +func (s *jsonFragmentScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { // construct the URL queryURL := queryURLParametersFromScene(scene) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getJsonScraper() - - if scraper == nil { - return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -196,7 +192,7 @@ func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scen return scraper.scrapeScene(ctx, q) } -func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { +func (s *jsonFragmentScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { switch { case input.Gallery != nil: return nil, fmt.Errorf("%w: cannot use a json scraper as a gallery fragment scraper", ErrNotSupported) @@ -210,15 +206,14 @@ func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (Scrape // construct the URL queryURL := queryURLParametersFromScrapedScene(scene) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getJsonScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -231,18 +226,17 @@ func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (Scrape return scraper.scrapeScene(ctx, q) } -func (s *jsonScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { +func (s *jsonFragmentScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { // construct the URL queryURL := queryURLParametersFromImage(image) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getJsonScraper() - - if scraper == nil { - return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -255,18 +249,17 @@ func (s *jsonScraper) scrapeImageByImage(ctx context.Context, image *models.Imag return scraper.scrapeImage(ctx, q) } -func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { +func (s *jsonFragmentScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { // construct the URL queryURL := queryURLParametersFromGallery(gallery) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getJsonScraper() - - if scraper == nil { - return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getJsonScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) diff --git a/pkg/scraper/json_test.go b/pkg/scraper/json_test.go index 249f17ad6..285c15489 100644 --- a/pkg/scraper/json_test.go +++ b/pkg/scraper/json_test.go @@ -68,7 +68,7 @@ jsonScrapers: } ` - c := &config{} + c := &Definition{} err := yaml.Unmarshal([]byte(yamlStr), &c) if err != nil { diff --git a/pkg/scraper/mapped.go b/pkg/scraper/mapped.go index 3fac22ec3..d92415c61 100644 --- a/pkg/scraper/mapped.go +++ b/pkg/scraper/mapped.go @@ -2,22 +2,9 @@ package scraper import ( "context" - "errors" - "fmt" - "math" - "net/url" - "reflect" - "regexp" - "strconv" - "strings" - "time" - "gopkg.in/yaml.v2" - - "github.com/stashapp/stash/pkg/javascript" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil" ) type mappedQuery interface { @@ -28,850 +15,7 @@ type mappedQuery interface { getURL() string } -type commonMappedConfig map[string]string - -type mappedConfig map[string]mappedScraperAttrConfig - -func (s mappedConfig) applyCommon(c commonMappedConfig, src string) string { - if c == nil { - return src - } - - ret := src - for commonKey, commonVal := range c { - ret = strings.ReplaceAll(ret, commonKey, commonVal) - } - - return ret -} - -// extractHostname parses a URL string and returns the hostname. -// Returns empty string if the URL cannot be parsed. -func extractHostname(urlStr string) string { - if urlStr == "" { - return "" - } - - u, err := url.Parse(urlStr) - if err != nil { - logger.Warnf("Error parsing URL '%s': %s", urlStr, err.Error()) - return "" - } - - return u.Hostname() -} - -type isMultiFunc func(key string) bool - -func (s mappedConfig) process(ctx context.Context, q mappedQuery, common commonMappedConfig, isMulti isMultiFunc) mappedResults { - var ret mappedResults - - for k, attrConfig := range s { - - if attrConfig.Fixed != "" { - // TODO - not sure if this needs to set _all_ indexes for the key - const i = 0 - // Support {inputURL} and {inputHostname} placeholders in fixed values - value := strings.ReplaceAll(attrConfig.Fixed, "{inputURL}", q.getURL()) - value = strings.ReplaceAll(value, "{inputHostname}", extractHostname(q.getURL())) - ret = ret.setSingleValue(i, k, value) - } else { - selector := attrConfig.Selector - selector = s.applyCommon(common, selector) - // Support {inputURL} and {inputHostname} placeholders in selectors - selector = strings.ReplaceAll(selector, "{inputURL}", q.getURL()) - selector = strings.ReplaceAll(selector, "{inputHostname}", extractHostname(q.getURL())) - - found, err := q.runQuery(selector) - if err != nil { - logger.Warnf("key '%v': %v", k, err) - } - - if len(found) > 0 { - result := s.postProcess(ctx, q, attrConfig, found) - - // HACK - if the key is URLs, then we need to set the value as a multi-value - isMulti := isMulti != nil && isMulti(k) - if isMulti { - ret = ret.setMultiValue(0, k, result) - } else { - for i, text := range result { - ret = ret.setSingleValue(i, k, text) - } - } - } - } - } - - return ret -} - -func (s mappedConfig) postProcess(ctx context.Context, q mappedQuery, attrConfig mappedScraperAttrConfig, found []string) []string { - // check if we're concatenating the results into a single result - var ret []string - if attrConfig.hasConcat() { - result := attrConfig.concatenateResults(found) - result = attrConfig.postProcess(ctx, result, q) - if attrConfig.hasSplit() { - results := attrConfig.splitString(result) - // skip cleaning when the query is used for searching - if q.getType() == SearchQuery { - return results - } - results = attrConfig.cleanResults(results) - return results - } - - ret = []string{result} - } else { - for _, text := range found { - text = attrConfig.postProcess(ctx, text, q) - if attrConfig.hasSplit() { - return attrConfig.splitString(text) - } - - ret = append(ret, text) - } - // skip cleaning when the query is used for searching - if q.getType() == SearchQuery { - return ret - } - ret = attrConfig.cleanResults(ret) - - } - - return ret -} - -type mappedSceneScraperConfig struct { - mappedConfig - - Tags mappedConfig `yaml:"Tags"` - Performers mappedPerformerScraperConfig `yaml:"Performers"` - Studio mappedConfig `yaml:"Studio"` - Movies mappedConfig `yaml:"Movies"` - Groups mappedConfig `yaml:"Groups"` -} -type _mappedSceneScraperConfig mappedSceneScraperConfig - -const ( - mappedScraperConfigSceneTags = "Tags" - mappedScraperConfigScenePerformers = "Performers" - mappedScraperConfigSceneStudio = "Studio" - mappedScraperConfigSceneMovies = "Movies" - mappedScraperConfigSceneGroups = "Groups" -) - -func (s *mappedSceneScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known scene sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] - thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] - thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] - thisMap[mappedScraperConfigSceneMovies] = parentMap[mappedScraperConfigSceneMovies] - thisMap[mappedScraperConfigSceneGroups] = parentMap[mappedScraperConfigSceneGroups] - - delete(parentMap, mappedScraperConfigSceneTags) - delete(parentMap, mappedScraperConfigScenePerformers) - delete(parentMap, mappedScraperConfigSceneStudio) - delete(parentMap, mappedScraperConfigSceneMovies) - delete(parentMap, mappedScraperConfigSceneGroups) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedSceneScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedSceneScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedGalleryScraperConfig struct { - mappedConfig - - Tags mappedConfig `yaml:"Tags"` - Performers mappedConfig `yaml:"Performers"` - Studio mappedConfig `yaml:"Studio"` -} - -type _mappedGalleryScraperConfig mappedGalleryScraperConfig - -func (s *mappedGalleryScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known scene sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] - thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] - thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] - - delete(parentMap, mappedScraperConfigSceneTags) - delete(parentMap, mappedScraperConfigScenePerformers) - delete(parentMap, mappedScraperConfigSceneStudio) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedGalleryScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedGalleryScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedImageScraperConfig struct { - mappedConfig - - Tags mappedConfig `yaml:"Tags"` - Performers mappedConfig `yaml:"Performers"` - Studio mappedConfig `yaml:"Studio"` -} -type _mappedImageScraperConfig mappedImageScraperConfig - -func (s *mappedImageScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known scene sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] - thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] - thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] - - delete(parentMap, mappedScraperConfigSceneTags) - delete(parentMap, mappedScraperConfigScenePerformers) - delete(parentMap, mappedScraperConfigSceneStudio) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedImageScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedImageScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedPerformerScraperConfig struct { - mappedConfig - - Tags mappedConfig `yaml:"Tags"` -} -type _mappedPerformerScraperConfig mappedPerformerScraperConfig - -const ( - mappedScraperConfigPerformerTags = "Tags" -) - -func (s *mappedPerformerScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known scene sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigPerformerTags] = parentMap[mappedScraperConfigPerformerTags] - - delete(parentMap, mappedScraperConfigPerformerTags) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedPerformerScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedPerformerScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedMovieScraperConfig struct { - mappedConfig - - Studio mappedConfig `yaml:"Studio"` - Tags mappedConfig `yaml:"Tags"` -} -type _mappedMovieScraperConfig mappedMovieScraperConfig - -const ( - mappedScraperConfigMovieStudio = "Studio" - mappedScraperConfigMovieTags = "Tags" -) - -func (s *mappedMovieScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // HACK - unmarshal to map first, then remove known movie sub-fields, then - // remarshal to yaml and pass that down to the base map - parentMap := make(map[string]interface{}) - if err := unmarshal(parentMap); err != nil { - return err - } - - // move the known sub-fields to a separate map - thisMap := make(map[string]interface{}) - - thisMap[mappedScraperConfigMovieStudio] = parentMap[mappedScraperConfigMovieStudio] - delete(parentMap, mappedScraperConfigMovieStudio) - - thisMap[mappedScraperConfigMovieTags] = parentMap[mappedScraperConfigMovieTags] - delete(parentMap, mappedScraperConfigMovieTags) - - // re-unmarshal the sub-fields - yml, err := yaml.Marshal(thisMap) - if err != nil { - return err - } - - // needs to be a different type to prevent infinite recursion - c := _mappedMovieScraperConfig{} - if err := yaml.Unmarshal(yml, &c); err != nil { - return err - } - - *s = mappedMovieScraperConfig(c) - - yml, err = yaml.Marshal(parentMap) - if err != nil { - return err - } - - if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { - return err - } - - return nil -} - -type mappedRegexConfig struct { - Regex string `yaml:"regex"` - With string `yaml:"with"` -} - -type mappedRegexConfigs []mappedRegexConfig - -func (c mappedRegexConfig) apply(value string) string { - if c.Regex != "" { - re, err := regexp.Compile(c.Regex) - if err != nil { - logger.Warnf("Error compiling regex '%s': %s", c.Regex, err.Error()) - return value - } - - ret := re.ReplaceAllString(value, c.With) - - // trim leading and trailing whitespace - // this is done to maintain backwards compatibility with existing - // scrapers - ret = strings.TrimSpace(ret) - - logger.Debugf(`Replace: '%s' with '%s'`, c.Regex, c.With) - logger.Debugf("Before: %s", value) - logger.Debugf("After: %s", ret) - return ret - } - - return value -} - -func (c mappedRegexConfigs) apply(value string) string { - // apply regex in order - for _, config := range c { - value = config.apply(value) - } - - return value -} - -type postProcessAction interface { - Apply(ctx context.Context, value string, q mappedQuery) string -} - -type postProcessParseDate string - -func (p *postProcessParseDate) Apply(ctx context.Context, value string, q mappedQuery) string { - parseDate := string(*p) - - const internalDateFormat = "2006-01-02" - - valueLower := strings.ToLower(value) - if valueLower == "today" || valueLower == "yesterday" { // handle today, yesterday - dt := time.Now() - if valueLower == "yesterday" { // subtract 1 day from now - dt = dt.AddDate(0, 0, -1) - } - return dt.Format(internalDateFormat) - } - - if parseDate == "" { - return value - } - - if parseDate == "unix" { - // try to parse the date using unix timestamp format - // if it fails, then just fall back to the original value - timeAsInt, err := strconv.ParseInt(value, 10, 64) - if err != nil { - logger.Warnf("Error parsing date string '%s' using unix timestamp format : %s", value, err.Error()) - return value - } - parsedValue := time.Unix(timeAsInt, 0) - - return parsedValue.Format(internalDateFormat) - } - - // try to parse the date using the pattern - // if it fails, then just fall back to the original value - parsedValue, err := time.Parse(parseDate, value) - if err != nil { - logger.Warnf("Error parsing date string '%s' using format '%s': %s", value, parseDate, err.Error()) - return value - } - - // convert it into our date format - return parsedValue.Format(internalDateFormat) -} - -type postProcessSubtractDays bool - -func (p *postProcessSubtractDays) Apply(ctx context.Context, value string, q mappedQuery) string { - const internalDateFormat = "2006-01-02" - - i, err := strconv.Atoi(value) - if err != nil { - logger.Warnf("Error parsing day string %s: %s", value, err) - return value - } - - dt := time.Now() - dt = dt.AddDate(0, 0, -i) - return dt.Format(internalDateFormat) -} - -type postProcessReplace mappedRegexConfigs - -func (c *postProcessReplace) Apply(ctx context.Context, value string, q mappedQuery) string { - replace := mappedRegexConfigs(*c) - return replace.apply(value) -} - -type postProcessSubScraper mappedScraperAttrConfig - -func (p *postProcessSubScraper) Apply(ctx context.Context, value string, q mappedQuery) string { - subScrapeConfig := mappedScraperAttrConfig(*p) - - logger.Debugf("Sub-scraping for: %s", value) - ss := q.subScrape(ctx, value) - - if ss != nil { - found, err := ss.runQuery(subScrapeConfig.Selector) - if err != nil { - logger.Warnf("subscrape for '%v': %v", value, err) - } - - if len(found) > 0 { - // check if we're concatenating the results into a single result - var result string - if subScrapeConfig.hasConcat() { - result = subScrapeConfig.concatenateResults(found) - } else { - result = found[0] - } - - result = subScrapeConfig.postProcess(ctx, result, ss) - return result - } - } - - return "" -} - -type postProcessMap map[string]string - -func (p *postProcessMap) Apply(ctx context.Context, value string, q mappedQuery) string { - // return the mapped value if present - m := *p - mapped, ok := m[value] - - if ok { - return mapped - } - - return value -} - -type postProcessFeetToCm bool - -func (p *postProcessFeetToCm) Apply(ctx context.Context, value string, q mappedQuery) string { - const foot_in_cm = 30.48 - const inch_in_cm = 2.54 - - reg := regexp.MustCompile("[0-9]+") - filtered := reg.FindAllString(value, -1) - - var feet float64 - var inches float64 - if len(filtered) > 0 { - feet, _ = strconv.ParseFloat(filtered[0], 64) - } - if len(filtered) > 1 { - inches, _ = strconv.ParseFloat(filtered[1], 64) - } - - var centimeters = feet*foot_in_cm + inches*inch_in_cm - - // Return rounded integer string - return strconv.Itoa(int(math.Round(centimeters))) -} - -type postProcessLbToKg bool - -func (p *postProcessLbToKg) Apply(ctx context.Context, value string, q mappedQuery) string { - const lb_in_kg = 0.45359237 - w, err := strconv.ParseFloat(value, 64) - if err == nil { - w *= lb_in_kg - value = strconv.Itoa(int(math.Round(w))) - } - return value -} - -type postProcessJavascript string - -func (p *postProcessJavascript) Apply(ctx context.Context, value string, q mappedQuery) string { - vm := javascript.NewVM() - if err := vm.Set("value", value); err != nil { - logger.Warnf("javascript failed to set value: %v", err) - return value - } - - log := &javascript.Log{ - Logger: logger.Logger, - Prefix: "", - ProgressChan: make(chan float64), - } - - if err := log.AddToVM("log", vm); err != nil { - logger.Logger.Errorf("error adding log API: %w", err) - } - - util := &javascript.Util{} - if err := util.AddToVM("util", vm); err != nil { - logger.Logger.Errorf("error adding util API: %w", err) - } - - script, err := javascript.CompileScript("", "(function() { "+string(*p)+"})()") - if err != nil { - logger.Warnf("javascript failed to compile: %v", err) - return value - } - - output, err := vm.RunProgram(script) - if err != nil { - logger.Warnf("javascript failed to run: %v", err) - return value - } - - // assume output is string - return output.String() -} - -type mappedPostProcessAction struct { - ParseDate string `yaml:"parseDate"` - SubtractDays bool `yaml:"subtractDays"` - Replace mappedRegexConfigs `yaml:"replace"` - SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` - Map map[string]string `yaml:"map"` - FeetToCm bool `yaml:"feetToCm"` - LbToKg bool `yaml:"lbToKg"` - Javascript string `yaml:"javascript"` -} - -func (a mappedPostProcessAction) ToPostProcessAction() (postProcessAction, error) { - var found string - var ret postProcessAction - - ensureOnly := func(field string) error { - if found != "" { - return fmt.Errorf("post-process actions must have a single field, found %s and %s", found, field) - } - found = field - return nil - } - - if a.ParseDate != "" { - found = "parseDate" - action := postProcessParseDate(a.ParseDate) - ret = &action - } - if len(a.Replace) > 0 { - if err := ensureOnly("replace"); err != nil { - return nil, err - } - action := postProcessReplace(a.Replace) - ret = &action - } - if a.SubScraper != nil { - if err := ensureOnly("subScraper"); err != nil { - return nil, err - } - action := postProcessSubScraper(*a.SubScraper) - ret = &action - } - if a.Map != nil { - if err := ensureOnly("map"); err != nil { - return nil, err - } - action := postProcessMap(a.Map) - ret = &action - } - if a.FeetToCm { - if err := ensureOnly("feetToCm"); err != nil { - return nil, err - } - action := postProcessFeetToCm(a.FeetToCm) - ret = &action - } - if a.LbToKg { - if err := ensureOnly("lbToKg"); err != nil { - return nil, err - } - action := postProcessLbToKg(a.LbToKg) - ret = &action - } - if a.SubtractDays { - if err := ensureOnly("subtractDays"); err != nil { - return nil, err - } - action := postProcessSubtractDays(a.SubtractDays) - ret = &action - } - if a.Javascript != "" { - if err := ensureOnly("javascript"); err != nil { - return nil, err - } - action := postProcessJavascript(a.Javascript) - ret = &action - } - - if ret == nil { - return nil, errors.New("invalid post-process action") - } - - return ret, nil -} - -type mappedScraperAttrConfig struct { - Selector string `yaml:"selector"` - Fixed string `yaml:"fixed"` - PostProcess []mappedPostProcessAction `yaml:"postProcess"` - Concat string `yaml:"concat"` - Split string `yaml:"split"` - - postProcessActions []postProcessAction - - // Deprecated: use PostProcess instead - ParseDate string `yaml:"parseDate"` - Replace mappedRegexConfigs `yaml:"replace"` - SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` -} - -type _mappedScraperAttrConfig mappedScraperAttrConfig - -func (c *mappedScraperAttrConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - // try unmarshalling into a string first - if err := unmarshal(&c.Selector); err != nil { - // if it's a type error then we try to unmarshall to the full object - var typeErr *yaml.TypeError - if !errors.As(err, &typeErr) { - return err - } - - // unmarshall to full object - // need it as a separate object - t := _mappedScraperAttrConfig{} - if err = unmarshal(&t); err != nil { - return err - } - - *c = mappedScraperAttrConfig(t) - } - - return c.convertPostProcessActions() -} - -func (c *mappedScraperAttrConfig) convertPostProcessActions() error { - // ensure we don't have the old deprecated fields and the new post process field - if len(c.PostProcess) > 0 { - if c.ParseDate != "" || len(c.Replace) > 0 || c.SubScraper != nil { - return errors.New("cannot include postProcess and (parseDate, replace, subScraper) deprecated fields") - } - - // convert xpathPostProcessAction actions to postProcessActions - for _, a := range c.PostProcess { - action, err := a.ToPostProcessAction() - if err != nil { - return err - } - c.postProcessActions = append(c.postProcessActions, action) - } - - c.PostProcess = nil - } else { - // convert old deprecated fields if present - // in same order as they used to be executed - if len(c.Replace) > 0 { - action := postProcessReplace(c.Replace) - c.postProcessActions = append(c.postProcessActions, &action) - c.Replace = nil - } - - if c.SubScraper != nil { - action := postProcessSubScraper(*c.SubScraper) - c.postProcessActions = append(c.postProcessActions, &action) - c.SubScraper = nil - } - - if c.ParseDate != "" { - action := postProcessParseDate(c.ParseDate) - c.postProcessActions = append(c.postProcessActions, &action) - c.ParseDate = "" - } - } - - return nil -} - -func (c mappedScraperAttrConfig) hasConcat() bool { - return c.Concat != "" -} - -func (c mappedScraperAttrConfig) hasSplit() bool { - return c.Split != "" -} - -func (c mappedScraperAttrConfig) concatenateResults(nodes []string) string { - separator := c.Concat - return strings.Join(nodes, separator) -} - -func (c mappedScraperAttrConfig) cleanResults(nodes []string) []string { - cleaned := sliceutil.Unique(nodes) // remove duplicate values - cleaned = sliceutil.Delete(cleaned, "") // remove empty values - return cleaned -} - -func (c mappedScraperAttrConfig) splitString(value string) []string { - separator := c.Split - var res []string - - if separator == "" { - return []string{value} - } - - for _, str := range strings.Split(value, separator) { - if str != "" { - res = append(res, str) - } - } - - return res -} - -func (c mappedScraperAttrConfig) postProcess(ctx context.Context, value string, q mappedQuery) string { - for _, action := range c.postProcessActions { - value = action.Apply(ctx, value, q) - } - - return value -} - -type mappedScrapers map[string]*mappedScraper +type mappedScrapers map[string]mappedScraper type mappedScraper struct { Common commonMappedConfig `yaml:"common"` @@ -885,102 +29,12 @@ type mappedScraper struct { Movie *mappedMovieScraperConfig `yaml:"movie"` } -type mappedResult map[string]interface{} -type mappedResults []mappedResult - -func (r mappedResult) apply(dest interface{}) { - destVal := reflect.ValueOf(dest).Elem() - - // all fields are either string pointers or string slices - for key, value := range r { - if err := mapFieldValue(destVal, key, value); err != nil { - logger.Errorf("Error mapping field %s in %T: %v", key, dest, err) - } - } -} - -func mapFieldValue(destVal reflect.Value, key string, value interface{}) error { - field := destVal.FieldByName(key) - - if !field.IsValid() { - return fmt.Errorf("field %s does not exist on %s", key, destVal.Type().Name()) - } - - if !field.CanSet() { - return fmt.Errorf("field %s cannot be set on %s", key, destVal.Type().Name()) - } - - fieldType := field.Type() - - switch v := value.(type) { - case string: - // if the field is a pointer to a string, then we need to convert the string to a pointer - // if the field is a string slice, then we need to convert the string to a slice - switch { - case fieldType.Kind() == reflect.String: - field.SetString(v) - case fieldType.Kind() == reflect.Ptr && fieldType.Elem().Kind() == reflect.String: - ptr := reflect.New(fieldType.Elem()) - ptr.Elem().SetString(v) - field.Set(ptr) - case fieldType.Kind() == reflect.Slice && fieldType.Elem().Kind() == reflect.String: - field.Set(reflect.ValueOf([]string{v})) - default: - return fmt.Errorf("cannot convert %T to %s", value, fieldType) - } - case []string: - // expect the field to be a string slice - if fieldType.Kind() == reflect.Slice && fieldType.Elem().Kind() == reflect.String { - field.Set(reflect.ValueOf(v)) - } else { - return fmt.Errorf("cannot convert %T to %s", value, fieldType) - } - default: - // fallback to reflection - reflectValue := reflect.ValueOf(value) - reflectValueType := reflectValue.Type() - - switch { - case reflectValueType.ConvertibleTo(fieldType): - field.Set(reflectValue.Convert(fieldType)) - case fieldType.Kind() == reflect.Pointer && reflectValueType.ConvertibleTo(fieldType.Elem()): - ptr := reflect.New(fieldType.Elem()) - ptr.Elem().Set(reflectValue.Convert(fieldType.Elem())) - field.Set(ptr) - default: - return fmt.Errorf("cannot convert %T to %s", value, fieldType) - } - } - - return nil -} - -func (r mappedResults) setSingleValue(index int, key string, value string) mappedResults { - if index >= len(r) { - r = append(r, make(mappedResult)) - } - - logger.Debugf(`[%d][%s] = %s`, index, key, value) - r[index][key] = value - return r -} - -func (r mappedResults) setMultiValue(index int, key string, value []string) mappedResults { - if index >= len(r) { - r = append(r, make(mappedResult)) - } - - logger.Debugf(`[%d][%s] = %s`, index, key, value) - r[index][key] = value - return r -} - func urlsIsMulti(key string) bool { return key == "URLs" } func (s mappedScraper) scrapePerformer(ctx context.Context, q mappedQuery) (*models.ScrapedPerformer, error) { - var ret models.ScrapedPerformer + var ret *models.ScrapedPerformer performerMap := s.Performer if performerMap == nil { @@ -992,31 +46,26 @@ func (s mappedScraper) scrapePerformer(ctx context.Context, q mappedQuery) (*mod results := performerMap.process(ctx, q, s.Common, urlsIsMulti) // now apply the tags + var tagResults mappedResults + if performerTagsMap != nil { logger.Debug(`Processing performer tags:`) - tagResults := performerTagsMap.process(ctx, q, s.Common, nil) - - for _, p := range tagResults { - tag := &models.ScrapedTag{} - p.apply(tag) - ret.Tags = append(ret.Tags, tag) - } + tagResults = performerTagsMap.process(ctx, q, s.Common, nil) } - if len(results) == 0 && len(ret.Tags) == 0 { + if len(results) == 0 { return nil, nil } if len(results) > 0 { - results[0].apply(&ret) + ret = results[0].scrapedPerformer() + ret.Tags = tagResults.scrapedTags() } - return &ret, nil + return ret, nil } func (s mappedScraper) scrapePerformers(ctx context.Context, q mappedQuery) ([]*models.ScrapedPerformer, error) { - var ret []*models.ScrapedPerformer - performerMap := s.Performer if performerMap == nil { return nil, nil @@ -1024,13 +73,7 @@ func (s mappedScraper) scrapePerformers(ctx context.Context, q mappedQuery) ([]* // isMulti is nil because it will behave incorrect when scraping multiple performers results := performerMap.process(ctx, q, s.Common, nil) - for _, r := range results { - var p models.ScrapedPerformer - r.apply(&p) - ret = append(ret, &p) - } - - return ret, nil + return results.scrapedPerformers(), nil } // processSceneRelationships sets the relationships on the models.ScrapedScene. It returns true if any relationships were set. @@ -1048,7 +91,7 @@ func (s mappedScraper) processSceneRelationships(ctx context.Context, q mappedQu if sceneTagsMap != nil { logger.Debug(`Processing scene tags:`) - ret.Tags = processRelationships[models.ScrapedTag](ctx, s, sceneTagsMap, q) + ret.Tags = sceneTagsMap.process(ctx, q, s.Common, nil).scrapedTags() } if sceneStudioMap != nil { @@ -1056,21 +99,20 @@ func (s mappedScraper) processSceneRelationships(ctx context.Context, q mappedQu studioResults := sceneStudioMap.process(ctx, q, s.Common, nil) if len(studioResults) > 0 && resultIndex < len(studioResults) { - studio := &models.ScrapedStudio{} // when doing a `search` scrape get the related studio - studioResults[resultIndex].apply(studio) + studio := studioResults[resultIndex].scrapedStudio() ret.Studio = studio } } if sceneMoviesMap != nil { logger.Debug(`Processing scene movies:`) - ret.Movies = processRelationships[models.ScrapedMovie](ctx, s, sceneMoviesMap, q) + ret.Movies = sceneMoviesMap.process(ctx, q, s.Common, nil).scrapedMovies() } if sceneGroupsMap != nil { logger.Debug(`Processing scene groups:`) - ret.Groups = processRelationships[models.ScrapedGroup](ctx, s, sceneGroupsMap, q) + ret.Groups = sceneGroupsMap.process(ctx, q, s.Common, nil).scrapedGroups() } return len(ret.Performers) > 0 || len(ret.Tags) > 0 || ret.Studio != nil || len(ret.Movies) > 0 || len(ret.Groups) > 0 @@ -1094,12 +136,10 @@ func (s mappedScraper) processPerformers(ctx context.Context, performersMap mapp } for _, p := range performerResults { - performer := &models.ScrapedPerformer{} - p.apply(performer) + performer := p.scrapedPerformer() for _, p := range performerTagResults { - tag := &models.ScrapedTag{} - p.apply(tag) + tag := p.scrapedTag() performer.Tags = append(performer.Tags, tag) } @@ -1110,20 +150,6 @@ func (s mappedScraper) processPerformers(ctx context.Context, performersMap mapp return ret } -func processRelationships[T any](ctx context.Context, s mappedScraper, relationshipMap mappedConfig, q mappedQuery) []*T { - var ret []*T - - results := relationshipMap.process(ctx, q, s.Common, nil) - - for _, p := range results { - var value T - p.apply(&value) - ret = append(ret, &value) - } - - return ret -} - func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*models.ScrapedScene, error) { var ret []*models.ScrapedScene @@ -1139,10 +165,9 @@ func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*mode for i, r := range results { logger.Debug(`Processing scene:`) - var thisScene models.ScrapedScene - r.apply(&thisScene) - s.processSceneRelationships(ctx, q, i, &thisScene) - ret = append(ret, &thisScene) + thisScene := r.scrapedScene() + s.processSceneRelationships(ctx, q, i, thisScene) + ret = append(ret, thisScene) } return ret, nil @@ -1159,17 +184,17 @@ func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*models. logger.Debug(`Processing scene:`) results := sceneMap.process(ctx, q, s.Common, urlsIsMulti) - var ret models.ScrapedScene + var ret *models.ScrapedScene if len(results) > 0 { - results[0].apply(&ret) + ret = results[0].scrapedScene() } - hasRelationships := s.processSceneRelationships(ctx, q, 0, &ret) + hasRelationships := s.processSceneRelationships(ctx, q, 0, ret) // #3953 - process only returns results if the non-relationship fields are // populated // only return if we have results or relationships if len(results) > 0 || hasRelationships { - return &ret, nil + return ret, nil } return nil, nil @@ -1192,15 +217,19 @@ func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*models. logger.Debug(`Processing image:`) results := imageMap.process(ctx, q, s.Common, urlsIsMulti) + if len(results) > 0 { + ret = *results[0].scrapedImage() + } + // now apply the performers and tags if imagePerformersMap != nil { logger.Debug(`Processing image performers:`) - ret.Performers = processRelationships[models.ScrapedPerformer](ctx, s, imagePerformersMap, q) + ret.Performers = imagePerformersMap.process(ctx, q, s.Common, nil).scrapedPerformers() } if imageTagsMap != nil { logger.Debug(`Processing image tags:`) - ret.Tags = processRelationships[models.ScrapedTag](ctx, s, imageTagsMap, q) + ret.Tags = imageTagsMap.process(ctx, q, s.Common, nil).scrapedTags() } if imageStudioMap != nil { @@ -1208,9 +237,7 @@ func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*models. studioResults := imageStudioMap.process(ctx, q, s.Common, nil) if len(studioResults) > 0 { - studio := &models.ScrapedStudio{} - studioResults[0].apply(studio) - ret.Studio = studio + ret.Studio = studioResults[0].scrapedStudio() } } @@ -1219,10 +246,6 @@ func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*models. return nil, nil } - if len(results) > 0 { - results[0].apply(&ret) - } - return &ret, nil } @@ -1243,27 +266,22 @@ func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*model logger.Debug(`Processing gallery:`) results := galleryMap.process(ctx, q, s.Common, urlsIsMulti) + if len(results) > 0 { + ret = *results[0].scrapedGallery() + } + // now apply the performers and tags if galleryPerformersMap != nil { logger.Debug(`Processing gallery performers:`) performerResults := galleryPerformersMap.process(ctx, q, s.Common, urlsIsMulti) - for _, p := range performerResults { - performer := &models.ScrapedPerformer{} - p.apply(performer) - ret.Performers = append(ret.Performers, performer) - } + ret.Performers = performerResults.scrapedPerformers() } if galleryTagsMap != nil { logger.Debug(`Processing gallery tags:`) tagResults := galleryTagsMap.process(ctx, q, s.Common, nil) - - for _, p := range tagResults { - tag := &models.ScrapedTag{} - p.apply(tag) - ret.Tags = append(ret.Tags, tag) - } + ret.Tags = tagResults.scrapedTags() } if galleryStudioMap != nil { @@ -1271,9 +289,7 @@ func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*model studioResults := galleryStudioMap.process(ctx, q, s.Common, nil) if len(studioResults) > 0 { - studio := &models.ScrapedStudio{} - studioResults[0].apply(studio) - ret.Studio = studio + ret.Studio = studioResults[0].scrapedStudio() } } @@ -1282,10 +298,6 @@ func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*model return nil, nil } - if len(results) > 0 { - results[0].apply(&ret) - } - return &ret, nil } @@ -1309,14 +321,16 @@ func (s mappedScraper) scrapeGroup(ctx context.Context, q mappedQuery) (*models. results := groupMap.process(ctx, q, s.Common, urlsIsMulti) + if len(results) > 0 { + ret = *results[0].scrapedGroup() + } + if groupStudioMap != nil { logger.Debug(`Processing group studio:`) studioResults := groupStudioMap.process(ctx, q, s.Common, nil) if len(studioResults) > 0 { - studio := &models.ScrapedStudio{} - studioResults[0].apply(studio) - ret.Studio = studio + ret.Studio = studioResults[0].scrapedStudio() } } @@ -1325,20 +339,12 @@ func (s mappedScraper) scrapeGroup(ctx context.Context, q mappedQuery) (*models. logger.Debug(`Processing group tags:`) tagResults := groupTagsMap.process(ctx, q, s.Common, nil) - for _, p := range tagResults { - tag := &models.ScrapedTag{} - p.apply(tag) - ret.Tags = append(ret.Tags, tag) - } + ret.Tags = tagResults.scrapedTags() } if len(results) == 0 && ret.Studio == nil && len(ret.Tags) == 0 { return nil, nil } - if len(results) > 0 { - results[0].apply(&ret) - } - return &ret, nil } diff --git a/pkg/scraper/mapped_config.go b/pkg/scraper/mapped_config.go new file mode 100644 index 000000000..920bf74b4 --- /dev/null +++ b/pkg/scraper/mapped_config.go @@ -0,0 +1,537 @@ +package scraper + +import ( + "context" + "errors" + "net/url" + "strings" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/sliceutil" + "gopkg.in/yaml.v2" +) + +type commonMappedConfig map[string]string + +type mappedConfig map[string]mappedScraperAttrConfig + +func (s mappedConfig) applyCommon(c commonMappedConfig, src string) string { + if c == nil { + return src + } + + ret := src + for commonKey, commonVal := range c { + ret = strings.ReplaceAll(ret, commonKey, commonVal) + } + + return ret +} + +// extractHostname parses a URL string and returns the hostname. +// Returns empty string if the URL cannot be parsed. +func extractHostname(urlStr string) string { + if urlStr == "" { + return "" + } + + u, err := url.Parse(urlStr) + if err != nil { + logger.Warnf("Error parsing URL '%s': %s", urlStr, err.Error()) + return "" + } + + return u.Hostname() +} + +type isMultiFunc func(key string) bool + +func (s mappedConfig) process(ctx context.Context, q mappedQuery, common commonMappedConfig, isMulti isMultiFunc) mappedResults { + var ret mappedResults + + for k, attrConfig := range s { + + if attrConfig.Fixed != "" { + // TODO - not sure if this needs to set _all_ indexes for the key + const i = 0 + // Support {inputURL} and {inputHostname} placeholders in fixed values + value := strings.ReplaceAll(attrConfig.Fixed, "{inputURL}", q.getURL()) + value = strings.ReplaceAll(value, "{inputHostname}", extractHostname(q.getURL())) + ret = ret.setSingleValue(i, k, value) + } else { + selector := attrConfig.Selector + selector = s.applyCommon(common, selector) + // Support {inputURL} and {inputHostname} placeholders in selectors + selector = strings.ReplaceAll(selector, "{inputURL}", q.getURL()) + selector = strings.ReplaceAll(selector, "{inputHostname}", extractHostname(q.getURL())) + + found, err := q.runQuery(selector) + if err != nil { + logger.Warnf("key '%v': %v", k, err) + } + + if len(found) > 0 { + result := s.postProcess(ctx, q, attrConfig, found) + + // HACK - if the key is URLs, then we need to set the value as a multi-value + isMulti := isMulti != nil && isMulti(k) + if isMulti { + ret = ret.setMultiValue(0, k, result) + } else { + for i, text := range result { + ret = ret.setSingleValue(i, k, text) + } + } + } + } + } + + return ret +} + +func (s mappedConfig) postProcess(ctx context.Context, q mappedQuery, attrConfig mappedScraperAttrConfig, found []string) []string { + // check if we're concatenating the results into a single result + var ret []string + if attrConfig.hasConcat() { + result := attrConfig.concatenateResults(found) + result = attrConfig.postProcess(ctx, result, q) + if attrConfig.hasSplit() { + results := attrConfig.splitString(result) + // skip cleaning when the query is used for searching + if q.getType() == SearchQuery { + return results + } + results = attrConfig.cleanResults(results) + return results + } + + ret = []string{result} + } else { + for _, text := range found { + text = attrConfig.postProcess(ctx, text, q) + if attrConfig.hasSplit() { + return attrConfig.splitString(text) + } + + ret = append(ret, text) + } + // skip cleaning when the query is used for searching + if q.getType() == SearchQuery { + return ret + } + ret = attrConfig.cleanResults(ret) + + } + + return ret +} + +type mappedSceneScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` + Performers mappedPerformerScraperConfig `yaml:"Performers"` + Studio mappedConfig `yaml:"Studio"` + Movies mappedConfig `yaml:"Movies"` + Groups mappedConfig `yaml:"Groups"` +} +type _mappedSceneScraperConfig mappedSceneScraperConfig + +const ( + mappedScraperConfigSceneTags = "Tags" + mappedScraperConfigScenePerformers = "Performers" + mappedScraperConfigSceneStudio = "Studio" + mappedScraperConfigSceneMovies = "Movies" + mappedScraperConfigSceneGroups = "Groups" +) + +func (s *mappedSceneScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] + thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] + thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] + thisMap[mappedScraperConfigSceneMovies] = parentMap[mappedScraperConfigSceneMovies] + thisMap[mappedScraperConfigSceneGroups] = parentMap[mappedScraperConfigSceneGroups] + + delete(parentMap, mappedScraperConfigSceneTags) + delete(parentMap, mappedScraperConfigScenePerformers) + delete(parentMap, mappedScraperConfigSceneStudio) + delete(parentMap, mappedScraperConfigSceneMovies) + delete(parentMap, mappedScraperConfigSceneGroups) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedSceneScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedSceneScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedGalleryScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` + Performers mappedConfig `yaml:"Performers"` + Studio mappedConfig `yaml:"Studio"` +} + +type _mappedGalleryScraperConfig mappedGalleryScraperConfig + +func (s *mappedGalleryScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] + thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] + thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] + + delete(parentMap, mappedScraperConfigSceneTags) + delete(parentMap, mappedScraperConfigScenePerformers) + delete(parentMap, mappedScraperConfigSceneStudio) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedGalleryScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedGalleryScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedImageScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` + Performers mappedConfig `yaml:"Performers"` + Studio mappedConfig `yaml:"Studio"` +} +type _mappedImageScraperConfig mappedImageScraperConfig + +func (s *mappedImageScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigSceneTags] = parentMap[mappedScraperConfigSceneTags] + thisMap[mappedScraperConfigScenePerformers] = parentMap[mappedScraperConfigScenePerformers] + thisMap[mappedScraperConfigSceneStudio] = parentMap[mappedScraperConfigSceneStudio] + + delete(parentMap, mappedScraperConfigSceneTags) + delete(parentMap, mappedScraperConfigScenePerformers) + delete(parentMap, mappedScraperConfigSceneStudio) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedImageScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedImageScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedPerformerScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` +} +type _mappedPerformerScraperConfig mappedPerformerScraperConfig + +const ( + mappedScraperConfigPerformerTags = "Tags" +) + +func (s *mappedPerformerScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigPerformerTags] = parentMap[mappedScraperConfigPerformerTags] + + delete(parentMap, mappedScraperConfigPerformerTags) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedPerformerScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedPerformerScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedMovieScraperConfig struct { + mappedConfig + + Studio mappedConfig `yaml:"Studio"` + Tags mappedConfig `yaml:"Tags"` +} +type _mappedMovieScraperConfig mappedMovieScraperConfig + +const ( + mappedScraperConfigMovieStudio = "Studio" + mappedScraperConfigMovieTags = "Tags" +) + +func (s *mappedMovieScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // HACK - unmarshal to map first, then remove known movie sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigMovieStudio] = parentMap[mappedScraperConfigMovieStudio] + delete(parentMap, mappedScraperConfigMovieStudio) + + thisMap[mappedScraperConfigMovieTags] = parentMap[mappedScraperConfigMovieTags] + delete(parentMap, mappedScraperConfigMovieTags) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedMovieScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedMovieScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil +} + +type mappedScraperAttrConfig struct { + Selector string `yaml:"selector"` + Fixed string `yaml:"fixed"` + PostProcess []mappedPostProcessAction `yaml:"postProcess"` + Concat string `yaml:"concat"` + Split string `yaml:"split"` + + postProcessActions []postProcessAction + + // Deprecated: use PostProcess instead + ParseDate string `yaml:"parseDate"` + Replace mappedRegexConfigs `yaml:"replace"` + SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` +} + +type _mappedScraperAttrConfig mappedScraperAttrConfig + +func (c *mappedScraperAttrConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { + // try unmarshalling into a string first + if err := unmarshal(&c.Selector); err != nil { + // if it's a type error then we try to unmarshall to the full object + var typeErr *yaml.TypeError + if !errors.As(err, &typeErr) { + return err + } + + // unmarshall to full object + // need it as a separate object + t := _mappedScraperAttrConfig{} + if err = unmarshal(&t); err != nil { + return err + } + + *c = mappedScraperAttrConfig(t) + } + + return c.convertPostProcessActions() +} + +func (c *mappedScraperAttrConfig) convertPostProcessActions() error { + // ensure we don't have the old deprecated fields and the new post process field + if len(c.PostProcess) > 0 { + if c.ParseDate != "" || len(c.Replace) > 0 || c.SubScraper != nil { + return errors.New("cannot include postProcess and (parseDate, replace, subScraper) deprecated fields") + } + + // convert xpathPostProcessAction actions to postProcessActions + for _, a := range c.PostProcess { + action, err := a.ToPostProcessAction() + if err != nil { + return err + } + c.postProcessActions = append(c.postProcessActions, action) + } + + c.PostProcess = nil + } else { + // convert old deprecated fields if present + // in same order as they used to be executed + if len(c.Replace) > 0 { + action := postProcessReplace(c.Replace) + c.postProcessActions = append(c.postProcessActions, &action) + c.Replace = nil + } + + if c.SubScraper != nil { + action := postProcessSubScraper(*c.SubScraper) + c.postProcessActions = append(c.postProcessActions, &action) + c.SubScraper = nil + } + + if c.ParseDate != "" { + action := postProcessParseDate(c.ParseDate) + c.postProcessActions = append(c.postProcessActions, &action) + c.ParseDate = "" + } + } + + return nil +} + +func (c mappedScraperAttrConfig) hasConcat() bool { + return c.Concat != "" +} + +func (c mappedScraperAttrConfig) hasSplit() bool { + return c.Split != "" +} + +func (c mappedScraperAttrConfig) concatenateResults(nodes []string) string { + separator := c.Concat + return strings.Join(nodes, separator) +} + +func (c mappedScraperAttrConfig) cleanResults(nodes []string) []string { + cleaned := sliceutil.Unique(nodes) // remove duplicate values + cleaned = sliceutil.Delete(cleaned, "") // remove empty values + return cleaned +} + +func (c mappedScraperAttrConfig) splitString(value string) []string { + separator := c.Split + var res []string + + if separator == "" { + return []string{value} + } + + for _, str := range strings.Split(value, separator) { + if str != "" { + res = append(res, str) + } + } + + return res +} + +func (c mappedScraperAttrConfig) postProcess(ctx context.Context, value string, q mappedQuery) string { + for _, action := range c.postProcessActions { + value = action.Apply(ctx, value, q) + } + + return value +} diff --git a/pkg/scraper/mapped_postprocessing.go b/pkg/scraper/mapped_postprocessing.go new file mode 100644 index 000000000..22a8b748a --- /dev/null +++ b/pkg/scraper/mapped_postprocessing.go @@ -0,0 +1,333 @@ +package scraper + +import ( + "context" + "errors" + "fmt" + "math" + "regexp" + "strconv" + "strings" + "time" + + "github.com/stashapp/stash/pkg/javascript" + "github.com/stashapp/stash/pkg/logger" +) + +type mappedRegexConfig struct { + Regex string `yaml:"regex"` + With string `yaml:"with"` +} + +type mappedRegexConfigs []mappedRegexConfig + +func (c mappedRegexConfig) apply(value string) string { + if c.Regex != "" { + re, err := regexp.Compile(c.Regex) + if err != nil { + logger.Warnf("Error compiling regex '%s': %s", c.Regex, err.Error()) + return value + } + + ret := re.ReplaceAllString(value, c.With) + + // trim leading and trailing whitespace + // this is done to maintain backwards compatibility with existing + // scrapers + ret = strings.TrimSpace(ret) + + logger.Debugf(`Replace: '%s' with '%s'`, c.Regex, c.With) + logger.Debugf("Before: %s", value) + logger.Debugf("After: %s", ret) + return ret + } + + return value +} + +func (c mappedRegexConfigs) apply(value string) string { + // apply regex in order + for _, config := range c { + value = config.apply(value) + } + + return value +} + +type postProcessAction interface { + Apply(ctx context.Context, value string, q mappedQuery) string +} + +type postProcessParseDate string + +func (p *postProcessParseDate) Apply(ctx context.Context, value string, q mappedQuery) string { + parseDate := string(*p) + + const internalDateFormat = "2006-01-02" + + valueLower := strings.ToLower(value) + if valueLower == "today" || valueLower == "yesterday" { // handle today, yesterday + dt := time.Now() + if valueLower == "yesterday" { // subtract 1 day from now + dt = dt.AddDate(0, 0, -1) + } + return dt.Format(internalDateFormat) + } + + if parseDate == "" { + return value + } + + if parseDate == "unix" { + // try to parse the date using unix timestamp format + // if it fails, then just fall back to the original value + timeAsInt, err := strconv.ParseInt(value, 10, 64) + if err != nil { + logger.Warnf("Error parsing date string '%s' using unix timestamp format : %s", value, err.Error()) + return value + } + parsedValue := time.Unix(timeAsInt, 0) + + return parsedValue.Format(internalDateFormat) + } + + // try to parse the date using the pattern + // if it fails, then just fall back to the original value + parsedValue, err := time.Parse(parseDate, value) + if err != nil { + logger.Warnf("Error parsing date string '%s' using format '%s': %s", value, parseDate, err.Error()) + return value + } + + // convert it into our date format + return parsedValue.Format(internalDateFormat) +} + +type postProcessSubtractDays bool + +func (p *postProcessSubtractDays) Apply(ctx context.Context, value string, q mappedQuery) string { + const internalDateFormat = "2006-01-02" + + i, err := strconv.Atoi(value) + if err != nil { + logger.Warnf("Error parsing day string %s: %s", value, err) + return value + } + + dt := time.Now() + dt = dt.AddDate(0, 0, -i) + return dt.Format(internalDateFormat) +} + +type postProcessReplace mappedRegexConfigs + +func (c *postProcessReplace) Apply(ctx context.Context, value string, q mappedQuery) string { + replace := mappedRegexConfigs(*c) + return replace.apply(value) +} + +type postProcessSubScraper mappedScraperAttrConfig + +func (p *postProcessSubScraper) Apply(ctx context.Context, value string, q mappedQuery) string { + subScrapeConfig := mappedScraperAttrConfig(*p) + + logger.Debugf("Sub-scraping for: %s", value) + ss := q.subScrape(ctx, value) + + if ss != nil { + found, err := ss.runQuery(subScrapeConfig.Selector) + if err != nil { + logger.Warnf("subscrape for '%v': %v", value, err) + } + + if len(found) > 0 { + // check if we're concatenating the results into a single result + var result string + if subScrapeConfig.hasConcat() { + result = subScrapeConfig.concatenateResults(found) + } else { + result = found[0] + } + + result = subScrapeConfig.postProcess(ctx, result, ss) + return result + } + } + + return "" +} + +type postProcessMap map[string]string + +func (p *postProcessMap) Apply(ctx context.Context, value string, q mappedQuery) string { + // return the mapped value if present + m := *p + mapped, ok := m[value] + + if ok { + return mapped + } + + return value +} + +type postProcessFeetToCm bool + +func (p *postProcessFeetToCm) Apply(ctx context.Context, value string, q mappedQuery) string { + const foot_in_cm = 30.48 + const inch_in_cm = 2.54 + + reg := regexp.MustCompile("[0-9]+") + filtered := reg.FindAllString(value, -1) + + var feet float64 + var inches float64 + if len(filtered) > 0 { + feet, _ = strconv.ParseFloat(filtered[0], 64) + } + if len(filtered) > 1 { + inches, _ = strconv.ParseFloat(filtered[1], 64) + } + + var centimeters = feet*foot_in_cm + inches*inch_in_cm + + // Return rounded integer string + return strconv.Itoa(int(math.Round(centimeters))) +} + +type postProcessLbToKg bool + +func (p *postProcessLbToKg) Apply(ctx context.Context, value string, q mappedQuery) string { + const lb_in_kg = 0.45359237 + w, err := strconv.ParseFloat(value, 64) + if err == nil { + w *= lb_in_kg + value = strconv.Itoa(int(math.Round(w))) + } + return value +} + +type postProcessJavascript string + +func (p *postProcessJavascript) Apply(ctx context.Context, value string, q mappedQuery) string { + vm := javascript.NewVM() + if err := vm.Set("value", value); err != nil { + logger.Warnf("javascript failed to set value: %v", err) + return value + } + + log := &javascript.Log{ + Logger: logger.Logger, + Prefix: "", + ProgressChan: make(chan float64), + } + + if err := log.AddToVM("log", vm); err != nil { + logger.Logger.Errorf("error adding log API: %w", err) + } + + util := &javascript.Util{} + if err := util.AddToVM("util", vm); err != nil { + logger.Logger.Errorf("error adding util API: %w", err) + } + + script, err := javascript.CompileScript("", "(function() { "+string(*p)+"})()") + if err != nil { + logger.Warnf("javascript failed to compile: %v", err) + return value + } + + output, err := vm.RunProgram(script) + if err != nil { + logger.Warnf("javascript failed to run: %v", err) + return value + } + + // assume output is string + return output.String() +} + +type mappedPostProcessAction struct { + ParseDate string `yaml:"parseDate"` + SubtractDays bool `yaml:"subtractDays"` + Replace mappedRegexConfigs `yaml:"replace"` + SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` + Map map[string]string `yaml:"map"` + FeetToCm bool `yaml:"feetToCm"` + LbToKg bool `yaml:"lbToKg"` + Javascript string `yaml:"javascript"` +} + +func (a mappedPostProcessAction) ToPostProcessAction() (postProcessAction, error) { + var found string + var ret postProcessAction + + ensureOnly := func(field string) error { + if found != "" { + return fmt.Errorf("post-process actions must have a single field, found %s and %s", found, field) + } + found = field + return nil + } + + if a.ParseDate != "" { + found = "parseDate" + action := postProcessParseDate(a.ParseDate) + ret = &action + } + if len(a.Replace) > 0 { + if err := ensureOnly("replace"); err != nil { + return nil, err + } + action := postProcessReplace(a.Replace) + ret = &action + } + if a.SubScraper != nil { + if err := ensureOnly("subScraper"); err != nil { + return nil, err + } + action := postProcessSubScraper(*a.SubScraper) + ret = &action + } + if a.Map != nil { + if err := ensureOnly("map"); err != nil { + return nil, err + } + action := postProcessMap(a.Map) + ret = &action + } + if a.FeetToCm { + if err := ensureOnly("feetToCm"); err != nil { + return nil, err + } + action := postProcessFeetToCm(a.FeetToCm) + ret = &action + } + if a.LbToKg { + if err := ensureOnly("lbToKg"); err != nil { + return nil, err + } + action := postProcessLbToKg(a.LbToKg) + ret = &action + } + if a.SubtractDays { + if err := ensureOnly("subtractDays"); err != nil { + return nil, err + } + action := postProcessSubtractDays(a.SubtractDays) + ret = &action + } + if a.Javascript != "" { + if err := ensureOnly("javascript"); err != nil { + return nil, err + } + action := postProcessJavascript(a.Javascript) + ret = &action + } + + if ret == nil { + return nil, errors.New("invalid post-process action") + } + + return ret, nil +} diff --git a/pkg/scraper/mapped_result.go b/pkg/scraper/mapped_result.go new file mode 100644 index 000000000..64cc97ec7 --- /dev/null +++ b/pkg/scraper/mapped_result.go @@ -0,0 +1,278 @@ +package scraper + +import ( + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type mappedResult map[string]interface{} +type mappedResults []mappedResult + +func (r mappedResult) string(key string) (string, bool) { + v, ok := r[key] + if !ok { + return "", false + } + + val, ok := v.(string) + if !ok { + logger.Errorf("String field %s is %T in mappedResult", key, r[key]) + } + + return val, true +} + +func (r mappedResult) mustString(key string) string { + v, ok := r[key] + if !ok { + logger.Errorf("Missing required string field %s in mappedResult", key) + return "" + } + + val, ok := v.(string) + if !ok { + logger.Errorf("String field %s is %T in mappedResult", key, r[key]) + } + + return val +} + +func (r mappedResult) stringPtr(key string) *string { + val, ok := r.string(key) + if !ok { + return nil + } + return &val +} + +func (r mappedResult) stringSlice(key string) []string { + v, ok := r[key] + if !ok { + return nil + } + + // need to try both []string and string + val, ok := v.([]string) + + if ok { + return val + } + + // try single string + singleVal, ok := v.(string) + if !ok { + logger.Errorf("String slice field %s is %T in mappedResult", key, r[key]) + return nil + } + + return []string{singleVal} +} + +func (r mappedResult) IntPtr(key string) *int { + v, ok := r[key] + if !ok { + return nil + } + + val, ok := v.(int) + if !ok { + logger.Errorf("Int field %s is %T in mappedResult", key, r[key]) + return nil + } + + return &val +} + +func (r mappedResults) setSingleValue(index int, key string, value string) mappedResults { + if index >= len(r) { + r = append(r, make(mappedResult)) + } + + logger.Debugf(`[%d][%s] = %s`, index, key, value) + r[index][key] = value + return r +} + +func (r mappedResults) setMultiValue(index int, key string, value []string) mappedResults { + if index >= len(r) { + r = append(r, make(mappedResult)) + } + + logger.Debugf(`[%d][%s] = %s`, index, key, value) + r[index][key] = value + return r +} + +func (r mappedResults) scrapedTags() []*models.ScrapedTag { + if len(r) == 0 { + return nil + } + + ret := make([]*models.ScrapedTag, len(r)) + for i, result := range r { + ret[i] = result.scrapedTag() + } + + return ret +} + +func (r mappedResult) scrapedTag() *models.ScrapedTag { + return &models.ScrapedTag{ + Name: r.mustString("Name"), + } +} + +func (r mappedResult) scrapedPerformer() *models.ScrapedPerformer { + ret := &models.ScrapedPerformer{ + Name: r.stringPtr("Name"), + Disambiguation: r.stringPtr("Disambiguation"), + Gender: r.stringPtr("Gender"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Twitter: r.stringPtr("Twitter"), + Birthdate: r.stringPtr("Birthdate"), + Ethnicity: r.stringPtr("Ethnicity"), + Country: r.stringPtr("Country"), + EyeColor: r.stringPtr("EyeColor"), + Height: r.stringPtr("Height"), + Measurements: r.stringPtr("Measurements"), + FakeTits: r.stringPtr("FakeTits"), + PenisLength: r.stringPtr("PenisLength"), + Circumcised: r.stringPtr("Circumcised"), + CareerLength: r.stringPtr("CareerLength"), + CareerStart: r.stringPtr("CareerStart"), + CareerEnd: r.stringPtr("CareerEnd"), + Tattoos: r.stringPtr("Tattoos"), + Piercings: r.stringPtr("Piercings"), + Aliases: r.stringPtr("Aliases"), + Image: r.stringPtr("Image"), + Images: r.stringSlice("Images"), + Details: r.stringPtr("Details"), + DeathDate: r.stringPtr("DeathDate"), + HairColor: r.stringPtr("HairColor"), + Weight: r.stringPtr("Weight"), + } + return ret +} + +func (r mappedResults) scrapedPerformers() []*models.ScrapedPerformer { + if len(r) == 0 { + return nil + } + + ret := make([]*models.ScrapedPerformer, len(r)) + for i, result := range r { + ret[i] = result.scrapedPerformer() + } + + return ret +} + +func (r mappedResult) scrapedScene() *models.ScrapedScene { + ret := &models.ScrapedScene{ + Title: r.stringPtr("Title"), + Code: r.stringPtr("Code"), + Details: r.stringPtr("Details"), + Director: r.stringPtr("Director"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Date: r.stringPtr("Date"), + Image: r.stringPtr("Image"), + Duration: r.IntPtr("Duration"), + } + return ret +} + +func (r mappedResult) scrapedImage() *models.ScrapedImage { + ret := &models.ScrapedImage{ + Title: r.stringPtr("Title"), + Code: r.stringPtr("Code"), + Details: r.stringPtr("Details"), + Photographer: r.stringPtr("Photographer"), + URLs: r.stringSlice("URLs"), + Date: r.stringPtr("Date"), + } + return ret +} + +func (r mappedResult) scrapedGallery() *models.ScrapedGallery { + ret := &models.ScrapedGallery{ + Title: r.stringPtr("Title"), + Code: r.stringPtr("Code"), + Details: r.stringPtr("Details"), + Photographer: r.stringPtr("Photographer"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Date: r.stringPtr("Date"), + } + return ret +} + +func (r mappedResult) scrapedStudio() *models.ScrapedStudio { + ret := &models.ScrapedStudio{ + Name: r.mustString("Name"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Image: r.stringPtr("Image"), + Details: r.stringPtr("Details"), + Aliases: r.stringPtr("Aliases"), + } + return ret +} + +func (r mappedResult) scrapedMovie() *models.ScrapedMovie { + ret := &models.ScrapedMovie{ + Name: r.stringPtr("Name"), + Aliases: r.stringPtr("Aliases"), + URLs: r.stringSlice("URLs"), + Duration: r.stringPtr("Duration"), + Date: r.stringPtr("Date"), + Director: r.stringPtr("Director"), + Synopsis: r.stringPtr("Synopsis"), + FrontImage: r.stringPtr("FrontImage"), + BackImage: r.stringPtr("BackImage"), + } + + return ret +} + +func (r mappedResult) scrapedGroup() *models.ScrapedGroup { + ret := &models.ScrapedGroup{ + Name: r.stringPtr("Name"), + Aliases: r.stringPtr("Aliases"), + URL: r.stringPtr("URL"), + URLs: r.stringSlice("URLs"), + Duration: r.stringPtr("Duration"), + Date: r.stringPtr("Date"), + Director: r.stringPtr("Director"), + Synopsis: r.stringPtr("Synopsis"), + FrontImage: r.stringPtr("FrontImage"), + BackImage: r.stringPtr("BackImage"), + } + + return ret +} + +func (r mappedResults) scrapedMovies() []*models.ScrapedMovie { + if len(r) == 0 { + return nil + } + ret := make([]*models.ScrapedMovie, len(r)) + for i, result := range r { + ret[i] = result.scrapedMovie() + } + + return ret +} + +func (r mappedResults) scrapedGroups() []*models.ScrapedGroup { + if len(r) == 0 { + return nil + } + ret := make([]*models.ScrapedGroup, len(r)) + for i, result := range r { + ret[i] = result.scrapedGroup() + } + + return ret +} diff --git a/pkg/scraper/mapped_result_test.go b/pkg/scraper/mapped_result_test.go new file mode 100644 index 000000000..db6d921bf --- /dev/null +++ b/pkg/scraper/mapped_result_test.go @@ -0,0 +1,908 @@ +package scraper + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stretchr/testify/assert" +) + +// Test string method +func TestMappedResultString(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue string + expectedOk bool + }{ + { + name: "valid string", + data: mappedResult{"name": "test"}, + key: "name", + expectedValue: "test", + expectedOk: true, + }, + { + name: "missing key", + data: mappedResult{}, + key: "missing", + expectedValue: "", + expectedOk: false, + }, + { + name: "wrong type still returns ok true but empty value", + data: mappedResult{"num": 123}, + key: "num", + expectedValue: "", + expectedOk: true, // logs error but returns ok=true + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val, ok := test.data.string(test.key) + assert.Equal(t, test.expectedValue, val) + assert.Equal(t, test.expectedOk, ok) + }) + } +} + +// Test mustString method +func TestMappedResultMustString(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue string + }{ + { + name: "valid string", + data: mappedResult{"name": "test"}, + key: "name", + expectedValue: "test", + }, + { + name: "missing key returns empty string", + data: mappedResult{}, + key: "missing", + expectedValue: "", + }, + { + name: "wrong type returns empty string", + data: mappedResult{"num": 123}, + key: "num", + expectedValue: "", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val := test.data.mustString(test.key) + assert.Equal(t, test.expectedValue, val) + }) + } +} + +// Test stringPtr method +func TestMappedResultStringPtr(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue *string + }{ + { + name: "valid string", + data: mappedResult{"name": "test"}, + key: "name", + expectedValue: strPtr("test"), + }, + { + name: "missing key returns nil", + data: mappedResult{}, + key: "missing", + expectedValue: nil, + }, + { + name: "wrong type returns non-nil pointer to empty string", + data: mappedResult{"num": 123}, + key: "num", + expectedValue: strPtr(""), // string() returns empty string but ok=true + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val := test.data.stringPtr(test.key) + if test.expectedValue == nil { + assert.Nil(t, val) + } else { + assert.NotNil(t, val) + assert.Equal(t, *test.expectedValue, *val) + } + }) + } +} + +// Test stringSlice method +func TestMappedResultStringSlice(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue []string + }{ + { + name: "valid slice", + data: mappedResult{"tags": []string{"a", "b", "c"}}, + key: "tags", + expectedValue: []string{"a", "b", "c"}, + }, + { + name: "missing key returns nil", + data: mappedResult{}, + key: "missing", + expectedValue: nil, + }, + { + name: "single value converted to slice", + data: mappedResult{"tags": "not a slice"}, + key: "tags", + expectedValue: []string{"not a slice"}, + }, + { + name: "wrong type returns nil", + data: mappedResult{"tags": 123}, + key: "tags", + expectedValue: nil, + }, + { + name: "empty slice", + data: mappedResult{"tags": []string{}}, + key: "tags", + expectedValue: []string{}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val := test.data.stringSlice(test.key) + assert.Equal(t, test.expectedValue, val) + }) + } +} + +// Test IntPtr method +func TestMappedResultIntPtr(t *testing.T) { + tests := []struct { + name string + data mappedResult + key string + expectedValue *int + }{ + { + name: "valid int", + data: mappedResult{"duration": 120}, + key: "duration", + expectedValue: intPtr(120), + }, + { + name: "missing key returns nil", + data: mappedResult{}, + key: "missing", + expectedValue: nil, + }, + { + name: "wrong type returns nil", + data: mappedResult{"duration": "120"}, + key: "duration", + expectedValue: nil, + }, + { + name: "zero value", + data: mappedResult{"duration": 0}, + key: "duration", + expectedValue: intPtr(0), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + val := test.data.IntPtr(test.key) + assert.Equal(t, test.expectedValue, val) + }) + } +} + +// Test setSingleValue method +func TestMappedResultsSetSingleValue(t *testing.T) { + tests := []struct { + name string + initialResults mappedResults + index int + key string + value string + expectedLen int + shouldPanic bool + }{ + { + name: "append to empty", + initialResults: mappedResults{}, + index: 0, + key: "name", + value: "test", + expectedLen: 1, + shouldPanic: false, + }, + { + name: "set in existing", + initialResults: mappedResults{mappedResult{}}, + index: 0, + key: "name", + value: "test", + expectedLen: 1, + shouldPanic: false, + }, + { + name: "append to existing", + initialResults: mappedResults{mappedResult{}}, + index: 1, + key: "name", + value: "test", + expectedLen: 2, + shouldPanic: false, + }, + { + name: "sparse index causes panic", + initialResults: mappedResults{mappedResult{}}, + index: 5, + key: "name", + value: "test", + expectedLen: 6, + shouldPanic: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + if test.shouldPanic { + assert.Panics(t, func() { + test.initialResults.setSingleValue(test.index, test.key, test.value) + }) + } else { + results := test.initialResults.setSingleValue(test.index, test.key, test.value) + assert.Equal(t, test.expectedLen, len(results)) + assert.Equal(t, test.value, results[test.index][test.key]) + } + }) + } +} + +// Test setMultiValue method +func TestMappedResultsSetMultiValue(t *testing.T) { + tests := []struct { + name string + initialResults mappedResults + index int + key string + value []string + expectedLen int + }{ + { + name: "append to empty", + initialResults: mappedResults{}, + index: 0, + key: "tags", + value: []string{"a", "b"}, + expectedLen: 1, + }, + { + name: "set in existing", + initialResults: mappedResults{mappedResult{}}, + index: 0, + key: "tags", + value: []string{"a", "b"}, + expectedLen: 1, + }, + { + name: "append to existing", + initialResults: mappedResults{mappedResult{}}, + index: 1, + key: "tags", + value: []string{"x", "y"}, + expectedLen: 2, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + results := test.initialResults.setMultiValue(test.index, test.key, test.value) + assert.Equal(t, test.expectedLen, len(results)) + assert.Equal(t, test.value, results[test.index][test.key]) + }) + } +} + +// Test scrapedTag method +func TestMappedResultScrapedTag(t *testing.T) { + tests := []struct { + name string + data mappedResult + expectedName string + }{ + { + name: "valid tag", + data: mappedResult{"Name": "Action"}, + expectedName: "Action", + }, + { + name: "missing name", + data: mappedResult{}, + expectedName: "", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + tag := test.data.scrapedTag() + assert.NotNil(t, tag) + assert.Equal(t, test.expectedName, tag.Name) + }) + } +} + +// Test scrapedTags method +func TestMappedResultsScrapedTags(t *testing.T) { + tests := []struct { + name string + data mappedResults + expectedCount int + expectedNames []string + }{ + { + name: "empty results", + data: mappedResults{}, + expectedCount: 0, + }, + { + name: "single tag", + data: mappedResults{ + mappedResult{"Name": "Action"}, + }, + expectedCount: 1, + expectedNames: []string{"Action"}, + }, + { + name: "multiple tags", + data: mappedResults{ + mappedResult{"Name": "Action"}, + mappedResult{"Name": "Drama"}, + mappedResult{"Name": "Comedy"}, + }, + expectedCount: 3, + expectedNames: []string{"Action", "Drama", "Comedy"}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + tags := test.data.scrapedTags() + if test.expectedCount == 0 { + assert.Nil(t, tags) + } else { + assert.NotNil(t, tags) + assert.Equal(t, test.expectedCount, len(tags)) + for i, expectedName := range test.expectedNames { + assert.Equal(t, expectedName, tags[i].Name) + } + } + }) + } +} + +// Test scrapedPerformer method +func TestMappedResultScrapedPerformer(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, p *models.ScrapedPerformer) + }{ + { + name: "full performer", + data: mappedResult{ + "Name": "Jane Doe", + "Disambiguation": "Actress", + "Gender": "Female", + "URL": "https://example.com/jane", + "URLs": []string{"url1", "url2"}, + "Twitter": "@jane", + "Birthdate": "1990-01-01", + "Ethnicity": "Caucasian", + "Country": "USA", + "EyeColor": "Blue", + "Height": "5'6\"", + "Measurements": "36-24-36", + "FakeTits": "No", + "PenisLength": "N/A", + "Circumcised": "N/A", + "CareerLength": "10 years", + "Tattoos": "Yes", + "Piercings": "Yes", + "Aliases": "Jane Smith", + "Image": "image.jpg", + "Images": []string{"img1", "img2"}, + "Details": "Some details", + "DeathDate": "N/A", + "HairColor": "Blonde", + "Weight": "130 lbs", + }, + validate: func(t *testing.T, p *models.ScrapedPerformer) { + assert.NotNil(t, p) + assert.Equal(t, "Jane Doe", *p.Name) + assert.Equal(t, "Actress", *p.Disambiguation) + assert.Equal(t, "Female", *p.Gender) + assert.Equal(t, "https://example.com/jane", *p.URL) + assert.Equal(t, []string{"url1", "url2"}, p.URLs) + assert.Equal(t, "@jane", *p.Twitter) + assert.Equal(t, "Blonde", *p.HairColor) + assert.Equal(t, "130 lbs", *p.Weight) + }, + }, + { + name: "minimal performer", + data: mappedResult{}, + validate: func(t *testing.T, p *models.ScrapedPerformer) { + assert.NotNil(t, p) + assert.Nil(t, p.Name) + assert.Nil(t, p.Gender) + assert.Empty(t, p.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + performer := test.data.scrapedPerformer() + test.validate(t, performer) + }) + } +} + +// Test scrapedPerformers method +func TestMappedResultsScrapedPerformers(t *testing.T) { + tests := []struct { + name string + data mappedResults + expectedCount int + }{ + { + name: "empty results", + data: mappedResults{}, + expectedCount: 0, + }, + { + name: "single performer", + data: mappedResults{ + mappedResult{"Name": "Jane Doe"}, + }, + expectedCount: 1, + }, + { + name: "multiple performers", + data: mappedResults{ + mappedResult{"Name": "Jane Doe"}, + mappedResult{"Name": "John Doe"}, + mappedResult{"Name": "Alice"}, + }, + expectedCount: 3, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + performers := test.data.scrapedPerformers() + if test.expectedCount == 0 { + assert.Nil(t, performers) + } else { + assert.NotNil(t, performers) + assert.Equal(t, test.expectedCount, len(performers)) + } + }) + } +} + +// Test scrapedScene method +func TestMappedResultScrapedScene(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, s *models.ScrapedScene) + }{ + { + name: "full scene", + data: mappedResult{ + "Title": "Scene Title", + "Code": "CODE123", + "Details": "Scene details", + "Director": "John Smith", + "URL": "https://example.com/scene", + "URLs": []string{"url1", "url2"}, + "Date": "2020-01-01", + "Image": "scene.jpg", + "Duration": 3600, + }, + validate: func(t *testing.T, s *models.ScrapedScene) { + assert.NotNil(t, s) + assert.Equal(t, "Scene Title", *s.Title) + assert.Equal(t, "CODE123", *s.Code) + assert.Equal(t, "Scene details", *s.Details) + assert.Equal(t, "John Smith", *s.Director) + assert.Equal(t, "https://example.com/scene", *s.URL) + assert.Equal(t, []string{"url1", "url2"}, s.URLs) + assert.Equal(t, "2020-01-01", *s.Date) + assert.Equal(t, "scene.jpg", *s.Image) + assert.Equal(t, 3600, *s.Duration) + }, + }, + { + name: "minimal scene", + data: mappedResult{}, + validate: func(t *testing.T, s *models.ScrapedScene) { + assert.NotNil(t, s) + assert.Nil(t, s.Title) + assert.Nil(t, s.Duration) + assert.Empty(t, s.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + scene := test.data.scrapedScene() + test.validate(t, scene) + }) + } +} + +// Test scrapedImage method +func TestMappedResultScrapedImage(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, i *models.ScrapedImage) + }{ + { + name: "full image", + data: mappedResult{ + "Title": "Image Title", + "Code": "IMG123", + "Details": "Image details", + "Photographer": "Jane Photographer", + "URLs": []string{"url1", "url2"}, + "Date": "2020-06-15", + }, + validate: func(t *testing.T, i *models.ScrapedImage) { + assert.NotNil(t, i) + assert.Equal(t, "Image Title", *i.Title) + assert.Equal(t, "IMG123", *i.Code) + assert.Equal(t, "Image details", *i.Details) + assert.Equal(t, "Jane Photographer", *i.Photographer) + assert.Equal(t, []string{"url1", "url2"}, i.URLs) + assert.Equal(t, "2020-06-15", *i.Date) + }, + }, + { + name: "minimal image", + data: mappedResult{}, + validate: func(t *testing.T, i *models.ScrapedImage) { + assert.NotNil(t, i) + assert.Nil(t, i.Title) + assert.Empty(t, i.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + image := test.data.scrapedImage() + test.validate(t, image) + }) + } +} + +// Test scrapedGallery method +func TestMappedResultScrapedGallery(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, g *models.ScrapedGallery) + }{ + { + name: "full gallery", + data: mappedResult{ + "Title": "Gallery Title", + "Code": "GAL123", + "Details": "Gallery details", + "Photographer": "Jane Photographer", + "URL": "https://example.com/gallery", + "URLs": []string{"url1", "url2"}, + "Date": "2020-07-20", + }, + validate: func(t *testing.T, g *models.ScrapedGallery) { + assert.NotNil(t, g) + assert.Equal(t, "Gallery Title", *g.Title) + assert.Equal(t, "GAL123", *g.Code) + assert.Equal(t, "Gallery details", *g.Details) + assert.Equal(t, "Jane Photographer", *g.Photographer) + assert.Equal(t, "https://example.com/gallery", *g.URL) + assert.Equal(t, []string{"url1", "url2"}, g.URLs) + assert.Equal(t, "2020-07-20", *g.Date) + }, + }, + { + name: "minimal gallery", + data: mappedResult{}, + validate: func(t *testing.T, g *models.ScrapedGallery) { + assert.NotNil(t, g) + assert.Nil(t, g.Title) + assert.Empty(t, g.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + gallery := test.data.scrapedGallery() + test.validate(t, gallery) + }) + } +} + +// Test scrapedStudio method +func TestMappedResultScrapedStudio(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, st *models.ScrapedStudio) + }{ + { + name: "full studio", + data: mappedResult{ + "Name": "Studio Name", + "URL": "https://example.com/studio", + "URLs": []string{"url1", "url2"}, + "Image": "studio.jpg", + "Details": "Studio details", + "Aliases": "Studio Alias", + }, + validate: func(t *testing.T, st *models.ScrapedStudio) { + assert.NotNil(t, st) + assert.Equal(t, "Studio Name", st.Name) + assert.Equal(t, "https://example.com/studio", *st.URL) + assert.Equal(t, []string{"url1", "url2"}, st.URLs) + assert.Equal(t, "studio.jpg", *st.Image) + assert.Equal(t, "Studio details", *st.Details) + assert.Equal(t, "Studio Alias", *st.Aliases) + }, + }, + { + name: "minimal studio", + data: mappedResult{}, + validate: func(t *testing.T, st *models.ScrapedStudio) { + assert.NotNil(t, st) + assert.Equal(t, "", st.Name) // mustString returns empty string + assert.Nil(t, st.URL) + assert.Empty(t, st.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + studio := test.data.scrapedStudio() + test.validate(t, studio) + }) + } +} + +// Test scrapedMovie method +func TestMappedResultScrapedMovie(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, m *models.ScrapedMovie) + }{ + { + name: "full movie", + data: mappedResult{ + "Name": "Movie Title", + "Aliases": "Movie Alias", + "URLs": []string{"url1", "url2"}, + "Duration": "120 minutes", + "Date": "2020-05-10", + "Director": "John Director", + "Synopsis": "Movie synopsis", + "FrontImage": "front.jpg", + "BackImage": "back.jpg", + }, + validate: func(t *testing.T, m *models.ScrapedMovie) { + assert.NotNil(t, m) + assert.Equal(t, "Movie Title", *m.Name) + assert.Equal(t, "Movie Alias", *m.Aliases) + assert.Equal(t, []string{"url1", "url2"}, m.URLs) + assert.Equal(t, "120 minutes", *m.Duration) + assert.Equal(t, "2020-05-10", *m.Date) + assert.Equal(t, "John Director", *m.Director) + assert.Equal(t, "Movie synopsis", *m.Synopsis) + assert.Equal(t, "front.jpg", *m.FrontImage) + assert.Equal(t, "back.jpg", *m.BackImage) + }, + }, + { + name: "minimal movie", + data: mappedResult{}, + validate: func(t *testing.T, m *models.ScrapedMovie) { + assert.NotNil(t, m) + assert.Nil(t, m.Name) + assert.Empty(t, m.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + movie := test.data.scrapedMovie() + test.validate(t, movie) + }) + } +} + +// Test scrapedMovies method +func TestMappedResultsScrapedMovies(t *testing.T) { + tests := []struct { + name string + data mappedResults + expectedCount int + }{ + { + name: "empty results", + data: mappedResults{}, + expectedCount: 0, + }, + { + name: "single movie", + data: mappedResults{ + mappedResult{"Name": "Movie 1"}, + }, + expectedCount: 1, + }, + { + name: "multiple movies", + data: mappedResults{ + mappedResult{"Name": "Movie 1"}, + mappedResult{"Name": "Movie 2"}, + mappedResult{"Name": "Movie 3"}, + }, + expectedCount: 3, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + movies := test.data.scrapedMovies() + if test.expectedCount == 0 { + assert.Nil(t, movies) + } else { + assert.NotNil(t, movies) + assert.Equal(t, test.expectedCount, len(movies)) + } + }) + } +} + +// Test scrapedGroup method +func TestMappedResultScrapedGroup(t *testing.T) { + tests := []struct { + name string + data mappedResult + validate func(t *testing.T, g *models.ScrapedGroup) + }{ + { + name: "full group", + data: mappedResult{ + "Name": "Group Title", + "Aliases": "Group Alias", + "URL": "https://example.com/group", + "URLs": []string{"url1", "url2"}, + "Duration": "240 minutes", + "Date": "2020-08-15", + "Director": "Jane Director", + "Synopsis": "Group synopsis", + "FrontImage": "front.jpg", + "BackImage": "back.jpg", + }, + validate: func(t *testing.T, g *models.ScrapedGroup) { + assert.NotNil(t, g) + assert.Equal(t, "Group Title", *g.Name) + assert.Equal(t, "Group Alias", *g.Aliases) + assert.Equal(t, "https://example.com/group", *g.URL) + assert.Equal(t, []string{"url1", "url2"}, g.URLs) + assert.Equal(t, "240 minutes", *g.Duration) + assert.Equal(t, "2020-08-15", *g.Date) + assert.Equal(t, "Jane Director", *g.Director) + assert.Equal(t, "Group synopsis", *g.Synopsis) + assert.Equal(t, "front.jpg", *g.FrontImage) + assert.Equal(t, "back.jpg", *g.BackImage) + }, + }, + { + name: "minimal group", + data: mappedResult{}, + validate: func(t *testing.T, g *models.ScrapedGroup) { + assert.NotNil(t, g) + assert.Nil(t, g.Name) + assert.Empty(t, g.URLs) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + group := test.data.scrapedGroup() + test.validate(t, group) + }) + } +} + +// Test scrapedGroups method +func TestMappedResultsScrapedGroups(t *testing.T) { + tests := []struct { + name string + data mappedResults + expectedCount int + }{ + { + name: "empty results", + data: mappedResults{}, + expectedCount: 0, + }, + { + name: "single group", + data: mappedResults{ + mappedResult{"Name": "Group 1"}, + }, + expectedCount: 1, + }, + { + name: "multiple groups", + data: mappedResults{ + mappedResult{"Name": "Group 1"}, + mappedResult{"Name": "Group 2"}, + mappedResult{"Name": "Group 3"}, + }, + expectedCount: 3, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + groups := test.data.scrapedGroups() + if test.expectedCount == 0 { + assert.Nil(t, groups) + } else { + assert.NotNil(t, groups) + assert.Equal(t, test.expectedCount, len(groups)) + } + }) + } +} + +// Helper functions +func strPtr(s string) *string { + return &s +} + +func intPtr(i int) *int { + return &i +} diff --git a/pkg/scraper/mapped_test.go b/pkg/scraper/mapped_test.go index 5f44e17af..667bb8385 100644 --- a/pkg/scraper/mapped_test.go +++ b/pkg/scraper/mapped_test.go @@ -25,7 +25,7 @@ xPathScrapers: - anything ` - c := &config{} + c := &Definition{} err := yaml.Unmarshal([]byte(yamlStr), &c) if err == nil { diff --git a/pkg/scraper/performer.go b/pkg/scraper/performer.go index 98e931762..e05240453 100644 --- a/pkg/scraper/performer.go +++ b/pkg/scraper/performer.go @@ -20,6 +20,8 @@ type ScrapedPerformerInput struct { PenisLength *string `json:"penis_length"` Circumcised *string `json:"circumcised"` CareerLength *string `json:"career_length"` + CareerStart *string `json:"career_start"` + CareerEnd *string `json:"career_end"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` diff --git a/pkg/scraper/post_processing_test.go b/pkg/scraper/post_processing_test.go new file mode 100644 index 000000000..2eb9385e1 --- /dev/null +++ b/pkg/scraper/post_processing_test.go @@ -0,0 +1,144 @@ +package scraper + +import ( + "context" + "testing" + + "github.com/stashapp/stash/pkg/models" +) + +func TestPostScrapePerformerCareerLength(t *testing.T) { + ctx := context.Background() + const related = false + + strPtr := func(s string) *string { + return &s + } + + tests := []struct { + name string + input models.ScrapedPerformer + want models.ScrapedPerformer + }{ + { + "start = 2000", + models.ScrapedPerformer{ + CareerStart: strPtr("2000"), + }, + models.ScrapedPerformer{ + CareerStart: strPtr("2000"), + CareerLength: strPtr("2000 -"), + }, + }, + { + "end = 2000", + models.ScrapedPerformer{ + CareerEnd: strPtr("2000"), + }, + models.ScrapedPerformer{ + CareerEnd: strPtr("2000"), + CareerLength: strPtr("- 2000"), + }, + }, + { + "start = 2000, end = 2020", + models.ScrapedPerformer{ + CareerStart: strPtr("2000"), + CareerEnd: strPtr("2020"), + }, + models.ScrapedPerformer{ + CareerStart: strPtr("2000"), + CareerEnd: strPtr("2020"), + CareerLength: strPtr("2000 - 2020"), + }, + }, + { + "length = 2000 -", + models.ScrapedPerformer{ + CareerLength: strPtr("2000 -"), + }, + models.ScrapedPerformer{ + CareerStart: strPtr("2000"), + CareerLength: strPtr("2000 -"), + }, + }, + { + "length = - 2010", + models.ScrapedPerformer{ + CareerLength: strPtr("- 2010"), + }, + models.ScrapedPerformer{ + CareerEnd: strPtr("2010"), + CareerLength: strPtr("- 2010"), + }, + }, + { + "length = 2000 - 2010", + models.ScrapedPerformer{ + CareerLength: strPtr("2000 - 2010"), + }, + models.ScrapedPerformer{ + CareerStart: strPtr("2000"), + CareerEnd: strPtr("2010"), + CareerLength: strPtr("2000 - 2010"), + }, + }, + { + "invalid start", + models.ScrapedPerformer{ + CareerStart: strPtr("two thousand"), + }, + models.ScrapedPerformer{ + CareerStart: strPtr("two thousand"), + }, + }, + { + "invalid end", + models.ScrapedPerformer{ + CareerEnd: strPtr("two thousand"), + }, + models.ScrapedPerformer{ + CareerEnd: strPtr("two thousand"), + }, + }, + { + "invalid career length", + models.ScrapedPerformer{ + CareerLength: strPtr("1234 - 4567 - 9224"), + }, + models.ScrapedPerformer{ + CareerLength: strPtr("1234 - 4567 - 9224"), + }, + }, + } + + compareStrPtr := func(a, b *string) bool { + if a == b { + return true + } + if a == nil || b == nil { + return false + } + return *a == *b + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := &postScraper{} + got, err := c.postScrapePerformer(ctx, tt.input, related) + if err != nil { + t.Fatalf("postScrapePerformer returned error: %v", err) + } + postScraped := got.(models.ScrapedPerformer) + if !compareStrPtr(postScraped.CareerStart, tt.want.CareerStart) { + t.Errorf("CareerStart = %v, want %v", postScraped.CareerStart, tt.want.CareerStart) + } + if !compareStrPtr(postScraped.CareerEnd, tt.want.CareerEnd) { + t.Errorf("CareerEnd = %v, want %v", postScraped.CareerEnd, tt.want.CareerEnd) + } + if !compareStrPtr(postScraped.CareerLength, tt.want.CareerLength) { + t.Errorf("CareerLength = %v, want %v", postScraped.CareerLength, tt.want.CareerLength) + } + }) + } +} diff --git a/pkg/scraper/postprocessing.go b/pkg/scraper/postprocessing.go index c2653743a..4b8f7e022 100644 --- a/pkg/scraper/postprocessing.go +++ b/pkg/scraper/postprocessing.go @@ -125,9 +125,64 @@ func (c *postScraper) postScrapePerformer(ctx context.Context, p models.ScrapedP } } + c.postProcessCareerLength(&p) + return p, nil } +func (c *postScraper) postProcessCareerLength(p *models.ScrapedPerformer) { + isEmptyStr := func(s *string) bool { return s == nil || *s == "" } + + // populate career start/end from career length and vice versa + if !isEmptyStr(p.CareerLength) && isEmptyStr(p.CareerStart) && isEmptyStr(p.CareerEnd) { + start, end, err := models.ParseYearRangeString(*p.CareerLength) + if err != nil { + logger.Warnf("Could not parse career length %s: %v", *p.CareerLength, err) + return + } + + if start != nil { + startStr := start.String() + p.CareerStart = &startStr + } + if end != nil { + endStr := end.String() + p.CareerEnd = &endStr + } + + return + } + + // populate career length from career start/end if career length is missing + if isEmptyStr(p.CareerLength) { + var ( + start *models.Date + end *models.Date + ) + + if !isEmptyStr(p.CareerStart) { + date, err := models.ParseDate(*p.CareerStart) + if err != nil { + logger.Warnf("Could not parse career start %s: %v", *p.CareerStart, err) + return + } + start = &date + } + + if !isEmptyStr(p.CareerEnd) { + date, err := models.ParseDate(*p.CareerEnd) + if err != nil { + logger.Warnf("Could not parse career end %s: %v", *p.CareerEnd, err) + return + } + end = &date + } + + v := models.FormatYearRange(start, end) + p.CareerLength = &v + } +} + func (c *postScraper) postScrapeMovie(ctx context.Context, m models.ScrapedMovie, related bool) (_ ScrapedContent, err error) { r := c.repository tqb := r.TagFinder diff --git a/pkg/scraper/query_url.go b/pkg/scraper/query_url.go index 91adb7d67..7fe874947 100644 --- a/pkg/scraper/query_url.go +++ b/pkg/scraper/query_url.go @@ -17,6 +17,12 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters { ret["oshash"] = scene.OSHash ret["filename"] = filepath.Base(scene.Path) + // pull phash from primary file + phashFingerprints := scene.Files.Primary().Base().Fingerprints.Filter(models.FingerprintTypePhash) + if len(phashFingerprints) > 0 { + ret["phash"] = phashFingerprints[0].Value() + } + if scene.Title != "" { ret["title"] = scene.Title } @@ -110,7 +116,7 @@ func (p queryURLParameters) constructURL(url string) string { } // replaceURL does a partial URL Replace ( only url parameter is used) -func replaceURL(url string, scraperConfig scraperTypeConfig) string { +func replaceURL(url string, scraperConfig ByURLDefinition) string { u := url queryURL := queryURLParameterFromURL(u) if scraperConfig.QueryURLReplacements != nil { diff --git a/pkg/scraper/script.go b/pkg/scraper/script.go index 866c92365..f8e47b5d8 100644 --- a/pkg/scraper/script.go +++ b/pkg/scraper/script.go @@ -208,22 +208,11 @@ func galleryInputFromGallery(gallery *models.Gallery) galleryInput { var ErrScraperScript = errors.New("scraper script error") type scriptScraper struct { - scraper scraperTypeConfig - config config + definition Definition globalConfig GlobalConfig } -func newScriptScraper(scraper scraperTypeConfig, config config, globalConfig GlobalConfig) *scriptScraper { - return &scriptScraper{ - scraper: scraper, - config: config, - globalConfig: globalConfig, - } -} - -func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, out interface{}) error { - command := s.scraper.Script - +func (s *scriptScraper) runScraperScript(ctx context.Context, command []string, inString string, out interface{}) error { var cmd *exec.Cmd if python.IsPythonCommand(command[0]) { pythonPath := s.globalConfig.GetPythonPath() @@ -233,7 +222,7 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o logger.Warnf("%s", err) } else { cmd = p.Command(ctx, command[1:]) - envVariable, _ := filepath.Abs(filepath.Dir(filepath.Dir(s.config.path))) + envVariable, _ := filepath.Abs(filepath.Dir(filepath.Dir(s.definition.path))) python.AppendPythonPath(cmd, envVariable) } } @@ -243,7 +232,7 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o cmd = stashExec.CommandContext(ctx, command[0], command[1:]...) } - cmd.Dir = filepath.Dir(s.config.path) + cmd.Dir = filepath.Dir(s.definition.path) stdin, err := cmd.StdinPipe() if err != nil { @@ -273,7 +262,7 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o return errors.New("error running scraper script") } - go handleScraperStderr(s.config.Name, stderr) + go handleScraperStderr(s.definition.Name, stderr) logger.Debugf("Scraper script <%s> started", strings.Join(cmd.Args, " ")) @@ -312,7 +301,39 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o return nil } -func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { +func (s *scriptScraper) scrape(ctx context.Context, command []string, input string, ty ScrapeContentType) (ScrapedContent, error) { + switch ty { + case ScrapeContentTypePerformer: + var performer *models.ScrapedPerformer + err := s.runScraperScript(ctx, command, input, &performer) + return performer, err + case ScrapeContentTypeGallery: + var gallery *models.ScrapedGallery + err := s.runScraperScript(ctx, command, input, &gallery) + return gallery, err + case ScrapeContentTypeScene: + var scene *models.ScrapedScene + err := s.runScraperScript(ctx, command, input, &scene) + return scene, err + case ScrapeContentTypeMovie, ScrapeContentTypeGroup: + var movie *models.ScrapedMovie + err := s.runScraperScript(ctx, command, input, &movie) + return movie, err + case ScrapeContentTypeImage: + var image *models.ScrapedImage + err := s.runScraperScript(ctx, command, input, &image) + return image, err + } + + return nil, ErrNotSupported +} + +type scriptNameScraper struct { + scriptScraper + definition ByNameDefinition +} + +func (s *scriptNameScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { input := `{"name": "` + name + `"}` var ret []ScrapedContent @@ -320,7 +341,7 @@ func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty Scrape switch ty { case ScrapeContentTypePerformer: var performers []models.ScrapedPerformer - err = s.runScraperScript(ctx, input, &performers) + err = s.runScraperScript(ctx, s.definition.Script, input, &performers) if err == nil { for _, p := range performers { v := p @@ -329,7 +350,7 @@ func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty Scrape } case ScrapeContentTypeScene: var scenes []models.ScrapedScene - err = s.runScraperScript(ctx, input, &scenes) + err = s.runScraperScript(ctx, s.definition.Script, input, &scenes) if err == nil { for _, s := range scenes { v := s @@ -343,7 +364,21 @@ func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty Scrape return ret, err } -func (s *scriptScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { +type scriptURLScraper struct { + scriptScraper + definition ByURLDefinition +} + +func (s *scriptURLScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { + return s.scrape(ctx, s.definition.Script, `{"url": "`+url+`"}`, ty) +} + +type scriptFragmentScraper struct { + scriptScraper + definition ByFragmentDefinition +} + +func (s *scriptFragmentScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { var inString []byte var err error var ty ScrapeContentType @@ -363,41 +398,10 @@ func (s *scriptScraper) scrapeByFragment(ctx context.Context, input Input) (Scra return nil, err } - return s.scrape(ctx, string(inString), ty) + return s.scrape(ctx, s.definition.Script, string(inString), ty) } -func (s *scriptScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { - return s.scrape(ctx, `{"url": "`+url+`"}`, ty) -} - -func (s *scriptScraper) scrape(ctx context.Context, input string, ty ScrapeContentType) (ScrapedContent, error) { - switch ty { - case ScrapeContentTypePerformer: - var performer *models.ScrapedPerformer - err := s.runScraperScript(ctx, input, &performer) - return performer, err - case ScrapeContentTypeGallery: - var gallery *models.ScrapedGallery - err := s.runScraperScript(ctx, input, &gallery) - return gallery, err - case ScrapeContentTypeScene: - var scene *models.ScrapedScene - err := s.runScraperScript(ctx, input, &scene) - return scene, err - case ScrapeContentTypeMovie, ScrapeContentTypeGroup: - var movie *models.ScrapedMovie - err := s.runScraperScript(ctx, input, &movie) - return movie, err - case ScrapeContentTypeImage: - var image *models.ScrapedImage - err := s.runScraperScript(ctx, input, &image) - return image, err - } - - return nil, ErrNotSupported -} - -func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { +func (s *scriptFragmentScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { inString, err := json.Marshal(sceneInputFromScene(scene)) if err != nil { @@ -406,12 +410,12 @@ func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sc var ret *models.ScrapedScene - err = s.runScraperScript(ctx, string(inString), &ret) + err = s.runScraperScript(ctx, s.definition.Script, string(inString), &ret) return ret, err } -func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { +func (s *scriptFragmentScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { inString, err := json.Marshal(galleryInputFromGallery(gallery)) if err != nil { @@ -420,12 +424,12 @@ func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mod var ret *models.ScrapedGallery - err = s.runScraperScript(ctx, string(inString), &ret) + err = s.runScraperScript(ctx, s.definition.Script, string(inString), &ret) return ret, err } -func (s *scriptScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { +func (s *scriptFragmentScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { inString, err := json.Marshal(imageToUpdateInput(image)) if err != nil { @@ -434,7 +438,7 @@ func (s *scriptScraper) scrapeImageByImage(ctx context.Context, image *models.Im var ret *models.ScrapedImage - err = s.runScraperScript(ctx, string(inString), &ret) + err = s.runScraperScript(ctx, s.definition.Script, string(inString), &ret) return ret, err } diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index 5c5cab9fc..23c4b9063 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -14,15 +14,13 @@ import ( ) type stashScraper struct { - scraper scraperTypeConfig - config config + config Definition globalConfig GlobalConfig client *http.Client } -func newStashScraper(scraper scraperTypeConfig, client *http.Client, config config, globalConfig GlobalConfig) *stashScraper { +func newStashScraper(client *http.Client, config Definition, globalConfig GlobalConfig) *stashScraper { return &stashScraper{ - scraper: scraper, config: config, client: client, globalConfig: globalConfig, diff --git a/pkg/scraper/url.go b/pkg/scraper/url.go index b53d7b27f..d036ae68e 100644 --- a/pkg/scraper/url.go +++ b/pkg/scraper/url.go @@ -25,8 +25,8 @@ import ( const scrapeDefaultSleep = time.Second * 2 -func loadURL(ctx context.Context, loadURL string, client *http.Client, scraperConfig config, globalConfig GlobalConfig) (io.Reader, error) { - driverOptions := scraperConfig.DriverOptions +func loadURL(ctx context.Context, loadURL string, client *http.Client, def Definition, globalConfig GlobalConfig) (io.Reader, error) { + driverOptions := def.DriverOptions if driverOptions != nil && driverOptions.UseCDP { // get the page using chrome dp return urlFromCDP(ctx, loadURL, *driverOptions, globalConfig) @@ -37,7 +37,7 @@ func loadURL(ctx context.Context, loadURL string, client *http.Client, scraperCo return nil, err } - jar, err := scraperConfig.jar() + jar, err := def.jar() if err != nil { return nil, fmt.Errorf("error creating cookie jar: %w", err) } @@ -83,7 +83,7 @@ func loadURL(ctx context.Context, loadURL string, client *http.Client, scraperCo } bodyReader := bytes.NewReader(body) - printCookies(jar, scraperConfig, "Jar cookies found for scraper urls") + printCookies(jar, def, "Jar cookies found for scraper urls") return charset.NewReader(bodyReader, resp.Header.Get("Content-Type")) } diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go index 5f7b76372..bf70869e8 100644 --- a/pkg/scraper/xpath.go +++ b/pkg/scraper/xpath.go @@ -3,7 +3,6 @@ package scraper import ( "bytes" "context" - "errors" "fmt" "net/http" "net/url" @@ -19,49 +18,36 @@ import ( ) type xpathScraper struct { - scraper scraperTypeConfig - config config + definition Definition globalConfig GlobalConfig client *http.Client } -func newXpathScraper(scraper scraperTypeConfig, client *http.Client, config config, globalConfig GlobalConfig) *xpathScraper { - return &xpathScraper{ - scraper: scraper, - config: config, - globalConfig: globalConfig, - client: client, +func (s *xpathScraper) getXpathScraper(name string) (*mappedScraper, error) { + ret, ok := s.definition.XPathScrapers[name] + if !ok { + return nil, fmt.Errorf("xpath scraper with name %s not found in config", name) } + return &ret, nil } -func (s *xpathScraper) getXpathScraper() *mappedScraper { - return s.config.XPathScrapers[s.scraper.Scraper] +type xpathURLScraper struct { + xpathScraper + definition ByURLDefinition } -func (s *xpathScraper) scrapeURL(ctx context.Context, url string) (*html.Node, *mappedScraper, error) { - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") - } - - doc, err := s.loadURL(ctx, url) - - if err != nil { - return nil, nil, err - } - - return doc, scraper, nil -} - -func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { - u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries - doc, scraper, err := s.scrapeURL(ctx, u) +func (s *xpathURLScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) { + scraper, err := s.getXpathScraper(s.definition.Scraper) if err != nil { return nil, err } - q := s.getXPathQuery(doc, u) + doc, err := s.loadURL(ctx, url) + if err != nil { + return nil, err + } + + q := s.getXPathQuery(doc, url) // if these just return the return values from scraper.scrape* functions then // it ends up returning ScrapedContent(nil) rather than nil switch ty { @@ -100,11 +86,15 @@ func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCon return nil, ErrNotSupported } -func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { - scraper := s.getXpathScraper() +type xpathNameScraper struct { + xpathScraper + definition ByNameDefinition +} - if scraper == nil { - return nil, fmt.Errorf("%w: name %v", ErrNotFound, s.scraper.Scraper) +func (s *xpathNameScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) { + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } const placeholder = "{}" @@ -112,7 +102,7 @@ func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC // replace the placeholder string with the URL-escaped name escapedName := url.QueryEscape(name) - url := s.scraper.QueryURL + url := s.definition.QueryURL url = strings.ReplaceAll(url, placeholder, escapedName) doc, err := s.loadURL(ctx, url) @@ -151,18 +141,22 @@ func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC return nil, ErrNotSupported } -func (s *xpathScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { +type xpathFragmentScraper struct { + xpathScraper + definition ByFragmentDefinition +} + +func (s *xpathFragmentScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) { // construct the URL queryURL := queryURLParametersFromScene(scene) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -175,7 +169,7 @@ func (s *xpathScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sce return scraper.scrapeScene(ctx, q) } -func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { +func (s *xpathFragmentScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { switch { case input.Gallery != nil: return nil, fmt.Errorf("%w: cannot use an xpath scraper as a gallery fragment scraper", ErrNotSupported) @@ -189,15 +183,14 @@ func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap // construct the URL queryURL := queryURLParametersFromScrapedScene(scene) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -210,18 +203,17 @@ func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap return scraper.scrapeScene(ctx, q) } -func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { +func (s *xpathFragmentScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) { // construct the URL queryURL := queryURLParametersFromGallery(gallery) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -234,18 +226,17 @@ func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mode return scraper.scrapeGallery(ctx, q) } -func (s *xpathScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { +func (s *xpathFragmentScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) { // construct the URL queryURL := queryURLParametersFromImage(image) - if s.scraper.QueryURLReplacements != nil { - queryURL.applyReplacements(s.scraper.QueryURLReplacements) + if s.definition.QueryURLReplacements != nil { + queryURL.applyReplacements(s.definition.QueryURLReplacements) } - url := queryURL.constructURL(s.scraper.QueryURL) + url := queryURL.constructURL(s.definition.QueryURL) - scraper := s.getXpathScraper() - - if scraper == nil { - return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + scraper, err := s.getXpathScraper(s.definition.Scraper) + if err != nil { + return nil, err } doc, err := s.loadURL(ctx, url) @@ -259,14 +250,14 @@ func (s *xpathScraper) scrapeImageByImage(ctx context.Context, image *models.Ima } func (s *xpathScraper) loadURL(ctx context.Context, url string) (*html.Node, error) { - r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig) + r, err := loadURL(ctx, url, s.client, s.definition, s.globalConfig) if err != nil { return nil, fmt.Errorf("failed to load URL %q: %w", url, err) } ret, err := html.Parse(r) - if err == nil && s.config.DebugOptions != nil && s.config.DebugOptions.PrintHTML { + if err == nil && s.definition.DebugOptions != nil && s.definition.DebugOptions.PrintHTML { var b bytes.Buffer if err := html.Render(&b, ret); err != nil { logger.Warnf("could not render HTML: %v", err) diff --git a/pkg/scraper/xpath_test.go b/pkg/scraper/xpath_test.go index 391f60728..42ee2227b 100644 --- a/pkg/scraper/xpath_test.go +++ b/pkg/scraper/xpath_test.go @@ -674,10 +674,10 @@ func verifyPerformers(t *testing.T, expectedNames []string, expectedURLs []strin } if expectedName != actualName { - t.Errorf("Expected performer name %s, got %s", expectedName, actualName) + t.Errorf("Expected performer name %q, got %q", expectedName, actualName) } if expectedURL != actualURL { - t.Errorf("Expected performer URL %s, got %s", expectedName, actualName) + t.Errorf("Expected performer URL %q, got %q", expectedURL, actualURL) } i++ } @@ -780,7 +780,7 @@ xPathScrapers: Name: //studio ` - c := &config{} + c := &Definition{} err := yaml.Unmarshal([]byte(yamlStr), &c) if err != nil { @@ -892,7 +892,7 @@ xPathScrapers: selector: //span ` - c := &config{} + c := &Definition{} err := yaml.Unmarshal([]byte(yamlStr), &c) if err != nil { @@ -904,12 +904,8 @@ xPathScrapers: client := &http.Client{} ctx := context.Background() - s := newGroupScraper(*c, globalConfig) - us, ok := s.(urlScraper) - if !ok { - t.Error("couldn't convert scraper into url scraper") - } - content, err := us.viaURL(ctx, client, ts.URL, ScrapeContentTypePerformer) + s := scraperFromDefinition(*c, globalConfig) + content, err := s.viaURL(ctx, client, ts.URL, ScrapeContentTypePerformer) if err != nil { t.Errorf("Error scraping performer: %s", err.Error()) diff --git a/pkg/session/local.go b/pkg/session/local.go new file mode 100644 index 000000000..519328496 --- /dev/null +++ b/pkg/session/local.go @@ -0,0 +1,44 @@ +package session + +import ( + "context" + "net" + "net/http" + + "github.com/stashapp/stash/pkg/logger" +) + +// SetLocalRequest checks if the request is from localhost and sets the context value accordingly. +// It returns the modified request with the updated context, or the original request if it did +// not come from localhost or if there was an error parsing the remote address. +func SetLocalRequest(r *http.Request) *http.Request { + // determine if request is from localhost + host, _, err := net.SplitHostPort(r.RemoteAddr) + if err != nil { + logger.Errorf("Error parsing remote address: %v", err) + return r + } + + ip := net.ParseIP(host) + if ip == nil { + logger.Errorf("Error parsing IP address: %s", host) + return r + } + + if ip.IsLoopback() { + ctx := context.WithValue(r.Context(), contextLocalRequest, true) + r = r.WithContext(ctx) + } + + return r +} + +// IsLocalRequest returns true if the request is from localhost, as determined by the context value set by SetLocalRequest. +// If the context value is not set, it returns false. +func IsLocalRequest(ctx context.Context) bool { + val := ctx.Value(contextLocalRequest) + if val == nil { + return false + } + return val.(bool) +} diff --git a/pkg/session/session.go b/pkg/session/session.go index 66cb39e09..3e4c2eea1 100644 --- a/pkg/session/session.go +++ b/pkg/session/session.go @@ -15,6 +15,7 @@ type key int const ( contextUser key = iota contextVisitedPlugins + contextLocalRequest ) const ( diff --git a/pkg/sliceutil/stringslice/string_collections.go b/pkg/sliceutil/stringslice/string_collections.go index f5251de5f..eff3409e2 100644 --- a/pkg/sliceutil/stringslice/string_collections.go +++ b/pkg/sliceutil/stringslice/string_collections.go @@ -45,6 +45,23 @@ func UniqueFold(s []string) []string { return ret } +// UniqueExcludeFold returns a deduplicated slice of strings with the excluded string removed. +// The comparison is case-insensitive. +func UniqueExcludeFold(values []string, exclude string) []string { + seen := make(map[string]struct{}, len(values)) + seen[strings.ToLower(exclude)] = struct{}{} + ret := make([]string, 0, len(values)) + for _, v := range values { + vLower := strings.ToLower(v) + if _, exists := seen[vLower]; exists { + continue + } + seen[vLower] = struct{}{} + ret = append(ret, v) + } + return ret +} + // TrimSpace trims whitespace from each string in a slice. func TrimSpace(s []string) []string { for i, v := range s { diff --git a/pkg/sqlite/anonymise.go b/pkg/sqlite/anonymise.go index 764f569c0..ace306169 100644 --- a/pkg/sqlite/anonymise.go +++ b/pkg/sqlite/anonymise.go @@ -332,6 +332,10 @@ func (db *Anonymiser) anonymiseScenes(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(scenesCustomFieldsTable.GetTable()), "scene_id"); err != nil { + return err + } + return nil } @@ -518,6 +522,10 @@ func (db *Anonymiser) anonymiseGalleries(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(galleriesCustomFieldsTable.GetTable()), "gallery_id"); err != nil { + return err + } + return nil } @@ -678,6 +686,10 @@ func (db *Anonymiser) anonymiseStudios(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(studiosCustomFieldsTable.GetTable()), "studio_id"); err != nil { + return err + } + return nil } @@ -873,6 +885,10 @@ func (db *Anonymiser) anonymiseTags(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(tagsCustomFieldsTable.GetTable()), "tag_id"); err != nil { + return err + } + return nil } @@ -948,6 +964,10 @@ func (db *Anonymiser) anonymiseGroups(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(groupsCustomFieldsTable.GetTable()), "group_id"); err != nil { + return err + } + return nil } diff --git a/pkg/sqlite/criterion_handlers.go b/pkg/sqlite/criterion_handlers.go index fe6d1fcb5..ae245f1b5 100644 --- a/pkg/sqlite/criterion_handlers.go +++ b/pkg/sqlite/criterion_handlers.go @@ -1012,6 +1012,11 @@ func (h *stashIDCriterionHandler) handle(ctx context.Context, f *filterBuilder) return } + // ideally, this handler should just convert to stashIDsCriterionHandler + // but there are some differences in how the existing handler works compared + // to the new code, specifically because this code uses the stringCriterionHandler. + // To minimise potential regressions, we'll keep the existing logic for now. + stashIDRepo := h.stashIDRepository t := stashIDRepo.tableName if h.stashIDTableAs != "" { @@ -1036,12 +1041,64 @@ func (h *stashIDCriterionHandler) handle(ctx context.Context, f *filterBuilder) }, t+".stash_id")(ctx, f) } +type stashIDsCriterionHandler struct { + c *models.StashIDsCriterionInput + stashIDRepository *stashIDRepository + stashIDTableAs string + parentIDCol string +} + +func (h *stashIDsCriterionHandler) handle(ctx context.Context, f *filterBuilder) { + if h.c == nil { + return + } + + stashIDRepo := h.stashIDRepository + t := stashIDRepo.tableName + if h.stashIDTableAs != "" { + t = h.stashIDTableAs + } + + joinClause := fmt.Sprintf("%s.%s = %s", t, stashIDRepo.idColumn, h.parentIDCol) + if h.c.Endpoint != nil && *h.c.Endpoint != "" { + joinClause += fmt.Sprintf(" AND %s.endpoint = '%s'", t, *h.c.Endpoint) + } + + f.addLeftJoin(stashIDRepo.tableName, h.stashIDTableAs, joinClause) + + switch h.c.Modifier { + case models.CriterionModifierIsNull: + f.addWhere(fmt.Sprintf("%s.stash_id IS NULL", t)) + case models.CriterionModifierNotNull: + f.addWhere(fmt.Sprintf("%s.stash_id IS NOT NULL", t)) + case models.CriterionModifierEquals: + var clauses []sqlClause + for _, id := range h.c.StashIDs { + clauses = append(clauses, makeClause(fmt.Sprintf("%s.stash_id = ?", t), id)) + } + f.whereClauses = append(f.whereClauses, orClauses(clauses...)) + case models.CriterionModifierNotEquals: + var clauses []sqlClause + for _, id := range h.c.StashIDs { + clauses = append(clauses, makeClause(fmt.Sprintf("%s.stash_id != ?", t), id)) + } + f.whereClauses = append(f.whereClauses, andClauses(clauses...)) + default: + f.setError(fmt.Errorf("invalid modifier %s for stash IDs criterion", h.c.Modifier)) + } +} + type relatedFilterHandler struct { - relatedIDCol string - relatedRepo repository + // column on the primary table that relates to the related table (eg scene_id) + relatedIDCol string + // repository for the related table (eg sceneRepository) + relatedRepo repository + // handler for the filter on the related table relatedHandler criterionHandler - joinFn func(f *filterBuilder) - directJoin bool + // optional function to perform the necessary join(s) to the related table + joinFn func(f *filterBuilder) + // if true, related filter handler will be run using the existing filterBuilder instead of a subquery. + directJoin bool } func (h *relatedFilterHandler) handle(ctx context.Context, f *filterBuilder) { @@ -1072,5 +1129,42 @@ func (h *relatedFilterHandler) handle(ctx context.Context, f *filterBuilder) { return } - f.addWhere(fmt.Sprintf("%s IN ("+subQuery.toSQL(false)+")", h.relatedIDCol), subQuery.args...) + f.addWhere(fmt.Sprintf("%s IN ("+subQuery.toSQL(false)+")", h.relatedIDCol), subQuery.allArgs()...) +} + +type phashDistanceCriterionHandler struct { + // assumes that applicable fingerprints table is joined as fingerprints_phash + joinFn func(f *filterBuilder) + criterion *models.PhashDistanceCriterionInput +} + +func (h *phashDistanceCriterionHandler) handle(ctx context.Context, f *filterBuilder) { + phashDistance := h.criterion + if phashDistance == nil { + return + } + + h.joinFn(f) + + value, _ := utils.StringToPhash(phashDistance.Value) + distance := 0 + if phashDistance.Distance != nil { + distance = *phashDistance.Distance + } + + switch { + case phashDistance.Modifier == models.CriterionModifierEquals && distance > 0: + // needed to avoid a type mismatch + f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") + f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) < ?", value, distance) + case phashDistance.Modifier == models.CriterionModifierNotEquals && distance > 0: + // needed to avoid a type mismatch + f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") + f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) > ?", value, distance) + default: + intCriterionHandler(&models.IntCriterionInput{ + Value: int(value), + Modifier: phashDistance.Modifier, + }, "fingerprints_phash.fingerprint", nil)(ctx, f) + } } diff --git a/pkg/sqlite/custom_fields.go b/pkg/sqlite/custom_fields.go index 63f85b250..22dbbfeb2 100644 --- a/pkg/sqlite/custom_fields.go +++ b/pkg/sqlite/custom_fields.go @@ -192,6 +192,10 @@ func (s *customFieldsStore) GetCustomFieldsBulk(ctx context.Context, ids []int) const single = false ret := make([]models.CustomFieldMap, len(ids)) + // initialise ret with empty maps for each id + for i := range ret { + ret[i] = make(map[string]interface{}) + } idi := make(map[int]int, len(ids)) for i, id := range ids { @@ -257,8 +261,8 @@ func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs str h.innerJoin(f, joinAs, cc.Field) f.addWhere(fmt.Sprintf("%[1]s.value IN %s", joinAs, getInBinding(len(cv))), cv...) case models.CriterionModifierNotEquals: - h.innerJoin(f, joinAs, cc.Field) - f.addWhere(fmt.Sprintf("%[1]s.value NOT IN %s", joinAs, getInBinding(len(cv))), cv...) + h.leftJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("(%[1]s.value NOT IN %s OR %[1]s.value IS NULL)", joinAs, getInBinding(len(cv))), cv...) case models.CriterionModifierIncludes: clauses := make([]sqlClause, len(cv)) for i, v := range cv { @@ -268,7 +272,7 @@ func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs str f.whereClauses = append(f.whereClauses, clauses...) case models.CriterionModifierExcludes: for _, v := range cv { - f.addWhere(fmt.Sprintf("%[1]s.value NOT LIKE ?", joinAs), fmt.Sprintf("%%%v%%", v)) + f.addWhere(fmt.Sprintf("(%[1]s.value NOT LIKE ? OR %[1]s.value IS NULL)", joinAs), fmt.Sprintf("%%%v%%", v)) } h.leftJoin(f, joinAs, cc.Field) case models.CriterionModifierMatchesRegex: @@ -311,8 +315,8 @@ func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs str h.innerJoin(f, joinAs, cc.Field) f.addWhere(fmt.Sprintf("%s.value BETWEEN ? AND ?", joinAs), cv[0], cv[1]) case models.CriterionModifierNotBetween: - h.innerJoin(f, joinAs, cc.Field) - f.addWhere(fmt.Sprintf("%s.value NOT BETWEEN ? AND ?", joinAs), cv[0], cv[1]) + h.leftJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("(%s.value NOT BETWEEN ? AND ? OR %[1]s.value IS NULL)", joinAs), cv[0], cv[1]) case models.CriterionModifierLessThan: if len(cv) != 1 { f.setError(fmt.Errorf("expected 1 value for custom field criterion modifier LESS_THAN, got %d", len(cv))) diff --git a/pkg/sqlite/custom_fields_test.go b/pkg/sqlite/custom_fields_test.go index 8ee154aec..5d5545210 100644 --- a/pkg/sqlite/custom_fields_test.go +++ b/pkg/sqlite/custom_fields_test.go @@ -11,11 +11,23 @@ import ( "github.com/stretchr/testify/assert" ) -func TestSetCustomFields(t *testing.T) { - performerIdx := performerIdx1WithScene +type customFieldsReaderWriter interface { + models.CustomFieldsReader + models.CustomFieldsWriter +} + +func testSetCustomFields(t *testing.T, namePrefix string, store customFieldsReaderWriter, id int, origCustomFields map[string]interface{}) { + getCustomFields := func() map[string]interface{} { + m := make(map[string]interface{}) + for k, v := range origCustomFields { + m[k] = v + } + return m + } mergeCustomFields := func(i map[string]interface{}) map[string]interface{} { - m := getPerformerCustomFields(performerIdx) + m := getCustomFields() + for k, v := range i { m[k] = v } @@ -70,7 +82,7 @@ func TestSetCustomFields(t *testing.T) { Remove: []string{"real"}, }, func() map[string]interface{} { - m := getPerformerCustomFields(performerIdx) + m := getCustomFields() delete(m, "real") return m }(), @@ -180,12 +192,8 @@ func TestSetCustomFields(t *testing.T) { }, } - // use performer custom fields store - store := db.Performer - id := performerIDs[performerIdx] - for _, tt := range tests { - runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + runWithRollbackTxn(t, namePrefix+" "+tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) err := store.SetCustomFields(ctx, id, tt.input) @@ -208,3 +216,45 @@ func TestSetCustomFields(t *testing.T) { }) } } + +func TestPerformerSetCustomFields(t *testing.T) { + performerIdx := performerIdx1WithScene + + testSetCustomFields(t, "Performer", db.Performer, performerIDs[performerIdx], getPerformerCustomFields(performerIdx)) +} + +func TestTagSetCustomFields(t *testing.T) { + tagIdx := tagIdx1WithScene + + testSetCustomFields(t, "Tag", db.Tag, tagIDs[tagIdx], getTagCustomFields(tagIdx)) +} + +func TestStudioSetCustomFields(t *testing.T) { + studioIdx := studioIdxWithScene + + testSetCustomFields(t, "Studio", db.Studio, studioIDs[studioIdx], getStudioCustomFields(studioIdx)) +} + +func TestSceneSetCustomFields(t *testing.T) { + sceneIdx := sceneIdxWithPerformer + + testSetCustomFields(t, "Scene", db.Scene, sceneIDs[sceneIdx], getSceneCustomFields(sceneIdx)) +} + +func TestGallerySetCustomFields(t *testing.T) { + galleryIdx := galleryIdxWithChapters + + testSetCustomFields(t, "Gallery", db.Gallery, galleryIDs[galleryIdx], getGalleryCustomFields(galleryIdx)) +} + +func TestImageSetCustomFields(t *testing.T) { + imageIdx := imageIdx2WithGallery + + testSetCustomFields(t, "Image", db.Image, imageIDs[imageIdx], getImageCustomFields(imageIdx)) +} + +func TestGroupSetCustomFields(t *testing.T) { + groupIdx := groupIdxWithScene + + testSetCustomFields(t, "Group", db.Group, groupIDs[groupIdx], getGroupCustomFields(groupIdx)) +} diff --git a/pkg/sqlite/database.go b/pkg/sqlite/database.go index 0ea3d7170..7c383dc4c 100644 --- a/pkg/sqlite/database.go +++ b/pkg/sqlite/database.go @@ -34,7 +34,7 @@ const ( cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE" ) -var appSchemaVersion uint = 75 +var appSchemaVersion uint = 85 //go:embed migrations/*.sql var migrationsBox embed.FS diff --git a/pkg/sqlite/file.go b/pkg/sqlite/file.go index 1be5648b4..b8e807e37 100644 --- a/pkg/sqlite/file.go +++ b/pkg/sqlite/file.go @@ -695,7 +695,7 @@ func (qb *FileStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.SelectD // FindAllByPaths returns the all files that are within any of the given paths. // Returns all if limit is < 0. // Returns all files if p is empty. -func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]models.File, error) { +func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]models.File, error) { table := qb.table() folderTable := folderTableMgr.table @@ -706,6 +706,10 @@ func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offs q = qb.allInPaths(q, p) + if !includeZipContents { + q = q.Where(table.Col("zip_file_id").IsNull()) + } + if limit > -1 { q = q.Limit(uint(limit)) } @@ -975,7 +979,7 @@ func (qb *FileStore) queryGroupedFields(ctx context.Context, options models.File Megapixels float64 Size int64 }{} - if err := qb.repository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { + if err := qb.repository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.allArgs(), &out); err != nil { return nil, err } diff --git a/pkg/sqlite/file_filter.go b/pkg/sqlite/file_filter.go index 12c7ba3d5..29946a8ce 100644 --- a/pkg/sqlite/file_filter.go +++ b/pkg/sqlite/file_filter.go @@ -82,7 +82,7 @@ func (qb *fileFilterHandler) criterionHandler() criterionHandler { qb.hashesCriterionHandler(fileFilter.Hashes), - qb.phashDuplicatedCriterionHandler(fileFilter.Duplicated), + qb.duplicatedCriterionHandler(fileFilter.Duplicated), ×tampCriterionHandler{fileFilter.CreatedAt, "files.created_at", nil}, ×tampCriterionHandler{fileFilter.UpdatedAt, "files.updated_at", nil}, @@ -205,17 +205,27 @@ func (qb *fileFilterHandler) galleryCountCriterionHandler(c *models.IntCriterion return h.handler(c) } -func (qb *fileFilterHandler) phashDuplicatedCriterionHandler(duplicatedFilter *models.PHashDuplicationCriterionInput) criterionHandlerFunc { +func (qb *fileFilterHandler) duplicatedCriterionHandler(duplicatedFilter *models.FileDuplicationCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { // TODO: Wishlist item: Implement Distance matching - if duplicatedFilter != nil { - var v string - if *duplicatedFilter.Duplicated { - v = ">" - } else { - v = "=" - } + // For files, only phash duplication applies + if duplicatedFilter == nil { + return + } + var phashValue *bool + + // Handle legacy 'duplicated' field for backwards compatibility + //nolint:staticcheck + if duplicatedFilter.Duplicated != nil && duplicatedFilter.Phash == nil { + //nolint:staticcheck + phashValue = duplicatedFilter.Duplicated + } else if duplicatedFilter.Phash != nil { + phashValue = duplicatedFilter.Phash + } + + if phashValue != nil { + v := getCountOperator(*phashValue) f.addInnerJoin("(SELECT file_id FROM files_fingerprints INNER JOIN (SELECT fingerprint FROM files_fingerprints WHERE type = 'phash' GROUP BY fingerprint HAVING COUNT (fingerprint) "+v+" 1) dupes on files_fingerprints.fingerprint = dupes.fingerprint)", "scph", "files.id = scph.file_id") } } @@ -228,22 +238,32 @@ func (qb *fileFilterHandler) hashesCriterionHandler(hashes []*models.Fingerprint t := fmt.Sprintf("file_fingerprints_%d", i) f.addLeftJoin(fingerprintTable, t, fmt.Sprintf("files.id = %s.file_id AND %s.type = ?", t, t), hash.Type) - value, _ := utils.StringToPhash(hash.Value) distance := 0 if hash.Distance != nil { distance = *hash.Distance } - if distance > 0 { - // needed to avoid a type mismatch - f.addWhere(fmt.Sprintf("typeof(%s.fingerprint) = 'integer'", t)) - f.addWhere(fmt.Sprintf("phash_distance(%s.fingerprint, ?) < ?", t), value, distance) + // Only phash supports distance matching and is stored as integer + if hash.Type == models.FingerprintTypePhash { + value, err := utils.StringToPhash(hash.Value) + if err != nil { + f.setError(fmt.Errorf("invalid phash value: %w", err)) + return + } + if distance > 0 { + // needed to avoid a type mismatch + f.addWhere(fmt.Sprintf("typeof(%s.fingerprint) = 'integer'", t)) + f.addWhere(fmt.Sprintf("phash_distance(%s.fingerprint, ?) < ?", t), value, distance) + } else { + intCriterionHandler(&models.IntCriterionInput{ + Value: int(value), + Modifier: models.CriterionModifierEquals, + }, t+".fingerprint", nil)(ctx, f) + } } else { - // use the default handler - intCriterionHandler(&models.IntCriterionInput{ - Value: int(value), - Modifier: models.CriterionModifierEquals, - }, t+".fingerprint", nil)(ctx, f) + // All other fingerprint types (md5, oshash, sha1, etc.) are stored as strings + // Use exact match for string-based fingerprints + f.addWhere(fmt.Sprintf("%s.fingerprint = ?", t), hash.Value) } } } diff --git a/pkg/sqlite/file_filter_test.go b/pkg/sqlite/file_filter_test.go index 50eed0129..648e502f7 100644 --- a/pkg/sqlite/file_filter_test.go +++ b/pkg/sqlite/file_filter_test.go @@ -9,6 +9,7 @@ import ( "testing" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" "github.com/stretchr/testify/assert" ) @@ -81,7 +82,45 @@ func TestFileQuery(t *testing.T) { includeIDs: []models.FileID{fileIDs[fileIdxInZip]}, excludeIdxs: []int{fileIdxStartImageFiles}, }, - // TODO - add more tests for other file filters + { + name: "hashes md5", + filter: &models.FileFilterType{ + Hashes: []*models.FingerprintFilterInput{ + { + Type: models.FingerprintTypeMD5, + Value: getPrefixedStringValue("file", fileIdxStartVideoFiles, "md5"), + }, + }, + }, + includeIdxs: []int{fileIdxStartVideoFiles}, + excludeIdxs: []int{fileIdxStartImageFiles}, + }, + { + name: "hashes oshash", + filter: &models.FileFilterType{ + Hashes: []*models.FingerprintFilterInput{ + { + Type: models.FingerprintTypeOshash, + Value: getPrefixedStringValue("file", fileIdxStartVideoFiles, "oshash"), + }, + }, + }, + includeIdxs: []int{fileIdxStartVideoFiles}, + excludeIdxs: []int{fileIdxStartImageFiles}, + }, + { + name: "hashes phash", + filter: &models.FileFilterType{ + Hashes: []*models.FingerprintFilterInput{ + { + Type: models.FingerprintTypePhash, + Value: utils.PhashToString(getFilePhash(fileIdxStartImageFiles)), + }, + }, + }, + includeIdxs: []int{fileIdxStartImageFiles}, + excludeIdxs: []int{fileIdxStartVideoFiles}, + }, } for _, tt := range tests { diff --git a/pkg/sqlite/file_test.go b/pkg/sqlite/file_test.go index 8422390c0..55c41f4f7 100644 --- a/pkg/sqlite/file_test.go +++ b/pkg/sqlite/file_test.go @@ -572,7 +572,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) { { "by MD5", models.Fingerprint{ - Type: "MD5", + Type: models.FingerprintTypeMD5, Fingerprint: getPrefixedStringValue("file", fileIdxZip, "md5"), }, []models.File{makeFileWithID(fileIdxZip)}, @@ -581,7 +581,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) { { "by OSHASH", models.Fingerprint{ - Type: "OSHASH", + Type: models.FingerprintTypeOshash, Fingerprint: getPrefixedStringValue("file", fileIdxZip, "oshash"), }, []models.File{makeFileWithID(fileIdxZip)}, @@ -590,7 +590,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) { { "non-existing", models.Fingerprint{ - Type: "OSHASH", + Type: models.FingerprintTypeOshash, Fingerprint: "foo", }, nil, diff --git a/pkg/sqlite/folder.go b/pkg/sqlite/folder.go index f250f7861..6cd1e0ade 100644 --- a/pkg/sqlite/folder.go +++ b/pkg/sqlite/folder.go @@ -20,6 +20,7 @@ const folderIDColumn = "folder_id" type folderRow struct { ID models.FolderID `db:"id" goqu:"skipinsert"` + Basename string `db:"basename"` Path string `db:"path"` ZipFileID null.Int `db:"zip_file_id"` ParentFolderID null.Int `db:"parent_folder_id"` @@ -30,6 +31,8 @@ type folderRow struct { func (r *folderRow) fromFolder(o models.Folder) { r.ID = o.ID + // derive basename from path + r.Basename = filepath.Base(o.Path) r.Path = o.Path r.ZipFileID = nullIntFromFileIDPtr(o.ZipFileID) r.ParentFolderID = nullIntFromFolderIDPtr(o.ParentFolderID) @@ -322,6 +325,126 @@ func (qb *FolderStore) FindByParentFolderID(ctx context.Context, parentFolderID return ret, nil } +func (qb *FolderStore) GetManyParentFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) { + table := qb.table() + + // SQL recursive query to get all parent folder IDs for each folder ID + /* + WITH RECURSIVE parent_folders AS ( + SELECT id, parent_folder_id + FROM folders + WHERE id IN (folderIDs) + + UNION ALL + + SELECT f.id, f.parent_folder_id + FROM folders f + INNER JOIN parent_folders pf ON f.id = pf.parent_folder_id + ) + SELECT id, parent_folder_id FROM parent_folders; + */ + const parentFolders = "parent_folders" + const parentFolderID = "parent_folder_id" + const parentID = "parent_id" + const foldersAlias = "f" + + const parentFoldersAlias = "pf" + foldersAliasedI := table.As(foldersAlias) + parentFoldersI := goqu.T(parentFolders).As(parentFoldersAlias) + + q := dialect.From(parentFolders).Prepared(true). + WithRecursive(parentFolders, + dialect.From(table).Select(table.Col(idColumn), table.Col(parentFolderID).As(parentID)). + Where(table.Col(idColumn).In(folderIDs)). + Union( + dialect.From(foldersAliasedI).InnerJoin( + parentFoldersI, + goqu.On(foldersAliasedI.Col(idColumn).Eq(parentFoldersI.Col(parentID))), + ).Select(foldersAliasedI.Col(idColumn), foldersAliasedI.Col(parentFolderID).As(parentID)), + ), + ).Select(idColumn, parentID) + + type resultRow struct { + FolderID models.FolderID `db:"id"` + ParentFolderID null.Int `db:"parent_id"` + } + + folderMap := make(map[models.FolderID]models.FolderID) + + if err := queryFunc(ctx, q, false, func(r *sqlx.Rows) error { + var row resultRow + if err := r.StructScan(&row); err != nil { + return err + } + + if row.ParentFolderID.Valid { + folderMap[row.FolderID] = models.FolderID(row.ParentFolderID.Int64) + } else { + folderMap[row.FolderID] = 0 + } + + return nil + }); err != nil { + return nil, err + } + + ret := make([][]models.FolderID, len(folderIDs)) + + for i, folderID := range folderIDs { + var parents []models.FolderID + currentID := folderID + + for { + parentID, exists := folderMap[currentID] + if !exists || parentID == 0 { + break + } + parents = append(parents, parentID) + currentID = parentID + } + + ret[i] = parents + } + + return ret, nil +} + +func (qb *FolderStore) GetManySubFolderIDs(ctx context.Context, parentFolderIDs []models.FolderID) ([][]models.FolderID, error) { + table := qb.table() + q := dialect.From(table).Select( + table.Col(idColumn), + table.Col("parent_folder_id"), + ).Where(qb.table().Col("parent_folder_id").In(parentFolderIDs)) + + sql, args, err := q.ToSQL() + if err != nil { + return nil, fmt.Errorf("building query: %w", err) + } + + var results []struct { + FolderID int `db:"id"` + ParentFolderID models.FolderID `db:"parent_folder_id"` + } + + if err := querySelect(ctx, sql, args, &results); err != nil { + return nil, fmt.Errorf("getting folders by parent folder ids %v: %w", parentFolderIDs, err) + } + + retMap := make(map[models.FolderID][]models.FolderID) + + for _, v := range results { + retMap[v.ParentFolderID] = append(retMap[v.ParentFolderID], models.FolderID(v.FolderID)) + } + + ret := make([][]models.FolderID, len(parentFolderIDs)) + + for i, parentID := range parentFolderIDs { + ret[i] = retMap[parentID] + } + + return ret, nil +} + func (qb *FolderStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.SelectDataset { table := qb.table() @@ -340,10 +463,14 @@ func (qb *FolderStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.Selec // FindAllInPaths returns the all folders that are or are within any of the given paths. // Returns all if limit is < 0. // Returns all folders if p is empty. -func (qb *FolderStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*models.Folder, error) { +func (qb *FolderStore) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]*models.Folder, error) { q := qb.selectDataset().Prepared(true) q = qb.allInPaths(q, p) + if !includeZipContents { + q = q.Where(qb.table().Col("zip_file_id").IsNull()) + } + if limit > -1 { q = q.Limit(uint(limit)) } @@ -513,7 +640,7 @@ func (qb *FolderStore) queryGroupedFields(ctx context.Context, options models.Fo Megapixels float64 Size int64 }{} - if err := qb.repository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { + if err := qb.repository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.allArgs(), &out); err != nil { return nil, err } @@ -527,6 +654,7 @@ var folderSortOptions = sortOptions{ "created_at", "id", "path", + "basename", "random", "updated_at", } diff --git a/pkg/sqlite/folder_filter.go b/pkg/sqlite/folder_filter.go index 6b2bd96e9..e0145bcca 100644 --- a/pkg/sqlite/folder_filter.go +++ b/pkg/sqlite/folder_filter.go @@ -65,6 +65,7 @@ func (qb *folderFilterHandler) criterionHandler() criterionHandler { folderFilter := qb.folderFilter return compoundHandler{ stringCriterionHandler(folderFilter.Path, qb.table.Col("path")), + stringCriterionHandler(folderFilter.Basename, qb.table.Col("basename")), ×tampCriterionHandler{folderFilter.ModTime, qb.table.Col("mod_time"), nil}, qb.parentFolderCriterionHandler(folderFilter.ParentFolder), diff --git a/pkg/sqlite/folder_filter_test.go b/pkg/sqlite/folder_filter_test.go index c1c7d7a37..c08208f30 100644 --- a/pkg/sqlite/folder_filter_test.go +++ b/pkg/sqlite/folder_filter_test.go @@ -33,6 +33,17 @@ func TestFolderQuery(t *testing.T) { includeIdxs: []int{folderIdxWithSubFolder, folderIdxWithParentFolder}, excludeIdxs: []int{folderIdxInZip}, }, + { + name: "basename", + filter: &models.FolderFilterType{ + Basename: &models.StringCriterionInput{ + Value: getFolderBasename(folderIdxWithParentFolder, nil), + Modifier: models.CriterionModifierIncludes, + }, + }, + includeIdxs: []int{folderIdxWithParentFolder}, + excludeIdxs: []int{folderIdxInZip}, + }, { name: "parent folder", filter: &models.FolderFilterType{ diff --git a/pkg/sqlite/folder_test.go b/pkg/sqlite/folder_test.go index 15b2b96b8..072a1167f 100644 --- a/pkg/sqlite/folder_test.go +++ b/pkg/sqlite/folder_test.go @@ -186,8 +186,6 @@ func Test_FolderStore_Update(t *testing.T) { } assert.Equal(copy, *s) - - return }) } } @@ -239,3 +237,75 @@ func Test_FolderStore_FindByPath(t *testing.T) { }) } } + +func Test_FolderStore_GetManyParentFolderIDs(t *testing.T) { + var empty []models.FolderID + emptyResult := [][]models.FolderID{empty} + tests := []struct { + name string + parentFolderIDs []models.FolderID + want [][]models.FolderID + wantErr bool + }{ + { + "valid with parent folders", + []models.FolderID{folderIDs[folderIdxWithParentFolder]}, + [][]models.FolderID{ + { + folderIDs[folderIdxWithSubFolder], + folderIDs[folderIdxRoot], + }, + }, + false, + }, + { + "valid multiple folders", + []models.FolderID{ + folderIDs[folderIdxWithParentFolder], + folderIDs[folderIdxWithSceneFiles], + }, + [][]models.FolderID{ + { + folderIDs[folderIdxWithSubFolder], + folderIDs[folderIdxRoot], + }, + { + folderIDs[folderIdxForObjectFiles], + folderIDs[folderIdxRoot], + }, + }, + false, + }, + { + "valid without parent folders", + []models.FolderID{folderIDs[folderIdxRoot]}, + emptyResult, + false, + }, + { + "invalid folder id", + []models.FolderID{invalidFolderID}, + emptyResult, + // does not error, just returns empty result + false, + }, + } + + qb := db.Folder + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + got, err := qb.GetManyParentFolderIDs(ctx, tt.parentFolderIDs) + if (err != nil) != tt.wantErr { + assert.Errorf(err, "FolderStore.GetManyParentFolderIDs() error = %v, wantErr %v", err, tt.wantErr) + return + } + if tt.wantErr { + return + } + + assert.Equal(got, tt.want) + }) + } +} diff --git a/pkg/sqlite/gallery.go b/pkg/sqlite/gallery.go index 41729057b..ad7a94b04 100644 --- a/pkg/sqlite/gallery.go +++ b/pkg/sqlite/gallery.go @@ -183,6 +183,8 @@ var ( ) type GalleryStore struct { + customFieldsStore + tableMgr *table fileStore *FileStore @@ -191,6 +193,10 @@ type GalleryStore struct { func NewGalleryStore(fileStore *FileStore, folderStore *FolderStore) *GalleryStore { return &GalleryStore{ + customFieldsStore: customFieldsStore{ + table: galleriesCustomFieldsTable, + fk: galleriesCustomFieldsTable.Col(galleryIDColumn), + }, tableMgr: galleryTableMgr, fileStore: fileStore, folderStore: folderStore, @@ -231,18 +237,18 @@ func (qb *GalleryStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error { +func (qb *GalleryStore) Create(ctx context.Context, newObject *models.CreateGalleryInput) error { var r galleryRow - r.fromGallery(*newObject) + r.fromGallery(*newObject.Gallery) id, err := qb.tableMgr.insertID(ctx, r) if err != nil { return err } - if len(fileIDs) > 0 { + if len(newObject.FileIDs) > 0 { const firstPrimary = true - if err := galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, fileIDs); err != nil { + if err := galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, newObject.FileIDs); err != nil { return err } } @@ -269,19 +275,24 @@ func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, f } } + const partial = false + if err := qb.setCustomFields(ctx, id, newObject.CustomFields, partial); err != nil { + return err + } + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } - *newObject = *updated + *newObject.Gallery = *updated return nil } -func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.Gallery) error { +func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.UpdateGalleryInput) error { var r galleryRow - r.fromGallery(*updatedObject) + r.fromGallery(*updatedObject.Gallery) if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { return err @@ -319,6 +330,10 @@ func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.Galler } } + if err := qb.SetCustomFields(ctx, updatedObject.ID, updatedObject.CustomFields); err != nil { + return err + } + return nil } @@ -364,6 +379,10 @@ func (qb *GalleryStore) UpdatePartial(ctx context.Context, id int, partial model } } + if err := qb.SetCustomFields(ctx, id, partial.CustomFields); err != nil { + return nil, err + } + return qb.find(ctx, id) } @@ -907,3 +926,7 @@ func (qb *GalleryStore) ResetCover(ctx context.Context, galleryID int) error { func (qb *GalleryStore) GetSceneIDs(ctx context.Context, id int) ([]int, error) { return galleryRepository.scenes.getIDs(ctx, id) } + +func (qb *GalleryStore) AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error { + return galleriesScenesTableMgr.insertJoins(ctx, galleryID, sceneIDs) +} diff --git a/pkg/sqlite/gallery_filter.go b/pkg/sqlite/gallery_filter.go index f05ff7b81..0435f3f57 100644 --- a/pkg/sqlite/gallery_filter.go +++ b/pkg/sqlite/gallery_filter.go @@ -84,6 +84,7 @@ func (qb *galleryFilterHandler) criterionHandler() criterionHandler { }), qb.pathCriterionHandler(filter.Path), + qb.parentFolderCriterionHandler(filter.ParentFolder), qb.fileCountCriterionHandler(filter.FileCount), intCriterionHandler(filter.Rating100, "galleries.rating", nil), qb.urlsCriterionHandler(filter.URL), @@ -105,6 +106,13 @@ func (qb *galleryFilterHandler) criterionHandler() criterionHandler { ×tampCriterionHandler{filter.CreatedAt, "galleries.created_at", nil}, ×tampCriterionHandler{filter.UpdatedAt, "galleries.updated_at", nil}, + &customFieldsFilterHandler{ + table: galleriesCustomFieldsTable.GetTable(), + fkCol: galleryIDColumn, + c: filter.CustomFields, + idCol: "galleries.id", + }, + &relatedFilterHandler{ relatedIDCol: "scenes_galleries.scene_id", relatedRepo: sceneRepository.repository, @@ -271,6 +279,65 @@ func (qb *galleryFilterHandler) pathCriterionHandler(c *models.StringCriterionIn } } +func (qb *galleryFilterHandler) parentFolderCriterionHandler(folder *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if folder == nil { + return + } + + galleryRepository.addFilesTable(f) + f.addLeftJoin(folderTable, "gallery_folder", "galleries.folder_id = gallery_folder.id") + + criterion := *folder + switch criterion.Modifier { + case models.CriterionModifierEquals: + criterion.Modifier = models.CriterionModifierIncludes + case models.CriterionModifierNotEquals: + criterion.Modifier = models.CriterionModifierExcludes + } + + // only allow includes or excludes filters + if criterion.Modifier != models.CriterionModifierIncludes && criterion.Modifier != models.CriterionModifierExcludes { + f.setError(fmt.Errorf("invalid modifier for parent folder criterion: %s", criterion.Modifier)) + } + + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { + return + } + + // combine excludes if excludes modifier is selected + if criterion.Modifier == models.CriterionModifierExcludes { + criterion.Modifier = models.CriterionModifierIncludes + criterion.Excludes = append(criterion.Excludes, criterion.Value...) + criterion.Value = nil + } + + if len(criterion.Value) > 0 { + valuesClause, err := getHierarchicalValues(ctx, criterion.Value, "folders", "", "parent_folder_id", "parent_folder_id", criterion.Depth) + if err != nil { + f.setError(err) + return + } + + // combine clauses with OR to handle zip file or folder + c1 := makeClause(fmt.Sprintf("files.parent_folder_id IN (SELECT column2 FROM (%s))", valuesClause)) + c2 := makeClause(fmt.Sprintf("gallery_folder.parent_folder_id IN (SELECT column2 FROM (%s))", valuesClause)) + f.whereClauses = append(f.whereClauses, orClauses(c1, c2)) + } + + if len(criterion.Excludes) > 0 { + valuesClause, err := getHierarchicalValues(ctx, criterion.Excludes, "folders", "", "parent_folder_id", "parent_folder_id", criterion.Depth) + if err != nil { + f.setError(err) + return + } + + f.addWhere(fmt.Sprintf("files.parent_folder_id NOT IN (SELECT column2 FROM (%s)) OR folders.parent_folder_id IS NULL", valuesClause)) + f.addWhere(fmt.Sprintf("gallery_folder.parent_folder_id NOT IN (SELECT column2 FROM (%s)) OR gallery_folder.parent_folder_id IS NULL", valuesClause)) + } + } +} + func (qb *galleryFilterHandler) fileCountCriterionHandler(fileCount *models.IntCriterionInput) criterionHandlerFunc { h := countCriterionHandlerBuilder{ primaryTable: galleryTable, @@ -301,7 +368,16 @@ func (qb *galleryFilterHandler) missingCriterionHandler(isMissing *string) crite case "tags": galleryRepository.tags.join(f, "tags_join", "galleries.id") f.addWhere("tags_join.gallery_id IS NULL") + case "cover": + f.addLeftJoin("galleries_images", "cover_join", "cover_join.gallery_id = galleries.id AND cover_join.cover = 1") + f.addWhere("cover_join.image_id IS NULL") default: + if err := validateIsMissing(*isMissing, []string{ + "title", "code", "rating", "details", "photographer", + }); err != nil { + f.setError(err) + return + } f.addWhere("(galleries." + *isMissing + " IS NULL OR TRIM(galleries." + *isMissing + ") = '')") } } diff --git a/pkg/sqlite/gallery_test.go b/pkg/sqlite/gallery_test.go index 06d7daf17..9bd0da47f 100644 --- a/pkg/sqlite/gallery_test.go +++ b/pkg/sqlite/gallery_test.go @@ -160,7 +160,10 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { fileIDs = []models.FileID{s.Files.List()[0].Base().ID} } - if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr { + if err := qb.Create(ctx, &models.CreateGalleryInput{ + Gallery: &s, + FileIDs: fileIDs, + }); (err != nil) != tt.wantErr { t.Errorf("galleryQueryBuilder.Create() error = %v, wantErr = %v", err, tt.wantErr) } @@ -360,7 +363,9 @@ func Test_galleryQueryBuilder_Update(t *testing.T) { copy := *tt.updatedObject - if err := qb.Update(ctx, tt.updatedObject); (err != nil) != tt.wantErr { + if err := qb.Update(ctx, &models.UpdateGalleryInput{ + Gallery: tt.updatedObject, + }); (err != nil) != tt.wantErr { t.Errorf("galleryQueryBuilder.Update() error = %v, wantErr %v", err, tt.wantErr) } @@ -826,6 +831,79 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) { } } +func Test_GalleryStore_UpdatePartialCustomFields(t *testing.T) { + tests := []struct { + name string + id int + partial models.GalleryPartial + expected map[string]interface{} // nil to use the partial + }{ + { + "set custom fields", + galleryIDs[galleryIdx1WithImage], + models.GalleryPartial{ + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + nil, + }, + { + "clear custom fields", + galleryIDs[galleryIdx1WithImage], + models.GalleryPartial{ + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + nil, + }, + { + "partial custom fields", + galleryIDs[galleryIdxWithTwoTags], + models.GalleryPartial{ + CustomFields: models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "string": "bbb", + "new_field": "new", + }, + }, + }, + map[string]interface{}{ + "int": int64(2), + "real": 1.2, + "string": "bbb", + "new_field": "new", + }, + }, + } + for _, tt := range tests { + qb := db.Gallery + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + _, err := qb.UpdatePartial(ctx, tt.id, tt.partial) + if err != nil { + t.Errorf("GalleryStore.UpdatePartial() error = %v", err) + return + } + + // ensure custom fields are correct + cf, err := qb.GetCustomFields(ctx, tt.id) + if err != nil { + t.Errorf("GalleryStore.GetCustomFields() error = %v", err) + return + } + if tt.expected == nil { + assert.Equal(tt.partial.CustomFields.Full, cf) + } else { + assert.Equal(tt.expected, cf) + } + }) + } +} + func Test_galleryQueryBuilder_Destroy(t *testing.T) { tests := []struct { name string @@ -3001,6 +3079,245 @@ func TestGallerySetAndResetCover(t *testing.T) { }) } +func TestGalleryQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.GalleryFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.GalleryFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getGalleryStringValue(galleryIdxWithImage, "custom")}, + }, + }, + }, + []int{galleryIdxWithImage}, + nil, + false, + }, + { + "not equals", + &models.GalleryFilterType{ + Title: &models.StringCriterionInput{ + Value: getGalleryStringValue(galleryIdxWithImage, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getGalleryStringValue(galleryIdxWithImage, "custom")}, + }, + }, + }, + nil, + []int{galleryIdxWithImage}, + false, + }, + { + "includes", + &models.GalleryFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getGalleryStringValue(galleryIdxWithImage, "custom")[9:]}, + }, + }, + }, + []int{galleryIdxWithImage}, + nil, + false, + }, + { + "excludes", + &models.GalleryFilterType{ + Title: &models.StringCriterionInput{ + Value: getGalleryStringValue(galleryIdxWithImage, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getGalleryStringValue(galleryIdxWithImage, "custom")[9:]}, + }, + }, + }, + nil, + []int{galleryIdxWithImage}, + false, + }, + { + "regex", + &models.GalleryFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + []int{galleryIdxWithPerformerTag}, + nil, + false, + }, + { + "invalid regex", + &models.GalleryFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.GalleryFilterType{ + Title: &models.StringCriterionInput{ + Value: getGalleryStringValue(galleryIdxWithPerformerTag, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + nil, + []int{galleryIdxWithPerformerTag}, + false, + }, + { + "invalid not matches regex", + &models.GalleryFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.GalleryFilterType{ + Title: &models.StringCriterionInput{ + Value: getGalleryStringValue(galleryIdxWithImage, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{galleryIdxWithImage}, + nil, + false, + }, + { + "not null", + &models.GalleryFilterType{ + Title: &models.StringCriterionInput{ + Value: getGalleryStringValue(galleryIdxWithImage, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{galleryIdxWithImage}, + nil, + false, + }, + { + "between", + &models.GalleryFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{galleryIdxWithImage}, + nil, + false, + }, + { + "not between", + &models.GalleryFilterType{ + Title: &models.StringCriterionInput{ + Value: getGalleryStringValue(galleryIdxWithImage, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{galleryIdxWithImage}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + galleries, _, err := db.Gallery.Query(ctx, tt.filter, nil) + if (err != nil) != tt.wantErr { + t.Errorf("GalleryStore.Query() error = %v, wantErr %v", err, tt.wantErr) + } + + if err != nil { + return + } + + ids := galleriesToIDs(galleries) + include := indexesToIDs(galleryIDs, tt.includeIdxs) + exclude := indexesToIDs(galleryIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + // TODO Count // TODO All // TODO Query diff --git a/pkg/sqlite/group.go b/pkg/sqlite/group.go index b216335b8..13a6905a5 100644 --- a/pkg/sqlite/group.go +++ b/pkg/sqlite/group.go @@ -131,6 +131,7 @@ var ( type GroupStore struct { blobJoinQueryBuilder + customFieldsStore tagRelationshipStore groupRelationshipStore @@ -143,6 +144,10 @@ func NewGroupStore(blobStore *BlobStore) *GroupStore { blobStore: blobStore, joinTable: groupTable, }, + customFieldsStore: customFieldsStore{ + table: groupsCustomFieldsTable, + fk: groupsCustomFieldsTable.Col(groupIDColumn), + }, tagRelationshipStore: tagRelationshipStore{ idRelationshipStore: idRelationshipStore{ joinTable: groupsTagsTableMgr, @@ -235,6 +240,10 @@ func (qb *GroupStore) UpdatePartial(ctx context.Context, id int, partial models. return nil, err } + if err := qb.SetCustomFields(ctx, id, partial.CustomFields); err != nil { + return nil, err + } + return qb.find(ctx, id) } diff --git a/pkg/sqlite/group_filter.go b/pkg/sqlite/group_filter.go index f29023785..14f3841f4 100644 --- a/pkg/sqlite/group_filter.go +++ b/pkg/sqlite/group_filter.go @@ -75,6 +75,7 @@ func (qb *groupFilterHandler) criterionHandler() criterionHandler { qb.tagsCriterionHandler(groupFilter.Tags), qb.tagCountCriterionHandler(groupFilter.TagCount), qb.groupOCounterCriterionHandler(groupFilter.OCounter), + qb.sceneCountCriterionHandler(groupFilter.SceneCount), &dateCriterionHandler{groupFilter.Date, "groups.date", nil}, groupHierarchyHandler.ParentsCriterionHandler(groupFilter.ContainingGroups), groupHierarchyHandler.ChildrenCriterionHandler(groupFilter.SubGroups), @@ -83,6 +84,13 @@ func (qb *groupFilterHandler) criterionHandler() criterionHandler { ×tampCriterionHandler{groupFilter.CreatedAt, "groups.created_at", nil}, ×tampCriterionHandler{groupFilter.UpdatedAt, "groups.updated_at", nil}, + &customFieldsFilterHandler{ + table: groupsCustomFieldsTable.GetTable(), + fkCol: groupIDColumn, + c: groupFilter.CustomFields, + idCol: "groups.id", + }, + &relatedFilterHandler{ relatedIDCol: "groups_scenes.scene_id", relatedRepo: sceneRepository.repository, @@ -111,7 +119,25 @@ func (qb *groupFilterHandler) missingCriterionHandler(isMissing *string) criteri case "scenes": f.addLeftJoin("groups_scenes", "", "groups_scenes.group_id = groups.id") f.addWhere("groups_scenes.scene_id IS NULL") + case "url": + groupsURLsTableMgr.join(f, "", "groups.id") + f.addWhere("group_urls.url IS NULL") + case "studio": + f.addWhere("groups.studio_id IS NULL") + case "performers": + f.addLeftJoin("groups_scenes", "gs_perf", "groups.id = gs_perf.group_id") + f.addLeftJoin("performers_scenes", "ps_perf", "gs_perf.scene_id = ps_perf.scene_id") + f.addWhere("ps_perf.performer_id IS NULL") + case "tags": + groupRepository.tags.join(f, "tags_join", "groups.id") + f.addWhere("tags_join.group_id IS NULL") default: + if err := validateIsMissing(*isMissing, []string{ + "aliases", "description", "director", "date", "rating", + }); err != nil { + f.setError(err) + return + } f.addWhere("(groups." + *isMissing + " IS NULL OR TRIM(groups." + *isMissing + ") = '')") } } @@ -204,6 +230,16 @@ func (qb *groupFilterHandler) tagCountCriterionHandler(count *models.IntCriterio return h.handler(count) } +func (qb *groupFilterHandler) sceneCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: groupTable, + joinTable: groupsScenesTable, + primaryFK: groupIDColumn, + } + + return h.handler(count) +} + // used for sorting and filtering on group o-count var selectGroupOCountSQL = utils.StrFormat( "SELECT SUM(o_counter) "+ diff --git a/pkg/sqlite/group_test.go b/pkg/sqlite/group_test.go index d4a177e86..22b551e02 100644 --- a/pkg/sqlite/group_test.go +++ b/pkg/sqlite/group_test.go @@ -566,6 +566,79 @@ func Test_groupQueryBuilder_UpdatePartial(t *testing.T) { } } +func Test_GroupStore_UpdatePartialCustomFields(t *testing.T) { + tests := []struct { + name string + id int + partial models.GroupPartial + expected map[string]interface{} // nil to use the partial + }{ + { + "set custom fields", + groupIDs[groupIdxWithChild], + models.GroupPartial{ + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + nil, + }, + { + "clear custom fields", + groupIDs[groupIdxWithChild], + models.GroupPartial{ + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + nil, + }, + { + "partial custom fields", + groupIDs[groupIdxWithTwoTags], + models.GroupPartial{ + CustomFields: models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "string": "bbb", + "new_field": "new", + }, + }, + }, + map[string]interface{}{ + "int": int64(3), + "real": 0.3, + "string": "bbb", + "new_field": "new", + }, + }, + } + for _, tt := range tests { + qb := db.Group + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + _, err := qb.UpdatePartial(ctx, tt.id, tt.partial) + if err != nil { + t.Errorf("GroupStore.UpdatePartial() error = %v", err) + return + } + + // ensure custom fields are correct + cf, err := qb.GetCustomFields(ctx, tt.id) + if err != nil { + t.Errorf("GroupStore.GetCustomFields() error = %v", err) + return + } + if tt.expected == nil { + assert.Equal(tt.partial.CustomFields.Full, cf) + } else { + assert.Equal(tt.expected, cf) + } + }) + } +} + func TestGroupFindByName(t *testing.T) { withTxn(func(ctx context.Context) error { mqb := db.Group @@ -669,6 +742,32 @@ func TestGroupQuery(t *testing.T) { nil, false, }, + { + "scene count equals 1", + nil, + &models.GroupFilterType{ + SceneCount: &models.IntCriterionInput{ + Value: 1, + Modifier: models.CriterionModifierEquals, + }, + }, + []int{groupIdxWithScene}, + []int{groupIdxWithParentAndChild}, + false, + }, + { + "scene count less than 1", + nil, + &models.GroupFilterType{ + SceneCount: &models.IntCriterionInput{ + Value: 1, + Modifier: models.CriterionModifierLessThan, + }, + }, + []int{groupIdxWithParentAndChild}, + []int{groupIdxWithScene}, + false, + }, } for _, tt := range tests { @@ -1891,6 +1990,245 @@ func TestGroupFindSubGroupIDs(t *testing.T) { } } +func TestGroupQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.GroupFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.GroupFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getGroupStringValue(groupIdxWithChild, "custom")}, + }, + }, + }, + []int{groupIdxWithChild}, + nil, + false, + }, + { + "not equals", + &models.GroupFilterType{ + Name: &models.StringCriterionInput{ + Value: getGroupStringValue(groupIdxWithChild, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getGroupStringValue(groupIdxWithChild, "custom")}, + }, + }, + }, + nil, + []int{groupIdxWithChild}, + false, + }, + { + "includes", + &models.GroupFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getGroupStringValue(groupIdxWithChild, "custom")[9:]}, + }, + }, + }, + []int{groupIdxWithChild}, + nil, + false, + }, + { + "excludes", + &models.GroupFilterType{ + Name: &models.StringCriterionInput{ + Value: getGroupStringValue(groupIdxWithChild, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getGroupStringValue(groupIdxWithChild, "custom")[9:]}, + }, + }, + }, + nil, + []int{groupIdxWithChild}, + false, + }, + { + "regex", + &models.GroupFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*11_custom"}, + }, + }, + }, + []int{groupIdxWithChildWithScene}, + nil, + false, + }, + { + "invalid regex", + &models.GroupFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.GroupFilterType{ + Name: &models.StringCriterionInput{ + Value: getGroupStringValue(groupIdxWithChildWithScene, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*11_custom"}, + }, + }, + }, + nil, + []int{groupIdxWithChildWithScene}, + false, + }, + { + "invalid not matches regex", + &models.GroupFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.GroupFilterType{ + Name: &models.StringCriterionInput{ + Value: getGroupStringValue(groupIdxWithGrandParent, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{groupIdxWithGrandParent}, + nil, + false, + }, + { + "not null", + &models.GroupFilterType{ + Name: &models.StringCriterionInput{ + Value: getGroupStringValue(groupIdxWithGrandParent, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{groupIdxWithGrandParent}, + nil, + false, + }, + { + "between", + &models.GroupFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{groupIdxWithTag}, + nil, + false, + }, + { + "not between", + &models.GroupFilterType{ + Name: &models.StringCriterionInput{ + Value: getGroupStringValue(groupIdxWithTag, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{groupIdxWithTag}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + groups, _, err := db.Group.Query(ctx, tt.filter, nil) + if (err != nil) != tt.wantErr { + t.Errorf("GroupStore.Query() error = %v, wantErr %v", err, tt.wantErr) + } + + if err != nil { + return + } + + ids := groupsToIDs(groups) + include := indexesToIDs(groupIDs, tt.includeIdxs) + exclude := indexesToIDs(groupIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + // TODO Update // TODO Destroy - ensure image is destroyed // TODO Find diff --git a/pkg/sqlite/image.go b/pkg/sqlite/image.go index ccccc90aa..e0ac576d8 100644 --- a/pkg/sqlite/image.go +++ b/pkg/sqlite/image.go @@ -185,6 +185,8 @@ var ( ) type ImageStore struct { + customFieldsStore + tableMgr *table oCounterManager @@ -193,6 +195,10 @@ type ImageStore struct { func NewImageStore(r *storeRepository) *ImageStore { return &ImageStore{ + customFieldsStore: customFieldsStore{ + table: imagesCustomFieldsTable, + fk: imagesCustomFieldsTable.Col(imageIDColumn), + }, tableMgr: imageTableMgr, oCounterManager: oCounterManager{imageTableMgr}, repo: r, @@ -236,18 +242,18 @@ func (qb *ImageStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *ImageStore) Create(ctx context.Context, newObject *models.Image, fileIDs []models.FileID) error { +func (qb *ImageStore) Create(ctx context.Context, newObject *models.CreateImageInput) error { var r imageRow - r.fromImage(*newObject) + r.fromImage(*newObject.Image) id, err := qb.tableMgr.insertID(ctx, r) if err != nil { return err } - if len(fileIDs) > 0 { + if len(newObject.FileIDs) > 0 { const firstPrimary = true - if err := imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, fileIDs); err != nil { + if err := imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, newObject.FileIDs); err != nil { return err } } @@ -276,12 +282,18 @@ func (qb *ImageStore) Create(ctx context.Context, newObject *models.Image, fileI } } + if err := qb.SetCustomFields(ctx, id, models.CustomFieldsInput{ + Full: newObject.CustomFields, + }); err != nil { + return err + } + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } - *newObject = *updated + *newObject.Image = *updated return nil } @@ -329,6 +341,10 @@ func (qb *ImageStore) UpdatePartial(ctx context.Context, id int, partial models. } } + if err := qb.SetCustomFields(ctx, id, partial.CustomFields); err != nil { + return nil, err + } + return qb.find(ctx, id) } @@ -821,7 +837,7 @@ func (qb *ImageStore) makeQuery(ctx context.Context, imageFilter *models.ImageFi ) filepathColumn := "folders.path || '" + string(filepath.Separator) + "' || files.basename" - searchColumns := []string{"images.title", filepathColumn, "files_fingerprints.fingerprint"} + searchColumns := []string{"images.title", "images.details", filepathColumn, "files_fingerprints.fingerprint"} query.parseQueryString(searchColumns, *q) } @@ -910,7 +926,7 @@ func (qb *ImageStore) queryGroupedFields(ctx context.Context, options models.Ima Megapixels null.Float Size null.Float }{} - if err := imageRepository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { + if err := imageRepository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.allArgs(), &out); err != nil { return nil, err } @@ -942,6 +958,7 @@ var imageSortOptions = sortOptions{ "performer_count", "random", "rating", + "resolution", "tag_count", "title", "updated_at", @@ -1001,6 +1018,14 @@ func (qb *ImageStore) setImageSortAndPagination(q *queryBuilder, findFilter *mod case "mod_time", "filesize": addFilesJoin() sortClause = getSort(sort, direction, "files") + case "resolution": + addFilesJoin() + q.addJoins(join{ + sort: true, + table: imageFileTable, + onClause: "images_files.file_id = image_files.file_id", + }) + sortClause = " ORDER BY MIN(image_files.width, image_files.height) " + direction case "title": addFilesJoin() addFolderJoin() diff --git a/pkg/sqlite/image_filter.go b/pkg/sqlite/image_filter.go index 1d119bfde..4d1d2c4b3 100644 --- a/pkg/sqlite/image_filter.go +++ b/pkg/sqlite/image_filter.go @@ -62,6 +62,15 @@ func (qb *imageFilterHandler) criterionHandler() criterionHandler { stringCriterionHandler(imageFilter.Checksum, "fingerprints_md5.fingerprint")(ctx, f) }), + + &phashDistanceCriterionHandler{ + joinFn: func(f *filterBuilder) { + imageRepository.addImagesFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "images_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") + }, + criterion: imageFilter.PhashDistance, + }, + stringCriterionHandler(imageFilter.Title, "images.title"), stringCriterionHandler(imageFilter.Code, "images.code"), stringCriterionHandler(imageFilter.Details, "images.details"), @@ -91,6 +100,13 @@ func (qb *imageFilterHandler) criterionHandler() criterionHandler { ×tampCriterionHandler{imageFilter.CreatedAt, "images.created_at", nil}, ×tampCriterionHandler{imageFilter.UpdatedAt, "images.updated_at", nil}, + &customFieldsFilterHandler{ + table: imagesCustomFieldsTable.GetTable(), + fkCol: imageIDColumn, + c: imageFilter.CustomFields, + idCol: "images.id", + }, + &relatedFilterHandler{ relatedIDCol: "galleries_images.gallery_id", relatedRepo: galleryRepository.repository, @@ -155,6 +171,9 @@ func (qb *imageFilterHandler) missingCriterionHandler(isMissing *string) criteri return func(ctx context.Context, f *filterBuilder) { if isMissing != nil && *isMissing != "" { switch *isMissing { + case "url": + imagesURLsTableMgr.join(f, "", "images.id") + f.addWhere("image_urls.url IS NULL") case "studio": f.addWhere("images.studio_id IS NULL") case "performers": @@ -167,6 +186,12 @@ func (qb *imageFilterHandler) missingCriterionHandler(isMissing *string) criteri imageRepository.tags.join(f, "tags_join", "images.id") f.addWhere("tags_join.image_id IS NULL") default: + if err := validateIsMissing(*isMissing, []string{ + "title", "details", "photographer", "date", "code", "rating", + }); err != nil { + f.setError(err) + return + } f.addWhere("(images." + *isMissing + " IS NULL OR TRIM(images." + *isMissing + ") = '')") } } diff --git a/pkg/sqlite/image_test.go b/pkg/sqlite/image_test.go index aa4ed3b99..85337c911 100644 --- a/pkg/sqlite/image_test.go +++ b/pkg/sqlite/image_test.go @@ -73,81 +73,94 @@ func Test_imageQueryBuilder_Create(t *testing.T) { tests := []struct { name string - newObject models.Image + newObject models.CreateImageInput wantErr bool }{ { "full", - models.Image{ - Title: title, - Code: code, - Rating: &rating, - Date: &date, - Details: details, - Photographer: photographer, - URLs: models.NewRelatedStrings([]string{url}), - Organized: true, - OCounter: ocounter, - StudioID: &studioIDs[studioIdxWithImage], - CreatedAt: createdAt, - UpdatedAt: updatedAt, - GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), - TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}), - PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), + models.CreateImageInput{ + Image: &models.Image{ + Title: title, + Code: code, + Rating: &rating, + Date: &date, + Details: details, + Photographer: photographer, + URLs: models.NewRelatedStrings([]string{url}), + Organized: true, + OCounter: ocounter, + StudioID: &studioIDs[studioIdxWithImage], + CreatedAt: createdAt, + UpdatedAt: updatedAt, + GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}), + PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), + }, + CustomFields: testCustomFields, }, false, }, { "with file", - models.Image{ - Title: title, - Code: code, - Rating: &rating, - Date: &date, - Details: details, - Photographer: photographer, - URLs: models.NewRelatedStrings([]string{url}), - Organized: true, - OCounter: ocounter, - StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedFiles([]models.File{ - imageFile.(*models.ImageFile), - }), - PrimaryFileID: &imageFile.Base().ID, - Path: imageFile.Base().Path, - CreatedAt: createdAt, - UpdatedAt: updatedAt, - GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), - TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}), - PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), + models.CreateImageInput{ + Image: &models.Image{ + Title: title, + Code: code, + Rating: &rating, + Date: &date, + Details: details, + Photographer: photographer, + URLs: models.NewRelatedStrings([]string{url}), + Organized: true, + OCounter: ocounter, + StudioID: &studioIDs[studioIdxWithImage], + Files: models.NewRelatedFiles([]models.File{ + imageFile.(*models.ImageFile), + }), + PrimaryFileID: &imageFile.Base().ID, + Path: imageFile.Base().Path, + CreatedAt: createdAt, + UpdatedAt: updatedAt, + GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}), + PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), + }, }, false, }, { "invalid studio id", - models.Image{ - StudioID: &invalidID, + models.CreateImageInput{ + Image: &models.Image{ + StudioID: &invalidID, + }, }, true, }, { "invalid gallery id", - models.Image{ - GalleryIDs: models.NewRelatedIDs([]int{invalidID}), + models.CreateImageInput{ + Image: &models.Image{ + GalleryIDs: models.NewRelatedIDs([]int{invalidID}), + }, }, true, }, { "invalid tag id", - models.Image{ - TagIDs: models.NewRelatedIDs([]int{invalidID}), + models.CreateImageInput{ + Image: &models.Image{ + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, }, true, }, { "invalid performer id", - models.Image{ - PerformerIDs: models.NewRelatedIDs([]int{invalidID}), + models.CreateImageInput{ + Image: &models.Image{ + PerformerIDs: models.NewRelatedIDs([]int{invalidID}), + }, }, true, }, @@ -165,8 +178,11 @@ func Test_imageQueryBuilder_Create(t *testing.T) { fileIDs = append(fileIDs, f.Base().ID) } } - s := tt.newObject - if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr { + s := *tt.newObject.Image + if err := qb.Create(ctx, &models.CreateImageInput{ + Image: &s, + FileIDs: fileIDs, + }); (err != nil) != tt.wantErr { t.Errorf("imageQueryBuilder.Create() error = %v, wantErr = %v", err, tt.wantErr) } @@ -177,7 +193,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) { assert.NotZero(s.ID) - copy := tt.newObject + copy := *tt.newObject.Image copy.ID = s.ID // load relationships @@ -201,8 +217,6 @@ func Test_imageQueryBuilder_Create(t *testing.T) { } assert.Equal(copy, *found) - - return }) } } @@ -387,8 +401,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) { } assert.Equal(copy, *s) - - return }) } } @@ -832,6 +844,79 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) { } } +func Test_ImageStore_UpdatePartialCustomFields(t *testing.T) { + tests := []struct { + name string + id int + partial models.ImagePartial + expected map[string]interface{} // nil to use the partial + }{ + { + "set custom fields", + imageIDs[imageIdx1WithGallery], + models.ImagePartial{ + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + nil, + }, + { + "clear custom fields", + imageIDs[imageIdx1WithGallery], + models.ImagePartial{ + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + nil, + }, + { + "partial custom fields", + imageIDs[imageIdxWithStudio], + models.ImagePartial{ + CustomFields: models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "string": "bbb", + "new_field": "new", + }, + }, + }, + map[string]interface{}{ + "int": int64(2), + "real": 1.2, + "string": "bbb", + "new_field": "new", + }, + }, + } + for _, tt := range tests { + qb := db.Image + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + _, err := qb.UpdatePartial(ctx, tt.id, tt.partial) + if err != nil { + t.Errorf("ImageStore.UpdatePartial() error = %v", err) + return + } + + // ensure custom fields are correct + cf, err := qb.GetCustomFields(ctx, tt.id) + if err != nil { + t.Errorf("ImageStore.GetCustomFields() error = %v", err) + return + } + if tt.expected == nil { + assert.Equal(tt.partial.CustomFields.Full, cf) + } else { + assert.Equal(tt.expected, cf) + } + }) + } +} + func Test_imageQueryBuilder_IncrementOCounter(t *testing.T) { tests := []struct { name string @@ -1511,6 +1596,20 @@ func TestImageQueryQ(t *testing.T) { }) } +func TestImageQueryQ_Details(t *testing.T) { + withTxn(func(ctx context.Context) error { + const imageIdx = 3 + + q := getImageStringValue(imageIdx, detailsField) + + sqb := db.Image + + imageQueryQ(ctx, t, sqb, q, imageIdx) + + return nil + }) +} + func queryImagesWithCount(ctx context.Context, sqb models.ImageReader, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, int, error) { result, err := sqb.Query(ctx, models.ImageQueryOptions{ QueryOptions: models.QueryOptions{ @@ -3018,6 +3117,252 @@ func TestImageQueryPagination(t *testing.T) { }) } +func TestImageQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.ImageFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.ImageFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getImageStringValue(imageIdx1WithGallery, "custom")}, + }, + }, + }, + []int{imageIdx1WithGallery}, + nil, + false, + }, + { + "not equals", + &models.ImageFilterType{ + Title: &models.StringCriterionInput{ + Value: getImageStringValue(imageIdx1WithGallery, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getImageStringValue(imageIdx1WithGallery, "custom")}, + }, + }, + }, + nil, + []int{imageIdx1WithGallery}, + false, + }, + { + "includes", + &models.ImageFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getImageStringValue(imageIdx1WithGallery, "custom")[9:]}, + }, + }, + }, + []int{imageIdx1WithGallery}, + nil, + false, + }, + { + "excludes", + &models.ImageFilterType{ + Title: &models.StringCriterionInput{ + Value: getImageStringValue(imageIdx1WithGallery, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getImageStringValue(imageIdx1WithGallery, "custom")[9:]}, + }, + }, + }, + nil, + []int{imageIdx1WithGallery}, + false, + }, + { + "regex", + &models.ImageFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + []int{imageIdxWithPerformerTag}, + nil, + false, + }, + { + "invalid regex", + &models.ImageFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.ImageFilterType{ + Title: &models.StringCriterionInput{ + Value: getImageStringValue(imageIdxWithPerformerTag, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + nil, + []int{imageIdxWithPerformerTag}, + false, + }, + { + "invalid not matches regex", + &models.ImageFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.ImageFilterType{ + Title: &models.StringCriterionInput{ + Value: getImageStringValue(imageIdx1WithGallery, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{imageIdx1WithGallery}, + nil, + false, + }, + { + "not null", + &models.ImageFilterType{ + Title: &models.StringCriterionInput{ + Value: getImageStringValue(imageIdx1WithGallery, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{imageIdx1WithGallery}, + nil, + false, + }, + { + "between", + &models.ImageFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{imageIdx2WithGallery}, + nil, + false, + }, + { + "not between", + &models.ImageFilterType{ + Title: &models.StringCriterionInput{ + Value: getImageStringValue(imageIdx2WithGallery, titleField), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{imageIdx2WithGallery}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + result, err := db.Image.Query(ctx, models.ImageQueryOptions{ + ImageFilter: tt.filter, + }) + if (err != nil) != tt.wantErr { + t.Errorf("ImageStore.Query() error = %v, wantErr %v", err, tt.wantErr) + } + + if err != nil { + return + } + + images, err := result.Resolve(ctx) + if err != nil { + t.Errorf("ImageStore.Query().Resolve() error = %v", err) + } + + ids := imagesToIDs(images) + include := indexesToIDs(imageIDs, tt.includeIdxs) + exclude := indexesToIDs(imageIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + // TODO Count // TODO SizeCount // TODO All diff --git a/pkg/sqlite/migrations/76_studio_custom_fields.up.sql b/pkg/sqlite/migrations/76_studio_custom_fields.up.sql new file mode 100644 index 000000000..81a72d4d4 --- /dev/null +++ b/pkg/sqlite/migrations/76_studio_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `studio_custom_fields` ( + `studio_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`studio_id`, `field`), + foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE +); + +CREATE INDEX `index_studio_custom_fields_field_value` ON `studio_custom_fields` (`field`, `value`); diff --git a/pkg/sqlite/migrations/77_tag_custom_fields.up.sql b/pkg/sqlite/migrations/77_tag_custom_fields.up.sql new file mode 100644 index 000000000..b34a5f794 --- /dev/null +++ b/pkg/sqlite/migrations/77_tag_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `tag_custom_fields` ( + `tag_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`tag_id`, `field`), + foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE +); + +CREATE INDEX `index_tag_custom_fields_field_value` ON `tag_custom_fields` (`field`, `value`); \ No newline at end of file diff --git a/pkg/sqlite/migrations/78_performer_career_dates.up.sql b/pkg/sqlite/migrations/78_performer_career_dates.up.sql new file mode 100644 index 000000000..006d9fae7 --- /dev/null +++ b/pkg/sqlite/migrations/78_performer_career_dates.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE "performers" ADD COLUMN "career_start" integer; +ALTER TABLE "performers" ADD COLUMN "career_end" integer; diff --git a/pkg/sqlite/migrations/78_postmigrate.go b/pkg/sqlite/migrations/78_postmigrate.go new file mode 100644 index 000000000..34dbe6eb3 --- /dev/null +++ b/pkg/sqlite/migrations/78_postmigrate.go @@ -0,0 +1,156 @@ +package migrations + +import ( + "context" + "database/sql" + "fmt" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sqlite" +) + +type schema78Migrator struct { + migrator +} + +func post78(ctx context.Context, db *sqlx.DB) error { + logger.Info("Running post-migration for schema version 78") + + m := schema78Migrator{ + migrator: migrator{ + db: db, + }, + } + + if err := m.migrateCareerLength(ctx); err != nil { + return fmt.Errorf("migrating career_length: %w", err) + } + + if err := m.dropCareerLength(); err != nil { + return fmt.Errorf("dropping career_length column: %w", err) + } + + return nil +} + +func (m *schema78Migrator) migrateCareerLength(ctx context.Context) error { + logger.Info("Migrating career_length to career_start/career_end") + + const limit = 1000 + + lastID := 0 + parsed := 0 + unparseable := 0 + + for { + gotSome := false + + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + query := `SELECT id, career_length FROM performers + WHERE career_length IS NOT NULL AND career_length != ''` + + if lastID != 0 { + query += fmt.Sprintf(" AND id > %d", lastID) + } + + query += fmt.Sprintf(" ORDER BY id LIMIT %d", limit) + + rows, err := tx.Query(query) + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + var ( + id int + careerLength string + ) + + if err := rows.Scan(&id, &careerLength); err != nil { + return err + } + + lastID = id + gotSome = true + + start, end, err := models.ParseYearRangeString(careerLength) + if err != nil { + logger.Warnf("Could not parse career_length %q for performer %d: %v — preserving as custom field", careerLength, id, err) + + if err := m.preserveAsCustomField(tx, id, careerLength); err != nil { + return fmt.Errorf("preserving career_length for performer %d: %w", id, err) + } + unparseable++ + continue + } + + if err := m.updateCareerFields(tx, id, start, end); err != nil { + return fmt.Errorf("updating career fields for performer %d: %w", id, err) + } + parsed++ + } + + return rows.Err() + }); err != nil { + return err + } + + if !gotSome { + break + } + } + + logger.Infof("Career length migration complete: %d parsed, %d unparseable (preserved as custom fields)", parsed, unparseable) + return nil +} + +func (m *schema78Migrator) updateCareerFields(tx *sqlx.Tx, id int, start *models.Date, end *models.Date) error { + var ( + startYear, endYear *int + ) + + if start != nil { + year := start.Year() + startYear = &year + } + if end != nil { + year := end.Year() + endYear = &year + } + + _, err := tx.Exec( + "UPDATE performers SET career_start = ?, career_end = ? WHERE id = ?", + startYear, endYear, id, + ) + return err +} + +func (m *schema78Migrator) preserveAsCustomField(tx *sqlx.Tx, id int, value string) error { + // check if a career_length custom field already exists + var existing sql.NullString + err := tx.Get(&existing, "SELECT value FROM performer_custom_fields WHERE performer_id = ? AND field = 'career_length'", id) + if err == nil { + logger.Debugf("career_length custom field already exists for performer %d, skipping", id) + return nil + } + + _, err = tx.Exec( + "INSERT INTO performer_custom_fields (performer_id, field, value) VALUES (?, 'career_length', ?)", + id, value, + ) + return err +} + +func (m *schema78Migrator) dropCareerLength() error { + logger.Info("Dropping career_length column from performers table") + return m.execAll([]string{ + "ALTER TABLE performers DROP COLUMN career_length", + }) +} + +func init() { + sqlite.RegisterPostMigration(78, post78) +} diff --git a/pkg/sqlite/migrations/79_scene_custom_fields.up.sql b/pkg/sqlite/migrations/79_scene_custom_fields.up.sql new file mode 100644 index 000000000..a56b34e3a --- /dev/null +++ b/pkg/sqlite/migrations/79_scene_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `scene_custom_fields` ( + `scene_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`scene_id`, `field`), + foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE +); + +CREATE INDEX `index_scene_custom_fields_field_value` ON `scene_custom_fields` (`field`, `value`); \ No newline at end of file diff --git a/pkg/sqlite/migrations/80_studio_organized.up.sql b/pkg/sqlite/migrations/80_studio_organized.up.sql new file mode 100644 index 000000000..3aa9c4656 --- /dev/null +++ b/pkg/sqlite/migrations/80_studio_organized.up.sql @@ -0,0 +1 @@ +ALTER TABLE `studios` ADD COLUMN `organized` boolean not null default '0'; \ No newline at end of file diff --git a/pkg/sqlite/migrations/81_gallery_custom_fields.up.sql b/pkg/sqlite/migrations/81_gallery_custom_fields.up.sql new file mode 100644 index 000000000..89a6e4c05 --- /dev/null +++ b/pkg/sqlite/migrations/81_gallery_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `gallery_custom_fields` ( + `gallery_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`gallery_id`, `field`), + foreign key(`gallery_id`) references `galleries`(`id`) on delete CASCADE +); + +CREATE INDEX `index_gallery_custom_fields_field_value` ON `gallery_custom_fields` (`field`, `value`); diff --git a/pkg/sqlite/migrations/82_group_custom_fields.up.sql b/pkg/sqlite/migrations/82_group_custom_fields.up.sql new file mode 100644 index 000000000..c1f287fec --- /dev/null +++ b/pkg/sqlite/migrations/82_group_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `group_custom_fields` ( + `group_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`group_id`, `field`), + foreign key(`group_id`) references `groups`(`id`) on delete CASCADE +); + +CREATE INDEX `index_group_custom_fields_field_value` ON `group_custom_fields` (`field`, `value`); diff --git a/pkg/sqlite/migrations/83_image_custom_fields.up.sql b/pkg/sqlite/migrations/83_image_custom_fields.up.sql new file mode 100644 index 000000000..0aa3aa4d7 --- /dev/null +++ b/pkg/sqlite/migrations/83_image_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `image_custom_fields` ( + `image_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`image_id`, `field`), + foreign key(`image_id`) references `images`(`id`) on delete CASCADE +); + +CREATE INDEX `index_image_custom_fields_field_value` ON `image_custom_fields` (`field`, `value`); diff --git a/pkg/sqlite/migrations/84_folder_basename.up.sql b/pkg/sqlite/migrations/84_folder_basename.up.sql new file mode 100644 index 000000000..5cfd5c2d9 --- /dev/null +++ b/pkg/sqlite/migrations/84_folder_basename.up.sql @@ -0,0 +1,50 @@ +-- we cannot add basename column directly because we require it to be NOT NULL +-- recreate folders table with basename column +PRAGMA foreign_keys=OFF; + +CREATE TABLE `folders_new` ( + `id` integer not null primary key autoincrement, + `basename` varchar(255) NOT NULL, + `path` varchar(255) NOT NULL, + `parent_folder_id` integer, + `zip_file_id` integer REFERENCES `files`(`id`), + `mod_time` datetime not null, + `created_at` datetime not null, + `updated_at` datetime not null, + foreign key(`parent_folder_id`) references `folders`(`id`) on delete SET NULL +); + +-- copy data from old table to new table, setting basename to path temporarily +INSERT INTO `folders_new` ( + `id`, + `basename`, + `path`, + `parent_folder_id`, + `zip_file_id`, + `mod_time`, + `created_at`, + `updated_at` +) SELECT + `id`, + `path`, + `path`, + `parent_folder_id`, + `zip_file_id`, + `mod_time`, + `created_at`, + `updated_at` +FROM `folders`; + +DROP INDEX IF EXISTS `index_folders_on_parent_folder_id`; +DROP INDEX IF EXISTS `index_folders_on_path_unique`; +DROP INDEX IF EXISTS `index_folders_on_zip_file_id`; +DROP TABLE `folders`; + +ALTER TABLE `folders_new` RENAME TO `folders`; + +CREATE UNIQUE INDEX `index_folders_on_path_unique` on `folders` (`path`); +CREATE UNIQUE INDEX `index_folders_on_parent_folder_id_basename_unique` on `folders` (`parent_folder_id`, `basename`); +CREATE INDEX `index_folders_on_zip_file_id` on `folders` (`zip_file_id`) WHERE `zip_file_id` IS NOT NULL; +CREATE INDEX `index_folders_on_basename` on `folders` (`basename`); + +PRAGMA foreign_keys=ON; \ No newline at end of file diff --git a/pkg/sqlite/migrations/84_postmigrate.go b/pkg/sqlite/migrations/84_postmigrate.go new file mode 100644 index 000000000..3be0dd22e --- /dev/null +++ b/pkg/sqlite/migrations/84_postmigrate.go @@ -0,0 +1,385 @@ +package migrations + +import ( + "context" + "database/sql" + "errors" + "fmt" + "path/filepath" + "slices" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/sqlite" + "gopkg.in/guregu/null.v4" +) + +func post84(ctx context.Context, db *sqlx.DB) error { + logger.Info("Running post-migration for schema version 84") + + m := schema84Migrator{ + migrator: migrator{ + db: db, + }, + folderCache: make(map[string]folderInfo), + } + + rootPaths := config.GetInstance().GetStashPaths().Paths() + + if err := m.createMissingFolderHierarchies(ctx, rootPaths); err != nil { + return fmt.Errorf("creating missing folder hierarchies: %w", err) + } + + if err := m.fixIncorrectParents(ctx, rootPaths); err != nil { + return fmt.Errorf("fixing incorrect parent folders: %w", err) + } + + if err := m.migrateFolders(ctx); err != nil { + return fmt.Errorf("migrating folders: %w", err) + } + + return nil +} + +type schema84Migrator struct { + migrator + folderCache map[string]folderInfo +} + +func (m *schema84Migrator) createMissingFolderHierarchies(ctx context.Context, rootPaths []string) error { + // before we set the basenames, we need to address any folders that are missing their + // parent folders. + const ( + limit = 1000 + logEvery = 10000 + ) + + lastID := 0 + count := 0 + logged := false + + for { + gotSome := false + + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + query := "SELECT `folders`.`id`, `folders`.`path` FROM `folders` WHERE `folders`.`parent_folder_id` IS NULL " + + if lastID != 0 { + query += fmt.Sprintf("AND `folders`.`id` > %d ", lastID) + } + + query += fmt.Sprintf("ORDER BY `folders`.`id` LIMIT %d", limit) + + rows, err := tx.Query(query) + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + // log once if we find any folders with missing parent folders + if !logged { + logger.Info("Migrating folders with missing parents...") + logged = true + } + + var id int + var p string + + err := rows.Scan(&id, &p) + if err != nil { + return err + } + + lastID = id + gotSome = true + count++ + + // don't try to create parent folders for root paths + if slices.Contains(rootPaths, p) { + continue + } + + parentDir := filepath.Dir(p) + if parentDir == p { + // this can happen if the path is something like "C:\", where the parent directory is the same as the current directory + continue + } + + parentID, err := m.getOrCreateFolderHierarchy(tx, parentDir, rootPaths) + if err != nil { + return fmt.Errorf("error creating parent folder for folder %d %q: %w", id, p, err) + } + + if parentID == nil { + continue + } + + // now set the parent folder ID for the current folder + logger.Debugf("Migrating folder %d %q: setting parent folder ID to %d", id, p, *parentID) + + _, err = tx.Exec("UPDATE `folders` SET `parent_folder_id` = ? WHERE `id` = ?", *parentID, id) + if err != nil { + return fmt.Errorf("error setting parent folder for folder %d %q: %w", id, p, err) + } + } + + return rows.Err() + }); err != nil { + return err + } + + if !gotSome { + break + } + + if count%logEvery == 0 { + logger.Infof("Migrated %d folders", count) + } + } + + return nil +} + +func (m *schema84Migrator) findFolderByPath(tx *sqlx.Tx, path string) (*int, error) { + query := "SELECT `folders`.`id` FROM `folders` WHERE `folders`.`path` = ?" + + var id int + if err := tx.Get(&id, query, path); err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, nil + } + + return nil, err + } + + return &id, nil +} + +// this is a copy of the GetOrCreateFolderHierarchy function from pkg/file/folder.go, +// but modified to use low-level SQL queries instead of the models.FolderFinderCreator interface, to avoid +func (m *schema84Migrator) getOrCreateFolderHierarchy(tx *sqlx.Tx, path string, rootPaths []string) (*int, error) { + // get or create folder hierarchy + folderID, err := m.findFolderByPath(tx, path) + if err != nil { + return nil, err + } + + if folderID == nil { + var parentID *int + + if !slices.Contains(rootPaths, path) { + parentPath := filepath.Dir(path) + + // it's possible that the parent path is the same as the current path, if there are folders outside + // of the root paths. In that case, we should just return nil for the parent ID. + if parentPath == path { + return nil, nil + } + + parentID, err = m.getOrCreateFolderHierarchy(tx, parentPath, rootPaths) + if err != nil { + return nil, err + } + } + + logger.Debugf("%s doesn't exist. Creating new folder entry...", path) + + // we need to set basename to path, which will be addressed in the next step + const insertSQL = "INSERT INTO `folders` (`path`,`basename`,`parent_folder_id`,`mod_time`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?)" + + var parentFolderID null.Int + if parentID != nil { + parentFolderID = null.IntFrom(int64(*parentID)) + } + + now := time.Now() + result, err := tx.Exec(insertSQL, path, path, parentFolderID, time.Time{}, now, now) + if err != nil { + return nil, fmt.Errorf("creating folder %s: %w", path, err) + } + + id, err := result.LastInsertId() + if err != nil { + return nil, fmt.Errorf("creating folder %s: %w", path, err) + } + + idInt := int(id) + folderID = &idInt + } + + return folderID, nil +} + +func (m *schema84Migrator) fixIncorrectParents(ctx context.Context, rootPaths []string) error { + const ( + limit = 1000 + logEvery = 10000 + ) + + lastID := 0 + count := 0 + fixed := 0 + logged := false + + for { + gotSome := false + + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + query := "SELECT f.id, f.path, f.parent_folder_id, pf.path AS parent_path " + + "FROM folders f " + + "JOIN folders pf ON f.parent_folder_id = pf.id " + + if lastID != 0 { + query += fmt.Sprintf("WHERE f.id > %d ", lastID) + } + + query += fmt.Sprintf("ORDER BY f.id LIMIT %d", limit) + + rows, err := tx.Query(query) + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + var id int + var p string + var parentFolderID int + var parentPath string + + err := rows.Scan(&id, &p, &parentFolderID, &parentPath) + if err != nil { + return err + } + + lastID = id + gotSome = true + count++ + + expectedParent := filepath.Dir(p) + if expectedParent == parentPath { + continue + } + + if !logged { + logger.Info("Fixing folders with incorrect parent folder assignments...") + logged = true + } + + correctParentID, err := m.getOrCreateFolderHierarchy(tx, expectedParent, rootPaths) + if err != nil { + return fmt.Errorf("error getting/creating correct parent for folder %d %q: %w", id, p, err) + } + + if correctParentID == nil { + continue + } + + logger.Debugf("Fixing folder %d %q: changing parent_folder_id from %d to %d", id, p, parentFolderID, *correctParentID) + + _, err = tx.Exec("UPDATE `folders` SET `parent_folder_id` = ? WHERE `id` = ?", *correctParentID, id) + if err != nil { + return fmt.Errorf("error fixing parent folder for folder %d %q: %w", id, p, err) + } + + fixed++ + } + + return rows.Err() + }); err != nil { + return err + } + + if !gotSome { + break + } + + if count%logEvery == 0 { + logger.Infof("Checked %d folders", count) + } + } + + if fixed > 0 { + logger.Infof("Fixed %d folders with incorrect parent assignments", fixed) + } + + return nil +} + +func (m *schema84Migrator) migrateFolders(ctx context.Context) error { + const ( + limit = 1000 + logEvery = 10000 + ) + + lastID := 0 + count := 0 + logged := false + + for { + gotSome := false + + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + query := "SELECT `folders`.`id`, `folders`.`path` FROM `folders` " + + if lastID != 0 { + query += fmt.Sprintf("WHERE `folders`.`id` > %d ", lastID) + } + + query += fmt.Sprintf("ORDER BY `folders`.`id` LIMIT %d", limit) + + rows, err := tx.Query(query) + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + if !logged { + logger.Infof("Migrating folders to set basenames...") + logged = true + } + + var id int + var p string + + err := rows.Scan(&id, &p) + if err != nil { + return err + } + + lastID = id + gotSome = true + count++ + + basename := filepath.Base(p) + logger.Debugf("Migrating folder %d %q: setting basename to %q", id, p, basename) + _, err = tx.Exec("UPDATE `folders` SET `basename` = ? WHERE `id` = ?", basename, id) + if err != nil { + return fmt.Errorf("error migrating folder %d %q: %w", id, p, err) + } + } + + return rows.Err() + }); err != nil { + return err + } + + if !gotSome { + break + } + + if count%logEvery == 0 { + logger.Infof("Migrated %d folders", count) + } + } + + return nil +} + +func init() { + sqlite.RegisterPostMigration(84, post84) +} diff --git a/pkg/sqlite/migrations/85_performer_career_dates.up.sql b/pkg/sqlite/migrations/85_performer_career_dates.up.sql new file mode 100644 index 000000000..1ce1cc97e --- /dev/null +++ b/pkg/sqlite/migrations/85_performer_career_dates.up.sql @@ -0,0 +1,112 @@ +-- have to change the type of the career start/end columns so need to recreate the table +PRAGMA foreign_keys=OFF; + +CREATE TABLE IF NOT EXISTS "performers_new" ( + `id` integer not null primary key autoincrement, + `name` varchar(255) not null, + `disambiguation` varchar(255), + `gender` varchar(20), + `birthdate` date, + `birthdate_precision` TINYINT, + `ethnicity` varchar(255), + `country` varchar(255), + `eye_color` varchar(255), + `height` int, + `measurements` varchar(255), + `fake_tits` varchar(255), + `tattoos` varchar(255), + `piercings` varchar(255), + `favorite` boolean not null default '0', + `created_at` datetime not null, + `updated_at` datetime not null, + `details` text, + `death_date` date, + `death_date_precision` TINYINT, + `hair_color` varchar(255), + `weight` integer, + `rating` tinyint, + `ignore_auto_tag` boolean not null default '0', + `penis_length` float, + `circumcised` varchar[10], + `career_start` date, + `career_start_precision` TINYINT, + `career_end` date, + `career_end_precision` TINYINT, + `image_blob` varchar(255) REFERENCES `blobs`(`checksum`) +); + +INSERT INTO `performers_new` ( + `id`, + `name`, + `disambiguation`, + `gender`, + `birthdate`, + `ethnicity`, + `country`, + `eye_color`, + `height`, + `measurements`, + `fake_tits`, + `tattoos`, + `piercings`, + `favorite`, + `created_at`, + `updated_at`, + `details`, + `death_date`, + `hair_color`, + `weight`, + `rating`, + `ignore_auto_tag`, + `image_blob`, + `penis_length`, + `circumcised`, + `birthdate_precision`, + `death_date_precision`, + `career_start`, + `career_end` +) SELECT + `id`, + `name`, + `disambiguation`, + `gender`, + `birthdate`, + `ethnicity`, + `country`, + `eye_color`, + `height`, + `measurements`, + `fake_tits`, + `tattoos`, + `piercings`, + `favorite`, + `created_at`, + `updated_at`, + `details`, + `death_date`, + `hair_color`, + `weight`, + `rating`, + `ignore_auto_tag`, + `image_blob`, + `penis_length`, + `circumcised`, + `birthdate_precision`, + `death_date_precision`, + CAST(`career_start` AS TEXT), + CAST(`career_end` AS TEXT) +FROM `performers`; + +DROP INDEX IF EXISTS `performers_name_disambiguation_unique`; +DROP INDEX IF EXISTS `performers_name_unique`; +DROP TABLE `performers`; + +ALTER TABLE `performers_new` RENAME TO `performers`; + +UPDATE "performers" SET `career_start` = CONCAT(`career_start`, '-01-01'), "career_start_precision" = 2 WHERE "career_start" IS NOT NULL; +UPDATE "performers" SET `career_end` = CONCAT(`career_end`, '-01-01'), "career_end_precision" = 2 WHERE "career_end" IS NOT NULL; + +CREATE UNIQUE INDEX `performers_name_disambiguation_unique` on `performers` (`name`, `disambiguation`) WHERE `disambiguation` IS NOT NULL; +CREATE UNIQUE INDEX `performers_name_unique` on `performers` (`name`) WHERE `disambiguation` IS NULL; + +PRAGMA foreign_keys=ON; \ No newline at end of file diff --git a/pkg/sqlite/performer.go b/pkg/sqlite/performer.go index bf6b780b2..aacd9172f 100644 --- a/pkg/sqlite/performer.go +++ b/pkg/sqlite/performer.go @@ -30,26 +30,29 @@ const ( ) type performerRow struct { - ID int `db:"id" goqu:"skipinsert"` - Name null.String `db:"name"` // TODO: make schema non-nullable - Disambigation zero.String `db:"disambiguation"` - Gender zero.String `db:"gender"` - Birthdate NullDate `db:"birthdate"` - BirthdatePrecision null.Int `db:"birthdate_precision"` - Ethnicity zero.String `db:"ethnicity"` - Country zero.String `db:"country"` - EyeColor zero.String `db:"eye_color"` - Height null.Int `db:"height"` - Measurements zero.String `db:"measurements"` - FakeTits zero.String `db:"fake_tits"` - PenisLength null.Float `db:"penis_length"` - Circumcised zero.String `db:"circumcised"` - CareerLength zero.String `db:"career_length"` - Tattoos zero.String `db:"tattoos"` - Piercings zero.String `db:"piercings"` - Favorite bool `db:"favorite"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` + ID int `db:"id" goqu:"skipinsert"` + Name null.String `db:"name"` // TODO: make schema non-nullable + Disambigation zero.String `db:"disambiguation"` + Gender zero.String `db:"gender"` + Birthdate NullDate `db:"birthdate"` + BirthdatePrecision null.Int `db:"birthdate_precision"` + Ethnicity zero.String `db:"ethnicity"` + Country zero.String `db:"country"` + EyeColor zero.String `db:"eye_color"` + Height null.Int `db:"height"` + Measurements zero.String `db:"measurements"` + FakeTits zero.String `db:"fake_tits"` + PenisLength null.Float `db:"penis_length"` + Circumcised zero.String `db:"circumcised"` + CareerStart NullDate `db:"career_start"` + CareerStartPrecision null.Int `db:"career_start_precision"` + CareerEnd NullDate `db:"career_end"` + CareerEndPrecision null.Int `db:"career_end_precision"` + Tattoos zero.String `db:"tattoos"` + Piercings zero.String `db:"piercings"` + Favorite bool `db:"favorite"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` // expressed as 1-100 Rating null.Int `db:"rating"` Details zero.String `db:"details"` @@ -82,7 +85,10 @@ func (r *performerRow) fromPerformer(o models.Performer) { if o.Circumcised != nil && o.Circumcised.IsValid() { r.Circumcised = zero.StringFrom(o.Circumcised.String()) } - r.CareerLength = zero.StringFrom(o.CareerLength) + r.CareerStart = NullDateFromDatePtr(o.CareerStart) + r.CareerStartPrecision = datePrecisionFromDatePtr(o.CareerStart) + r.CareerEnd = NullDateFromDatePtr(o.CareerEnd) + r.CareerEndPrecision = datePrecisionFromDatePtr(o.CareerEnd) r.Tattoos = zero.StringFrom(o.Tattoos) r.Piercings = zero.StringFrom(o.Piercings) r.Favorite = o.Favorite @@ -110,7 +116,8 @@ func (r *performerRow) resolve() *models.Performer { Measurements: r.Measurements.String, FakeTits: r.FakeTits.String, PenisLength: nullFloatPtr(r.PenisLength), - CareerLength: r.CareerLength.String, + CareerStart: r.CareerStart.DatePtr(r.CareerStartPrecision), + CareerEnd: r.CareerEnd.DatePtr(r.CareerEndPrecision), Tattoos: r.Tattoos.String, Piercings: r.Piercings.String, Favorite: r.Favorite, @@ -131,7 +138,7 @@ func (r *performerRow) resolve() *models.Performer { } if r.Circumcised.ValueOrZero() != "" { - v := models.CircumisedEnum(r.Circumcised.String) + v := models.CircumcisedEnum(r.Circumcised.String) ret.Circumcised = &v } @@ -155,7 +162,8 @@ func (r *performerRowRecord) fromPartial(o models.PerformerPartial) { r.setNullString("fake_tits", o.FakeTits) r.setNullFloat64("penis_length", o.PenisLength) r.setNullString("circumcised", o.Circumcised) - r.setNullString("career_length", o.CareerLength) + r.setNullDate("career_start", "career_start_precision", o.CareerStart) + r.setNullDate("career_end", "career_end_precision", o.CareerEnd) r.setNullString("tattoos", o.Tattoos) r.setNullString("piercings", o.Piercings) r.setBool("favorite", o.Favorite) @@ -706,6 +714,28 @@ func (qb *PerformerStore) sortByLastOAt(direction string) string { return " ORDER BY (" + selectPerformerLastOAtSQL + ") " + direction } +// used for sorting on performer latest scene +var selectPerformerLatestSceneSQL = utils.StrFormat( + "SELECT MAX(date) FROM ("+ + "SELECT {date} FROM {performers_scenes} s "+ + "LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+ + "WHERE s.{performer_id} = {performers}.id"+ + ")", + map[string]interface{}{ + "performer_id": performerIDColumn, + "performers": performerTable, + "performers_scenes": performersScenesTable, + "scenes": sceneTable, + "scene_id": sceneIDColumn, + "date": sceneDateColumn, + }, +) + +func (qb *PerformerStore) sortByLatestScene(direction string) string { + // need to get the latest date from scenes + return " ORDER BY (" + selectPerformerLatestSceneSQL + ") " + direction +} + // used for sorting on performer last view_date var selectPerformerLastPlayedAtSQL = utils.StrFormat( "SELECT MAX(view_date) FROM ("+ @@ -752,9 +782,32 @@ func (qb *PerformerStore) sortByScenesDuration(direction string) string { return " ORDER BY (" + selectPerformerScenesDurationSQL + ") " + direction } +// used for sorting by total scene file size +var selectPerformerScenesSizeSQL = utils.StrFormat( + "SELECT COALESCE(SUM({files}.size), 0) FROM {performers_scenes} s "+ + "LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+ + "LEFT JOIN {scenes_files} ON {scenes_files}.{scene_id} = {scenes}.id "+ + "LEFT JOIN {files} ON {files}.id = {scenes_files}.file_id "+ + "WHERE s.{performer_id} = {performers}.id", + map[string]interface{}{ + "performer_id": performerIDColumn, + "performers": performerTable, + "performers_scenes": performersScenesTable, + "scenes": sceneTable, + "scene_id": sceneIDColumn, + "scenes_files": scenesFilesTable, + "files": fileTable, + }, +) + +func (qb *PerformerStore) sortByScenesSize(direction string) string { + return " ORDER BY (" + selectPerformerScenesSizeSQL + ") " + direction +} + var performerSortOptions = sortOptions{ "birthdate", - "career_length", + "career_start", + "career_end", "created_at", "galleries_count", "height", @@ -762,6 +815,7 @@ var performerSortOptions = sortOptions{ "images_count", "last_o_at", "last_played_at", + "latest_scene", "measurements", "name", "o_counter", @@ -771,6 +825,7 @@ var performerSortOptions = sortOptions{ "rating", "scenes_count", "scenes_duration", + "scenes_size", "tag_count", "updated_at", "weight", @@ -800,6 +855,8 @@ func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) (s sortQuery += getCountSort(performerTable, performersScenesTable, performerIDColumn, direction) case "scenes_duration": sortQuery += qb.sortByScenesDuration(direction) + case "scenes_size": + sortQuery += qb.sortByScenesSize(direction) case "images_count": sortQuery += getCountSort(performerTable, performersImagesTable, performerIDColumn, direction) case "galleries_count": @@ -812,6 +869,8 @@ func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) (s sortQuery += qb.sortByLastPlayedAt(direction) case "last_o_at": sortQuery += qb.sortByLastOAt(direction) + case "latest_scene": + sortQuery += qb.sortByLatestScene(direction) default: sortQuery += getSort(sort, direction, "performers") } @@ -893,3 +952,58 @@ func (qb *PerformerStore) FindByStashIDStatus(ctx context.Context, hasStashID bo return ret, nil } + +func (qb *PerformerStore) Merge(ctx context.Context, source []int, destination int) error { + if len(source) == 0 { + return nil + } + + inBinding := getInBinding(len(source)) + + args := []interface{}{destination} + srcArgs := make([]interface{}, len(source)) + for i, id := range source { + if id == destination { + return errors.New("cannot merge where source == destination") + } + srcArgs[i] = id + } + + args = append(args, srcArgs...) + + performerTables := map[string]string{ + performersScenesTable: sceneIDColumn, + performersGalleriesTable: galleryIDColumn, + performersImagesTable: imageIDColumn, + performersTagsTable: tagIDColumn, + } + + args = append(args, destination) + + // for each table, update source performer ids to destination performer id, ignoring duplicates + for table, idColumn := range performerTables { + _, err := dbWrapper.Exec(ctx, `UPDATE OR IGNORE `+table+` +SET performer_id = ? +WHERE performer_id IN `+inBinding+` +AND NOT EXISTS(SELECT 1 FROM `+table+` o WHERE o.`+idColumn+` = `+table+`.`+idColumn+` AND o.performer_id = ?)`, + args..., + ) + if err != nil { + return err + } + + // delete source performer ids from the table where they couldn't be set + if _, err := dbWrapper.Exec(ctx, `DELETE FROM `+table+` WHERE performer_id IN `+inBinding, srcArgs...); err != nil { + return err + } + } + + for _, id := range source { + err := qb.Destroy(ctx, id) + if err != nil { + return err + } + } + + return nil +} diff --git a/pkg/sqlite/performer_filter.go b/pkg/sqlite/performer_filter.go index 11d3138bc..4336e998c 100644 --- a/pkg/sqlite/performer_filter.go +++ b/pkg/sqlite/performer_filter.go @@ -47,6 +47,51 @@ func (qb *performerFilterHandler) validate() error { } } + // if legacy career length filter used, ensure only supported modifiers are used and value is valid + if filter.CareerLength != nil { + careerLength := filter.CareerLength + switch careerLength.Modifier { + case models.CriterionModifierEquals: + start, end, err := models.ParseYearRangeString(careerLength.Value) + if err != nil { + return fmt.Errorf("invalid career length value: %s", careerLength.Value) + } + // ensure career start/end is not set + if start != nil && filter.CareerStart != nil { + return fmt.Errorf("cannot use legacy CareerLength filter with CareerStart filter") + } + if end != nil && filter.CareerEnd != nil { + return fmt.Errorf("cannot use legacy CareerLength filter with CareerEnd filter") + } + case models.CriterionModifierIsNull, models.CriterionModifierNotNull: + // valid modifiers, no value parsing needed + default: + return fmt.Errorf("invalid career length modifier: %s", careerLength.Modifier) + } + } + + // validate date formats + if filter.Birthdate != nil && filter.Birthdate.Value != "" { + if _, err := models.ParseDate(filter.Birthdate.Value); err != nil { + return fmt.Errorf("invalid birthdate value: %s", filter.Birthdate.Value) + } + } + if filter.DeathDate != nil && filter.DeathDate.Value != "" { + if _, err := models.ParseDate(filter.DeathDate.Value); err != nil { + return fmt.Errorf("invalid death date value: %s", filter.DeathDate.Value) + } + } + if filter.CareerStart != nil && filter.CareerStart.Value != "" { + if _, err := models.ParseDate(filter.CareerStart.Value); err != nil { + return fmt.Errorf("invalid career start value: %s", filter.CareerStart.Value) + } + } + if filter.CareerEnd != nil && filter.CareerEnd.Value != "" { + if _, err := models.ParseDate(filter.CareerEnd.Value); err != nil { + return fmt.Errorf("invalid career end value: %s", filter.CareerEnd.Value) + } + } + return nil } @@ -71,10 +116,13 @@ func (qb *performerFilterHandler) handle(ctx context.Context, f *filterBuilder) } func (qb *performerFilterHandler) criterionHandler() criterionHandler { - filter := qb.performerFilter + // make a copy of the filter to modify with legacy conversions without affecting original filter used for subfilters + filter := *qb.performerFilter const tableName = performerTable heightCmCrit := filter.HeightCm + convertLegacyCareerLengthFilter(&filter) + return compoundHandler{ stringCriterionHandler(filter.Name, tableName+".name"), stringCriterionHandler(filter.Disambiguation, tableName+".disambiguation"), @@ -129,7 +177,9 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler { } }), - stringCriterionHandler(filter.CareerLength, tableName+".career_length"), + // CareerLength filter is deprecated and non-functional (column removed in schema 78) + &dateCriterionHandler{filter.CareerStart, tableName + ".career_start", nil}, + &dateCriterionHandler{filter.CareerEnd, tableName + ".career_end", nil}, stringCriterionHandler(filter.Tattoos, tableName+".tattoos"), stringCriterionHandler(filter.Piercings, tableName+".piercings"), intCriterionHandler(filter.Rating100, tableName+".rating", nil), @@ -148,6 +198,12 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler { stashIDTableAs: "performer_stash_ids", parentIDCol: "performers.id", }, + &stashIDsCriterionHandler{ + c: filter.StashIDsEndpoint, + stashIDRepository: &performerRepository.stashIDs, + stashIDTableAs: "performer_stash_ids", + parentIDCol: "performers.id", + }, qb.aliasCriterionHandler(filter.Aliases), @@ -161,6 +217,7 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler { qb.tagCountCriterionHandler(filter.TagCount), qb.sceneCountCriterionHandler(filter.SceneCount), + qb.markerCountCriterionHandler(filter.MarkerCount), qb.imageCountCriterionHandler(filter.ImageCount), qb.galleryCountCriterionHandler(filter.GalleryCount), qb.playCounterCriterionHandler(filter.PlayCount), @@ -170,6 +227,16 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler { ×tampCriterionHandler{filter.CreatedAt, tableName + ".created_at", nil}, ×tampCriterionHandler{filter.UpdatedAt, tableName + ".updated_at", nil}, + &relatedFilterHandler{ + relatedIDCol: "scene_markers.id", + relatedRepo: sceneMarkerRepository.repository, + relatedHandler: &sceneMarkerFilterHandler{filter.MarkersFilter}, + joinFn: func(f *filterBuilder) { + performerRepository.scenes.innerJoin(f, "", "performers.id") + f.addInnerJoin(sceneMarkerTable, "", "scene_markers.scene_id = performers_scenes.scene_id") + }, + }, + &relatedFilterHandler{ relatedIDCol: "performers_scenes.scene_id", relatedRepo: sceneRepository.repository, @@ -215,6 +282,51 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler { } } +func convertLegacyCareerLengthFilter(filter *models.PerformerFilterType) { + // convert legacy career length filter to career start/end filters + if filter.CareerLength != nil { + careerLength := filter.CareerLength + switch careerLength.Modifier { + case models.CriterionModifierEquals: + start, end, _ := models.ParseYearRangeString(careerLength.Value) + if start != nil { + start = &models.Date{ + Time: start.AddDate(0, 0, -1), // make exclusive + Precision: models.DatePrecisionDay, + } + filter.CareerStart = &models.DateCriterionInput{ + Value: start.String(), + Modifier: models.CriterionModifierGreaterThan, + } + } + if end != nil { + end = &models.Date{ + Time: end.AddDate(1, 0, 0), // make exclusive + Precision: models.DatePrecisionDay, + } + filter.CareerEnd = &models.DateCriterionInput{ + Value: end.String(), // plus one to make it exclusive + Modifier: models.CriterionModifierLessThan, + } + } + case models.CriterionModifierIsNull: + filter.CareerStart = &models.DateCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + filter.CareerEnd = &models.DateCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + case models.CriterionModifierNotNull: + filter.CareerStart = &models.DateCriterionInput{ + Modifier: models.CriterionModifierNotNull, + } + filter.CareerEnd = &models.DateCriterionInput{ + Modifier: models.CriterionModifierNotNull, + } + } + } +} + // TODO - we need to provide a whitelist of possible values func (qb *performerFilterHandler) performerIsMissingCriterionHandler(isMissing *string) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { @@ -234,7 +346,19 @@ func (qb *performerFilterHandler) performerIsMissingCriterionHandler(isMissing * case "aliases": performersAliasesTableMgr.join(f, "", "performers.id") f.addWhere("performer_aliases.alias IS NULL") + case "tags": + f.addLeftJoin(performersTagsTable, "tags_join", "tags_join.performer_id = performers.id") + f.addWhere("tags_join.performer_id IS NULL") default: + if err := validateIsMissing(*isMissing, []string{ + "disambiguation", "gender", "birthdate", "death_date", + "ethnicity", "country", "hair_color", "eye_color", "height", "weight", + "measurements", "fake_tits", "penis_length", "circumcised", + "career_start", "career_end", "tattoos", "piercings", "details", "rating", + }); err != nil { + f.setError(err) + return + } f.addWhere("(performers." + *isMissing + " IS NULL OR TRIM(performers." + *isMissing + ") = '')") } } @@ -316,6 +440,22 @@ func (qb *performerFilterHandler) sceneCountCriterionHandler(count *models.IntCr return h.handler(count) } +func (qb *performerFilterHandler) markerCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if count != nil { + performerRepository.scenes.innerJoin(f, "", "performers.id") + + const query = `(SELECT COUNT(*) FROM scene_markers + INNER JOIN scenes ON scene_markers.scene_id = scenes.id + INNER JOIN performers_scenes ON performers_scenes.scene_id = scenes.id + WHERE performers_scenes.performer_id = performers.id)` + + clause, args := getIntCriterionWhereClause(query, *count) + f.addWhere(clause, args...) + } + } +} + func (qb *performerFilterHandler) imageCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { h := countCriterionHandlerBuilder{ primaryTable: performerTable, diff --git a/pkg/sqlite/performer_test.go b/pkg/sqlite/performer_test.go index 190d80e31..ebe1b9eab 100644 --- a/pkg/sqlite/performer_test.go +++ b/pkg/sqlite/performer_test.go @@ -65,8 +65,9 @@ func Test_PerformerStore_Create(t *testing.T) { measurements = "measurements" fakeTits = "fakeTits" penisLength = 1.23 - circumcised = models.CircumisedEnumCut - careerLength = "careerLength" + circumcised = models.CircumcisedEnumCut + careerStart = models.DateFromYear(2005) + careerEnd = models.DateFromYear(2015) tattoos = "tattoos" piercings = "piercings" aliases = []string{"alias1", "alias2"} @@ -107,7 +108,8 @@ func Test_PerformerStore_Create(t *testing.T) { FakeTits: fakeTits, PenisLength: &penisLength, Circumcised: &circumcised, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Tattoos: tattoos, Piercings: piercings, Favorite: favorite, @@ -204,8 +206,6 @@ func Test_PerformerStore_Create(t *testing.T) { } assert.Equal(tt.newObject.CustomFields, cf) - - return }) } } @@ -228,8 +228,9 @@ func Test_PerformerStore_Update(t *testing.T) { measurements = "measurements" fakeTits = "fakeTits" penisLength = 1.23 - circumcised = models.CircumisedEnumCut - careerLength = "careerLength" + circumcised = models.CircumcisedEnumCut + careerStart = models.DateFromYear(2005) + careerEnd = models.DateFromYear(2015) tattoos = "tattoos" piercings = "piercings" aliases = []string{"alias1", "alias2"} @@ -271,7 +272,8 @@ func Test_PerformerStore_Update(t *testing.T) { FakeTits: fakeTits, PenisLength: &penisLength, Circumcised: &circumcised, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Tattoos: tattoos, Piercings: piercings, Favorite: favorite, @@ -422,7 +424,8 @@ func clearPerformerPartial() models.PerformerPartial { FakeTits: nullString, PenisLength: nullFloat, Circumcised: nullString, - CareerLength: nullString, + CareerStart: nullDate, + CareerEnd: nullDate, Tattoos: nullString, Piercings: nullString, Aliases: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, @@ -454,8 +457,9 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { measurements = "measurements" fakeTits = "fakeTits" penisLength = 1.23 - circumcised = models.CircumisedEnumCut - careerLength = "careerLength" + circumcised = models.CircumcisedEnumCut + careerStart = models.DateFromYear(2005) + careerEnd = models.DateFromYear(2015) tattoos = "tattoos" piercings = "piercings" aliases = []string{"alias1", "alias2"} @@ -501,7 +505,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { FakeTits: models.NewOptionalString(fakeTits), PenisLength: models.NewOptionalFloat64(penisLength), Circumcised: models.NewOptionalString(circumcised.String()), - CareerLength: models.NewOptionalString(careerLength), + CareerStart: models.NewOptionalDate(careerStart), + CareerEnd: models.NewOptionalDate(careerEnd), Tattoos: models.NewOptionalString(tattoos), Piercings: models.NewOptionalString(piercings), Aliases: &models.UpdateStrings{ @@ -552,7 +557,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { FakeTits: fakeTits, PenisLength: &penisLength, Circumcised: &circumcised, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Tattoos: tattoos, Piercings: piercings, Aliases: models.NewRelatedStrings(aliases), @@ -1069,6 +1075,8 @@ func TestPerformerQuery(t *testing.T) { var ( endpoint = performerStashID(performerIdxWithGallery).Endpoint stashID = performerStashID(performerIdxWithGallery).StashID + stashID2 = performerStashID(performerIdx1WithGallery).StashID + stashIDs = []*string{&stashID, &stashID2} ) tests := []struct { @@ -1133,12 +1141,66 @@ func TestPerformerQuery(t *testing.T) { nil, false, }, + { + "stash ids with endpoint", + nil, + &models.PerformerFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + StashIDs: stashIDs, + Modifier: models.CriterionModifierEquals, + }, + }, + []int{performerIdxWithGallery, performerIdx1WithGallery}, + nil, + false, + }, + { + "exclude stash ids with endpoint", + nil, + &models.PerformerFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + StashIDs: stashIDs, + Modifier: models.CriterionModifierNotEquals, + }, + }, + nil, + []int{performerIdxWithGallery, performerIdx1WithGallery}, + false, + }, + { + "null stash ids with endpoint", + nil, + &models.PerformerFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + Modifier: models.CriterionModifierIsNull, + }, + }, + nil, + []int{performerIdxWithGallery, performerIdx1WithGallery}, + false, + }, + { + "not null stash ids with endpoint", + nil, + &models.PerformerFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + Modifier: models.CriterionModifierNotNull, + }, + }, + []int{performerIdxWithGallery, performerIdx1WithGallery}, + nil, + false, + }, { "circumcised (cut)", nil, &models.PerformerFilterType{ Circumcised: &models.CircumcisionCriterionInput{ - Value: []models.CircumisedEnum{models.CircumisedEnumCut}, + Value: []models.CircumcisedEnum{models.CircumcisedEnumCut}, Modifier: models.CriterionModifierIncludes, }, }, @@ -1151,7 +1213,7 @@ func TestPerformerQuery(t *testing.T) { nil, &models.PerformerFilterType{ Circumcised: &models.CircumcisionCriterionInput{ - Value: []models.CircumisedEnum{models.CircumisedEnumCut}, + Value: []models.CircumcisedEnum{models.CircumcisedEnumCut}, Modifier: models.CriterionModifierExcludes, }, }, @@ -1710,30 +1772,117 @@ func verifyPerformerAge(t *testing.T, ageCriterion models.IntCriterionInput) { }) } -func TestPerformerQueryCareerLength(t *testing.T) { - const value = "2005" - careerLengthCriterion := models.StringCriterionInput{ +func TestPerformerQueryLegacyCareerLength(t *testing.T) { + const value = "2002 - 2012" + + tests := []struct { + name string + c models.StringCriterionInput + careerStartCrit *models.DateCriterionInput + careerEndCrit *models.DateCriterionInput + err bool + }{ + { + name: "valid format", + c: models.StringCriterionInput{ + Value: value, + Modifier: models.CriterionModifierEquals, + }, + careerStartCrit: &models.DateCriterionInput{ + Value: "2001-12-31", + Modifier: models.CriterionModifierGreaterThan, + }, + careerEndCrit: &models.DateCriterionInput{ + Value: "2013-01-01", + Modifier: models.CriterionModifierLessThan, + }, + err: false, + }, + { + name: "invalid format", + c: models.StringCriterionInput{ + Value: "invalid format", + Modifier: models.CriterionModifierEquals, + }, + err: true, + }, + { + name: "is null", + c: models.StringCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + careerStartCrit: &models.DateCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + careerEndCrit: &models.DateCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + err: false, + }, + { + name: "not null", + c: models.StringCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + careerStartCrit: &models.DateCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + careerEndCrit: &models.DateCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + err: false, + }, + { + name: "invalid modifier", + c: models.StringCriterionInput{ + Value: value, + Modifier: models.CriterionModifierMatchesRegex, + }, + err: true, + }, + } + + qb := db.Performer + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + performers, _, err := qb.Query(ctx, &models.PerformerFilterType{ + CareerLength: &tt.c, + }, nil) + + if err != nil && !tt.err { + t.Errorf("Error querying performer: %s", err.Error()) + } else if err == nil && tt.err { + t.Errorf("Expected error but got none") + } + + if err != nil || tt.err { + return + } + + if len(performers) == 0 { + t.Errorf("Expected to find performers but found none") + } + + for _, performer := range performers { + verifyDatePtr(t, performer.CareerStart, *tt.careerStartCrit) + verifyDatePtr(t, performer.CareerEnd, *tt.careerEndCrit) + } + }) + } +} + +func TestPerformerQueryCareerStart(t *testing.T) { + const value = "2002" + criterion := models.DateCriterionInput{ Value: value, Modifier: models.CriterionModifierEquals, } - verifyPerformerCareerLength(t, careerLengthCriterion) - - careerLengthCriterion.Modifier = models.CriterionModifierNotEquals - verifyPerformerCareerLength(t, careerLengthCriterion) - - careerLengthCriterion.Modifier = models.CriterionModifierMatchesRegex - verifyPerformerCareerLength(t, careerLengthCriterion) - - careerLengthCriterion.Modifier = models.CriterionModifierNotMatchesRegex - verifyPerformerCareerLength(t, careerLengthCriterion) -} - -func verifyPerformerCareerLength(t *testing.T, criterion models.StringCriterionInput) { withTxn(func(ctx context.Context) error { qb := db.Performer performerFilter := models.PerformerFilterType{ - CareerLength: &criterion, + CareerStart: &criterion, } performers, _, err := qb.Query(ctx, &performerFilter, nil) @@ -1742,8 +1891,33 @@ func verifyPerformerCareerLength(t *testing.T, criterion models.StringCriterionI } for _, performer := range performers { - cl := performer.CareerLength - verifyString(t, cl, criterion) + verifyDatePtr(t, performer.CareerStart, criterion) + } + + return nil + }) +} + +func TestPerformerQueryCareerEnd(t *testing.T) { + const value = "2012" + criterion := models.DateCriterionInput{ + Value: value, + Modifier: models.CriterionModifierEquals, + } + + withTxn(func(ctx context.Context) error { + qb := db.Performer + performerFilter := models.PerformerFilterType{ + CareerEnd: &criterion, + } + + performers, _, err := qb.Query(ctx, &performerFilter, nil) + if err != nil { + t.Errorf("Error querying performer: %s", err.Error()) + } + + for _, performer := range performers { + verifyDatePtr(t, performer.CareerEnd, criterion) } return nil @@ -2524,6 +2698,146 @@ func TestPerformerStore_FindByStashIDStatus(t *testing.T) { } } +func TestPerformerMerge(t *testing.T) { + tests := []struct { + name string + srcIdxs []int + destIdx int + wantErr bool + }{ + { + name: "merge into self", + srcIdxs: []int{performerIdx1WithDupName}, + destIdx: performerIdx1WithDupName, + wantErr: true, + }, + { + name: "merge multiple", + srcIdxs: []int{ + performerIdx2WithScene, + performerIdxWithTwoScenes, + performerIdx1WithImage, + performerIdxWithTwoImages, + performerIdxWithGallery, + performerIdxWithTwoGalleries, + performerIdxWithTag, + performerIdxWithTwoTags, + }, + destIdx: tagIdxWithPerformer, + wantErr: false, + }, + } + + qb := db.Performer + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + // load src tag ids to compare after merge + performerTagIds := make(map[int][]int) + for _, srcIdx := range tt.srcIdxs { + srcPerformer, err := qb.Find(ctx, performerIDs[srcIdx]) + if err != nil { + t.Errorf("Error finding performer: %s", err.Error()) + } + if err := srcPerformer.LoadTagIDs(ctx, qb); err != nil { + t.Errorf("Error loading performer tag IDs: %s", err.Error()) + } + srcTagIDs := srcPerformer.TagIDs.List() + performerTagIds[srcIdx] = srcTagIDs + } + + err := qb.Merge(ctx, indexesToIDs(tagIDs, tt.srcIdxs), tagIDs[tt.destIdx]) + + if (err != nil) != tt.wantErr { + t.Errorf("PerformerStore.Merge() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if err != nil { + return + } + + // ensure source performers are destroyed + for _, srcIdx := range tt.srcIdxs { + p, err := qb.Find(ctx, performerIDs[srcIdx]) + + // not found returns nil performer and nil error + if err != nil { + t.Errorf("Error finding performer: %s", err.Error()) + continue + } + assert.Nil(p) + } + + // ensure items point to new performer + for _, srcIdx := range tt.srcIdxs { + sceneIdxs := scenePerformers.reverseLookup(srcIdx) + for _, sceneIdx := range sceneIdxs { + s, err := db.Scene.Find(ctx, sceneIDs[sceneIdx]) + if err != nil { + t.Errorf("Error finding scene: %s", err.Error()) + } + if err := s.LoadPerformerIDs(ctx, db.Scene); err != nil { + t.Errorf("Error loading scene performer IDs: %s", err.Error()) + } + scenePerformerIDs := s.PerformerIDs.List() + + assert.Contains(scenePerformerIDs, performerIDs[tt.destIdx]) + assert.NotContains(scenePerformerIDs, performerIDs[srcIdx]) + } + + imageIdxs := imagePerformers.reverseLookup(srcIdx) + for _, imageIdx := range imageIdxs { + i, err := db.Image.Find(ctx, imageIDs[imageIdx]) + if err != nil { + t.Errorf("Error finding image: %s", err.Error()) + } + if err := i.LoadPerformerIDs(ctx, db.Image); err != nil { + t.Errorf("Error loading image performer IDs: %s", err.Error()) + } + imagePerformerIDs := i.PerformerIDs.List() + + assert.Contains(imagePerformerIDs, performerIDs[tt.destIdx]) + assert.NotContains(imagePerformerIDs, performerIDs[srcIdx]) + } + + galleryIdxs := galleryPerformers.reverseLookup(srcIdx) + for _, galleryIdx := range galleryIdxs { + g, err := db.Gallery.Find(ctx, galleryIDs[galleryIdx]) + if err != nil { + t.Errorf("Error finding gallery: %s", err.Error()) + } + if err := g.LoadPerformerIDs(ctx, db.Gallery); err != nil { + t.Errorf("Error loading gallery performer IDs: %s", err.Error()) + } + galleryPerformerIDs := g.PerformerIDs.List() + + assert.Contains(galleryPerformerIDs, performerIDs[tt.destIdx]) + assert.NotContains(galleryPerformerIDs, performerIDs[srcIdx]) + } + } + + // ensure tags were merged + destPerformer, err := qb.Find(ctx, performerIDs[tt.destIdx]) + if err != nil { + t.Errorf("Error finding performer: %s", err.Error()) + } + if err := destPerformer.LoadTagIDs(ctx, qb); err != nil { + t.Errorf("Error loading performer tag IDs: %s", err.Error()) + } + destTagIDs := destPerformer.TagIDs.List() + + for _, srcIdx := range tt.srcIdxs { + for _, tagID := range performerTagIds[srcIdx] { + assert.Contains(destTagIDs, tagID) + } + } + }) + } +} + // TODO Update // TODO Destroy // TODO Find diff --git a/pkg/sqlite/query.go b/pkg/sqlite/query.go index 99c1f4e5f..80c7fcd40 100644 --- a/pkg/sqlite/query.go +++ b/pkg/sqlite/query.go @@ -17,13 +17,26 @@ type queryBuilder struct { joins joins whereClauses []string havingClauses []string - args []interface{} withClauses []string recursiveWith bool + withArgs []interface{} + joinArgs []interface{} + whereArgs []interface{} + havingArgs []interface{} + sortAndPagination string } +func (qb queryBuilder) allArgs() []interface{} { + var args []interface{} + args = append(args, qb.withArgs...) + args = append(args, qb.joinArgs...) + args = append(args, qb.whereArgs...) + args = append(args, qb.havingArgs...) + return args +} + func (qb queryBuilder) body(includeSortPagination bool) string { return fmt.Sprintf("SELECT %s FROM %s%s", strings.Join(qb.columns, ", "), qb.from, qb.joins.toSQL(includeSortPagination)) } @@ -55,13 +68,13 @@ func (qb queryBuilder) toSQL(includeSortPagination bool) string { func (qb queryBuilder) findIDs(ctx context.Context) ([]int, error) { const includeSortPagination = true sql := qb.toSQL(includeSortPagination) - return qb.repository.runIdsQuery(ctx, sql, qb.args) + return qb.repository.runIdsQuery(ctx, sql, qb.allArgs()) } func (qb queryBuilder) executeFind(ctx context.Context) ([]int, int, error) { const includeSortPagination = true body := qb.body(includeSortPagination) - return qb.repository.executeFindQuery(ctx, body, qb.args, qb.sortAndPagination, qb.whereClauses, qb.havingClauses, qb.withClauses, qb.recursiveWith) + return qb.repository.executeFindQuery(ctx, body, qb.allArgs(), qb.sortAndPagination, qb.whereClauses, qb.havingClauses, qb.withClauses, qb.recursiveWith) } func (qb queryBuilder) executeCount(ctx context.Context) (int, error) { @@ -79,7 +92,7 @@ func (qb queryBuilder) executeCount(ctx context.Context) (int, error) { body = qb.repository.buildQueryBody(body, qb.whereClauses, qb.havingClauses) countQuery := withClause + qb.repository.buildCountQuery(body) - return qb.repository.runCountQuery(ctx, countQuery, qb.args) + return qb.repository.runCountQuery(ctx, countQuery, qb.allArgs()) } func (qb *queryBuilder) addWhere(clauses ...string) { @@ -109,7 +122,11 @@ func (qb *queryBuilder) addWith(recursive bool, clauses ...string) { } func (qb *queryBuilder) addArg(args ...interface{}) { - qb.args = append(qb.args, args...) + qb.whereArgs = append(qb.whereArgs, args...) +} + +func (qb *queryBuilder) addHavingArg(args ...interface{}) { + qb.havingArgs = append(qb.havingArgs, args...) } func (qb *queryBuilder) hasJoin(alias string) bool { @@ -148,7 +165,7 @@ func (qb *queryBuilder) joinSort(table, as, onClause string) { func (qb *queryBuilder) addJoins(joins ...join) { for _, j := range joins { if qb.joins.addUnique(j) { - qb.args = append(qb.args, j.args...) + qb.joinArgs = append(qb.joinArgs, j.args...) } } } @@ -163,20 +180,16 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) error { if len(clause) > 0 { qb.addWith(f.recursiveWith, clause) } - if len(args) > 0 { - // WITH clause always comes first and thus precedes alk args - qb.args = append(args, qb.args...) + qb.withArgs = append(qb.withArgs, args...) } - // add joins here to insert args qb.addJoins(f.getAllJoins()...) clause, args = f.generateWhereClauses() if len(clause) > 0 { qb.addWhere(clause) } - if len(args) > 0 { qb.addArg(args...) } @@ -185,9 +198,8 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) error { if len(clause) > 0 { qb.addHaving(clause) } - if len(args) > 0 { - qb.addArg(args...) + qb.addHavingArg(args...) } return nil diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index 0c2d11345..c2093431d 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -26,6 +26,7 @@ const ( sceneTable = "scenes" scenesFilesTable = "scenes_files" sceneIDColumn = "scene_id" + sceneDateColumn = "date" performersScenesTable = "performers_scenes" scenesTagsTable = "scenes_tags" scenesGalleriesTable = "scenes_galleries" @@ -233,6 +234,7 @@ var ( type SceneStore struct { blobJoinQueryBuilder + customFieldsStore tableMgr *table oDateManager @@ -247,6 +249,10 @@ func NewSceneStore(r *storeRepository, blobStore *BlobStore) *SceneStore { blobStore: blobStore, joinTable: sceneTable, }, + customFieldsStore: customFieldsStore{ + table: scenesCustomFieldsTable, + fk: scenesCustomFieldsTable.Col(sceneIDColumn), + }, tableMgr: sceneTableMgr, viewDateManager: viewDateManager{scenesViewTableMgr}, @@ -1091,7 +1097,7 @@ func (qb *SceneStore) queryGroupedFields(ctx context.Context, options models.Sce Duration null.Float Size null.Float }{} - if err := sceneRepository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { + if err := sceneRepository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.allArgs(), &out); err != nil { return nil, err } @@ -1138,6 +1144,7 @@ var sceneSortOptions = sortOptions{ "perceptual_similarity", "random", "rating", + "resolution", "studio", "tag_count", "title", @@ -1236,6 +1243,9 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF sort = "frame_rate" addVideoFileTable() query.sortAndPagination += getSort(sort, direction, videoFileTable) + case "resolution": + addVideoFileTable() + query.sortAndPagination += fmt.Sprintf(" ORDER BY MIN(%s.width, %s.height) %s", videoFileTable, videoFileTable, getSortDirection(direction)) case "filesize": addFileTable() query.sortAndPagination += getSort(sort, direction, fileTable) diff --git a/pkg/sqlite/scene_filter.go b/pkg/sqlite/scene_filter.go index fad300248..712c3d83d 100644 --- a/pkg/sqlite/scene_filter.go +++ b/pkg/sqlite/scene_filter.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/utils" ) type sceneFilterHandler struct { @@ -83,14 +82,27 @@ func (qb *sceneFilterHandler) criterionHandler() criterionHandler { criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { if sceneFilter.Phash != nil { // backwards compatibility - qb.phashDistanceCriterionHandler(&models.PhashDistanceCriterionInput{ - Value: sceneFilter.Phash.Value, - Modifier: sceneFilter.Phash.Modifier, - })(ctx, f) + h := phashDistanceCriterionHandler{ + joinFn: func(f *filterBuilder) { + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") + }, + criterion: &models.PhashDistanceCriterionInput{ + Value: sceneFilter.Phash.Value, + Modifier: sceneFilter.Phash.Modifier, + }, + } + h.handle(ctx, f) } }), - qb.phashDistanceCriterionHandler(sceneFilter.PhashDistance), + &phashDistanceCriterionHandler{ + joinFn: func(f *filterBuilder) { + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") + }, + criterion: sceneFilter.PhashDistance, + }, intCriterionHandler(sceneFilter.Rating100, "scenes.rating", nil), qb.oCountCriterionHandler(sceneFilter.OCounter), @@ -114,13 +126,20 @@ func (qb *sceneFilterHandler) criterionHandler() criterionHandler { stringCriterionHandler(sceneFilter.StashID, "scene_stash_ids.stash_id")(ctx, f) } }), - &stashIDCriterionHandler{ c: sceneFilter.StashIDEndpoint, stashIDRepository: &sceneRepository.stashIDs, stashIDTableAs: "scene_stash_ids", parentIDCol: "scenes.id", }, + &stashIDsCriterionHandler{ + c: sceneFilter.StashIDsEndpoint, + stashIDRepository: &sceneRepository.stashIDs, + stashIDTableAs: "scene_stash_ids", + parentIDCol: "scenes.id", + }, + + qb.stashIDCountCriterionHandler(sceneFilter.StashIDCount), boolCriterionHandler(sceneFilter.Interactive, "video_files.interactive", qb.addVideoFilesTable), intCriterionHandler(sceneFilter.InteractiveSpeed, "video_files.interactive_speed", qb.addVideoFilesTable), @@ -155,11 +174,18 @@ func (qb *sceneFilterHandler) criterionHandler() criterionHandler { qb.performerTagsCriterionHandler(sceneFilter.PerformerTags), qb.performerFavoriteCriterionHandler(sceneFilter.PerformerFavorite), qb.performerAgeCriterionHandler(sceneFilter.PerformerAge), - qb.phashDuplicatedCriterionHandler(sceneFilter.Duplicated, qb.addSceneFilesTable), + qb.duplicatedCriterionHandler(sceneFilter.Duplicated), &dateCriterionHandler{sceneFilter.Date, "scenes.date", nil}, ×tampCriterionHandler{sceneFilter.CreatedAt, "scenes.created_at", nil}, ×tampCriterionHandler{sceneFilter.UpdatedAt, "scenes.updated_at", nil}, + &customFieldsFilterHandler{ + table: scenesCustomFieldsTable.GetTable(), + fkCol: sceneIDColumn, + c: sceneFilter.CustomFields, + idCol: "scenes.id", + }, + &relatedFilterHandler{ relatedIDCol: "scenes_galleries.gallery_id", relatedRepo: galleryRepository.repository, @@ -277,26 +303,71 @@ func (qb *sceneFilterHandler) fileCountCriterionHandler(fileCount *models.IntCri return h.handler(fileCount) } -func (qb *sceneFilterHandler) phashDuplicatedCriterionHandler(duplicatedFilter *models.PHashDuplicationCriterionInput, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { +func (qb *sceneFilterHandler) duplicatedCriterionHandler(duplicatedFilter *models.DuplicationCriterionInput) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { - // TODO: Wishlist item: Implement Distance matching - if duplicatedFilter != nil { - if addJoinFn != nil { - addJoinFn(f) - } + if duplicatedFilter == nil { + return + } - var v string - if *duplicatedFilter.Duplicated { - v = ">" - } else { - v = "=" - } + // Handle legacy 'duplicated' field - treat as phash if phash not explicitly set + //nolint:staticcheck + if duplicatedFilter.Duplicated != nil && duplicatedFilter.Phash == nil { + //nolint:staticcheck + duplicatedFilter.Phash = duplicatedFilter.Duplicated + } - f.addInnerJoin("(SELECT file_id FROM files_fingerprints INNER JOIN (SELECT fingerprint FROM files_fingerprints WHERE type = 'phash' GROUP BY fingerprint HAVING COUNT (fingerprint) "+v+" 1) dupes on files_fingerprints.fingerprint = dupes.fingerprint)", "scph", "scenes_files.file_id = scph.file_id") + // Handle explicit fields + if duplicatedFilter.Phash != nil { + qb.addSceneFilesTable(f) + qb.applyPhashDuplication(f, *duplicatedFilter.Phash) + } + + if duplicatedFilter.StashID != nil { + qb.applyStashIDDuplication(f, *duplicatedFilter.StashID) + } + + if duplicatedFilter.Title != nil { + qb.applyTitleDuplication(f, *duplicatedFilter.Title) + } + + if duplicatedFilter.URL != nil { + qb.applyURLDuplication(f, *duplicatedFilter.URL) } } } +// getCountOperator returns ">" for duplicated items (count > 1) or "=" for unique items (count = 1) +func getCountOperator(duplicated bool) string { + if duplicated { + return ">" + } + return "=" +} + +func (qb *sceneFilterHandler) applyPhashDuplication(f *filterBuilder, duplicated bool) { + // TODO: Wishlist item: Implement Distance matching + v := getCountOperator(duplicated) + f.addInnerJoin("(SELECT file_id FROM files_fingerprints INNER JOIN (SELECT fingerprint FROM files_fingerprints WHERE type = 'phash' GROUP BY fingerprint HAVING COUNT (fingerprint) "+v+" 1) dupes on files_fingerprints.fingerprint = dupes.fingerprint)", "scph", "scenes_files.file_id = scph.file_id") +} + +func (qb *sceneFilterHandler) applyStashIDDuplication(f *filterBuilder, duplicated bool) { + v := getCountOperator(duplicated) + // Find stash_ids that appear on more than one scene + f.addInnerJoin("(SELECT scene_id FROM scene_stash_ids INNER JOIN (SELECT stash_id FROM scene_stash_ids GROUP BY stash_id HAVING COUNT(DISTINCT scene_id) "+v+" 1) dupes ON scene_stash_ids.stash_id = dupes.stash_id)", "scsi", "scenes.id = scsi.scene_id") +} + +func (qb *sceneFilterHandler) applyTitleDuplication(f *filterBuilder, duplicated bool) { + v := getCountOperator(duplicated) + // Find titles that appear on more than one scene (excluding empty titles) + f.addInnerJoin("(SELECT id FROM scenes WHERE title != '' AND title IS NOT NULL AND title IN (SELECT title FROM scenes WHERE title != '' AND title IS NOT NULL GROUP BY title HAVING COUNT(*) "+v+" 1))", "sctitle", "scenes.id = sctitle.id") +} + +func (qb *sceneFilterHandler) applyURLDuplication(f *filterBuilder, duplicated bool) { + v := getCountOperator(duplicated) + // Find URLs that appear on more than one scene + f.addInnerJoin("(SELECT scene_id FROM scene_urls INNER JOIN (SELECT url FROM scene_urls GROUP BY url HAVING COUNT(DISTINCT scene_id) "+v+" 1) dupes ON scene_urls.url = dupes.url)", "scurl", "scenes.id = scurl.scene_id") +} + func (qb *sceneFilterHandler) codecCriterionHandler(codec *models.StringCriterionInput, codecColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { if codec != nil { @@ -355,6 +426,12 @@ func (qb *sceneFilterHandler) isMissingCriterionHandler(isMissing *string) crite case "cover": f.addWhere("scenes.cover_blob IS NULL") default: + if err := validateIsMissing(*isMissing, []string{ + "title", "code", "details", "director", "rating", + }); err != nil { + f.setError(err) + return + } f.addWhere("(scenes." + *isMissing + " IS NULL OR TRIM(scenes." + *isMissing + ") = '')") } } @@ -436,6 +513,16 @@ func (qb *sceneFilterHandler) tagCountCriterionHandler(tagCount *models.IntCrite return h.handler(tagCount) } +func (qb *sceneFilterHandler) stashIDCountCriterionHandler(stashIDCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: "scene_stash_ids", + primaryFK: sceneIDColumn, + } + + return h.handler(stashIDCount) +} + func (qb *sceneFilterHandler) performersCriterionHandler(performers *models.MultiCriterionInput) criterionHandlerFunc { h := joinedMultiCriterionHandlerBuilder{ primaryTable: sceneTable, @@ -542,42 +629,3 @@ func (qb *sceneFilterHandler) performerTagsCriterionHandler(tags *models.Hierarc joinPrimaryKey: sceneIDColumn, } } - -func (qb *sceneFilterHandler) phashDistanceCriterionHandler(phashDistance *models.PhashDistanceCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if phashDistance != nil { - qb.addSceneFilesTable(f) - f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") - - value, _ := utils.StringToPhash(phashDistance.Value) - distance := 0 - if phashDistance.Distance != nil { - distance = *phashDistance.Distance - } - - if distance == 0 { - // use the default handler - intCriterionHandler(&models.IntCriterionInput{ - Value: int(value), - Modifier: phashDistance.Modifier, - }, "fingerprints_phash.fingerprint", nil)(ctx, f) - } - - switch { - case phashDistance.Modifier == models.CriterionModifierEquals && distance > 0: - // needed to avoid a type mismatch - f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") - f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) < ?", value, distance) - case phashDistance.Modifier == models.CriterionModifierNotEquals && distance > 0: - // needed to avoid a type mismatch - f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") - f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) > ?", value, distance) - default: - intCriterionHandler(&models.IntCriterionInput{ - Value: int(value), - Modifier: phashDistance.Modifier, - }, "fingerprints_phash.fingerprint", nil)(ctx, f) - } - } - } -} diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index 1efc4d705..67bf227a2 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -2098,6 +2098,8 @@ func TestSceneQuery(t *testing.T) { var ( endpoint = sceneStashID(sceneIdxWithGallery).Endpoint stashID = sceneStashID(sceneIdxWithGallery).StashID + stashID2 = sceneStashID(sceneIdxWithPerformer).StashID + stashIDs = []*string{&stashID, &stashID2} depth = -1 ) @@ -2203,6 +2205,60 @@ func TestSceneQuery(t *testing.T) { nil, false, }, + { + "stash ids with endpoint", + nil, + &models.SceneFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + StashIDs: stashIDs, + Modifier: models.CriterionModifierEquals, + }, + }, + []int{sceneIdxWithGallery, sceneIdxWithPerformer}, + nil, + false, + }, + { + "exclude stash ids with endpoint", + nil, + &models.SceneFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + StashIDs: stashIDs, + Modifier: models.CriterionModifierNotEquals, + }, + }, + nil, + []int{sceneIdxWithGallery, sceneIdxWithPerformer}, + false, + }, + { + "null stash ids with endpoint", + nil, + &models.SceneFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + Modifier: models.CriterionModifierIsNull, + }, + }, + nil, + []int{sceneIdxWithGallery, sceneIdxWithPerformer}, + false, + }, + { + "not null stash ids with endpoint", + nil, + &models.SceneFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + Modifier: models.CriterionModifierNotNull, + }, + }, + []int{sceneIdxWithGallery, sceneIdxWithPerformer}, + nil, + false, + }, { "with studio id 0 including child studios", nil, @@ -2217,6 +2273,32 @@ func TestSceneQuery(t *testing.T) { nil, false, }, + { + "single stash id", + nil, + &models.SceneFilterType{ + StashIDCount: &models.IntCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: 1, + }, + }, + []int{sceneIdxWithGallery, sceneIdxWithPerformer}, + []int{sceneIdxWithGroup}, + false, + }, + { + "less than one stash id", + nil, + &models.SceneFilterType{ + StashIDCount: &models.IntCriterionInput{ + Modifier: models.CriterionModifierLessThan, + Value: 1, + }, + }, + []int{sceneIdxWithGroup}, + []int{sceneIdxWithGallery, sceneIdxWithPerformer}, + false, + }, } for _, tt := range tests { @@ -2739,6 +2821,33 @@ func verifyIntPtr(t *testing.T, value *int, criterion models.IntCriterionInput) } } +func verifyDatePtr(t *testing.T, value *models.Date, criterion models.DateCriterionInput) { + t.Helper() + assert := assert.New(t) + if criterion.Modifier == models.CriterionModifierIsNull { + assert.Nil(value, "expect is null values to be null") + } + if criterion.Modifier == models.CriterionModifierNotNull { + assert.NotNil(value, "expect not null values to be not null") + } + if criterion.Modifier == models.CriterionModifierEquals { + date, _ := models.ParseDate(criterion.Value) + assert.Equal(date, *value) + } + if criterion.Modifier == models.CriterionModifierNotEquals { + date, _ := models.ParseDate(criterion.Value) + assert.NotEqual(date, *value) + } + if criterion.Modifier == models.CriterionModifierGreaterThan { + date, _ := models.ParseDate(criterion.Value) + assert.True(value.After(date)) + } + if criterion.Modifier == models.CriterionModifierLessThan { + date, _ := models.ParseDate(criterion.Value) + assert.True(date.After(*value)) + } +} + func TestSceneQueryOCounter(t *testing.T) { const oCounter = 1 oCounterCriterion := models.IntCriterionInput{ @@ -4039,7 +4148,7 @@ func TestSceneQueryPhashDuplicated(t *testing.T) { withTxn(func(ctx context.Context) error { sqb := db.Scene duplicated := true - phashCriterion := models.PHashDuplicationCriterionInput{ + phashCriterion := models.DuplicationCriterionInput{ Duplicated: &duplicated, } @@ -4744,6 +4853,253 @@ func TestSceneStore_SaveActivity(t *testing.T) { } } +func TestSceneQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.SceneFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getSceneStringValue(sceneIdxWithGallery, "custom")}, + }, + }, + }, + []int{sceneIdxWithGallery}, + nil, + false, + }, + { + "not equals", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithGallery), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getSceneStringValue(sceneIdxWithGallery, "custom")}, + }, + }, + }, + nil, + []int{sceneIdxWithGallery}, + false, + }, + { + "includes", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getSceneStringValue(sceneIdxWithGallery, "custom")[9:]}, + }, + }, + }, + []int{sceneIdxWithGallery}, + nil, + false, + }, + { + "excludes", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithGallery), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getSceneStringValue(sceneIdxWithGallery, "custom")[9:]}, + }, + }, + }, + nil, + []int{sceneIdxWithGallery}, + false, + }, + { + "regex", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + []int{sceneIdxWithTwoPerformerTag}, + nil, + false, + }, + { + "invalid regex", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithTwoPerformerTag), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + nil, + []int{sceneIdxWithTwoPerformerTag}, + false, + }, + { + "invalid not matches regex", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithGallery), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{sceneIdxWithGallery}, + nil, + false, + }, + { + "not null", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithGallery), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{sceneIdxWithGallery}, + nil, + false, + }, + { + "between", + &models.SceneFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{sceneIdxWithPerformer}, + nil, + false, + }, + { + "not between", + &models.SceneFilterType{ + Title: &models.StringCriterionInput{ + Value: getSceneTitle(sceneIdxWithPerformer), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{sceneIdxWithPerformer}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + result, err := db.Scene.Query(ctx, models.SceneQueryOptions{ + SceneFilter: tt.filter, + }) + if (err != nil) != tt.wantErr { + t.Errorf("SceneStore.Query() error = %v, wantErr %v", err, tt.wantErr) + } + + if err != nil { + return + } + + scenes, err := result.Resolve(ctx) + if err != nil { + t.Errorf("SceneStore.Query().Resolve() error = %v", err) + return + } + + ids := scenesToIDs(scenes) + include := indexesToIDs(sceneIDs, tt.includeIdxs) + exclude := indexesToIDs(sceneIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + // TODO Count // TODO SizeCount diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index 63c66fd06..4ab310ee7 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -31,7 +31,8 @@ const ( ) const ( - folderIdxWithSubFolder = iota + folderIdxRoot = iota + folderIdxWithSubFolder folderIdxWithParentFolder folderIdxWithFiles folderIdxInZip @@ -305,6 +306,7 @@ const ( pathField = "Path" checksumField = "Checksum" titleField = "Title" + detailsField = "Details" urlField = "URL" zipPath = "zipPath.zip" firstSavedFilterName = "firstSavedFilterName" @@ -359,6 +361,8 @@ func (m linkMap) reverseLookup(idx int) []int { var ( folderParentFolders = map[int]int{ + folderIdxWithSubFolder: folderIdxRoot, + folderIdxForObjectFiles: folderIdxRoot, folderIdxWithParentFolder: folderIdxWithSubFolder, folderIdxWithSceneFiles: folderIdxForObjectFiles, folderIdxWithImageFiles: folderIdxForObjectFiles, @@ -785,6 +789,10 @@ func getFolderPath(index int, parentFolderIdx *int) string { return path } +func getFolderBasename(index int, parentFolderIdx *int) string { + return filepath.Base(getFolderPath(index, parentFolderIdx)) +} + func getFolderModTime(index int) time.Time { return time.Date(2000, 1, (index%10)+1, 0, 0, 0, 0, time.UTC) } @@ -858,16 +866,24 @@ func getFileModTime(index int) time.Time { return getFolderModTime(index) } +func getFilePhash(index int) int64 { + return int64(index * 567) +} + func getFileFingerprints(index int) []models.Fingerprint { return []models.Fingerprint{ { - Type: "MD5", + Type: models.FingerprintTypeMD5, Fingerprint: getPrefixedStringValue("file", index, "md5"), }, { - Type: "OSHASH", + Type: models.FingerprintTypeOshash, Fingerprint: getPrefixedStringValue("file", index, "oshash"), }, + { + Type: models.FingerprintTypePhash, + Fingerprint: getFilePhash(index), + }, } } @@ -1076,10 +1092,17 @@ func getObjectDate(index int) *models.Date { return &ret } +func sceneStashIDs(i int) []models.StashID { + if i%5 == 0 { + return nil + } + return []models.StashID{sceneStashID(i)} +} + func sceneStashID(i int) models.StashID { return models.StashID{ StashID: getSceneStringValue(i, "stashid"), - Endpoint: getSceneStringValue(i, "endpoint"), + Endpoint: getSceneStringValue(0, "endpoint"), UpdatedAt: epochTime, } } @@ -1174,14 +1197,24 @@ func makeScene(i int) *models.Scene { PerformerIDs: models.NewRelatedIDs(pids), TagIDs: models.NewRelatedIDs(tids), Groups: models.NewRelatedGroups(groups), - StashIDs: models.NewRelatedStashIDs([]models.StashID{ - sceneStashID(i), - }), + StashIDs: models.NewRelatedStashIDs(sceneStashIDs(i)), PlayDuration: getScenePlayDuration(i), ResumeTime: getSceneResumeTime(i), } } +func getSceneCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getSceneStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + func createScenes(ctx context.Context, n int) error { sqb := db.Scene fqb := db.File @@ -1199,6 +1232,10 @@ func createScenes(ctx context.Context, n int) error { return fmt.Errorf("Error creating scene %v+: %s", scene, err.Error()) } + if err := sqb.SetCustomFields(ctx, scene.ID, models.CustomFieldsInput{Full: getSceneCustomFields(i)}); err != nil { + return fmt.Errorf("Error setting custom fields for scene %d: %s", scene.ID, err.Error()) + } + sceneIDs = append(sceneIDs, scene.ID) } @@ -1226,6 +1263,18 @@ func getImageBasename(index int) string { return getImageStringValue(index, pathField) } +func getImageCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getImageStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + func makeImageFile(i int) *models.ImageFile { return &models.ImageFile{ BaseFile: &models.BaseFile{ @@ -1257,9 +1306,10 @@ func makeImage(i int) *models.Image { tids := indexesToIDs(tagIDs, imageTags[i]) return &models.Image{ - Title: title, - Rating: getIntPtr(getRating(i)), - Date: getObjectDate(i), + Title: title, + Details: getImageStringValue(i, detailsField), + Rating: getIntPtr(getRating(i)), + Date: getObjectDate(i), URLs: models.NewRelatedStrings([]string{ getImageEmptyString(i, urlField), }), @@ -1288,7 +1338,11 @@ func createImages(ctx context.Context, n int) error { image := makeImage(i) - err := qb.Create(ctx, image, []models.FileID{f.ID}) + err := qb.Create(ctx, &models.CreateImageInput{ + Image: image, + FileIDs: []models.FileID{f.ID}, + CustomFields: getImageCustomFields(i), + }) if err != nil { return fmt.Errorf("Error creating image %v+: %s", image, err.Error()) @@ -1368,6 +1422,18 @@ func makeGallery(i int, includeScenes bool) *models.Gallery { return ret } +func getGalleryCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getGalleryStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + func createGalleries(ctx context.Context, n int) error { gqb := db.Gallery fqb := db.File @@ -1389,7 +1455,11 @@ func createGalleries(ctx context.Context, n int) error { const includeScenes = false gallery := makeGallery(i, includeScenes) - err := gqb.Create(ctx, gallery, fileIDs) + err := gqb.Create(ctx, &models.CreateGalleryInput{ + Gallery: gallery, + FileIDs: fileIDs, + CustomFields: getGalleryCustomFields(i), + }) if err != nil { return fmt.Errorf("Error creating gallery %v+: %s", gallery, err.Error()) @@ -1420,6 +1490,18 @@ func getGroupEmptyString(index int, field string) string { return v.String } +func getGroupCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getGroupStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + // createGroups creates n groups with plain Name and o groups with camel cased NaMe included func createGroups(ctx context.Context, mqb models.GroupReaderWriter, n int, o int) error { const namePlain = "Name" @@ -1452,6 +1534,13 @@ func createGroups(ctx context.Context, mqb models.GroupReaderWriter, n int, o in return fmt.Errorf("Error creating group [%d] %v+: %s", i, group, err.Error()) } + customFields := getGroupCustomFields(i) + if customFields != nil { + if err := mqb.SetCustomFields(ctx, group.ID, models.CustomFieldsInput{Full: customFields}); err != nil { + return fmt.Errorf("Error setting custom fields for group %d: %s", group.ID, err.Error()) + } + } + groupIDs = append(groupIDs, group.ID) groupNames = append(groupNames, group.Name) } @@ -1508,13 +1597,26 @@ func getPerformerDeathDate(index int) *models.Date { return &ret } -func getPerformerCareerLength(index int) *string { +func getPerformerCareerStart(index int) *models.Date { if index%5 == 0 { return nil } - ret := fmt.Sprintf("20%2d", index) - return &ret + date := models.DateFromYear(2000 + index) + return &date +} + +func getPerformerCareerEnd(index int) *models.Date { + if index%5 == 0 { + return nil + } + + // only set career_end for even indices + if index%2 == 0 { + date := models.DateFromYear(2010 + index) + return &date + } + return nil } func getPerformerPenisLength(index int) *float64 { @@ -1526,15 +1628,15 @@ func getPerformerPenisLength(index int) *float64 { return &ret } -func getPerformerCircumcised(index int) *models.CircumisedEnum { - var ret models.CircumisedEnum +func getPerformerCircumcised(index int) *models.CircumcisedEnum { + var ret models.CircumcisedEnum switch { case index%3 == 0: return nil case index%3 == 1: - ret = models.CircumisedEnumCut + ret = models.CircumcisedEnumCut default: - ret = models.CircumisedEnumUncut + ret = models.CircumcisedEnumUncut } return &ret @@ -1547,7 +1649,7 @@ func getIgnoreAutoTag(index int) bool { func performerStashID(i int) models.StashID { return models.StashID{ StashID: getPerformerStringValue(i, "stashid"), - Endpoint: getPerformerStringValue(i, "endpoint"), + Endpoint: getPerformerStringValue(0, "endpoint"), } } @@ -1610,10 +1712,8 @@ func createPerformers(ctx context.Context, n int, o int) error { TagIDs: models.NewRelatedIDs(tids), } - careerLength := getPerformerCareerLength(i) - if careerLength != nil { - performer.CareerLength = *careerLength - } + performer.CareerStart = getPerformerCareerStart(i) + performer.CareerEnd = getPerformerCareerEnd(i) if (index+1)%5 != 0 { performer.StashIDs = models.NewRelatedStashIDs([]models.StashID{ @@ -1700,7 +1800,19 @@ func getTagChildCount(id int) int { func tagStashID(i int) models.StashID { return models.StashID{ StashID: getTagStringValue(i, "stashid"), - Endpoint: getTagStringValue(i, "endpoint"), + Endpoint: getTagStringValue(0, "endpoint"), + } +} + +func getTagCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getTagStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, } } @@ -1731,7 +1843,10 @@ func createTags(ctx context.Context, tqb models.TagReaderWriter, n int, o int) e }) } - err := tqb.Create(ctx, &tag) + err := tqb.Create(ctx, &models.CreateTagInput{ + Tag: &tag, + CustomFields: getTagCustomFields(i), + }) if err != nil { return fmt.Errorf("Error creating tag %v+: %s", tag, err.Error()) @@ -1760,7 +1875,19 @@ func getStudioNullStringValue(index int, field string) string { return ret.String } -func createStudio(ctx context.Context, sqb *sqlite.StudioStore, name string, parentID *int) (*models.Studio, error) { +func getStudioCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getStudioStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + +func createStudio(ctx context.Context, sqb *sqlite.StudioStore, name string, parentID *int, customFields map[string]interface{}) (*models.Studio, error) { studio := models.Studio{ Name: name, } @@ -1769,7 +1896,7 @@ func createStudio(ctx context.Context, sqb *sqlite.StudioStore, name string, par studio.ParentID = parentID } - err := createStudioFromModel(ctx, sqb, &studio) + err := createStudioFromModel(ctx, sqb, &studio, customFields) if err != nil { return nil, err } @@ -1777,8 +1904,11 @@ func createStudio(ctx context.Context, sqb *sqlite.StudioStore, name string, par return &studio, nil } -func createStudioFromModel(ctx context.Context, sqb *sqlite.StudioStore, studio *models.Studio) error { - err := sqb.Create(ctx, studio) +func createStudioFromModel(ctx context.Context, sqb *sqlite.StudioStore, studio *models.Studio, customFields map[string]interface{}) error { + err := sqb.Create(ctx, &models.CreateStudioInput{ + Studio: studio, + CustomFields: customFields, + }) if err != nil { return fmt.Errorf("Error creating studio %v+: %s", studio, err.Error()) @@ -1840,7 +1970,7 @@ func createStudios(ctx context.Context, n int, o int) error { alias := getStudioStringValue(i, "Alias") studio.Aliases = models.NewRelatedStrings([]string{alias}) } - err := createStudioFromModel(ctx, sqb, &studio) + err := createStudioFromModel(ctx, sqb, &studio, getStudioCustomFields(i)) if err != nil { return err diff --git a/pkg/sqlite/sql.go b/pkg/sqlite/sql.go index 2d5922555..87376c2c1 100644 --- a/pkg/sqlite/sql.go +++ b/pkg/sqlite/sql.go @@ -71,6 +71,16 @@ func (o sortOptions) validateSort(sort string) error { return fmt.Errorf("invalid sort: %s", sort) } +func validateIsMissing(isMissing string, allowed []string) error { + for _, v := range allowed { + if v == isMissing { + return nil + } + } + + return fmt.Errorf("invalid is_missing field: %s", isMissing) +} + func getSortDirection(direction string) string { if direction != "ASC" && direction != "DESC" { return "ASC" @@ -137,6 +147,8 @@ func getCountSort(primaryTable, joinTable, primaryFK, direction string) string { return fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM %s AS sort WHERE sort.%s = %s.id) %s", joinTable, primaryFK, primaryTable, getSortDirection(direction)) } +// getStringSearchClause returns a sqlClause for searching strings in the provided columns. +// It is used for includes and excludes string criteria. func getStringSearchClause(columns []string, q string, not bool) sqlClause { var likeClauses []string var args []interface{} @@ -257,8 +269,11 @@ func getDateWhereClause(column string, modifier models.CriterionModifier, value upper = &u } - args := []interface{}{value} - betweenArgs := []interface{}{value, *upper} + valueDate, _ := models.ParseDate(value) + date := Date{Date: valueDate.Time} + + args := []interface{}{date} + betweenArgs := []interface{}{date, *upper} switch modifier { case models.CriterionModifierIsNull: diff --git a/pkg/sqlite/studio.go b/pkg/sqlite/studio.go index 1a05be6f3..87f905935 100644 --- a/pkg/sqlite/studio.go +++ b/pkg/sqlite/studio.go @@ -15,6 +15,7 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/studio" + "github.com/stashapp/stash/pkg/utils" ) const ( @@ -43,6 +44,7 @@ type studioRow struct { Favorite bool `db:"favorite"` Details zero.String `db:"details"` IgnoreAutoTag bool `db:"ignore_auto_tag"` + Organized bool `db:"organized"` // not used in resolutions or updates ImageBlob zero.String `db:"image_blob"` @@ -58,6 +60,7 @@ func (r *studioRow) fromStudio(o models.Studio) { r.Favorite = o.Favorite r.Details = zero.StringFrom(o.Details) r.IgnoreAutoTag = o.IgnoreAutoTag + r.Organized = o.Organized } func (r *studioRow) resolve() *models.Studio { @@ -71,6 +74,7 @@ func (r *studioRow) resolve() *models.Studio { Favorite: r.Favorite, Details: r.Details.String, IgnoreAutoTag: r.IgnoreAutoTag, + Organized: r.Organized, } return ret @@ -89,6 +93,7 @@ func (r *studioRowRecord) fromPartial(o models.StudioPartial) { r.setBool("favorite", o.Favorite) r.setNullString("details", o.Details) r.setBool("ignore_auto_tag", o.IgnoreAutoTag) + r.setBool("organized", o.Organized) } type studioRepositoryType struct { @@ -100,6 +105,7 @@ type studioRepositoryType struct { scenes repository images repository galleries repository + groups repository } var ( @@ -126,6 +132,10 @@ var ( tableName: galleryTable, idColumn: studioIDColumn, }, + groups: repository{ + tableName: groupTable, + idColumn: studioIDColumn, + }, tags: joinRepository{ repository: repository{ tableName: studiosTagsTable, @@ -140,6 +150,7 @@ var ( type StudioStore struct { blobJoinQueryBuilder + customFieldsStore tagRelationshipStore tableMgr *table @@ -151,6 +162,10 @@ func NewStudioStore(blobStore *BlobStore) *StudioStore { blobStore: blobStore, joinTable: studioTable, }, + customFieldsStore: customFieldsStore{ + table: studiosCustomFieldsTable, + fk: studiosCustomFieldsTable.Col(studioIDColumn), + }, tagRelationshipStore: tagRelationshipStore{ idRelationshipStore: idRelationshipStore{ joinTable: studiosTagsTableMgr, @@ -169,11 +184,11 @@ func (qb *StudioStore) selectDataset() *goqu.SelectDataset { return dialect.From(qb.table()).Select(qb.table().All()) } -func (qb *StudioStore) Create(ctx context.Context, newObject *models.Studio) error { +func (qb *StudioStore) Create(ctx context.Context, newObject *models.CreateStudioInput) error { var err error var r studioRow - r.fromStudio(*newObject) + r.fromStudio(*newObject.Studio) id, err := qb.tableMgr.insertID(ctx, r) if err != nil { @@ -207,12 +222,17 @@ func (qb *StudioStore) Create(ctx context.Context, newObject *models.Studio) err } } + const partial = false + if err := qb.setCustomFields(ctx, id, newObject.CustomFields, partial); err != nil { + return err + } + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } - *newObject = *updated + *newObject.Studio = *updated return nil } @@ -253,13 +273,17 @@ func (qb *StudioStore) UpdatePartial(ctx context.Context, input models.StudioPar } } - return qb.Find(ctx, input.ID) + if err := qb.SetCustomFields(ctx, input.ID, input.CustomFields); err != nil { + return nil, err + } + + return qb.find(ctx, input.ID) } // This is only used by the Import/Export functionality -func (qb *StudioStore) Update(ctx context.Context, updatedObject *models.Studio) error { +func (qb *StudioStore) Update(ctx context.Context, updatedObject *models.UpdateStudioInput) error { var r studioRow - r.fromStudio(*updatedObject) + r.fromStudio(*updatedObject.Studio) if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { return err @@ -287,6 +311,10 @@ func (qb *StudioStore) Update(ctx context.Context, updatedObject *models.Studio) } } + if err := qb.SetCustomFields(ctx, updatedObject.ID, updatedObject.CustomFields); err != nil { + return err + } + return nil } @@ -601,15 +629,46 @@ func (qb *StudioStore) sortByScenesDuration(direction string) string { ) %s`, sceneTable, scenesFilesTable, scenesFilesTable, sceneIDColumn, sceneTable, scenesFilesTable, sceneTable, studioIDColumn, studioTable, getSortDirection(direction)) } +func (qb *StudioStore) sortByScenesSize(direction string) string { + return fmt.Sprintf(` ORDER BY ( + SELECT COALESCE(SUM(%s.size), 0) + FROM %s + LEFT JOIN %s ON %s.%s = %s.id + LEFT JOIN %s ON %s.id = %s.file_id + WHERE %s.%s = %s.id + ) %s`, fileTable, sceneTable, scenesFilesTable, scenesFilesTable, sceneIDColumn, sceneTable, fileTable, fileTable, scenesFilesTable, sceneTable, studioIDColumn, studioTable, getSortDirection(direction)) +} + +// used for sorting on performer latest scene +var selectStudioLatestSceneSQL = utils.StrFormat( + "SELECT MAX(date) FROM ("+ + "SELECT {date} FROM {scenes} s "+ + "WHERE s.{studio_id} = {studios}.id"+ + ")", + map[string]interface{}{ + "scenes": sceneTable, + "studios": studioTable, + "studio_id": studioIDColumn, + "date": sceneDateColumn, + }, +) + +func (qb *StudioStore) sortByLatestScene(direction string) string { + // need to get the latest date from scenes + return " ORDER BY (" + selectStudioLatestSceneSQL + ") " + direction +} + var studioSortOptions = sortOptions{ "child_count", "created_at", "galleries_count", "id", "images_count", + "latest_scene", "name", "scenes_count", "scenes_duration", + "scenes_size", "random", "rating", "tag_count", @@ -640,12 +699,16 @@ func (qb *StudioStore) getStudioSort(findFilter *models.FindFilterType) (string, sortQuery += getCountSort(studioTable, sceneTable, studioIDColumn, direction) case "scenes_duration": sortQuery += qb.sortByScenesDuration(direction) + case "scenes_size": + sortQuery += qb.sortByScenesSize(direction) case "images_count": sortQuery += getCountSort(studioTable, imageTable, studioIDColumn, direction) case "galleries_count": sortQuery += getCountSort(studioTable, galleryTable, studioIDColumn, direction) case "child_count": sortQuery += getCountSort(studioTable, studioTable, studioParentIDColumn, direction) + case "latest_scene": + sortQuery += qb.sortByLatestScene(direction) default: sortQuery += getSort(sort, direction, "studios") } diff --git a/pkg/sqlite/studio_filter.go b/pkg/sqlite/studio_filter.go index 6ff7fcced..6d5a8fe7c 100644 --- a/pkg/sqlite/studio_filter.go +++ b/pkg/sqlite/studio_filter.go @@ -59,6 +59,7 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler { intCriterionHandler(studioFilter.Rating100, studioTable+".rating", nil), boolCriterionHandler(studioFilter.Favorite, studioTable+".favorite", nil), boolCriterionHandler(studioFilter.IgnoreAutoTag, studioTable+".ignore_auto_tag", nil), + boolCriterionHandler(studioFilter.Organized, studioTable+".organized", nil), criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { if studioFilter.StashID != nil { @@ -72,12 +73,19 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler { stashIDTableAs: "studio_stash_ids", parentIDCol: "studios.id", }, + &stashIDsCriterionHandler{ + c: studioFilter.StashIDsEndpoint, + stashIDRepository: &studioRepository.stashIDs, + stashIDTableAs: "studio_stash_ids", + parentIDCol: "studios.id", + }, qb.isMissingCriterionHandler(studioFilter.IsMissing), qb.tagCountCriterionHandler(studioFilter.TagCount), qb.sceneCountCriterionHandler(studioFilter.SceneCount), qb.imageCountCriterionHandler(studioFilter.ImageCount), qb.galleryCountCriterionHandler(studioFilter.GalleryCount), + qb.groupCountCriterionHandler(studioFilter.GroupCount), qb.parentCriterionHandler(studioFilter.Parents), qb.aliasCriterionHandler(studioFilter.Aliases), qb.tagsCriterionHandler(studioFilter.Tags), @@ -111,6 +119,22 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler { studioRepository.galleries.innerJoin(f, "", "studios.id") }, }, + + &relatedFilterHandler{ + relatedIDCol: "groups.id", + relatedRepo: groupRepository.repository, + relatedHandler: &groupFilterHandler{studioFilter.GroupsFilter}, + joinFn: func(f *filterBuilder) { + studioRepository.groups.innerJoin(f, "", "studios.id") + }, + }, + + &customFieldsFilterHandler{ + table: studiosCustomFieldsTable.GetTable(), + fkCol: studioIDColumn, + c: studioFilter.CustomFields, + idCol: "studios.id", + }, } } @@ -126,7 +150,19 @@ func (qb *studioFilterHandler) isMissingCriterionHandler(isMissing *string) crit case "stash_id": studioRepository.stashIDs.join(f, "studio_stash_ids", "studios.id") f.addWhere("studio_stash_ids.studio_id IS NULL") + case "aliases": + studiosAliasesTableMgr.join(f, "", "studios.id") + f.addWhere("studio_aliases.alias IS NULL") + case "tags": + f.addLeftJoin(studiosTagsTable, "tags_join", "tags_join.studio_id = studios.id") + f.addWhere("tags_join.studio_id IS NULL") default: + if err := validateIsMissing(*isMissing, []string{ + "details", "rating", + }); err != nil { + f.setError(err) + return + } f.addWhere("(studios." + *isMissing + " IS NULL OR TRIM(studios." + *isMissing + ") = '')") } } @@ -166,6 +202,17 @@ func (qb *studioFilterHandler) galleryCountCriterionHandler(galleryCount *models } } +func (qb *studioFilterHandler) groupCountCriterionHandler(groupCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if groupCount != nil { + f.addLeftJoin("groups", "", "groups.studio_id = studios.id") + clause, args := getIntCriterionWhereClause("count(distinct groups.id)", *groupCount) + + f.addHaving(clause, args...) + } + } +} + func (qb *studioFilterHandler) tagCountCriterionHandler(tagCount *models.IntCriterionInput) criterionHandlerFunc { h := countCriterionHandlerBuilder{ primaryTable: studioTable, diff --git a/pkg/sqlite/studio_test.go b/pkg/sqlite/studio_test.go index 003877c77..eebc677c3 100644 --- a/pkg/sqlite/studio_test.go +++ b/pkg/sqlite/studio_test.go @@ -11,6 +11,7 @@ import ( "strconv" "strings" "testing" + "time" "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" @@ -47,6 +48,566 @@ func TestStudioFindByName(t *testing.T) { }) } +func loadStudioRelationships(ctx context.Context, expected models.Studio, actual *models.Studio) error { + if expected.Aliases.Loaded() { + if err := actual.LoadAliases(ctx, db.Studio); err != nil { + return err + } + } + if expected.URLs.Loaded() { + if err := actual.LoadURLs(ctx, db.Studio); err != nil { + return err + } + } + if expected.TagIDs.Loaded() { + if err := actual.LoadTagIDs(ctx, db.Studio); err != nil { + return err + } + } + if expected.StashIDs.Loaded() { + if err := actual.LoadStashIDs(ctx, db.Studio); err != nil { + return err + } + } + + return nil +} + +func Test_StudioStore_Create(t *testing.T) { + var ( + name = "name" + details = "details" + url = "url" + rating = 3 + aliases = []string{"alias1", "alias2"} + ignoreAutoTag = true + organized = true + favorite = true + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + newObject models.CreateStudioInput + wantErr bool + }{ + { + "full", + models.CreateStudioInput{ + Studio: &models.Studio{ + Name: name, + URLs: models.NewRelatedStrings([]string{url}), + Favorite: favorite, + Rating: &rating, + Details: details, + IgnoreAutoTag: ignoreAutoTag, + Organized: organized, + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithStudio], tagIDs[tagIdx1WithDupName]}), + Aliases: models.NewRelatedStrings(aliases), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + CustomFields: testCustomFields, + }, + false, + }, + { + "invalid tag id", + models.CreateStudioInput{ + Studio: &models.Studio{ + Name: name, + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + } + + qb := db.Studio + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + p := tt.newObject + if err := qb.Create(ctx, &p); (err != nil) != tt.wantErr { + t.Errorf("StudioStore.Create() error = %v, wantErr = %v", err, tt.wantErr) + } + + if tt.wantErr { + assert.Zero(p.ID) + return + } + + assert.NotZero(p.ID) + + copy := *tt.newObject.Studio + copy.ID = p.ID + + // load relationships + if err := loadStudioRelationships(ctx, copy, p.Studio); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + + assert.Equal(copy, *p.Studio) + + // ensure can find the Studio + found, err := qb.Find(ctx, p.ID) + if err != nil { + t.Errorf("StudioStore.Find() error = %v", err) + } + + if !assert.NotNil(found) { + return + } + + // load relationships + if err := loadStudioRelationships(ctx, copy, found); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + assert.Equal(copy, *found) + + // ensure custom fields are set + cf, err := qb.GetCustomFields(ctx, p.ID) + if err != nil { + t.Errorf("StudioStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.newObject.CustomFields, cf) + + return + }) + } +} + +func Test_StudioStore_Update(t *testing.T) { + var ( + name = "name" + details = "details" + url = "url" + rating = 3 + aliases = []string{"aliasX", "aliasY"} + ignoreAutoTag = true + organized = true + favorite = true + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + updatedObject models.UpdateStudioInput + wantErr bool + }{ + { + "full", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[studioIdxWithGallery], + Name: name, + URLs: models.NewRelatedStrings([]string{url}), + Favorite: favorite, + Rating: &rating, + Details: details, + IgnoreAutoTag: ignoreAutoTag, + Organized: organized, + Aliases: models.NewRelatedStrings(aliases), + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithStudio]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + }, + false, + }, + { + "clear nullables", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[studioIdxWithGallery], + Name: name, // name is mandatory + URLs: models.NewRelatedStrings([]string{}), + Aliases: models.NewRelatedStrings([]string{}), + TagIDs: models.NewRelatedIDs([]int{}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + }, + }, + false, + }, + { + "clear tag ids", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[sceneIdxWithTag], + Name: name, // name is mandatory + TagIDs: models.NewRelatedIDs([]int{}), + }, + }, + false, + }, + { + "set custom fields", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[studioIdxWithGallery], + Name: name, // name is mandatory + }, + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + false, + }, + { + "clear custom fields", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[studioIdxWithGallery], + Name: name, // name is mandatory + }, + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + false, + }, + { + "invalid tag id", + models.UpdateStudioInput{ + Studio: &models.Studio{ + ID: studioIDs[sceneIdxWithGallery], + Name: name, // name is mandatory + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + } + + qb := db.Studio + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + copy := *tt.updatedObject.Studio + + if err := qb.Update(ctx, &tt.updatedObject); (err != nil) != tt.wantErr { + t.Errorf("StudioStore.Update() error = %v, wantErr %v", err, tt.wantErr) + } + + if tt.wantErr { + return + } + + s, err := qb.Find(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("StudioStore.Find() error = %v", err) + } + + // load relationships + if err := loadStudioRelationships(ctx, copy, s); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + + assert.Equal(copy, *s) + + // ensure custom fields are correct + if tt.updatedObject.CustomFields.Full != nil { + cf, err := qb.GetCustomFields(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("StudioStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.updatedObject.CustomFields.Full, cf) + } + }) + } +} + +func clearStudioPartial() models.StudioPartial { + nullString := models.OptionalString{Set: true, Null: true} + nullInt := models.OptionalInt{Set: true, Null: true} + + // leave mandatory fields + return models.StudioPartial{ + URLs: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, + Aliases: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, + Rating: nullInt, + Details: nullString, + TagIDs: &models.UpdateIDs{Mode: models.RelationshipUpdateModeSet}, + StashIDs: &models.UpdateStashIDs{Mode: models.RelationshipUpdateModeSet}, + } +} + +func Test_StudioStore_UpdatePartial(t *testing.T) { + var ( + name = "name" + details = "details" + url = "url" + aliases = []string{"aliasX", "aliasY"} + rating = 3 + ignoreAutoTag = true + organized = true + favorite = true + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + id int + partial models.StudioPartial + want models.Studio + wantErr bool + }{ + { + "full", + studioIDs[studioIdxWithDupName], + models.StudioPartial{ + Name: models.NewOptionalString(name), + URLs: &models.UpdateStrings{ + Values: []string{url}, + Mode: models.RelationshipUpdateModeSet, + }, + Aliases: &models.UpdateStrings{ + Values: aliases, + Mode: models.RelationshipUpdateModeSet, + }, + Favorite: models.NewOptionalBool(favorite), + Rating: models.NewOptionalInt(rating), + Details: models.NewOptionalString(details), + IgnoreAutoTag: models.NewOptionalBool(ignoreAutoTag), + Organized: models.NewOptionalBool(organized), + TagIDs: &models.UpdateIDs{ + IDs: []int{tagIDs[tagIdx1WithStudio], tagIDs[tagIdx1WithDupName]}, + Mode: models.RelationshipUpdateModeSet, + }, + StashIDs: &models.UpdateStashIDs{ + StashIDs: []models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }, + Mode: models.RelationshipUpdateModeSet, + }, + CreatedAt: models.NewOptionalTime(createdAt), + UpdatedAt: models.NewOptionalTime(updatedAt), + }, + models.Studio{ + ID: studioIDs[studioIdxWithDupName], + Name: name, + URLs: models.NewRelatedStrings([]string{url}), + Aliases: models.NewRelatedStrings(aliases), + Favorite: favorite, + Rating: &rating, + Details: details, + IgnoreAutoTag: ignoreAutoTag, + Organized: organized, + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithStudio]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + false, + }, + { + "clear all", + studioIDs[studioIdxWithTwoTags], + clearStudioPartial(), + models.Studio{ + ID: studioIDs[studioIdxWithTwoTags], + Name: getStudioStringValue(studioIdxWithTwoTags, "Name"), + Favorite: getStudioBoolValue(studioIdxWithTwoTags), + Aliases: models.NewRelatedStrings([]string{}), + TagIDs: models.NewRelatedIDs([]int{}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + IgnoreAutoTag: getIgnoreAutoTag(studioIdxWithTwoTags), + }, + false, + }, + { + "invalid id", + invalidID, + models.StudioPartial{Name: models.NewOptionalString(name)}, + models.Studio{}, + true, + }, + } + for _, tt := range tests { + qb := db.Studio + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + tt.partial.ID = tt.id + + got, err := qb.UpdatePartial(ctx, tt.partial) + if (err != nil) != tt.wantErr { + t.Errorf("StudioStore.UpdatePartial() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if tt.wantErr { + return + } + + if err := loadStudioRelationships(ctx, tt.want, got); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + + assert.Equal(tt.want, *got) + + s, err := qb.Find(ctx, tt.id) + if err != nil { + t.Errorf("StudioStore.Find() error = %v", err) + } + + // load relationships + if err := loadStudioRelationships(ctx, tt.want, s); err != nil { + t.Errorf("loadStudioRelationships() error = %v", err) + return + } + + assert.Equal(tt.want, *s) + }) + } +} + +func Test_StudioStore_UpdatePartialCustomFields(t *testing.T) { + tests := []struct { + name string + id int + partial models.StudioPartial + expected map[string]interface{} // nil to use the partial + }{ + { + "set custom fields", + studioIDs[studioIdxWithGallery], + models.StudioPartial{ + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + nil, + }, + { + "clear custom fields", + studioIDs[studioIdxWithGallery], + models.StudioPartial{ + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + nil, + }, + { + "partial custom fields", + studioIDs[studioIdxWithGallery], + models.StudioPartial{ + CustomFields: models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "string": "bbb", + "new_field": "new", + }, + }, + }, + map[string]interface{}{ + "int": int64(2), + "real": 0.7, + "string": "bbb", + "new_field": "new", + }, + }, + } + for _, tt := range tests { + qb := db.Studio + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + tt.partial.ID = tt.id + + _, err := qb.UpdatePartial(ctx, tt.partial) + if err != nil { + t.Errorf("StudioStore.UpdatePartial() error = %v", err) + return + } + + // ensure custom fields are correct + cf, err := qb.GetCustomFields(ctx, tt.id) + if err != nil { + t.Errorf("StudioStore.GetCustomFields() error = %v", err) + return + } + if tt.expected == nil { + assert.Equal(tt.partial.CustomFields.Full, cf) + } else { + assert.Equal(tt.expected, cf) + } + }) + } +} + func TestStudioQueryNameOr(t *testing.T) { const studio1Idx = 1 const studio2Idx = 2 @@ -82,14 +643,6 @@ func TestStudioQueryNameOr(t *testing.T) { }) } -func loadStudioRelationships(ctx context.Context, t *testing.T, s *models.Studio) error { - if err := s.LoadURLs(ctx, db.Studio); err != nil { - return err - } - - return nil -} - func TestStudioQueryNameAndUrl(t *testing.T) { const studioIdx = 1 studioName := getStudioStringValue(studioIdx, "Name") @@ -311,13 +864,13 @@ func TestStudioDestroyParent(t *testing.T) { // create parent and child studios if err := withTxn(func(ctx context.Context) error { - createdParent, err := createStudio(ctx, db.Studio, parentName, nil) + createdParent, err := createStudio(ctx, db.Studio, parentName, nil, nil) if err != nil { return fmt.Errorf("Error creating parent studio: %s", err.Error()) } parentID := createdParent.ID - createdChild, err := createStudio(ctx, db.Studio, childName, &parentID) + createdChild, err := createStudio(ctx, db.Studio, childName, &parentID, nil) if err != nil { return fmt.Errorf("Error creating child studio: %s", err.Error()) } @@ -373,13 +926,13 @@ func TestStudioUpdateClearParent(t *testing.T) { // create parent and child studios if err := withTxn(func(ctx context.Context) error { - createdParent, err := createStudio(ctx, db.Studio, parentName, nil) + createdParent, err := createStudio(ctx, db.Studio, parentName, nil, nil) if err != nil { return fmt.Errorf("Error creating parent studio: %s", err.Error()) } parentID := createdParent.ID - createdChild, err := createStudio(ctx, db.Studio, childName, &parentID) + createdChild, err := createStudio(ctx, db.Studio, childName, &parentID, nil) if err != nil { return fmt.Errorf("Error creating child studio: %s", err.Error()) } @@ -414,7 +967,7 @@ func TestStudioUpdateStudioImage(t *testing.T) { // create studio to test against const name = "TestStudioUpdateStudioImage" - created, err := createStudio(ctx, db.Studio, name, nil) + created, err := createStudio(ctx, db.Studio, name, nil, nil) if err != nil { return fmt.Errorf("Error creating studio: %s", err.Error()) } @@ -578,7 +1131,7 @@ func TestStudioStashIDs(t *testing.T) { // create studio to test against const name = "TestStudioStashIDs" - created, err := createStudio(ctx, db.Studio, name, nil) + created, err := createStudio(ctx, db.Studio, name, nil, nil) if err != nil { return fmt.Errorf("Error creating studio: %s", err.Error()) } @@ -990,7 +1543,7 @@ func TestStudioAlias(t *testing.T) { // create studio to test against const name = "TestStudioAlias" - created, err := createStudio(ctx, db.Studio, name, nil) + created, err := createStudio(ctx, db.Studio, name, nil, nil) if err != nil { return fmt.Errorf("Error creating studio: %s", err.Error()) } @@ -1148,6 +1701,251 @@ func TestStudioQueryFast(t *testing.T) { }) } +func studiesToIDs(i []*models.Studio) []int { + ret := make([]int, len(i)) + for i, v := range i { + ret[i] = v.ID + } + + return ret +} + +func TestStudioQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.StudioFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getStudioStringValue(studioIdxWithTwoScenes, "custom")}, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "not equals", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getStudioStringValue(studioIdxWithTwoScenes, "custom")}, + }, + }, + }, + nil, + []int{studioIdxWithTwoScenes}, + false, + }, + { + "includes", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getStudioStringValue(studioIdxWithTwoScenes, "custom")[9:]}, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "excludes", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getStudioStringValue(studioIdxWithTwoScenes, "custom")[9:]}, + }, + }, + }, + nil, + []int{studioIdxWithTwoScenes}, + false, + }, + { + "regex", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*1_custom"}, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "invalid regex", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*1_custom"}, + }, + }, + }, + nil, + []int{studioIdxWithTwoScenes}, + false, + }, + { + "invalid not matches regex", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "not null", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithTwoScenes, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{studioIdxWithTwoScenes}, + nil, + false, + }, + { + "between", + &models.StudioFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{studioIdxWithGroup}, + nil, + false, + }, + { + "not between", + &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: getStudioStringValue(studioIdxWithGroup, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{studioIdxWithGroup}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + studios, _, err := db.Studio.Query(ctx, tt.filter, nil) + if (err != nil) != tt.wantErr { + t.Errorf("StudioStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } + + ids := studiesToIDs(studios) + include := indexesToIDs(studioIDs, tt.includeIdxs) + exclude := indexesToIDs(studioIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + // TODO Create // TODO Update // TODO Destroy diff --git a/pkg/sqlite/table.go b/pkg/sqlite/table.go index 790e84e94..3f8dfb70f 100644 --- a/pkg/sqlite/table.go +++ b/pkg/sqlite/table.go @@ -1209,6 +1209,14 @@ func querySimple(ctx context.Context, query *goqu.SelectDataset, out interface{} return nil } +func querySelect(ctx context.Context, query string, args []interface{}, dest interface{}) error { + if err := dbWrapper.Select(ctx, dest, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) { + return fmt.Errorf("running query: %s [%v]: %w", query, args, err) + } + + return nil +} + // func cols(table exp.IdentifierExpression, cols []string) []interface{} { // var ret []interface{} // for _, c := range cols { diff --git a/pkg/sqlite/tables.go b/pkg/sqlite/tables.go index 7cddf25cc..4c09113f0 100644 --- a/pkg/sqlite/tables.go +++ b/pkg/sqlite/tables.go @@ -14,12 +14,14 @@ var ( performersImagesJoinTable = goqu.T(performersImagesTable) imagesFilesJoinTable = goqu.T(imagesFilesTable) imagesURLsJoinTable = goqu.T(imagesURLsTable) + imagesCustomFieldsTable = goqu.T("image_custom_fields") galleriesFilesJoinTable = goqu.T(galleriesFilesTable) galleriesTagsJoinTable = goqu.T(galleriesTagsTable) performersGalleriesJoinTable = goqu.T(performersGalleriesTable) galleriesScenesJoinTable = goqu.T(galleriesScenesTable) galleriesURLsJoinTable = goqu.T(galleriesURLsTable) + galleriesCustomFieldsTable = goqu.T("gallery_custom_fields") scenesFilesJoinTable = goqu.T(scenesFilesTable) scenesTagsJoinTable = goqu.T(scenesTagsTable) @@ -27,6 +29,7 @@ var ( scenesStashIDsJoinTable = goqu.T("scene_stash_ids") scenesGroupsJoinTable = goqu.T(groupsScenesTable) scenesURLsJoinTable = goqu.T(scenesURLsTable) + scenesCustomFieldsTable = goqu.T("scene_custom_fields") sceneMarkersTagsJoinTable = goqu.T(sceneMarkersTagsTable) @@ -40,14 +43,17 @@ var ( studiosURLsJoinTable = goqu.T(studioURLsTable) studiosTagsJoinTable = goqu.T(studiosTagsTable) studiosStashIDsJoinTable = goqu.T("studio_stash_ids") + studiosCustomFieldsTable = goqu.T("studio_custom_fields") groupsURLsJoinTable = goqu.T(groupURLsTable) groupsTagsJoinTable = goqu.T(groupsTagsTable) groupRelationsJoinTable = goqu.T(groupRelationsTable) + groupsCustomFieldsTable = goqu.T("group_custom_fields") tagsAliasesJoinTable = goqu.T(tagAliasesTable) tagRelationsJoinTable = goqu.T(tagRelationsTable) tagsStashIDsJoinTable = goqu.T("tag_stash_ids") + tagsCustomFieldsTable = goqu.T("tag_custom_fields") ) var ( diff --git a/pkg/sqlite/tag.go b/pkg/sqlite/tag.go index 977ac0433..f6a542c91 100644 --- a/pkg/sqlite/tag.go +++ b/pkg/sqlite/tag.go @@ -104,9 +104,12 @@ type tagRepositoryType struct { aliases stringRepository stashIDs stashIDRepository - scenes joinRepository - images joinRepository - galleries joinRepository + scenes joinRepository + images joinRepository + galleries joinRepository + groups joinRepository + performers joinRepository + studios joinRepository } var ( @@ -152,11 +155,36 @@ var ( fkColumn: galleryIDColumn, foreignTable: galleryTable, }, + groups: joinRepository{ + repository: repository{ + tableName: groupsTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: groupIDColumn, + foreignTable: groupTable, + }, + performers: joinRepository{ + repository: repository{ + tableName: performersTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: performerIDColumn, + foreignTable: performerTable, + }, + studios: joinRepository{ + repository: repository{ + tableName: studiosTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: studioIDColumn, + foreignTable: studioTable, + }, } ) type TagStore struct { blobJoinQueryBuilder + customFieldsStore tableMgr *table } @@ -167,6 +195,10 @@ func NewTagStore(blobStore *BlobStore) *TagStore { blobStore: blobStore, joinTable: tagTable, }, + customFieldsStore: customFieldsStore{ + table: tagsCustomFieldsTable, + fk: tagsCustomFieldsTable.Col(tagIDColumn), + }, tableMgr: tagTableMgr, } } @@ -179,9 +211,9 @@ func (qb *TagStore) selectDataset() *goqu.SelectDataset { return dialect.From(qb.table()).Select(qb.table().All()) } -func (qb *TagStore) Create(ctx context.Context, newObject *models.Tag) error { +func (qb *TagStore) Create(ctx context.Context, newObject *models.CreateTagInput) error { var r tagRow - r.fromTag(*newObject) + r.fromTag(*newObject.Tag) id, err := qb.tableMgr.insertID(ctx, r) if err != nil { @@ -212,12 +244,17 @@ func (qb *TagStore) Create(ctx context.Context, newObject *models.Tag) error { } } + const partial = false + if err := qb.setCustomFields(ctx, id, newObject.CustomFields, partial); err != nil { + return err + } + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } - *newObject = *updated + *newObject.Tag = *updated return nil } @@ -261,12 +298,16 @@ func (qb *TagStore) UpdatePartial(ctx context.Context, id int, partial models.Ta } } + if err := qb.SetCustomFields(ctx, id, partial.CustomFields); err != nil { + return nil, err + } + return qb.find(ctx, id) } -func (qb *TagStore) Update(ctx context.Context, updatedObject *models.Tag) error { +func (qb *TagStore) Update(ctx context.Context, updatedObject *models.UpdateTagInput) error { var r tagRow - r.fromTag(*updatedObject) + r.fromTag(*updatedObject.Tag) if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { return err @@ -296,6 +337,10 @@ func (qb *TagStore) Update(ctx context.Context, updatedObject *models.Tag) error } } + if err := qb.SetCustomFields(ctx, updatedObject.ID, updatedObject.CustomFields); err != nil { + return err + } + return nil } @@ -552,6 +597,36 @@ func (qb *TagStore) FindByStashID(ctx context.Context, stashID models.StashID) ( return ret, nil } +func (qb *TagStore) FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*models.Tag, error) { + table := qb.table() + sq := dialect.From(table).LeftJoin( + tagsStashIDsJoinTable, + goqu.On(table.Col(idColumn).Eq(tagsStashIDsJoinTable.Col(tagIDColumn))), + ).Select(table.Col(idColumn)) + + if hasStashID { + sq = sq.Where( + tagsStashIDsJoinTable.Col("stash_id").IsNotNull(), + tagsStashIDsJoinTable.Col("endpoint").Eq(stashboxEndpoint), + ) + } else { + sq = sq.Where( + tagsStashIDsJoinTable.Col("stash_id").IsNull(), + ) + } + + idsQuery := qb.selectDataset().Where( + table.Col(idColumn).In(sq), + ) + + ret, err := qb.getMany(ctx, idsQuery) + if err != nil { + return nil, fmt.Errorf("getting tags for stash-box endpoint %s: %w", stashboxEndpoint, err) + } + + return ret, nil +} + func (qb *TagStore) GetParentIDs(ctx context.Context, relatedID int) ([]int, error) { return tagsParentTagsTableMgr.get(ctx, relatedID) } @@ -695,6 +770,7 @@ var tagSortOptions = sortOptions{ "scene_markers_count", "scenes_count", "scenes_duration", + "scenes_size", "updated_at", } @@ -709,6 +785,17 @@ func (qb *TagStore) sortByScenesDuration(direction string) string { ) %s`, scenesTagsTable, sceneTable, sceneTable, scenesTagsTable, sceneIDColumn, scenesFilesTable, scenesFilesTable, sceneIDColumn, sceneTable, scenesFilesTable, scenesTagsTable, tagIDColumn, tagTable, getSortDirection(direction)) } +func (qb *TagStore) sortByScenesSize(direction string) string { + return fmt.Sprintf(` ORDER BY ( + SELECT COALESCE(SUM(%s.size), 0) + FROM %s + LEFT JOIN %s ON %s.id = %s.%s + LEFT JOIN %s ON %s.%s = %s.id + LEFT JOIN %s ON %s.id = %s.file_id + WHERE %s.%s = %s.id + ) %s`, fileTable, scenesTagsTable, sceneTable, sceneTable, scenesTagsTable, sceneIDColumn, scenesFilesTable, scenesFilesTable, sceneIDColumn, sceneTable, fileTable, fileTable, scenesFilesTable, scenesTagsTable, tagIDColumn, tagTable, getSortDirection(direction)) +} + func (qb *TagStore) getDefaultTagSort() string { return getSort("name", "ASC", "tags") } @@ -737,6 +824,8 @@ func (qb *TagStore) getTagSort(query *queryBuilder, findFilter *models.FindFilte sortQuery += getCountSort(tagTable, scenesTagsTable, tagIDColumn, direction) case "scenes_duration": sortQuery += qb.sortByScenesDuration(direction) + case "scenes_size": + sortQuery += qb.sortByScenesSize(direction) case "scene_markers_count": sortQuery += fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM scene_markers_tags WHERE tags.id = scene_markers_tags.tag_id)+(SELECT COUNT(*) FROM scene_markers WHERE tags.id = scene_markers.primary_tag_id) %s", getSortDirection(direction)) case "images_count": @@ -859,6 +948,8 @@ func (qb *TagStore) Merge(ctx context.Context, source []int, destination int) er } args = append(args, destination) + + // for each table, update source tag ids to destination tag id, ignoring duplicates for table, idColumn := range tagTables { _, err := dbWrapper.Exec(ctx, `UPDATE OR IGNORE `+table+` SET tag_id = ? diff --git a/pkg/sqlite/tag_filter.go b/pkg/sqlite/tag_filter.go index 27ccf3c09..5fd41e80a 100644 --- a/pkg/sqlite/tag_filter.go +++ b/pkg/sqlite/tag_filter.go @@ -91,10 +91,23 @@ func (qb *tagFilterHandler) criterionHandler() criterionHandler { stashIDTableAs: "tag_stash_ids", parentIDCol: "tags.id", }, + &stashIDsCriterionHandler{ + c: tagFilter.StashIDsEndpoint, + stashIDRepository: &tagRepository.stashIDs, + stashIDTableAs: "tag_stash_ids", + parentIDCol: "tags.id", + }, ×tampCriterionHandler{tagFilter.CreatedAt, "tags.created_at", nil}, ×tampCriterionHandler{tagFilter.UpdatedAt, "tags.updated_at", nil}, + &customFieldsFilterHandler{ + table: tagsCustomFieldsTable.GetTable(), + fkCol: tagIDColumn, + c: tagFilter.CustomFields, + idCol: "tags.id", + }, + &relatedFilterHandler{ relatedIDCol: "scenes_tags.scene_id", relatedRepo: sceneRepository.repository, @@ -121,6 +134,47 @@ func (qb *tagFilterHandler) criterionHandler() criterionHandler { tagRepository.galleries.innerJoin(f, "", "tags.id") }, }, + + &relatedFilterHandler{ + relatedIDCol: "groups_tags.group_id", + relatedRepo: groupRepository.repository, + relatedHandler: &groupFilterHandler{tagFilter.GroupsFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.groups.innerJoin(f, "", "tags.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "performers_tags.performer_id", + relatedRepo: performerRepository.repository, + relatedHandler: &performerFilterHandler{tagFilter.PerformersFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.performers.innerJoin(f, "", "tags.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "studios_tags.studio_id", + relatedRepo: studioRepository.repository, + relatedHandler: &studioFilterHandler{tagFilter.StudiosFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.studios.innerJoin(f, "", "tags.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "markers_tags.marker_id", + relatedRepo: sceneMarkerRepository.repository, + relatedHandler: &sceneMarkerFilterHandler{tagFilter.MarkersFilter}, + joinFn: func(f *filterBuilder) { + f.addWith(`markers_tags AS ( + SELECT mt.scene_marker_id AS marker_id, mt.tag_id AS tag_id FROM scene_markers_tags mt + UNION + SELECT m.id, m.primary_tag_id FROM scene_markers m + )`) + f.addInnerJoin("markers_tags", "", "markers_tags.tag_id = tags.id") + }, + }, } } @@ -144,7 +198,19 @@ func (qb *tagFilterHandler) isMissingCriterionHandler(isMissing *string) criteri switch *isMissing { case "image": f.addWhere("tags.image_blob IS NULL") + case "aliases": + tagRepository.aliases.join(f, "", "tags.id") + f.addWhere("tag_aliases.alias IS NULL") + case "stash_id": + tagRepository.stashIDs.join(f, "tag_stash_ids", "tags.id") + f.addWhere("tag_stash_ids.tag_id IS NULL") default: + if err := validateIsMissing(*isMissing, []string{ + "description", + }); err != nil { + f.setError(err) + return + } f.addWhere("(tags." + *isMissing + " IS NULL OR TRIM(tags." + *isMissing + ") = '')") } } diff --git a/pkg/sqlite/tag_test.go b/pkg/sqlite/tag_test.go index 18fe486bc..179969fd6 100644 --- a/pkg/sqlite/tag_test.go +++ b/pkg/sqlite/tag_test.go @@ -356,6 +356,8 @@ func TestTagQuery(t *testing.T) { var ( endpoint = tagStashID(tagIdxWithPerformer).Endpoint stashID = tagStashID(tagIdxWithPerformer).StashID + stashID2 = tagStashID(tagIdx1WithPerformer).StashID + stashIDs = []*string{&stashID, &stashID2} ) tests := []struct { @@ -420,6 +422,60 @@ func TestTagQuery(t *testing.T) { nil, false, }, + { + "stash ids with endpoint", + nil, + &models.TagFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + StashIDs: stashIDs, + Modifier: models.CriterionModifierEquals, + }, + }, + []int{tagIdxWithPerformer, tagIdx1WithPerformer}, + nil, + false, + }, + { + "exclude stash ids with endpoint", + nil, + &models.TagFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + StashIDs: stashIDs, + Modifier: models.CriterionModifierNotEquals, + }, + }, + nil, + []int{tagIdxWithPerformer, tagIdx1WithPerformer}, + false, + }, + { + "null stash ids with endpoint", + nil, + &models.TagFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + Modifier: models.CriterionModifierIsNull, + }, + }, + nil, + []int{tagIdxWithPerformer, tagIdx1WithPerformer}, + false, + }, + { + "not null stash ids with endpoint", + nil, + &models.TagFilterType{ + StashIDsEndpoint: &models.StashIDsCriterionInput{ + Endpoint: &endpoint, + Modifier: models.CriterionModifierNotNull, + }, + }, + []int{tagIdxWithPerformer, tagIdx1WithPerformer}, + nil, + false, + }, } for _, tt := range tests { @@ -956,8 +1012,10 @@ func TestTagUpdateTagImage(t *testing.T) { // create tag to test against const name = "TestTagUpdateTagImage" - tag := models.Tag{ - Name: name, + tag := models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + }, } err := qb.Create(ctx, &tag) if err != nil { @@ -976,15 +1034,17 @@ func TestTagUpdateAlias(t *testing.T) { // create tag to test against const name = "TestTagUpdateAlias" - tag := models.Tag{ - Name: name, + tag := models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + }, } err := qb.Create(ctx, &tag) if err != nil { return fmt.Errorf("Error creating tag: %s", err.Error()) } - aliases := []string{"alias1", "alias2"} + aliases := []string{"updatedAlias1", "updatedAlias2"} err = qb.UpdateAliases(ctx, tag.ID, aliases) if err != nil { return fmt.Errorf("Error updating tag aliases: %s", err.Error()) @@ -1009,8 +1069,10 @@ func TestTagStashIDs(t *testing.T) { // create tag to test against const name = "TestTagStashIDs" - tag := models.Tag{ - Name: name, + tag := models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + }, } err := qb.Create(ctx, &tag) if err != nil { @@ -1033,9 +1095,11 @@ func TestTagFindByStashID(t *testing.T) { const name = "TestTagFindByStashID" const stashID = "stashid" const endpoint = "endpoint" - tag := models.Tag{ - Name: name, - StashIDs: models.NewRelatedStashIDs([]models.StashID{{StashID: stashID, Endpoint: endpoint}}), + tag := models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + StashIDs: models.NewRelatedStashIDs([]models.StashID{{StashID: stashID, Endpoint: endpoint}}), + }, } err := qb.Create(ctx, &tag) if err != nil { @@ -1207,8 +1271,685 @@ func TestTagMerge(t *testing.T) { } } -// TODO Create -// TODO Update +func loadTagRelationships(ctx context.Context, expected models.Tag, actual *models.Tag) error { + if expected.Aliases.Loaded() { + if err := actual.LoadAliases(ctx, db.Tag); err != nil { + return err + } + } + if expected.ParentIDs.Loaded() { + if err := actual.LoadParentIDs(ctx, db.Tag); err != nil { + return err + } + } + if expected.ChildIDs.Loaded() { + if err := actual.LoadChildIDs(ctx, db.Tag); err != nil { + return err + } + } + if expected.StashIDs.Loaded() { + if err := actual.LoadStashIDs(ctx, db.Tag); err != nil { + return err + } + } + + return nil +} + +func Test_TagStore_Create(t *testing.T) { + var ( + name = "name" + sortName = "sortName" + description = "description" + favorite = true + ignoreAutoTag = true + aliases = []string{"alias1", "alias2"} + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = epochTime + updatedAt = epochTime + ) + + tests := []struct { + name string + newObject models.CreateTagInput + wantErr bool + }{ + { + "full", + models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + SortName: sortName, + Description: description, + Favorite: favorite, + IgnoreAutoTag: ignoreAutoTag, + Aliases: models.NewRelatedStrings(aliases), + ParentIDs: models.NewRelatedIDs([]int{tagIDs[tagIdxWithScene]}), + ChildIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + CustomFields: testCustomFields, + }, + false, + }, + { + "invalid parent id", + models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + ParentIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + { + "invalid child id", + models.CreateTagInput{ + Tag: &models.Tag{ + Name: name, + ChildIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + } + + qb := db.Tag + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + p := tt.newObject + if err := qb.Create(ctx, &p); (err != nil) != tt.wantErr { + t.Errorf("TagStore.Create() error = %v, wantErr = %v", err, tt.wantErr) + } + + if tt.wantErr { + assert.Zero(p.ID) + return + } + + assert.NotZero(p.ID) + + copy := *tt.newObject.Tag + copy.ID = p.ID + + // load relationships + if err := loadTagRelationships(ctx, copy, p.Tag); err != nil { + t.Errorf("loadTagRelationships() error = %v", err) + return + } + + assert.Equal(copy, *p.Tag) + + // ensure can find the tag + found, err := qb.Find(ctx, p.ID) + if err != nil { + t.Errorf("TagStore.Find() error = %v", err) + } + + if !assert.NotNil(found) { + return + } + + // load relationships + if err := loadTagRelationships(ctx, copy, found); err != nil { + t.Errorf("loadTagRelationships() error = %v", err) + return + } + assert.Equal(copy, *found) + + // ensure custom fields are set + cf, err := qb.GetCustomFields(ctx, p.ID) + if err != nil { + t.Errorf("TagStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.newObject.CustomFields, cf) + + return + }) + } +} + +func Test_TagStore_Update(t *testing.T) { + var ( + name = "name" + sortName = "sortName" + description = "description" + favorite = true + ignoreAutoTag = true + aliases = []string{"alias1", "alias2"} + endpoint1 = "endpoint1" + endpoint2 = "endpoint2" + stashID1 = "stashid1" + stashID2 = "stashid2" + createdAt = epochTime + updatedAt = epochTime + ) + + tests := []struct { + name string + updatedObject models.UpdateTagInput + wantErr bool + }{ + { + "full", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: name, + SortName: sortName, + Description: description, + Favorite: favorite, + IgnoreAutoTag: ignoreAutoTag, + Aliases: models.NewRelatedStrings(aliases), + ParentIDs: models.NewRelatedIDs([]int{tagIDs[tagIdxWithScene]}), + ChildIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + UpdatedAt: epochTime, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + UpdatedAt: epochTime, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{ + "string": "updated", + "int": int64(999), + "real": 9.99, + }, + }, + }, + false, + }, + { + "set custom fields", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: tagNames[tagIdxWithGallery], + }, + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + false, + }, + { + "clear custom fields", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: tagNames[tagIdxWithGallery], + }, + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + false, + }, + { + "invalid parent id", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: tagNames[tagIdxWithGallery], + ParentIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + { + "invalid child id", + models.UpdateTagInput{ + Tag: &models.Tag{ + ID: tagIDs[tagIdxWithGallery], + Name: tagNames[tagIdxWithGallery], + ChildIDs: models.NewRelatedIDs([]int{invalidID}), + }, + }, + true, + }, + } + + qb := db.Tag + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + p := tt.updatedObject + if err := qb.Update(ctx, &p); (err != nil) != tt.wantErr { + t.Errorf("TagStore.Update() error = %v, wantErr = %v", err, tt.wantErr) + } + + if tt.wantErr { + return + } + + s, err := qb.Find(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("TagStore.Find() error = %v", err) + return + } + + // load relationships + if err := loadTagRelationships(ctx, *tt.updatedObject.Tag, s); err != nil { + t.Errorf("loadTagRelationships() error = %v", err) + return + } + + assert.Equal(*tt.updatedObject.Tag, *s) + + // ensure custom fields are correct + if tt.updatedObject.CustomFields.Full != nil { + cf, err := qb.GetCustomFields(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("TagStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.updatedObject.CustomFields.Full, cf) + } + }) + } +} + +func Test_TagStore_UpdatePartialCustomFields(t *testing.T) { + tests := []struct { + name string + id int + partial models.TagPartial + expected map[string]interface{} // nil to use the partial + }{ + { + "set custom fields", + tagIDs[tagIdxWithGallery], + models.TagPartial{ + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + nil, + }, + { + "clear custom fields", + tagIDs[tagIdxWithGallery], + models.TagPartial{ + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + nil, + }, + { + "partial custom fields", + tagIDs[tagIdxWithGallery], + models.TagPartial{ + CustomFields: models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "string": "bbb", + "new_field": "new", + }, + }, + }, + map[string]interface{}{ + "int": int64(2), + "real": float64(1.7), + "string": "bbb", + "new_field": "new", + }, + }, + } + for _, tt := range tests { + qb := db.Tag + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + _, err := qb.UpdatePartial(ctx, tt.id, tt.partial) + if err != nil { + t.Errorf("TagStore.UpdatePartial() error = %v", err) + return + } + + // ensure custom fields are correct + cf, err := qb.GetCustomFields(ctx, tt.id) + if err != nil { + t.Errorf("TagStore.GetCustomFields() error = %v", err) + return + } + if tt.expected == nil { + assert.Equal(tt.partial.CustomFields.Full, cf) + } else { + assert.Equal(tt.expected, cf) + } + }) + } +} + +func TestTagQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.TagFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")}, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "not equals", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")}, + }, + }, + }, + nil, + []int{tagIdxWithGallery}, + false, + }, + { + "includes", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")[9:]}, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "excludes", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")[9:]}, + }, + }, + }, + nil, + []int{tagIdxWithGallery}, + false, + }, + { + "regex", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "invalid regex", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*17_custom"}, + }, + }, + }, + nil, + []int{tagIdxWithGallery}, + false, + }, + { + "invalid not matches regex", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "not null", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{tagIdxWithGallery}, + nil, + false, + }, + { + "between", + &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + []int{tagIdx2WithScene}, + nil, + false, + }, + { + "not between", + &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: getTagStringValue(tagIdx2WithScene, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.15, 0.25}, + }, + }, + }, + nil, + []int{tagIdx2WithScene}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + tags, _, err := db.Tag.Query(ctx, tt.filter, nil) + if (err != nil) != tt.wantErr { + t.Errorf("TagStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } + + ids := tagsToIDs(tags) + include := indexesToIDs(tagIDs, tt.includeIdxs) + exclude := indexesToIDs(tagIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } + + // Test combining text search (findFilter.Q) with custom field filters. + // This verifies that positional args are bound in the correct order + // when JOINs (from custom fields) and WHERE (from text search) both + // have parameterized placeholders. + runWithRollbackTxn(t, "equals with text search", func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + tagName := getTagStringValue(tagIdxWithGallery, "Name") + q := tagName + findFilter := &models.FindFilterType{Q: &q} + + tagFilter := &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getTagStringValue(tagIdxWithGallery, "custom")}, + }, + }, + } + + tags, _, err := db.Tag.Query(ctx, tagFilter, findFilter) + if err != nil { + t.Errorf("TagStore.Query() error = %v", err) + return + } + + ids := tagsToIDs(tags) + assert.Contains(ids, tagIDs[tagIdxWithGallery]) + assert.Len(tags, 1) + }) + + runWithRollbackTxn(t, "is_null with text search", func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + tagName := getTagStringValue(tagIdxWithGallery, "Name") + q := tagName + findFilter := &models.FindFilterType{Q: &q} + + tagFilter := &models.TagFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + } + + tags, _, err := db.Tag.Query(ctx, tagFilter, findFilter) + if err != nil { + t.Errorf("TagStore.Query() error = %v", err) + return + } + + ids := tagsToIDs(tags) + assert.Contains(ids, tagIDs[tagIdxWithGallery]) + assert.Len(tags, 1) + }) +} + // TODO Destroy // TODO Find // TODO FindBySceneID diff --git a/pkg/stashbox/graphql/generated_client.go b/pkg/stashbox/graphql/generated_client.go index 640a1c893..bc9a6ce89 100644 --- a/pkg/stashbox/graphql/generated_client.go +++ b/pkg/stashbox/graphql/generated_client.go @@ -9,8 +9,6 @@ import ( ) type StashBoxGraphQLClient interface { - FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindSceneByFingerprint, error) - FindScenesByFullFingerprints(ctx context.Context, fingerprints []*FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindScenesByFullFingerprints, error) FindScenesBySceneFingerprints(ctx context.Context, fingerprints [][]*FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindScenesBySceneFingerprints, error) SearchScene(ctx context.Context, term string, interceptors ...clientv2.RequestInterceptor) (*SearchScene, error) SearchPerformer(ctx context.Context, term string, interceptors ...clientv2.RequestInterceptor) (*SearchPerformer, error) @@ -130,8 +128,11 @@ func (t *StudioFragment) GetImages() []*ImageFragment { } type TagFragment struct { - Name string "json:\"name\" graphql:\"name\"" - ID string "json:\"id\" graphql:\"id\"" + Name string "json:\"name\" graphql:\"name\"" + ID string "json:\"id\" graphql:\"id\"" + Description *string "json:\"description,omitempty\" graphql:\"description\"" + Aliases []string "json:\"aliases\" graphql:\"aliases\"" + Category *TagFragment_Category "json:\"category,omitempty\" graphql:\"category\"" } func (t *TagFragment) GetName() string { @@ -146,6 +147,24 @@ func (t *TagFragment) GetID() string { } return t.ID } +func (t *TagFragment) GetDescription() *string { + if t == nil { + t = &TagFragment{} + } + return t.Description +} +func (t *TagFragment) GetAliases() []string { + if t == nil { + t = &TagFragment{} + } + return t.Aliases +} +func (t *TagFragment) GetCategory() *TagFragment_Category { + if t == nil { + t = &TagFragment{} + } + return t.Category +} type MeasurementsFragment struct { BandSize *int "json:\"band_size,omitempty\" graphql:\"band_size\"" @@ -518,6 +537,31 @@ func (t *StudioFragment_Parent) GetName() string { return t.Name } +type TagFragment_Category struct { + Description *string "json:\"description,omitempty\" graphql:\"description\"" + ID string "json:\"id\" graphql:\"id\"" + Name string "json:\"name\" graphql:\"name\"" +} + +func (t *TagFragment_Category) GetDescription() *string { + if t == nil { + t = &TagFragment_Category{} + } + return t.Description +} +func (t *TagFragment_Category) GetID() string { + if t == nil { + t = &TagFragment_Category{} + } + return t.ID +} +func (t *TagFragment_Category) GetName() string { + if t == nil { + t = &TagFragment_Category{} + } + return t.Name +} + type SceneFragment_Studio_StudioFragment_Parent struct { ID string "json:\"id\" graphql:\"id\"" Name string "json:\"name\" graphql:\"name\"" @@ -536,38 +580,27 @@ func (t *SceneFragment_Studio_StudioFragment_Parent) GetName() string { return t.Name } -type FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent struct { - ID string "json:\"id\" graphql:\"id\"" - Name string "json:\"name\" graphql:\"name\"" +type SceneFragment_Tags_TagFragment_Category struct { + Description *string "json:\"description,omitempty\" graphql:\"description\"" + ID string "json:\"id\" graphql:\"id\"" + Name string "json:\"name\" graphql:\"name\"" } -func (t *FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent) GetID() string { +func (t *SceneFragment_Tags_TagFragment_Category) GetDescription() *string { if t == nil { - t = &FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent{} + t = &SceneFragment_Tags_TagFragment_Category{} + } + return t.Description +} +func (t *SceneFragment_Tags_TagFragment_Category) GetID() string { + if t == nil { + t = &SceneFragment_Tags_TagFragment_Category{} } return t.ID } -func (t *FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent) GetName() string { +func (t *SceneFragment_Tags_TagFragment_Category) GetName() string { if t == nil { - t = &FindSceneByFingerprint_FindSceneByFingerprint_SceneFragment_Studio_StudioFragment_Parent{} - } - return t.Name -} - -type FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent struct { - ID string "json:\"id\" graphql:\"id\"" - Name string "json:\"name\" graphql:\"name\"" -} - -func (t *FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent) GetID() string { - if t == nil { - t = &FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent{} - } - return t.ID -} -func (t *FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent) GetName() string { - if t == nil { - t = &FindScenesByFullFingerprints_FindScenesByFullFingerprints_SceneFragment_Studio_StudioFragment_Parent{} + t = &SceneFragment_Tags_TagFragment_Category{} } return t.Name } @@ -590,6 +623,31 @@ func (t *FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragme return t.Name } +type FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragment_Tags_TagFragment_Category struct { + Description *string "json:\"description,omitempty\" graphql:\"description\"" + ID string "json:\"id\" graphql:\"id\"" + Name string "json:\"name\" graphql:\"name\"" +} + +func (t *FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragment_Tags_TagFragment_Category) GetDescription() *string { + if t == nil { + t = &FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragment_Tags_TagFragment_Category{} + } + return t.Description +} +func (t *FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragment_Tags_TagFragment_Category) GetID() string { + if t == nil { + t = &FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragment_Tags_TagFragment_Category{} + } + return t.ID +} +func (t *FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragment_Tags_TagFragment_Category) GetName() string { + if t == nil { + t = &FindScenesBySceneFingerprints_FindScenesBySceneFingerprints_SceneFragment_Tags_TagFragment_Category{} + } + return t.Name +} + type SearchScene_SearchScene_SceneFragment_Studio_StudioFragment_Parent struct { ID string "json:\"id\" graphql:\"id\"" Name string "json:\"name\" graphql:\"name\"" @@ -608,6 +666,31 @@ func (t *SearchScene_SearchScene_SceneFragment_Studio_StudioFragment_Parent) Get return t.Name } +type SearchScene_SearchScene_SceneFragment_Tags_TagFragment_Category struct { + Description *string "json:\"description,omitempty\" graphql:\"description\"" + ID string "json:\"id\" graphql:\"id\"" + Name string "json:\"name\" graphql:\"name\"" +} + +func (t *SearchScene_SearchScene_SceneFragment_Tags_TagFragment_Category) GetDescription() *string { + if t == nil { + t = &SearchScene_SearchScene_SceneFragment_Tags_TagFragment_Category{} + } + return t.Description +} +func (t *SearchScene_SearchScene_SceneFragment_Tags_TagFragment_Category) GetID() string { + if t == nil { + t = &SearchScene_SearchScene_SceneFragment_Tags_TagFragment_Category{} + } + return t.ID +} +func (t *SearchScene_SearchScene_SceneFragment_Tags_TagFragment_Category) GetName() string { + if t == nil { + t = &SearchScene_SearchScene_SceneFragment_Tags_TagFragment_Category{} + } + return t.Name +} + type FindSceneByID_FindScene_SceneFragment_Studio_StudioFragment_Parent struct { ID string "json:\"id\" graphql:\"id\"" Name string "json:\"name\" graphql:\"name\"" @@ -626,6 +709,31 @@ func (t *FindSceneByID_FindScene_SceneFragment_Studio_StudioFragment_Parent) Get return t.Name } +type FindSceneByID_FindScene_SceneFragment_Tags_TagFragment_Category struct { + Description *string "json:\"description,omitempty\" graphql:\"description\"" + ID string "json:\"id\" graphql:\"id\"" + Name string "json:\"name\" graphql:\"name\"" +} + +func (t *FindSceneByID_FindScene_SceneFragment_Tags_TagFragment_Category) GetDescription() *string { + if t == nil { + t = &FindSceneByID_FindScene_SceneFragment_Tags_TagFragment_Category{} + } + return t.Description +} +func (t *FindSceneByID_FindScene_SceneFragment_Tags_TagFragment_Category) GetID() string { + if t == nil { + t = &FindSceneByID_FindScene_SceneFragment_Tags_TagFragment_Category{} + } + return t.ID +} +func (t *FindSceneByID_FindScene_SceneFragment_Tags_TagFragment_Category) GetName() string { + if t == nil { + t = &FindSceneByID_FindScene_SceneFragment_Tags_TagFragment_Category{} + } + return t.Name +} + type FindStudio_FindStudio_StudioFragment_Parent struct { ID string "json:\"id\" graphql:\"id\"" Name string "json:\"name\" graphql:\"name\"" @@ -644,6 +752,56 @@ func (t *FindStudio_FindStudio_StudioFragment_Parent) GetName() string { return t.Name } +type FindTag_FindTag_TagFragment_Category struct { + Description *string "json:\"description,omitempty\" graphql:\"description\"" + ID string "json:\"id\" graphql:\"id\"" + Name string "json:\"name\" graphql:\"name\"" +} + +func (t *FindTag_FindTag_TagFragment_Category) GetDescription() *string { + if t == nil { + t = &FindTag_FindTag_TagFragment_Category{} + } + return t.Description +} +func (t *FindTag_FindTag_TagFragment_Category) GetID() string { + if t == nil { + t = &FindTag_FindTag_TagFragment_Category{} + } + return t.ID +} +func (t *FindTag_FindTag_TagFragment_Category) GetName() string { + if t == nil { + t = &FindTag_FindTag_TagFragment_Category{} + } + return t.Name +} + +type QueryTags_QueryTags_Tags_TagFragment_Category struct { + Description *string "json:\"description,omitempty\" graphql:\"description\"" + ID string "json:\"id\" graphql:\"id\"" + Name string "json:\"name\" graphql:\"name\"" +} + +func (t *QueryTags_QueryTags_Tags_TagFragment_Category) GetDescription() *string { + if t == nil { + t = &QueryTags_QueryTags_Tags_TagFragment_Category{} + } + return t.Description +} +func (t *QueryTags_QueryTags_Tags_TagFragment_Category) GetID() string { + if t == nil { + t = &QueryTags_QueryTags_Tags_TagFragment_Category{} + } + return t.ID +} +func (t *QueryTags_QueryTags_Tags_TagFragment_Category) GetName() string { + if t == nil { + t = &QueryTags_QueryTags_Tags_TagFragment_Category{} + } + return t.Name +} + type QueryTags_QueryTags struct { Count int "json:\"count\" graphql:\"count\"" Tags []*TagFragment "json:\"tags\" graphql:\"tags\"" @@ -695,28 +853,6 @@ func (t *SubmitPerformerDraft_SubmitPerformerDraft) GetID() *string { return t.ID } -type FindSceneByFingerprint struct { - FindSceneByFingerprint []*SceneFragment "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" -} - -func (t *FindSceneByFingerprint) GetFindSceneByFingerprint() []*SceneFragment { - if t == nil { - t = &FindSceneByFingerprint{} - } - return t.FindSceneByFingerprint -} - -type FindScenesByFullFingerprints struct { - FindScenesByFullFingerprints []*SceneFragment "json:\"findScenesByFullFingerprints\" graphql:\"findScenesByFullFingerprints\"" -} - -func (t *FindScenesByFullFingerprints) GetFindScenesByFullFingerprints() []*SceneFragment { - if t == nil { - t = &FindScenesByFullFingerprints{} - } - return t.FindScenesByFullFingerprints -} - type FindScenesBySceneFingerprints struct { FindScenesBySceneFingerprints [][]*SceneFragment "json:\"findScenesBySceneFingerprints\" graphql:\"findScenesBySceneFingerprints\"" } @@ -849,278 +985,6 @@ func (t *SubmitPerformerDraft) GetSubmitPerformerDraft() *SubmitPerformerDraft_S return &t.SubmitPerformerDraft } -const FindSceneByFingerprintDocument = `query FindSceneByFingerprint ($fingerprint: FingerprintQueryInput!) { - findSceneByFingerprint(fingerprint: $fingerprint) { - ... SceneFragment - } -} -fragment SceneFragment on Scene { - id - title - code - details - director - duration - date - urls { - ... URLFragment - } - images { - ... ImageFragment - } - studio { - ... StudioFragment - } - tags { - ... TagFragment - } - performers { - ... PerformerAppearanceFragment - } - fingerprints { - ... FingerprintFragment - } -} -fragment URLFragment on URL { - url - type -} -fragment ImageFragment on Image { - id - url - width - height -} -fragment StudioFragment on Studio { - name - id - aliases - urls { - ... URLFragment - } - parent { - name - id - } - images { - ... ImageFragment - } -} -fragment TagFragment on Tag { - name - id -} -fragment PerformerAppearanceFragment on PerformerAppearance { - as - performer { - ... PerformerFragment - } -} -fragment PerformerFragment on Performer { - id - name - disambiguation - aliases - gender - merged_ids - deleted - merged_into_id - urls { - ... URLFragment - } - images { - ... ImageFragment - } - birth_date - death_date - ethnicity - country - eye_color - hair_color - height - measurements { - ... MeasurementsFragment - } - breast_type - career_start_year - career_end_year - tattoos { - ... BodyModificationFragment - } - piercings { - ... BodyModificationFragment - } -} -fragment MeasurementsFragment on Measurements { - band_size - cup_size - waist - hip -} -fragment BodyModificationFragment on BodyModification { - location - description -} -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration -} -` - -func (c *Client) FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindSceneByFingerprint, error) { - vars := map[string]any{ - "fingerprint": fingerprint, - } - - var res FindSceneByFingerprint - if err := c.Client.Post(ctx, "FindSceneByFingerprint", FindSceneByFingerprintDocument, &res, vars, interceptors...); err != nil { - if c.Client.ParseDataWhenErrors { - return &res, err - } - - return nil, err - } - - return &res, nil -} - -const FindScenesByFullFingerprintsDocument = `query FindScenesByFullFingerprints ($fingerprints: [FingerprintQueryInput!]!) { - findScenesByFullFingerprints(fingerprints: $fingerprints) { - ... SceneFragment - } -} -fragment SceneFragment on Scene { - id - title - code - details - director - duration - date - urls { - ... URLFragment - } - images { - ... ImageFragment - } - studio { - ... StudioFragment - } - tags { - ... TagFragment - } - performers { - ... PerformerAppearanceFragment - } - fingerprints { - ... FingerprintFragment - } -} -fragment URLFragment on URL { - url - type -} -fragment ImageFragment on Image { - id - url - width - height -} -fragment StudioFragment on Studio { - name - id - aliases - urls { - ... URLFragment - } - parent { - name - id - } - images { - ... ImageFragment - } -} -fragment TagFragment on Tag { - name - id -} -fragment PerformerAppearanceFragment on PerformerAppearance { - as - performer { - ... PerformerFragment - } -} -fragment PerformerFragment on Performer { - id - name - disambiguation - aliases - gender - merged_ids - deleted - merged_into_id - urls { - ... URLFragment - } - images { - ... ImageFragment - } - birth_date - death_date - ethnicity - country - eye_color - hair_color - height - measurements { - ... MeasurementsFragment - } - breast_type - career_start_year - career_end_year - tattoos { - ... BodyModificationFragment - } - piercings { - ... BodyModificationFragment - } -} -fragment MeasurementsFragment on Measurements { - band_size - cup_size - waist - hip -} -fragment BodyModificationFragment on BodyModification { - location - description -} -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration -} -` - -func (c *Client) FindScenesByFullFingerprints(ctx context.Context, fingerprints []*FingerprintQueryInput, interceptors ...clientv2.RequestInterceptor) (*FindScenesByFullFingerprints, error) { - vars := map[string]any{ - "fingerprints": fingerprints, - } - - var res FindScenesByFullFingerprints - if err := c.Client.Post(ctx, "FindScenesByFullFingerprints", FindScenesByFullFingerprintsDocument, &res, vars, interceptors...); err != nil { - if c.Client.ParseDataWhenErrors { - return &res, err - } - - return nil, err - } - - return &res, nil -} - const FindScenesBySceneFingerprintsDocument = `query FindScenesBySceneFingerprints ($fingerprints: [[FingerprintQueryInput!]!]!) { findScenesBySceneFingerprints(fingerprints: $fingerprints) { ... SceneFragment @@ -1181,6 +1045,13 @@ fragment StudioFragment on Studio { fragment TagFragment on Tag { name id + description + aliases + category { + id + name + description + } } fragment PerformerAppearanceFragment on PerformerAppearance { as @@ -1317,6 +1188,13 @@ fragment StudioFragment on Studio { fragment TagFragment on Tag { name id + description + aliases + category { + id + name + description + } } fragment PerformerAppearanceFragment on PerformerAppearance { as @@ -1611,6 +1489,13 @@ fragment StudioFragment on Studio { fragment TagFragment on Tag { name id + description + aliases + category { + id + name + description + } } fragment PerformerAppearanceFragment on PerformerAppearance { as @@ -1745,6 +1630,13 @@ const FindTagDocument = `query FindTag ($id: ID, $name: String) { fragment TagFragment on Tag { name id + description + aliases + category { + id + name + description + } } ` @@ -1777,6 +1669,13 @@ const QueryTagsDocument = `query QueryTags ($input: TagQueryInput!) { fragment TagFragment on Tag { name id + description + aliases + category { + id + name + description + } } ` @@ -1890,8 +1789,6 @@ func (c *Client) SubmitPerformerDraft(ctx context.Context, input PerformerDraftI } var DocumentOperationNames = map[string]string{ - FindSceneByFingerprintDocument: "FindSceneByFingerprint", - FindScenesByFullFingerprintsDocument: "FindScenesByFullFingerprints", FindScenesBySceneFingerprintsDocument: "FindScenesBySceneFingerprints", SearchSceneDocument: "SearchScene", SearchPerformerDocument: "SearchPerformer", diff --git a/pkg/stashbox/performer.go b/pkg/stashbox/performer.go index 38824eba1..5b25b4a59 100644 --- a/pkg/stashbox/performer.go +++ b/pkg/stashbox/performer.go @@ -231,12 +231,22 @@ func performerFragmentToScrapedPerformer(p graphql.PerformerFragment) *models.Sc sp.Height = &hs } + if p.CareerStartYear != nil { + cs := strconv.Itoa(*p.CareerStartYear) + sp.CareerStart = &cs + } + + if p.CareerEndYear != nil { + ce := strconv.Itoa(*p.CareerEndYear) + sp.CareerEnd = &ce + } + if p.BirthDate != nil { - sp.Birthdate = padFuzzyDate(p.BirthDate) + sp.Birthdate = p.BirthDate } if p.DeathDate != nil { - sp.DeathDate = padFuzzyDate(p.DeathDate) + sp.DeathDate = p.DeathDate } if p.Gender != nil { @@ -280,23 +290,6 @@ func performerFragmentToScrapedPerformer(p graphql.PerformerFragment) *models.Sc return sp } -func padFuzzyDate(date *string) *string { - if date == nil { - return nil - } - - var paddedDate string - switch len(*date) { - case 10: - paddedDate = *date - case 7: - paddedDate = fmt.Sprintf("%s-01", *date) - case 4: - paddedDate = fmt.Sprintf("%s-01-01", *date) - } - return &paddedDate -} - // FindPerformerByID queries stash-box for a performer by ID. func (c Client) FindPerformerByID(ctx context.Context, id string) (*models.ScrapedPerformer, error) { performer, err := c.client.FindPerformerByID(ctx, id) @@ -388,16 +381,13 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf aliases := strings.Join(performer.Aliases.List(), ",") draft.Aliases = &aliases } - if performer.CareerLength != "" { - var career = strings.Split(performer.CareerLength, "-") - if i, err := strconv.Atoi(strings.TrimSpace(career[0])); err == nil { - draft.CareerStartYear = &i - } - if len(career) == 2 { - if y, err := strconv.Atoi(strings.TrimSpace(career[1])); err == nil { - draft.CareerEndYear = &y - } - } + if performer.CareerStart != nil { + year := performer.CareerStart.Year() + draft.CareerStartYear = &year + } + if performer.CareerEnd != nil { + year := performer.CareerEnd.Year() + draft.CareerEndYear = &year } if len(performer.URLs.List()) > 0 { diff --git a/pkg/stashbox/tag.go b/pkg/stashbox/tag.go index df2ecbcc0..45bcf96c4 100644 --- a/pkg/stashbox/tag.go +++ b/pkg/stashbox/tag.go @@ -31,10 +31,8 @@ func (c Client) findTagByID(ctx context.Context, id string) ([]*models.ScrapedTa return nil, nil } - return []*models.ScrapedTag{{ - Name: tag.FindTag.Name, - RemoteSiteID: &tag.FindTag.ID, - }}, nil + ret := tagFragmentToScrapedTag(*tag.FindTag) + return []*models.ScrapedTag{ret}, nil } func (c Client) queryTagsByName(ctx context.Context, name string) ([]*models.ScrapedTag, error) { @@ -57,11 +55,29 @@ func (c Client) queryTagsByName(ctx context.Context, name string) ([]*models.Scr var ret []*models.ScrapedTag for _, t := range result.QueryTags.Tags { - ret = append(ret, &models.ScrapedTag{ - Name: t.Name, - RemoteSiteID: &t.ID, - }) + ret = append(ret, tagFragmentToScrapedTag(*t)) } return ret, nil } + +func tagFragmentToScrapedTag(t graphql.TagFragment) *models.ScrapedTag { + ret := &models.ScrapedTag{ + Name: t.Name, + Description: t.Description, + RemoteSiteID: &t.ID, + } + + if len(t.Aliases) > 0 { + ret.AliasList = t.Aliases + } + + if t.Category != nil { + ret.Parent = &models.ScrapedTag{ + Name: t.Category.Name, + Description: t.Category.Description, + } + } + + return ret +} diff --git a/pkg/studio/export.go b/pkg/studio/export.go index 1440c3cdd..206791da6 100644 --- a/pkg/studio/export.go +++ b/pkg/studio/export.go @@ -17,6 +17,7 @@ type FinderImageStashIDGetter interface { models.URLLoader models.StashIDLoader GetImage(ctx context.Context, studioID int) ([]byte, error) + models.CustomFieldsReader } // ToJSON converts a Studio object into its JSON equivalent. @@ -26,6 +27,7 @@ func ToJSON(ctx context.Context, reader FinderImageStashIDGetter, studio *models Details: studio.Details, Favorite: studio.Favorite, IgnoreAutoTag: studio.IgnoreAutoTag, + Organized: studio.Organized, CreatedAt: json.JSONTime{Time: studio.CreatedAt}, UpdatedAt: json.JSONTime{Time: studio.UpdatedAt}, } @@ -60,6 +62,12 @@ func ToJSON(ctx context.Context, reader FinderImageStashIDGetter, studio *models } newStudioJSON.StashIDs = studio.StashIDs.List() + var err error + newStudioJSON.CustomFields, err = reader.GetCustomFields(ctx, studio.ID) + if err != nil { + return nil, fmt.Errorf("getting studio custom fields: %v", err) + } + image, err := reader.GetImage(ctx, studio.ID) if err != nil { logger.Errorf("Error getting studio image: %v", err) diff --git a/pkg/studio/export_test.go b/pkg/studio/export_test.go index c333c0ad5..dce75ba9a 100644 --- a/pkg/studio/export_test.go +++ b/pkg/studio/export_test.go @@ -18,18 +18,25 @@ const ( errImageID = 3 missingParentStudioID = 4 errStudioID = 5 + customFieldsID = 6 parentStudioID = 10 missingStudioID = 11 errParentStudioID = 12 + errCustomFieldsID = 13 ) var ( - studioName = "testStudio" - url = "url" - details = "details" - parentStudioName = "parentStudio" - autoTagIgnored = true + studioName = "testStudio" + url = "url" + details = "details" + parentStudioName = "parentStudio" + autoTagIgnored = true + studioOrganized = true + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) var studioID = 1 @@ -67,6 +74,7 @@ func createFullStudio(id int, parentID int) models.Studio { UpdatedAt: updateTime, Rating: &rating, IgnoreAutoTag: autoTagIgnored, + Organized: studioOrganized, Aliases: models.NewRelatedStrings(aliases), TagIDs: models.NewRelatedIDs([]int{}), StashIDs: models.NewRelatedStashIDs(stashIDs), @@ -91,7 +99,7 @@ func createEmptyStudio(id int) models.Studio { } } -func createFullJSONStudio(parentStudio, image string, aliases []string) *jsonschema.Studio { +func createFullJSONStudio(parentStudio, image string, aliases []string, customFields map[string]interface{}) *jsonschema.Studio { return &jsonschema.Studio{ Name: studioName, URLs: []string{url}, @@ -109,6 +117,8 @@ func createFullJSONStudio(parentStudio, image string, aliases []string) *jsonsch Aliases: aliases, StashIDs: stashIDs, IgnoreAutoTag: autoTagIgnored, + Organized: studioOrganized, + CustomFields: customFields, } } @@ -120,16 +130,18 @@ func createEmptyJSONStudio() *jsonschema.Studio { UpdatedAt: json.JSONTime{ Time: updateTime, }, - Aliases: []string{}, - URLs: []string{}, - StashIDs: []models.StashID{}, + Aliases: []string{}, + URLs: []string{}, + StashIDs: []models.StashID{}, + CustomFields: emptyCustomFields, } } type testScenario struct { - input models.Studio - expected *jsonschema.Studio - err bool + input models.Studio + customFields map[string]interface{} + expected *jsonschema.Studio + err bool } var scenarios []testScenario @@ -138,30 +150,48 @@ func initTestTable() { scenarios = []testScenario{ { createFullStudio(studioID, parentStudioID), - createFullJSONStudio(parentStudioName, image, []string{"alias"}), + emptyCustomFields, + createFullJSONStudio(parentStudioName, image, []string{"alias"}, emptyCustomFields), + false, + }, + { + createFullStudio(customFieldsID, parentStudioID), + customFields, + createFullJSONStudio(parentStudioName, image, []string{"alias"}, customFields), false, }, { createEmptyStudio(noImageID), + emptyCustomFields, createEmptyJSONStudio(), false, }, { createFullStudio(errImageID, parentStudioID), - createFullJSONStudio(parentStudioName, "", []string{"alias"}), + emptyCustomFields, + createFullJSONStudio(parentStudioName, "", []string{"alias"}, emptyCustomFields), // failure to get image is not an error false, }, { createFullStudio(missingParentStudioID, missingStudioID), - createFullJSONStudio("", image, []string{"alias"}), + emptyCustomFields, + createFullJSONStudio("", image, []string{"alias"}, emptyCustomFields), false, }, { createFullStudio(errStudioID, errParentStudioID), + emptyCustomFields, nil, true, }, + { + createFullStudio(errCustomFieldsID, parentStudioID), + customFields, + nil, + // failure to get custom fields should cause an error + true, + }, } } @@ -177,6 +207,7 @@ func TestToJSON(t *testing.T) { db.Studio.On("GetImage", testCtx, errImageID).Return(nil, imageErr).Once() db.Studio.On("GetImage", testCtx, missingParentStudioID).Return(imageBytes, nil).Maybe() db.Studio.On("GetImage", testCtx, errStudioID).Return(imageBytes, nil).Maybe() + db.Studio.On("GetImage", testCtx, customFieldsID).Return(imageBytes, nil).Once() parentStudioErr := errors.New("error getting parent studio") @@ -184,6 +215,15 @@ func TestToJSON(t *testing.T) { db.Studio.On("Find", testCtx, missingStudioID).Return(nil, nil) db.Studio.On("Find", testCtx, errParentStudioID).Return(nil, parentStudioErr) + customFieldsErr := errors.New("error getting custom fields") + + db.Studio.On("GetCustomFields", testCtx, studioID).Return(emptyCustomFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, missingParentStudioID).Return(emptyCustomFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, noImageID).Return(emptyCustomFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, errImageID).Return(emptyCustomFields, nil).Once() + db.Studio.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, customFieldsErr).Once() + for i, s := range scenarios { studio := s.input json, err := ToJSON(testCtx, db.Studio, &studio) diff --git a/pkg/studio/import.go b/pkg/studio/import.go index 405852e53..264e2566a 100644 --- a/pkg/studio/import.go +++ b/pkg/studio/import.go @@ -26,13 +26,15 @@ type Importer struct { Input jsonschema.Studio MissingRefBehaviour models.ImportMissingRefEnum - ID int - studio models.Studio - imageData []byte + ID int + studio models.Studio + customFields models.CustomFieldMap + imageData []byte } func (i *Importer) PreImport(ctx context.Context) error { i.studio = studioJSONtoStudio(i.Input) + i.customFields = i.Input.CustomFields if err := i.populateParentStudio(ctx); err != nil { return err @@ -110,7 +112,9 @@ func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names [] newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -153,7 +157,7 @@ func (i *Importer) populateParentStudio(ctx context.Context) error { } func (i *Importer) createParentStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.ReaderWriter.Create(ctx, &newStudio) @@ -194,7 +198,10 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - err := i.ReaderWriter.Create(ctx, &i.studio) + err := i.ReaderWriter.Create(ctx, &models.CreateStudioInput{ + Studio: &i.studio, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating studio: %v", err) } @@ -206,7 +213,12 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Update(ctx context.Context, id int) error { studio := i.studio studio.ID = id - err := i.ReaderWriter.Update(ctx, &studio) + err := i.ReaderWriter.Update(ctx, &models.UpdateStudioInput{ + Studio: &studio, + CustomFields: models.CustomFieldsInput{ + Full: i.customFields, + }, + }) if err != nil { return fmt.Errorf("error updating existing studio: %v", err) } @@ -221,6 +233,7 @@ func studioJSONtoStudio(studioJSON jsonschema.Studio) models.Studio { Details: studioJSON.Details, Favorite: studioJSON.Favorite, IgnoreAutoTag: studioJSON.IgnoreAutoTag, + Organized: studioJSON.Organized, CreatedAt: studioJSON.CreatedAt.GetTime(), UpdatedAt: studioJSON.UpdatedAt.GetTime(), diff --git a/pkg/studio/import_test.go b/pkg/studio/import_test.go index 882b8ca56..c2bbd40f5 100644 --- a/pkg/studio/import_test.go +++ b/pkg/studio/import_test.go @@ -49,6 +49,7 @@ func TestImporterPreImport(t *testing.T) { Name: studioName, Image: invalidImage, IgnoreAutoTag: autoTagIgnored, + Organized: studioOrganized, }, } @@ -62,7 +63,7 @@ func TestImporterPreImport(t *testing.T) { assert.Nil(t, err) - i.Input = *createFullJSONStudio(studioName, image, []string{"alias"}) + i.Input = *createFullJSONStudio(studioName, image, []string{"alias"}, customFields) i.Input.ParentStudio = "" err = i.PreImport(testCtx) @@ -71,6 +72,7 @@ func TestImporterPreImport(t *testing.T) { expectedStudio := createFullStudio(0, 0) expectedStudio.ParentID = nil assert.Equal(t, expectedStudio, i.studio) + assert.Equal(t, models.CustomFieldMap(customFields), i.customFields) } func TestImporterPreImportWithTag(t *testing.T) { @@ -121,9 +123,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -156,7 +158,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -206,9 +208,9 @@ func TestImporterPreImportWithMissingParent(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingParentStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -240,7 +242,7 @@ func TestImporterPreImportWithMissingParentCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingParentStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -327,11 +329,11 @@ func TestCreate(t *testing.T) { } errCreate := errors.New("Create error") - db.Studio.On("Create", testCtx, &studio).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) + db.Studio.On("Create", testCtx, &models.CreateStudioInput{Studio: &studio}).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) s.ID = studioID }).Return(nil).Once() - db.Studio.On("Create", testCtx, &studioErr).Return(errCreate).Once() + db.Studio.On("Create", testCtx, &models.CreateStudioInput{Studio: &studioErr}).Return(errCreate).Once() id, err := i.Create(testCtx) assert.Equal(t, studioID, *id) @@ -366,7 +368,7 @@ func TestUpdate(t *testing.T) { // id needs to be set for the mock input studio.ID = studioID - db.Studio.On("Update", testCtx, &studio).Return(nil).Once() + db.Studio.On("Update", testCtx, &models.UpdateStudioInput{Studio: &studio}).Return(nil).Once() err := i.Update(testCtx, studioID) assert.Nil(t, err) @@ -375,7 +377,7 @@ func TestUpdate(t *testing.T) { // need to set id separately studioErr.ID = errImageID - db.Studio.On("Update", testCtx, &studioErr).Return(errUpdate).Once() + db.Studio.On("Update", testCtx, &models.UpdateStudioInput{Studio: &studioErr}).Return(errUpdate).Once() err = i.Update(testCtx, errImageID) assert.NotNil(t, err) diff --git a/pkg/studio/validate.go b/pkg/studio/validate.go index 4e2f51c84..526400066 100644 --- a/pkg/studio/validate.go +++ b/pkg/studio/validate.go @@ -75,7 +75,7 @@ func ValidateAliases(ctx context.Context, id int, aliases []string, qb models.St return nil } -func ValidateCreate(ctx context.Context, studio models.Studio, qb models.StudioQueryer) error { +func ValidateCreate(ctx context.Context, studio models.CreateStudioInput, qb models.StudioQueryer) error { if err := validateName(ctx, 0, studio.Name, qb); err != nil { return err } @@ -135,6 +135,7 @@ func ValidateModify(ctx context.Context, s models.StudioPartial, qb ValidateModi } effectiveAliases := s.Aliases.Apply(existing.Aliases.List()) + if err := ValidateAliases(ctx, s.ID, effectiveAliases, qb); err != nil { return err } diff --git a/pkg/studio/validate_test.go b/pkg/studio/validate_test.go index 6562dc5ca..b196ba3c3 100644 --- a/pkg/studio/validate_test.go +++ b/pkg/studio/validate_test.go @@ -102,3 +102,72 @@ func TestValidateUpdateName(t *testing.T) { }) } } + +func TestValidateUpdateAliases(t *testing.T) { + db := mocks.NewDatabase() + + const ( + name1 = "name 1" + name2 = "name 2" + alias1 = "alias 1" + newAlias = "new alias" + ) + + existing1 := models.Studio{ + ID: 1, + Name: name1, + } + existing2 := models.Studio{ + ID: 2, + Name: name2, + } + + pp := 1 + findFilter := &models.FindFilterType{ + PerPage: &pp, + } + + aliasFilter := func(n string) *models.StudioFilterType { + return &models.StudioFilterType{ + Aliases: &models.StringCriterionInput{ + Value: n, + Modifier: models.CriterionModifierEquals, + }, + } + } + + // name1 matches existing1 name - ok + db.Studio.On("Query", testCtx, nameFilter(alias1), findFilter).Return(nil, 0, nil) + db.Studio.On("Query", testCtx, aliasFilter(alias1), findFilter).Return(nil, 0, nil) + + // name2 matches existing2 name - error + db.Studio.On("Query", testCtx, nameFilter(name2), findFilter).Return([]*models.Studio{&existing2}, 1, nil) + + // alias matches existing alias - error + db.Studio.On("Query", testCtx, nameFilter(newAlias), findFilter).Return(nil, 0, nil) + db.Studio.On("Query", testCtx, aliasFilter(newAlias), findFilter).Return([]*models.Studio{&existing2}, 1, nil) + + // valid alias + db.Studio.On("Query", testCtx, nameFilter("valid"), findFilter).Return(nil, 0, nil) + db.Studio.On("Query", testCtx, aliasFilter("valid"), findFilter).Return(nil, 0, nil) + + tests := []struct { + tName string + studio models.Studio + aliases []string + want error + }{ + {"valid alias", existing1, []string{alias1}, nil}, + {"alias duplicates other name", existing1, []string{name2}, &NameExistsError{name2}}, + {"alias duplicates other alias", existing1, []string{newAlias}, &NameUsedByAliasError{newAlias, existing2.Name}}, + {"valid new alias", existing1, []string{"valid"}, nil}, + {"empty alias", existing1, []string{""}, ErrEmptyAlias}, + } + + for _, tt := range tests { + t.Run(tt.tName, func(t *testing.T) { + got := ValidateAliases(testCtx, tt.studio.ID, tt.aliases, db.Studio) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/tag/export.go b/pkg/tag/export.go index b07418667..fc7115209 100644 --- a/pkg/tag/export.go +++ b/pkg/tag/export.go @@ -16,6 +16,7 @@ type FinderAliasImageGetter interface { GetAliases(ctx context.Context, studioID int) ([]string, error) GetImage(ctx context.Context, tagID int) ([]byte, error) FindByChildTagID(ctx context.Context, childID int) ([]*models.Tag, error) + GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) models.StashIDLoader } @@ -63,6 +64,11 @@ func ToJSON(ctx context.Context, reader FinderAliasImageGetter, tag *models.Tag) newTagJSON.Parents = GetNames(parents) + newTagJSON.CustomFields, err = reader.GetCustomFields(ctx, tag.ID) + if err != nil { + return nil, fmt.Errorf("getting tag custom fields: %v", err) + } + return &newTagJSON, nil } diff --git a/pkg/tag/export_test.go b/pkg/tag/export_test.go index 84e082f30..cba2d4ebf 100644 --- a/pkg/tag/export_test.go +++ b/pkg/tag/export_test.go @@ -14,12 +14,14 @@ import ( ) const ( - tagID = 1 - noImageID = 2 - errImageID = 3 - errAliasID = 4 - withParentsID = 5 - errParentsID = 6 + tagID = iota + 1 + customFieldsID + noImageID + errImageID + errAliasID + withParentsID + errParentsID + errCustomFieldsID ) const ( @@ -32,6 +34,11 @@ var ( autoTagIgnored = true createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) + + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) func createTag(id int) models.Tag { @@ -47,8 +54,8 @@ func createTag(id int) models.Tag { } } -func createJSONTag(aliases []string, image string, parents []string) *jsonschema.Tag { - return &jsonschema.Tag{ +func createJSONTag(aliases []string, image string, parents []string, withCustomFields bool) *jsonschema.Tag { + ret := &jsonschema.Tag{ Name: tagName, SortName: sortName, Favorite: true, @@ -61,15 +68,23 @@ func createJSONTag(aliases []string, image string, parents []string) *jsonschema UpdatedAt: json.JSONTime{ Time: updateTime, }, - Image: image, - Parents: parents, + Image: image, + Parents: parents, + CustomFields: emptyCustomFields, } + + if withCustomFields { + ret.CustomFields = customFields + } + + return ret } type testScenario struct { - tag models.Tag - expected *jsonschema.Tag - err bool + tag models.Tag + customFields map[string]interface{} + expected *jsonschema.Tag + err bool } var scenarios []testScenario @@ -78,32 +93,50 @@ func initTestTable() { scenarios = []testScenario{ { createTag(tagID), - createJSONTag([]string{"alias"}, image, nil), + emptyCustomFields, + createJSONTag([]string{"alias"}, image, nil, false), + false, + }, + { + createTag(customFieldsID), + customFields, + createJSONTag([]string{"alias"}, image, nil, true), false, }, { createTag(noImageID), - createJSONTag(nil, "", nil), + emptyCustomFields, + createJSONTag(nil, "", nil, false), false, }, { createTag(errImageID), - createJSONTag(nil, "", nil), + emptyCustomFields, + createJSONTag(nil, "", nil, false), // getting the image should not cause an error false, }, { createTag(errAliasID), + emptyCustomFields, nil, true, }, { createTag(withParentsID), - createJSONTag(nil, image, []string{"parent"}), + emptyCustomFields, + createJSONTag(nil, image, []string{"parent"}, false), false, }, { createTag(errParentsID), + emptyCustomFields, + nil, + true, + }, + { + createTag(errCustomFieldsID), + customFields, nil, true, }, @@ -118,32 +151,48 @@ func TestToJSON(t *testing.T) { imageErr := errors.New("error getting image") aliasErr := errors.New("error getting aliases") parentsErr := errors.New("error getting parents") + customFieldsErr := errors.New("error getting custom fields") db.Tag.On("GetAliases", testCtx, tagID).Return([]string{"alias"}, nil).Once() + db.Tag.On("GetAliases", testCtx, customFieldsID).Return([]string{"alias"}, nil).Once() db.Tag.On("GetAliases", testCtx, noImageID).Return(nil, nil).Once() db.Tag.On("GetAliases", testCtx, errImageID).Return(nil, nil).Once() db.Tag.On("GetAliases", testCtx, errAliasID).Return(nil, aliasErr).Once() db.Tag.On("GetAliases", testCtx, withParentsID).Return(nil, nil).Once() db.Tag.On("GetAliases", testCtx, errParentsID).Return(nil, nil).Once() + db.Tag.On("GetAliases", testCtx, errCustomFieldsID).Return(nil, nil).Once() db.Tag.On("GetStashIDs", testCtx, tagID).Return(nil, nil).Once() + db.Tag.On("GetStashIDs", testCtx, customFieldsID).Return(nil, nil).Once() db.Tag.On("GetStashIDs", testCtx, noImageID).Return(nil, nil).Once() db.Tag.On("GetStashIDs", testCtx, errImageID).Return(nil, nil).Once() // errAliasID test fails before GetStashIDs is called, so no mock needed db.Tag.On("GetStashIDs", testCtx, withParentsID).Return(nil, nil).Once() db.Tag.On("GetStashIDs", testCtx, errParentsID).Return(nil, nil).Once() + db.Tag.On("GetStashIDs", testCtx, errCustomFieldsID).Return(nil, nil).Once() db.Tag.On("GetImage", testCtx, tagID).Return(imageBytes, nil).Once() + db.Tag.On("GetImage", testCtx, customFieldsID).Return(imageBytes, nil).Once() db.Tag.On("GetImage", testCtx, noImageID).Return(nil, nil).Once() db.Tag.On("GetImage", testCtx, errImageID).Return(nil, imageErr).Once() db.Tag.On("GetImage", testCtx, withParentsID).Return(imageBytes, nil).Once() db.Tag.On("GetImage", testCtx, errParentsID).Return(nil, nil).Once() + db.Tag.On("GetImage", testCtx, errCustomFieldsID).Return(nil, nil).Once() db.Tag.On("FindByChildTagID", testCtx, tagID).Return(nil, nil).Once() + db.Tag.On("FindByChildTagID", testCtx, customFieldsID).Return(nil, nil).Once() db.Tag.On("FindByChildTagID", testCtx, noImageID).Return(nil, nil).Once() db.Tag.On("FindByChildTagID", testCtx, withParentsID).Return([]*models.Tag{{Name: "parent"}}, nil).Once() db.Tag.On("FindByChildTagID", testCtx, errParentsID).Return(nil, parentsErr).Once() db.Tag.On("FindByChildTagID", testCtx, errImageID).Return(nil, nil).Once() + db.Tag.On("FindByChildTagID", testCtx, errCustomFieldsID).Return(nil, nil).Once() + + db.Tag.On("GetCustomFields", testCtx, tagID).Return(emptyCustomFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, noImageID).Return(emptyCustomFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, errImageID).Return(emptyCustomFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, withParentsID).Return(emptyCustomFields, nil).Once() + db.Tag.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, customFieldsErr).Once() for i, s := range scenarios { tag := s.tag diff --git a/pkg/tag/import.go b/pkg/tag/import.go index 53b741886..501dc6795 100644 --- a/pkg/tag/import.go +++ b/pkg/tag/import.go @@ -31,8 +31,9 @@ type Importer struct { Input jsonschema.Tag MissingRefBehaviour models.ImportMissingRefEnum - tag models.Tag - imageData []byte + tag models.Tag + imageData []byte + customFields map[string]interface{} } func (i *Importer) PreImport(ctx context.Context) error { @@ -55,6 +56,8 @@ func (i *Importer) PreImport(ctx context.Context) error { } } + i.customFields = i.Input.CustomFields + return nil } @@ -78,6 +81,14 @@ func (i *Importer) PostImport(ctx context.Context, id int) error { return fmt.Errorf("error setting parents: %v", err) } + if len(i.customFields) > 0 { + if err := i.ReaderWriter.SetCustomFields(ctx, id, models.CustomFieldsInput{ + Full: i.customFields, + }); err != nil { + return fmt.Errorf("error setting tag custom fields: %v", err) + } + } + return nil } @@ -101,7 +112,10 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - err := i.ReaderWriter.Create(ctx, &i.tag) + err := i.ReaderWriter.Create(ctx, &models.CreateTagInput{ + Tag: &i.tag, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating tag: %v", err) } @@ -113,7 +127,12 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Update(ctx context.Context, id int) error { tag := i.tag tag.ID = id - err := i.ReaderWriter.Update(ctx, &tag) + err := i.ReaderWriter.Update(ctx, &models.UpdateTagInput{ + Tag: &tag, + CustomFields: models.CustomFieldsInput{ + Full: i.customFields, + }, + }) if err != nil { return fmt.Errorf("error updating existing tag: %v", err) } @@ -157,7 +176,9 @@ func (i *Importer) createParent(ctx context.Context, name string) (int, error) { newTag := models.NewTag() newTag.Name = name - err := i.ReaderWriter.Create(ctx, &newTag) + err := i.ReaderWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return 0, err } diff --git a/pkg/tag/import_test.go b/pkg/tag/import_test.go index b706c4937..f6eaec88a 100644 --- a/pkg/tag/import_test.go +++ b/pkg/tag/import_test.go @@ -154,14 +154,14 @@ func TestImporterPostImportParentMissing(t *testing.T) { db.Tag.On("UpdateParentTags", testCtx, ignoreID, emptyParents).Return(nil).Once() db.Tag.On("UpdateParentTags", testCtx, ignoreFoundID, []int{103}).Return(nil).Once() - db.Tag.On("Create", testCtx, mock.MatchedBy(func(t *models.Tag) bool { - return t.Name == "Create" + db.Tag.On("Create", testCtx, mock.MatchedBy(func(input *models.CreateTagInput) bool { + return input.Tag.Name == "Create" })).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = 100 + input := args.Get(1).(*models.CreateTagInput) + input.Tag.ID = 100 }).Return(nil).Once() - db.Tag.On("Create", testCtx, mock.MatchedBy(func(t *models.Tag) bool { - return t.Name == "CreateError" + db.Tag.On("Create", testCtx, mock.MatchedBy(func(input *models.CreateTagInput) bool { + return input.Tag.Name == "CreateError" })).Return(errors.New("failed creating parent")).Once() i.MissingRefBehaviour = models.ImportMissingRefEnumCreate @@ -261,11 +261,15 @@ func TestCreate(t *testing.T) { } errCreate := errors.New("Create error") - db.Tag.On("Create", testCtx, &tag).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = tagID + db.Tag.On("Create", testCtx, mock.MatchedBy(func(input *models.CreateTagInput) bool { + return input.Tag.Name == tag.Name + })).Run(func(args mock.Arguments) { + input := args.Get(1).(*models.CreateTagInput) + input.Tag.ID = tagID }).Return(nil).Once() - db.Tag.On("Create", testCtx, &tagErr).Return(errCreate).Once() + db.Tag.On("Create", testCtx, mock.MatchedBy(func(input *models.CreateTagInput) bool { + return input.Tag.Name == tagErr.Name + })).Return(errCreate).Once() id, err := i.Create(testCtx) assert.Equal(t, tagID, *id) @@ -299,7 +303,10 @@ func TestUpdate(t *testing.T) { // id needs to be set for the mock input tag.ID = tagID - db.Tag.On("Update", testCtx, &tag).Return(nil).Once() + tagInput := models.UpdateTagInput{ + Tag: &tag, + } + db.Tag.On("Update", testCtx, &tagInput).Return(nil).Once() err := i.Update(testCtx, tagID) assert.Nil(t, err) @@ -308,7 +315,10 @@ func TestUpdate(t *testing.T) { // need to set id separately tagErr.ID = errImageID - db.Tag.On("Update", testCtx, &tagErr).Return(errUpdate).Once() + errInput := models.UpdateTagInput{ + Tag: &tagErr, + } + db.Tag.On("Update", testCtx, &errInput).Return(errUpdate).Once() err = i.Update(testCtx, errImageID) assert.NotNil(t, err) diff --git a/pkg/tag/update.go b/pkg/tag/update.go index 99e9b9165..4a3a2901a 100644 --- a/pkg/tag/update.go +++ b/pkg/tag/update.go @@ -220,49 +220,3 @@ func ValidateHierarchyExisting(ctx context.Context, tag *models.Tag, parentIDs, return nil } - -func MergeHierarchy(ctx context.Context, destination int, sources []int, qb RelationshipFinder) ([]int, []int, error) { - var mergedParents, mergedChildren []int - allIds := append([]int{destination}, sources...) - - addTo := func(mergedItems []int, tagIDs []int) []int { - Tags: - for _, tagID := range tagIDs { - // Ignore tags which are already set - for _, existingItem := range mergedItems { - if tagID == existingItem { - continue Tags - } - } - - // Ignore tags which are being merged, as these are rolled up anyway (if A is merged into B any direct link between them can be ignored) - for _, id := range allIds { - if tagID == id { - continue Tags - } - } - - mergedItems = append(mergedItems, tagID) - } - - return mergedItems - } - - for _, id := range allIds { - parents, err := qb.GetParentIDs(ctx, id) - if err != nil { - return nil, nil, err - } - - mergedParents = addTo(mergedParents, parents) - - children, err := qb.GetChildIDs(ctx, id) - if err != nil { - return nil, nil, err - } - - mergedChildren = addTo(mergedChildren, children) - } - - return mergedParents, mergedChildren, nil -} diff --git a/pkg/tag/validate.go b/pkg/tag/validate.go index 966cec945..abc260b5e 100644 --- a/pkg/tag/validate.go +++ b/pkg/tag/validate.go @@ -69,7 +69,9 @@ func ValidateUpdate(ctx context.Context, id int, partial models.TagPartial, qb m return err } - if err := EnsureAliasesUnique(ctx, id, partial.Aliases.Apply(existing.Aliases.List()), qb); err != nil { + newAliases := partial.Aliases.Apply(existing.Aliases.List()) + + if err := EnsureAliasesUnique(ctx, id, newAliases, qb); err != nil { return err } } diff --git a/pkg/tag/validate_test.go b/pkg/tag/validate_test.go new file mode 100644 index 000000000..539086a6d --- /dev/null +++ b/pkg/tag/validate_test.go @@ -0,0 +1,86 @@ +package tag + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" +) + +func nameFilter(n string) *models.TagFilterType { + return &models.TagFilterType{ + Name: &models.StringCriterionInput{ + Value: n, + Modifier: models.CriterionModifierEquals, + }, + } +} + +func aliasFilter(n string) *models.TagFilterType { + return &models.TagFilterType{ + Aliases: &models.StringCriterionInput{ + Value: n, + Modifier: models.CriterionModifierEquals, + }, + } +} + +func TestEnsureAliasesUnique(t *testing.T) { + db := mocks.NewDatabase() + + const ( + name1 = "name 1" + name2 = "name 2" + alias1 = "alias 1" + newAlias = "new alias" + ) + + existing2 := models.Tag{ + ID: 2, + Name: name2, + } + + pp := 1 + findFilter := &models.FindFilterType{ + PerPage: &pp, + } + + // name1 matches existing1 name - ok + // EnsureAliasesUnique calls EnsureTagNameUnique. + // EnsureTagNameUnique calls ByName then ByAlias. + + // Case 1: valid alias + // ByName "alias 1" -> nil + // ByAlias "alias 1" -> nil + db.Tag.On("Query", testCtx, nameFilter(alias1), findFilter).Return(nil, 0, nil) + db.Tag.On("Query", testCtx, aliasFilter(alias1), findFilter).Return(nil, 0, nil) + + // Case 2: alias duplicates existing2 name + // ByName "name 2" -> existing2 + db.Tag.On("Query", testCtx, nameFilter(name2), findFilter).Return([]*models.Tag{&existing2}, 1, nil) + + // Case 3: alias duplicates existing2 alias + // ByName "new alias" -> nil + // ByAlias "new alias" -> existing2 + db.Tag.On("Query", testCtx, nameFilter(newAlias), findFilter).Return(nil, 0, nil) + db.Tag.On("Query", testCtx, aliasFilter(newAlias), findFilter).Return([]*models.Tag{&existing2}, 1, nil) + + tests := []struct { + tName string + id int + aliases []string + want error + }{ + {"valid alias", 1, []string{alias1}, nil}, + {"alias duplicates other name", 1, []string{name2}, &NameExistsError{name2}}, + {"alias duplicates other alias", 1, []string{newAlias}, &NameUsedByAliasError{newAlias, existing2.Name}}, + } + + for _, tt := range tests { + t.Run(tt.tName, func(t *testing.T) { + got := EnsureAliasesUnique(testCtx, tt.id, tt.aliases, db.Tag) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/utils/mutex.go b/pkg/utils/mutex.go index 212200214..47439e32b 100644 --- a/pkg/utils/mutex.go +++ b/pkg/utils/mutex.go @@ -1,5 +1,7 @@ package utils +import "sync" + // MutexManager manages access to mutexes using a mutex type and key. type MutexManager struct { mapChan chan map[string]<-chan struct{} @@ -62,3 +64,26 @@ func (csm *MutexManager) Claim(mutexType string, key string, done <-chan struct{ csm.mapChan <- m }() } + +type MutexField[T any] struct { + mutex sync.RWMutex + value T +} + +func (mf *MutexField[T]) Get() T { + mf.mutex.RLock() + defer mf.mutex.RUnlock() + return mf.value +} + +func (mf *MutexField[T]) Set(value T) { + mf.mutex.Lock() + defer mf.mutex.Unlock() + mf.value = value +} + +func (mf *MutexField[T]) SetFunc(f func(T) T) { + mf.mutex.Lock() + defer mf.mutex.Unlock() + mf.value = f(mf.value) +} diff --git a/ui/v2.5/graphql/data/config.graphql b/ui/v2.5/graphql/data/config.graphql index b65ba21cc..ba8215fe3 100644 --- a/ui/v2.5/graphql/data/config.graphql +++ b/ui/v2.5/graphql/data/config.graphql @@ -39,6 +39,11 @@ fragment ConfigGeneralData on ConfigGeneralResult { logLevel logAccess logFileMaxSize + useCustomSpriteInterval + spriteInterval + minimumSprites + maximumSprites + spriteScreenshotSize createGalleriesFromFolders galleryCoverRegex videoExtensions @@ -92,6 +97,7 @@ fragment ConfigInterfaceData on ConfigInterfaceResult { javascriptEnabled customLocales customLocalesEnabled + disableCustomizations language imageLightbox { slideshowDelay @@ -142,7 +148,7 @@ fragment IdentifyMetadataOptionsData on IdentifyMetadataOptions { } setCoverImage setOrganized - includeMalePerformers + performerGenders skipMultipleMatches skipMultipleMatchTag skipSingleNamePerformers diff --git a/ui/v2.5/graphql/data/file.graphql b/ui/v2.5/graphql/data/file.graphql index 52a4c50f8..7386adb81 100644 --- a/ui/v2.5/graphql/data/file.graphql +++ b/ui/v2.5/graphql/data/file.graphql @@ -1,5 +1,6 @@ fragment FolderData on Folder { id + basename path } @@ -86,3 +87,17 @@ fragment VisualFileData on VisualFile { } } } + +fragment SelectFolderData on Folder { + id + path + basename +} + +fragment RecursiveFolderData on Folder { + ...SelectFolderData + + parent_folders { + ...SelectFolderData + } +} diff --git a/ui/v2.5/graphql/data/gallery.graphql b/ui/v2.5/graphql/data/gallery.graphql index c41f3e2b2..349a52ad7 100644 --- a/ui/v2.5/graphql/data/gallery.graphql +++ b/ui/v2.5/graphql/data/gallery.graphql @@ -22,7 +22,7 @@ fragment GalleryData on Gallery { folder { ...FolderData } - + image_count chapters { ...GalleryChapterData } @@ -39,6 +39,8 @@ fragment GalleryData on Gallery { scenes { ...SlimSceneData } + + custom_fields } fragment SelectGalleryData on Gallery { diff --git a/ui/v2.5/graphql/data/group.graphql b/ui/v2.5/graphql/data/group.graphql index 5251bed89..a9968bbae 100644 --- a/ui/v2.5/graphql/data/group.graphql +++ b/ui/v2.5/graphql/data/group.graphql @@ -1,3 +1,4 @@ +# Full fragment for detail views - includes recursive counts fragment GroupData on Group { id name @@ -38,4 +39,47 @@ fragment GroupData on Group { id title } + + custom_fields +} + +# Lightweight fragment for list views - excludes expensive recursive counts +# The _all fields (depth: -1) cause 10+ second queries on large databases +fragment ListGroupData on Group { + id + name + aliases + duration + date + rating100 + director + + studio { + ...SlimStudioData + } + + tags { + ...SlimTagData + } + + containing_groups { + group { + ...SlimGroupData + } + description + } + + synopsis + urls + front_image_path + back_image_path + scene_count + performer_count + sub_group_count + o_counter + + scenes { + id + title + } } diff --git a/ui/v2.5/graphql/data/image.graphql b/ui/v2.5/graphql/data/image.graphql index 52163b007..63ce5b458 100644 --- a/ui/v2.5/graphql/data/image.graphql +++ b/ui/v2.5/graphql/data/image.graphql @@ -37,4 +37,6 @@ fragment ImageData on Image { visual_files { ...VisualFileData } + + custom_fields } diff --git a/ui/v2.5/graphql/data/performer-slim.graphql b/ui/v2.5/graphql/data/performer-slim.graphql index 56a30842d..9bb628fba 100644 --- a/ui/v2.5/graphql/data/performer-slim.graphql +++ b/ui/v2.5/graphql/data/performer-slim.graphql @@ -16,7 +16,8 @@ fragment SlimPerformerData on Performer { fake_tits penis_length circumcised - career_length + career_start + career_end tattoos piercings alias_list diff --git a/ui/v2.5/graphql/data/performer.graphql b/ui/v2.5/graphql/data/performer.graphql index 035c8abc7..2a75fbb95 100644 --- a/ui/v2.5/graphql/data/performer.graphql +++ b/ui/v2.5/graphql/data/performer.graphql @@ -13,7 +13,8 @@ fragment PerformerData on Performer { fake_tits penis_length circumcised - career_length + career_start + career_end tattoos piercings alias_list diff --git a/ui/v2.5/graphql/data/scene.graphql b/ui/v2.5/graphql/data/scene.graphql index e4a6e5cc6..b7378c1da 100644 --- a/ui/v2.5/graphql/data/scene.graphql +++ b/ui/v2.5/graphql/data/scene.graphql @@ -79,6 +79,8 @@ fragment SceneData on Scene { mime_type label } + + custom_fields } fragment SelectSceneData on Scene { diff --git a/ui/v2.5/graphql/data/scrapers.graphql b/ui/v2.5/graphql/data/scrapers.graphql index 4a0f588a4..0dae3c2d5 100644 --- a/ui/v2.5/graphql/data/scrapers.graphql +++ b/ui/v2.5/graphql/data/scrapers.graphql @@ -38,7 +38,8 @@ fragment ScrapedPerformerData on ScrapedPerformer { fake_tits penis_length circumcised - career_length + career_start + career_end tattoos piercings aliases @@ -68,7 +69,8 @@ fragment ScrapedScenePerformerData on ScrapedPerformer { fake_tits penis_length circumcised - career_length + career_start + career_end tattoos piercings aliases @@ -158,6 +160,13 @@ fragment ScrapedSceneStudioData on ScrapedStudio { fragment ScrapedSceneTagData on ScrapedTag { stored_id name + description + alias_list + parent { + stored_id + name + description + } remote_site_id } diff --git a/ui/v2.5/graphql/data/studio-slim.graphql b/ui/v2.5/graphql/data/studio-slim.graphql index c48f7d93e..4ca3c8b4d 100644 --- a/ui/v2.5/graphql/data/studio-slim.graphql +++ b/ui/v2.5/graphql/data/studio-slim.graphql @@ -17,5 +17,8 @@ fragment SlimStudioData on Studio { id name } + favorite + ignore_auto_tag + organized o_counter } diff --git a/ui/v2.5/graphql/data/studio.graphql b/ui/v2.5/graphql/data/studio.graphql index aabec7a9b..0e23a885e 100644 --- a/ui/v2.5/graphql/data/studio.graphql +++ b/ui/v2.5/graphql/data/studio.graphql @@ -16,6 +16,7 @@ fragment StudioData on Studio { image_path } ignore_auto_tag + organized image_path scene_count scene_count_all: scene_count(depth: -1) @@ -40,6 +41,7 @@ fragment StudioData on Studio { ...SlimTagData } o_counter + custom_fields } fragment SelectStudioData on Studio { diff --git a/ui/v2.5/graphql/data/tag.graphql b/ui/v2.5/graphql/data/tag.graphql index 4b0c0aef9..19438e2a4 100644 --- a/ui/v2.5/graphql/data/tag.graphql +++ b/ui/v2.5/graphql/data/tag.graphql @@ -34,6 +34,8 @@ fragment TagData on Tag { children { ...SlimTagData } + + custom_fields } fragment SelectTagData on Tag { @@ -67,6 +69,11 @@ fragment TagListData on Tag { aliases ignore_auto_tag favorite + stash_ids { + endpoint + stash_id + updated_at + } image_path # Direct counts only - no recursive depth queries scene_count diff --git a/ui/v2.5/graphql/mutations/file.graphql b/ui/v2.5/graphql/mutations/file.graphql index 254a55126..fe920d308 100644 --- a/ui/v2.5/graphql/mutations/file.graphql +++ b/ui/v2.5/graphql/mutations/file.graphql @@ -1,3 +1,11 @@ mutation DeleteFiles($ids: [ID!]!) { deleteFiles(ids: $ids) } + +mutation RevealFileInFileManager($id: ID!) { + revealFileInFileManager(id: $id) +} + +mutation RevealFolderInFileManager($id: ID!) { + revealFolderInFileManager(id: $id) +} diff --git a/ui/v2.5/graphql/mutations/performer.graphql b/ui/v2.5/graphql/mutations/performer.graphql index a4fa341ed..2082281fc 100644 --- a/ui/v2.5/graphql/mutations/performer.graphql +++ b/ui/v2.5/graphql/mutations/performer.graphql @@ -23,3 +23,9 @@ mutation PerformerDestroy($id: ID!) { mutation PerformersDestroy($ids: [ID!]!) { performersDestroy(ids: $ids) } + +mutation PerformerMerge($input: PerformerMergeInput!) { + performerMerge(input: $input) { + id + } +} diff --git a/ui/v2.5/graphql/mutations/stash-box.graphql b/ui/v2.5/graphql/mutations/stash-box.graphql index 596dc4302..de5f5136c 100644 --- a/ui/v2.5/graphql/mutations/stash-box.graphql +++ b/ui/v2.5/graphql/mutations/stash-box.graphql @@ -12,6 +12,10 @@ mutation StashBoxBatchStudioTag($input: StashBoxBatchTagInput!) { stashBoxBatchStudioTag(input: $input) } +mutation StashBoxBatchTagTag($input: StashBoxBatchTagInput!) { + stashBoxBatchTagTag(input: $input) +} + mutation SubmitStashBoxSceneDraft($input: StashBoxDraftSubmissionInput!) { submitStashBoxSceneDraft(input: $input) } diff --git a/ui/v2.5/graphql/mutations/tag.graphql b/ui/v2.5/graphql/mutations/tag.graphql index f2138e057..33c50833a 100644 --- a/ui/v2.5/graphql/mutations/tag.graphql +++ b/ui/v2.5/graphql/mutations/tag.graphql @@ -24,8 +24,14 @@ mutation BulkTagUpdate($input: BulkTagUpdateInput!) { } } -mutation TagsMerge($source: [ID!]!, $destination: ID!) { - tagsMerge(input: { source: $source, destination: $destination }) { +mutation TagsMerge( + $source: [ID!]! + $destination: ID! + $values: TagUpdateInput +) { + tagsMerge( + input: { source: $source, destination: $destination, values: $values } + ) { ...TagData } } diff --git a/ui/v2.5/graphql/queries/folder.graphql b/ui/v2.5/graphql/queries/folder.graphql new file mode 100644 index 000000000..b1119cd61 --- /dev/null +++ b/ui/v2.5/graphql/queries/folder.graphql @@ -0,0 +1,48 @@ +query FindRootFoldersForSelect($zip_file_filter: MultiCriterionInput) { + findFolders( + filter: { per_page: -1, sort: "path", direction: ASC } + folder_filter: { + parent_folder: { modifier: IS_NULL } + zip_file: $zip_file_filter + } + ) { + count + folders { + ...SelectFolderData + } + } +} + +query FindFoldersForQuery( + $filter: FindFilterType + $folder_filter: FolderFilterType + $ids: [ID!] +) { + findFolders(filter: $filter, folder_filter: $folder_filter, ids: $ids) { + count + folders { + ...RecursiveFolderData + } + } +} + +query FindFolderHierarchyForIDs($ids: [ID!]!) { + findFolders(ids: $ids) { + count + folders { + ...SelectFolderData + + parent_folders { + ...SelectFolderData + # the parent folders will be expanded, so we need the child folders + sub_folders { + ...SelectFolderData + # get zip file so we can filter out zip folders if needed + zip_file { + id + } + } + } + } + } +} diff --git a/ui/v2.5/graphql/queries/movie.graphql b/ui/v2.5/graphql/queries/movie.graphql index ad47e908d..2b2af7510 100644 --- a/ui/v2.5/graphql/queries/movie.graphql +++ b/ui/v2.5/graphql/queries/movie.graphql @@ -2,7 +2,7 @@ query FindGroups($filter: FindFilterType, $group_filter: GroupFilterType) { findGroups(filter: $filter, group_filter: $group_filter) { count groups { - ...GroupData + ...ListGroupData } } } diff --git a/ui/v2.5/graphql/queries/scene.graphql b/ui/v2.5/graphql/queries/scene.graphql index d6a3afd47..0e1a9fa11 100644 --- a/ui/v2.5/graphql/queries/scene.graphql +++ b/ui/v2.5/graphql/queries/scene.graphql @@ -40,6 +40,14 @@ query FindScene($id: ID!, $checksum: String) { } } +query FindFullScenes($ids: [Int!]) { + findScenes(scene_ids: $ids) { + scenes { + ...SceneData + } + } +} + query FindSceneMarkerTags($id: ID!) { sceneMarkerTags(scene_id: $id) { tag { diff --git a/ui/v2.5/graphql/queries/tag.graphql b/ui/v2.5/graphql/queries/tag.graphql index e0b20ee02..c91315f99 100644 --- a/ui/v2.5/graphql/queries/tag.graphql +++ b/ui/v2.5/graphql/queries/tag.graphql @@ -1,5 +1,9 @@ -query FindTags($filter: FindFilterType, $tag_filter: TagFilterType) { - findTags(filter: $filter, tag_filter: $tag_filter) { +query FindTags( + $filter: FindFilterType + $tag_filter: TagFilterType + $ids: [ID!] +) { + findTags(filter: $filter, tag_filter: $tag_filter, ids: $ids) { count tags { ...TagData diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index 5913540db..001e7fb60 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -3,6 +3,7 @@ "private": true, "homepage": "./", "type": "module", + "packageManager": "pnpm@10.30.3+sha512.c961d1e0a2d8e354ecaa5166b822516668b7f44cb5bd95122d590dd81922f606f5473b6d23ec4a5be05e7fcd18e8488d47d978bbe981872f1145d06e9a740017", "scripts": { "start": "vite", "build": "vite build", @@ -27,11 +28,11 @@ "@formatjs/intl-locale": "^3.0.11", "@formatjs/intl-numberformat": "^8.3.3", "@formatjs/intl-pluralrules": "^5.1.8", - "@fortawesome/fontawesome-svg-core": "^6.3.0", - "@fortawesome/free-brands-svg-icons": "^6.3.0", - "@fortawesome/free-regular-svg-icons": "^6.3.0", - "@fortawesome/free-solid-svg-icons": "^6.3.0", - "@fortawesome/react-fontawesome": "^0.2.0", + "@fortawesome/fontawesome-svg-core": "^7.1.0", + "@fortawesome/free-brands-svg-icons": "^7.1.0", + "@fortawesome/free-regular-svg-icons": "^7.1.0", + "@fortawesome/free-solid-svg-icons": "^7.1.0", + "@fortawesome/react-fontawesome": "^0.2.6", "@react-hook/resize-observer": "^1.2.6", "@silvermine/videojs-airplay": "^1.2.0", "@silvermine/videojs-chromecast": "^1.4.1", @@ -50,7 +51,7 @@ "graphql-ws": "^5.14.3", "i18n-iso-countries": "^7.5.0", "localforage": "^1.10.0", - "lodash-es": "^4.17.21", + "lodash-es": "^4.17.23", "moment": "^2.30.1", "mousetrap": "^1.6.5", "mousetrap-pause": "^1.0.0", diff --git a/ui/v2.5/pnpm-lock.yaml b/ui/v2.5/pnpm-lock.yaml index 16fef0a19..46dcec4d8 100644 --- a/ui/v2.5/pnpm-lock.yaml +++ b/ui/v2.5/pnpm-lock.yaml @@ -27,20 +27,20 @@ importers: specifier: ^5.1.8 version: 5.4.6 '@fortawesome/fontawesome-svg-core': - specifier: ^6.3.0 - version: 6.7.2 + specifier: ^7.1.0 + version: 7.1.0 '@fortawesome/free-brands-svg-icons': - specifier: ^6.3.0 - version: 6.7.2 + specifier: ^7.1.0 + version: 7.1.0 '@fortawesome/free-regular-svg-icons': - specifier: ^6.3.0 - version: 6.7.2 + specifier: ^7.1.0 + version: 7.1.0 '@fortawesome/free-solid-svg-icons': - specifier: ^6.3.0 - version: 6.7.2 + specifier: ^7.1.0 + version: 7.1.0 '@fortawesome/react-fontawesome': - specifier: ^0.2.0 - version: 0.2.6(@fortawesome/fontawesome-svg-core@6.7.2)(react@17.0.2) + specifier: ^0.2.6 + version: 0.2.6(@fortawesome/fontawesome-svg-core@7.1.0)(react@17.0.2) '@react-hook/resize-observer': specifier: ^1.2.6 version: 1.2.6(react@17.0.2) @@ -96,8 +96,8 @@ importers: specifier: ^1.10.0 version: 1.10.0 lodash-es: - specifier: ^4.17.21 - version: 4.17.21 + specifier: ^4.17.23 + version: 4.17.23 moment: specifier: ^2.30.1 version: 2.30.1 @@ -1215,6 +1215,7 @@ packages: '@formatjs/intl-enumerator@1.4.6': resolution: {integrity: sha512-O2YMcE3SuBy4jL8r6YNq/8hvFrQ92QGLawdmzFbOi8D1r3VOfEMr8ifnOMp3zt8XemfTLrma+aF6yRCVeEbVLw==} + deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. '@formatjs/intl-getcanonicallocales@2.3.0': resolution: {integrity: sha512-BOXbLwqQ7nKua/l7tKqDLRN84WupDXFDhGJQMFvsMVA2dKuOdRaWTxWpL3cJ7qPkoNw11Jf+Xpj4OSPBBvW0eQ==} @@ -1262,28 +1263,29 @@ packages: ts-jest: optional: true - '@fortawesome/fontawesome-common-types@6.7.2': - resolution: {integrity: sha512-Zs+YeHUC5fkt7Mg1l6XTniei3k4bwG/yo3iFUtZWd/pMx9g3fdvkSK9E0FOC+++phXOka78uJcYb8JaFkW52Xg==} + '@fortawesome/fontawesome-common-types@7.1.0': + resolution: {integrity: sha512-l/BQM7fYntsCI//du+6sEnHOP6a74UixFyOYUyz2DLMXKx+6DEhfR3F2NYGE45XH1JJuIamacb4IZs9S0ZOWLA==} engines: {node: '>=6'} - '@fortawesome/fontawesome-svg-core@6.7.2': - resolution: {integrity: sha512-yxtOBWDrdi5DD5o1pmVdq3WMCvnobT0LU6R8RyyVXPvFRd2o79/0NCuQoCjNTeZz9EzA9xS3JxNWfv54RIHFEA==} + '@fortawesome/fontawesome-svg-core@7.1.0': + resolution: {integrity: sha512-fNxRUk1KhjSbnbuBxlWSnBLKLBNun52ZBTcs22H/xEEzM6Ap81ZFTQ4bZBxVQGQgVY0xugKGoRcCbaKjLQ3XZA==} engines: {node: '>=6'} - '@fortawesome/free-brands-svg-icons@6.7.2': - resolution: {integrity: sha512-zu0evbcRTgjKfrr77/2XX+bU+kuGfjm0LbajJHVIgBWNIDzrhpRxiCPNT8DW5AdmSsq7Mcf9D1bH0aSeSUSM+Q==} + '@fortawesome/free-brands-svg-icons@7.1.0': + resolution: {integrity: sha512-9byUd9bgNfthsZAjBl6GxOu1VPHgBuRUP9juI7ZoM98h8xNPTCTagfwUFyYscdZq4Hr7gD1azMfM9s5tIWKZZA==} engines: {node: '>=6'} - '@fortawesome/free-regular-svg-icons@6.7.2': - resolution: {integrity: sha512-7Z/ur0gvCMW8G93dXIQOkQqHo2M5HLhYrRVC0//fakJXxcF1VmMPsxnG6Ee8qEylA8b8Q3peQXWMNZ62lYF28g==} + '@fortawesome/free-regular-svg-icons@7.1.0': + resolution: {integrity: sha512-0e2fdEyB4AR+e6kU4yxwA/MonnYcw/CsMEP9lH82ORFi9svA6/RhDyhxIv5mlJaldmaHLLYVTb+3iEr+PDSZuQ==} engines: {node: '>=6'} - '@fortawesome/free-solid-svg-icons@6.7.2': - resolution: {integrity: sha512-GsBrnOzU8uj0LECDfD5zomZJIjrPhIlWU82AHwa2s40FKH+kcxQaBvBo3Z4TxyZHIyX8XTDxsyA33/Vx9eFuQA==} + '@fortawesome/free-solid-svg-icons@7.1.0': + resolution: {integrity: sha512-Udu3K7SzAo9N013qt7qmm22/wo2hADdheXtBfxFTecp+ogsc0caQNRKEb7pkvvagUGOpG9wJC1ViH6WXs8oXIA==} engines: {node: '>=6'} '@fortawesome/react-fontawesome@0.2.6': resolution: {integrity: sha512-mtBFIi1UsYQo7rYonYFkjgYKGoL8T+fEH6NGUpvuqtY3ytMsAoDaPo5rk25KuMtKDipY4bGYM/CkmCHA1N3FUg==} + deprecated: v0.2.x is no longer supported. Unless you are still using FontAwesome 5, please update to v3.1.1 or greater. peerDependencies: '@fortawesome/fontawesome-svg-core': ~1 || ~6 || ~7 react: ^16.3 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -1658,36 +1660,42 @@ packages: engines: {node: '>= 10.0.0'} cpu: [arm] os: [linux] + libc: [glibc] '@parcel/watcher-linux-arm-musl@2.5.1': resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==} engines: {node: '>= 10.0.0'} cpu: [arm] os: [linux] + libc: [musl] '@parcel/watcher-linux-arm64-glibc@2.5.1': resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==} engines: {node: '>= 10.0.0'} cpu: [arm64] os: [linux] + libc: [glibc] '@parcel/watcher-linux-arm64-musl@2.5.1': resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==} engines: {node: '>= 10.0.0'} cpu: [arm64] os: [linux] + libc: [musl] '@parcel/watcher-linux-x64-glibc@2.5.1': resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==} engines: {node: '>= 10.0.0'} cpu: [x64] os: [linux] + libc: [glibc] '@parcel/watcher-linux-x64-musl@2.5.1': resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==} engines: {node: '>= 10.0.0'} cpu: [x64] os: [linux] + libc: [musl] '@parcel/watcher-win32-arm64@2.5.1': resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==} @@ -1779,56 +1787,67 @@ packages: resolution: {integrity: sha512-JzWRR41o2U3/KMNKRuZNsDUAcAVUYhsPuMlx5RUldw0E4lvSIXFUwejtYz1HJXohUmqs/M6BBJAUBzKXZVddbg==} cpu: [arm] os: [linux] + libc: [glibc] '@rollup/rollup-linux-arm-musleabihf@4.53.1': resolution: {integrity: sha512-L8kRIrnfMrEoHLHtHn+4uYA52fiLDEDyezgxZtGUTiII/yb04Krq+vk3P2Try+Vya9LeCE9ZHU8CXD6J9EhzHQ==} cpu: [arm] os: [linux] + libc: [musl] '@rollup/rollup-linux-arm64-gnu@4.53.1': resolution: {integrity: sha512-ysAc0MFRV+WtQ8li8hi3EoFi7us6d1UzaS/+Dp7FYZfg3NdDljGMoVyiIp6Ucz7uhlYDBZ/zt6XI0YEZbUO11Q==} cpu: [arm64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-arm64-musl@4.53.1': resolution: {integrity: sha512-UV6l9MJpDbDZZ/fJvqNcvO1PcivGEf1AvKuTcHoLjVZVFeAMygnamCTDikCVMRnA+qJe+B3pSbgX2+lBMqgBhA==} cpu: [arm64] os: [linux] + libc: [musl] '@rollup/rollup-linux-loong64-gnu@4.53.1': resolution: {integrity: sha512-UDUtelEprkA85g95Q+nj3Xf0M4hHa4DiJ+3P3h4BuGliY4NReYYqwlc0Y8ICLjN4+uIgCEvaygYlpf0hUj90Yg==} cpu: [loong64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-ppc64-gnu@4.53.1': resolution: {integrity: sha512-vrRn+BYhEtNOte/zbc2wAUQReJXxEx2URfTol6OEfY2zFEUK92pkFBSXRylDM7aHi+YqEPJt9/ABYzmcrS4SgQ==} cpu: [ppc64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-riscv64-gnu@4.53.1': resolution: {integrity: sha512-gto/1CxHyi4A7YqZZNznQYrVlPSaodOBPKM+6xcDSCMVZN/Fzb4K+AIkNz/1yAYz9h3Ng+e2fY9H6bgawVq17w==} cpu: [riscv64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-riscv64-musl@4.53.1': resolution: {integrity: sha512-KZ6Vx7jAw3aLNjFR8eYVcQVdFa/cvBzDNRFM3z7XhNNunWjA03eUrEwJYPk0G8V7Gs08IThFKcAPS4WY/ybIrQ==} cpu: [riscv64] os: [linux] + libc: [musl] '@rollup/rollup-linux-s390x-gnu@4.53.1': resolution: {integrity: sha512-HvEixy2s/rWNgpwyKpXJcHmE7om1M89hxBTBi9Fs6zVuLU4gOrEMQNbNsN/tBVIMbLyysz/iwNiGtMOpLAOlvA==} cpu: [s390x] os: [linux] + libc: [glibc] '@rollup/rollup-linux-x64-gnu@4.53.1': resolution: {integrity: sha512-E/n8x2MSjAQgjj9IixO4UeEUeqXLtiA7pyoXCFYLuXpBA/t2hnbIdxHfA7kK9BFsYAoNU4st1rHYdldl8dTqGA==} cpu: [x64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-x64-musl@4.53.1': resolution: {integrity: sha512-IhJ087PbLOQXCN6Ui/3FUkI9pWNZe/Z7rEIVOzMsOs1/HSAECCvSZ7PkIbkNqL/AZn6WbZvnoVZw/qwqYMo4/w==} cpu: [x64] os: [linux] + libc: [musl] '@rollup/rollup-openharmony-arm64@4.53.1': resolution: {integrity: sha512-0++oPNgLJHBblreu0SFM7b3mAsBJBTY0Ksrmu9N6ZVrPiTkRgda52mWR7TKhHAsUb9noCjFvAw9l6ZO1yzaVbA==} @@ -2193,11 +2212,11 @@ packages: peerDependencies: ajv: ^6.9.1 - ajv@6.12.6: - resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + ajv@6.14.0: + resolution: {integrity: sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==} - ajv@8.17.1: - resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==} + ajv@8.18.0: + resolution: {integrity: sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==} ansi-escapes@4.3.2: resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} @@ -2348,6 +2367,10 @@ packages: balanced-match@2.0.0: resolution: {integrity: sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==} + balanced-match@4.0.4: + resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} + engines: {node: 18 || 20 || >=22} + base64-blob@1.4.1: resolution: {integrity: sha512-n5Ov4cPTbLBTX1PiFbaB5AmK7LMigO9HWh5Lzx+Kcx/yx1MppeeLYtAH8aLv1m++WNoHQnr+xbGSqcZinopwlw==} @@ -2372,6 +2395,7 @@ packages: bootstrap@4.6.2: resolution: {integrity: sha512-51Bbp/Uxr9aTuy6ca/8FbFloBUJZLHwnhTcnjIeRn2suQWsWzcuJhGjKDB5eppVte/8oCdOL3VuwxvZDUggwGQ==} + deprecated: This version of Bootstrap is no longer supported. Please upgrade to the latest version. peerDependencies: jquery: 1.9.1 - 3 popper.js: ^1.16.1 @@ -2379,8 +2403,9 @@ packages: brace-expansion@1.1.12: resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} - brace-expansion@2.0.2: - resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + brace-expansion@5.0.3: + resolution: {integrity: sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==} + engines: {node: 18 || 20 || >=22} braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} @@ -3698,8 +3723,8 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} - lodash-es@4.17.21: - resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} + lodash-es@4.17.23: + resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==} lodash.debounce@4.0.8: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} @@ -3892,11 +3917,11 @@ packages: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} - minimatch@3.1.2: - resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + minimatch@3.1.5: + resolution: {integrity: sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==} - minimatch@9.0.5: - resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + minimatch@9.0.8: + resolution: {integrity: sha512-reYkDYtj/b19TeqbNZCV4q9t+Yxylf/rYBsLb42SXJatTv4/ylq5lEiAmhA/IToxO7NI2UzNMghHoHuaqDkAjw==} engines: {node: '>=16 || 14 >=14.17'} minimist-options@4.1.0: @@ -6333,14 +6358,14 @@ snapshots: '@eslint/eslintrc@2.1.4': dependencies: - ajv: 6.12.6 + ajv: 6.14.0 debug: 4.4.3 espree: 9.6.1 globals: 13.24.0 ignore: 5.3.2 import-fresh: 3.3.1 js-yaml: 4.1.0 - minimatch: 3.1.2 + minimatch: 3.1.5 strip-json-comments: 3.1.1 transitivePeerDependencies: - supports-color @@ -6485,27 +6510,27 @@ snapshots: tslib: 2.8.1 typescript: 4.8.4 - '@fortawesome/fontawesome-common-types@6.7.2': {} + '@fortawesome/fontawesome-common-types@7.1.0': {} - '@fortawesome/fontawesome-svg-core@6.7.2': + '@fortawesome/fontawesome-svg-core@7.1.0': dependencies: - '@fortawesome/fontawesome-common-types': 6.7.2 + '@fortawesome/fontawesome-common-types': 7.1.0 - '@fortawesome/free-brands-svg-icons@6.7.2': + '@fortawesome/free-brands-svg-icons@7.1.0': dependencies: - '@fortawesome/fontawesome-common-types': 6.7.2 + '@fortawesome/fontawesome-common-types': 7.1.0 - '@fortawesome/free-regular-svg-icons@6.7.2': + '@fortawesome/free-regular-svg-icons@7.1.0': dependencies: - '@fortawesome/fontawesome-common-types': 6.7.2 + '@fortawesome/fontawesome-common-types': 7.1.0 - '@fortawesome/free-solid-svg-icons@6.7.2': + '@fortawesome/free-solid-svg-icons@7.1.0': dependencies: - '@fortawesome/fontawesome-common-types': 6.7.2 + '@fortawesome/fontawesome-common-types': 7.1.0 - '@fortawesome/react-fontawesome@0.2.6(@fortawesome/fontawesome-svg-core@6.7.2)(react@17.0.2)': + '@fortawesome/react-fontawesome@0.2.6(@fortawesome/fontawesome-svg-core@7.1.0)(react@17.0.2)': dependencies: - '@fortawesome/fontawesome-svg-core': 6.7.2 + '@fortawesome/fontawesome-svg-core': 7.1.0 prop-types: 15.8.1 react: 17.0.2 @@ -7034,7 +7059,7 @@ snapshots: dependencies: '@humanwhocodes/object-schema': 2.0.3 debug: 4.4.3 - minimatch: 3.1.2 + minimatch: 3.1.5 transitivePeerDependencies: - supports-color @@ -7660,18 +7685,18 @@ snapshots: clean-stack: 2.2.0 indent-string: 4.0.0 - ajv-keywords@3.5.2(ajv@6.12.6): + ajv-keywords@3.5.2(ajv@6.14.0): dependencies: - ajv: 6.12.6 + ajv: 6.14.0 - ajv@6.12.6: + ajv@6.14.0: dependencies: fast-deep-equal: 3.1.3 fast-json-stable-stringify: 2.1.0 json-schema-traverse: 0.4.1 uri-js: 4.4.1 - ajv@8.17.1: + ajv@8.18.0: dependencies: fast-deep-equal: 3.1.3 fast-uri: 3.1.0 @@ -7884,6 +7909,8 @@ snapshots: balanced-match@2.0.0: {} + balanced-match@4.0.4: {} + base64-blob@1.4.1: dependencies: b64-to-blob: 1.2.19 @@ -7921,9 +7948,9 @@ snapshots: balanced-match: 1.0.2 concat-map: 0.0.1 - brace-expansion@2.0.2: + brace-expansion@5.0.3: dependencies: - balanced-match: 1.0.2 + balanced-match: 4.0.4 braces@3.0.3: dependencies: @@ -8517,7 +8544,7 @@ snapshots: hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 - minimatch: 3.1.2 + minimatch: 3.1.5 object.fromentries: 2.0.8 object.groupby: 1.0.3 object.values: 1.2.1 @@ -8545,7 +8572,7 @@ snapshots: hasown: 2.0.2 jsx-ast-utils: 3.3.5 language-tags: 1.0.9 - minimatch: 3.1.2 + minimatch: 3.1.5 object.fromentries: 2.0.8 safe-regex-test: 1.1.0 string.prototype.includes: 2.0.1 @@ -8566,7 +8593,7 @@ snapshots: estraverse: 5.3.0 hasown: 2.0.2 jsx-ast-utils: 3.3.5 - minimatch: 3.1.2 + minimatch: 3.1.5 object.entries: 1.1.9 object.fromentries: 2.0.8 object.values: 1.2.1 @@ -8598,7 +8625,7 @@ snapshots: '@humanwhocodes/module-importer': 1.0.1 '@nodelib/fs.walk': 1.2.8 '@ungap/structured-clone': 1.3.0 - ajv: 6.12.6 + ajv: 6.14.0 chalk: 4.1.2 cross-spawn: 7.0.6 debug: 4.4.3 @@ -8623,7 +8650,7 @@ snapshots: json-stable-stringify-without-jsonify: 1.0.1 levn: 0.4.1 lodash.merge: 4.6.2 - minimatch: 3.1.2 + minimatch: 3.1.5 natural-compare: 1.4.0 optionator: 0.9.4 strip-ansi: 6.0.1 @@ -8787,7 +8814,7 @@ snapshots: deepmerge: 2.2.1 hoist-non-react-statics: 3.3.2 lodash: 4.17.21 - lodash-es: 4.17.21 + lodash-es: 4.17.23 react: 17.0.2 react-fast-compare: 2.0.4 tiny-warning: 1.0.3 @@ -8871,7 +8898,7 @@ snapshots: fs.realpath: 1.0.0 inflight: 1.0.6 inherits: 2.0.4 - minimatch: 3.1.2 + minimatch: 3.1.5 once: 1.4.0 path-is-absolute: 1.0.1 @@ -8929,7 +8956,7 @@ snapshots: cosmiconfig: 8.3.6(typescript@4.8.4) graphql: 16.11.0 jiti: 2.6.1 - minimatch: 9.0.5 + minimatch: 9.0.8 string-env-interpolation: 1.0.1 tslib: 2.8.1 transitivePeerDependencies: @@ -9442,7 +9469,7 @@ snapshots: dependencies: p-locate: 5.0.0 - lodash-es@4.17.21: {} + lodash-es@4.17.23: {} lodash.debounce@4.0.8: {} @@ -9697,13 +9724,13 @@ snapshots: min-indent@1.0.1: {} - minimatch@3.1.2: + minimatch@3.1.5: dependencies: brace-expansion: 1.1.12 - minimatch@9.0.5: + minimatch@9.0.8: dependencies: - brace-expansion: 2.0.2 + brace-expansion: 5.0.3 minimist-options@4.1.0: dependencies: @@ -10494,8 +10521,8 @@ snapshots: schema-utils@2.7.1: dependencies: '@types/json-schema': 7.0.15 - ajv: 6.12.6 - ajv-keywords: 3.5.2(ajv@6.12.6) + ajv: 6.14.0 + ajv-keywords: 3.5.2(ajv@6.14.0) scuid@1.1.0: {} @@ -10824,7 +10851,7 @@ snapshots: table@6.9.0: dependencies: - ajv: 8.17.1 + ajv: 8.18.0 lodash.truncate: 4.4.2 slice-ansi: 4.0.0 string-width: 4.2.3 diff --git a/ui/v2.5/src/App.tsx b/ui/v2.5/src/App.tsx index a8b92ecc3..d08274b18 100644 --- a/ui/v2.5/src/App.tsx +++ b/ui/v2.5/src/App.tsx @@ -49,6 +49,7 @@ import { PluginRoutes, PluginsLoader } from "./plugins"; // import plugin_api to run code import "./pluginApi"; import { ConnectionMonitor } from "./ConnectionMonitor"; +import { TroubleshootingModeOverlay } from "./components/TroubleshootingMode/TroubleshootingModeOverlay"; import { PatchFunction } from "./patch"; import moment from "moment/min/moment-with-locales"; @@ -307,7 +308,8 @@ export const App: React.FC = () => { ); } - const titleProps = makeTitleProps(); + const title = config.data?.configuration.ui.title || "Stash"; + const titleProps = makeTitleProps(title); if (!messages) { return null; @@ -351,11 +353,17 @@ export const App: React.FC = () => { formats={intlFormats} > - + {maybeRenderReleaseNotes()} + }> diff --git a/ui/v2.5/src/components/Changelog/Changelog.tsx b/ui/v2.5/src/components/Changelog/Changelog.tsx index 97175e1c2..df7517f7d 100644 --- a/ui/v2.5/src/components/Changelog/Changelog.tsx +++ b/ui/v2.5/src/components/Changelog/Changelog.tsx @@ -35,6 +35,7 @@ import V0270 from "src/docs/en/Changelog/v0270.md"; import V0280 from "src/docs/en/Changelog/v0280.md"; import V0290 from "src/docs/en/Changelog/v0290.md"; import V0300 from "src/docs/en/Changelog/v0300.md"; +import V0310 from "src/docs/en/Changelog/v0310.md"; import V0290ReleaseNotes from "src/docs/en/ReleaseNotes/v0290.md"; @@ -75,9 +76,9 @@ const Changelog: React.FC = () => { // after new release: // add entry to releases, using the current* fields // then update the current fields. - const currentVersion = stashVersion || "v0.30.0"; + const currentVersion = stashVersion || "v0.31.0"; const currentDate = buildDate; - const currentPage = V0300; + const currentPage = V0310; const releases: IStashRelease[] = [ { @@ -86,6 +87,12 @@ const Changelog: React.FC = () => { page: currentPage, defaultOpen: true, }, + { + version: "v0.30.1", + date: "2025-12-18", + page: V0300, + releaseNotes: V0290ReleaseNotes, + }, { version: "v0.29.3", date: "2025-11-06", @@ -256,7 +263,9 @@ const Changelog: React.FC = () => { return (
-

Changelog:

+

+ +

{releases.map((r) => ( void; - type: "scene"; // TODO - add image generate + type: "scene" | "image" | "gallery"; } -export const GenerateDialog: React.FC = ({ +export const GenerateDialog: React.FC = ({ selectedIds, onClose, type, }) => { + const sceneIDs = type === "scene" ? selectedIds : undefined; + const imageIDs = type === "image" ? selectedIds : undefined; + const galleryIDs = type === "gallery" ? selectedIds : undefined; + const { configuration } = useConfigurationContext(); function getDefaultOptions(): GQL.GenerateMetadataInput { @@ -89,6 +93,13 @@ export const GenerateDialog: React.FC = ({ }, [configuration, configRead]); const selectionStatus = useMemo(() => { + const countableIds: Record = { + scene: "countables.scenes", + image: "countables.images", + gallery: "countables.galleries", + }; + const countableId = countableIds[type]; + if (selectedIds) { return ( @@ -98,7 +109,7 @@ export const GenerateDialog: React.FC = ({ num: selectedIds.length, scene: intl.formatMessage( { - id: "countables.scenes", + id: countableId, }, { count: selectedIds.length, @@ -118,7 +129,7 @@ export const GenerateDialog: React.FC = ({ num: intl.formatMessage({ id: "all" }), scene: intl.formatMessage( { - id: "countables.scenes", + id: countableId, }, { count: 0, @@ -135,13 +146,15 @@ export const GenerateDialog: React.FC = ({
{message}
); - }, [selectedIds, intl]); + }, [selectedIds, intl, type]); async function onGenerate() { try { await mutateMetadataGenerate({ ...options, - sceneIDs: selectedIds, + sceneIDs, + imageIDs, + galleryIDs, }); Toast.success( intl.formatMessage( diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx index 3073a7952..8262de4ec 100644 --- a/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx @@ -62,7 +62,7 @@ export const IdentifyDialog: React.FC = ({ createMissing: true, }, ], - includeMalePerformers: true, + performerGenders: undefined, setCoverImage: true, setOrganized: false, skipMultipleMatches: true, diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx index 1362df02a..4987db5f9 100644 --- a/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx @@ -6,6 +6,7 @@ import { IScraperSource } from "./constants"; import { FieldOptionsList } from "./FieldOptions"; import { ThreeStateBoolean } from "./ThreeStateBoolean"; import { TagSelect } from "src/components/Shared/Select"; +import { genderList } from "src/utils/gender"; interface IOptionsEditor { options: GQL.IdentifyMetadataOptionsInput; @@ -124,24 +125,52 @@ export const OptionsEditor: React.FC = ({ )} - - setOptions({ - includeMalePerformers: v, - }) - } - label={intl.formatMessage({ - id: "config.tasks.identify.include_male_performers", - })} - defaultValue={defaultOptions?.includeMalePerformers ?? undefined} - {...checkboxProps} - /> + + + + + {source && ( + ) => { + if (e.currentTarget.checked) { + setOptions({ performerGenders: undefined }); + } else { + setOptions({ + performerGenders: + defaultOptions?.performerGenders ?? genderList.slice(), + }); + } + }} + /> + )} + {(options.performerGenders != null || !source) && + genderList.map((gender) => { + const performerGenders = + options.performerGenders ?? genderList.slice(); + return ( + } + checked={performerGenders.includes(gender)} + onChange={(e: React.ChangeEvent) => { + const isChecked = e.currentTarget.checked; + setOptions({ + performerGenders: isChecked + ? [...performerGenders, gender] + : performerGenders.filter((g) => g !== gender), + }); + }} + /> + ); + })} + + + + = PatchComponent( + "FilteredRecommendationRow", + (props) => { + const cardCount = props.count; + + const unsupportedCriteria = props.filter.criteria.filter( + (criterion) => criterion instanceof UnsupportedCriterion + ); + + const header = unsupportedCriteria.length ? ( +
+ {props.heading} + + c.criterionOption.type) + .join(", "), + }} + /> + + } + /> +
+ ) : ( + props.heading + ); + + if (!props.loading && !cardCount) { + return null; + } + + return ( + + + + } + > + + {props.children} + + + ); + } +); diff --git a/ui/v2.5/src/components/FrontPage/RecommendationRow.tsx b/ui/v2.5/src/components/FrontPage/RecommendationRow.tsx index 0b48434c0..97e43f294 100644 --- a/ui/v2.5/src/components/FrontPage/RecommendationRow.tsx +++ b/ui/v2.5/src/components/FrontPage/RecommendationRow.tsx @@ -1,24 +1,24 @@ import React, { PropsWithChildren } from "react"; +import { PatchComponent } from "src/patch"; interface IProps { className?: string; - header: string; + header: React.ReactNode; link: JSX.Element; } -export const RecommendationRow: React.FC> = ({ - className, - header, - link, - children, -}) => ( -
-
-
-

{header}

+export const RecommendationRow: React.FC> = + PatchComponent( + "RecommendationRow", + ({ className, header, link, children }) => ( +
+
+
+

{header}

+
+ {link} +
+ {children}
- {link} -
- {children} -
-); + ) + ); diff --git a/ui/v2.5/src/components/FrontPage/styles.scss b/ui/v2.5/src/components/FrontPage/styles.scss index 88d7f0c0a..2de0c6a44 100644 --- a/ui/v2.5/src/components/FrontPage/styles.scss +++ b/ui/v2.5/src/components/FrontPage/styles.scss @@ -492,3 +492,10 @@ color: white; opacity: 0.75; } + +// HACK: compatibility with existing behaviour after removed width from zoom-1 class +// this should really be changed to use the specific card types instead of a generic zoom-1 class, +// but this is a quick fix to prevent breaking existing styles +.recommendation-row .card.zoom-1 { + width: 320px; +} diff --git a/ui/v2.5/src/components/Galleries/EditGalleriesDialog.tsx b/ui/v2.5/src/components/Galleries/EditGalleriesDialog.tsx index 9ff7e00f2..cec44abf1 100644 --- a/ui/v2.5/src/components/Galleries/EditGalleriesDialog.tsx +++ b/ui/v2.5/src/components/Galleries/EditGalleriesDialog.tsx @@ -1,100 +1,129 @@ -import React, { useEffect, useState } from "react"; -import { Form, Col, Row } from "react-bootstrap"; -import { FormattedMessage, useIntl } from "react-intl"; -import isEqual from "lodash-es/isEqual"; +import React, { useEffect, useMemo, useState } from "react"; +import { Form } from "react-bootstrap"; +import { useIntl } from "react-intl"; import { useBulkGalleryUpdate } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; import { StudioSelect } from "../Shared/Select"; import { ModalComponent } from "../Shared/Modal"; -import { useToast } from "src/hooks/Toast"; -import * as FormUtils from "src/utils/form"; import { MultiSet } from "../Shared/MultiSet"; +import { useToast } from "src/hooks/Toast"; import { RatingSystem } from "../Shared/Rating/RatingSystem"; import { - getAggregateInputIDs, getAggregateInputValue, getAggregatePerformerIds, - getAggregateRating, - getAggregateStudioId, + getAggregateStateObject, getAggregateTagIds, + getAggregateStudioId, + getAggregateSceneIds, } from "src/utils/bulkUpdate"; import { faPencilAlt } from "@fortawesome/free-solid-svg-icons"; +import { IndeterminateCheckbox } from "../Shared/IndeterminateCheckbox"; +import { BulkUpdateFormGroup, BulkUpdateTextInput } from "../Shared/BulkUpdate"; +import { BulkUpdateDateInput } from "../Shared/DateInput"; +import { getDateError } from "src/utils/yup"; interface IListOperationProps { selected: GQL.SlimGalleryDataFragment[]; onClose: (applied: boolean) => void; } +const galleryFields = [ + "code", + "rating100", + "details", + "organized", + "photographer", + "date", +]; + export const EditGalleriesDialog: React.FC = ( props: IListOperationProps ) => { const intl = useIntl(); const Toast = useToast(); - const [rating100, setRating] = useState(); - const [studioId, setStudioId] = useState(); - const [performerMode, setPerformerMode] = - React.useState(GQL.BulkUpdateIdMode.Add); - const [performerIds, setPerformerIds] = useState(); - const [existingPerformerIds, setExistingPerformerIds] = useState(); - const [tagMode, setTagMode] = React.useState( - GQL.BulkUpdateIdMode.Add - ); - const [tagIds, setTagIds] = useState(); - const [existingTagIds, setExistingTagIds] = useState(); - const [organized, setOrganized] = useState(); + + const [updateInput, setUpdateInput] = useState({ + ids: props.selected.map((gallery) => { + return gallery.id; + }), + }); + + const [performerIds, setPerformerIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + const [tagIds, setTagIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + const [sceneIds, setSceneIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + + const unsetDisabled = props.selected.length < 2; + + const [dateError, setDateError] = useState(); const [updateGalleries] = useBulkGalleryUpdate(); // Network state const [isUpdating, setIsUpdating] = useState(false); - const checkboxRef = React.createRef(); + const aggregateState = useMemo(() => { + const updateState: Partial = {}; + const state = props.selected; + updateState.studio_id = getAggregateStudioId(props.selected); + const updateTagIds = getAggregateTagIds(props.selected); + const updatePerformerIds = getAggregatePerformerIds(props.selected); + const updateSceneIds = getAggregateSceneIds(props.selected); + let first = true; + + state.forEach((gallery: GQL.SlimGalleryDataFragment) => { + getAggregateStateObject(updateState, gallery, galleryFields, first); + first = false; + }); + + return { + state: updateState, + tagIds: updateTagIds, + performerIds: updatePerformerIds, + sceneIds: updateSceneIds, + }; + }, [props.selected]); + + // update initial state from aggregate + useEffect(() => { + setUpdateInput((current) => ({ ...current, ...aggregateState.state })); + }, [aggregateState]); + + useEffect(() => { + setDateError(getDateError(updateInput.date ?? "", intl)); + }, [updateInput.date, intl]); + + function setUpdateField(input: Partial) { + setUpdateInput((current) => ({ ...current, ...input })); + } function getGalleryInput(): GQL.BulkGalleryUpdateInput { - // need to determine what we are actually setting on each gallery - const aggregateRating = getAggregateRating(props.selected); - const aggregateStudioId = getAggregateStudioId(props.selected); - const aggregatePerformerIds = getAggregatePerformerIds(props.selected); - const aggregateTagIds = getAggregateTagIds(props.selected); - const galleryInput: GQL.BulkGalleryUpdateInput = { - ids: props.selected.map((gallery) => { - return gallery.id; - }), + ...updateInput, + tag_ids: tagIds, + performer_ids: performerIds, + scene_ids: sceneIds, }; - galleryInput.rating100 = getAggregateInputValue(rating100, aggregateRating); - galleryInput.studio_id = getAggregateInputValue( - studioId, - aggregateStudioId + // we don't have unset functionality for the rating star control + // so need to determine if we are setting a rating or not + galleryInput.rating100 = getAggregateInputValue( + updateInput.rating100, + aggregateState.state.rating100 ); - galleryInput.performer_ids = getAggregateInputIDs( - performerMode, - performerIds, - aggregatePerformerIds - ); - galleryInput.tag_ids = getAggregateInputIDs( - tagMode, - tagIds, - aggregateTagIds - ); - - if (organized !== undefined) { - galleryInput.organized = organized; - } - return galleryInput; } async function onSave() { setIsUpdating(true); try { - await updateGalleries({ - variables: { - input: getGalleryInput(), - }, - }); + await updateGalleries({ variables: { input: getGalleryInput() } }); Toast.success( intl.formatMessage( { id: "toast.updated_entity" }, @@ -110,129 +139,13 @@ export const EditGalleriesDialog: React.FC = ( setIsUpdating(false); } - useEffect(() => { - const state = props.selected; - let updateRating: number | undefined; - let updateStudioID: string | undefined; - let updatePerformerIds: string[] = []; - let updateTagIds: string[] = []; - let updateOrganized: boolean | undefined; - let first = true; - - state.forEach((gallery: GQL.SlimGalleryDataFragment) => { - const galleryRating = gallery.rating100; - const GalleriestudioID = gallery?.studio?.id; - const galleryPerformerIDs = (gallery.performers ?? []) - .map((p) => p.id) - .sort(); - const galleryTagIDs = (gallery.tags ?? []).map((p) => p.id).sort(); - - if (first) { - updateRating = galleryRating ?? undefined; - updateStudioID = GalleriestudioID; - updatePerformerIds = galleryPerformerIDs; - updateTagIds = galleryTagIDs; - updateOrganized = gallery.organized; - first = false; - } else { - if (galleryRating !== updateRating) { - updateRating = undefined; - } - if (GalleriestudioID !== updateStudioID) { - updateStudioID = undefined; - } - if (!isEqual(galleryPerformerIDs, updatePerformerIds)) { - updatePerformerIds = []; - } - if (!isEqual(galleryTagIDs, updateTagIds)) { - updateTagIds = []; - } - if (gallery.organized !== updateOrganized) { - updateOrganized = undefined; - } - } - }); - - setRating(updateRating); - setStudioId(updateStudioID); - setExistingPerformerIds(updatePerformerIds); - setExistingTagIds(updateTagIds); - - setOrganized(updateOrganized); - }, [props.selected]); - - useEffect(() => { - if (checkboxRef.current) { - checkboxRef.current.indeterminate = organized === undefined; - } - }, [organized, checkboxRef]); - - function renderMultiSelect( - type: "performers" | "tags", - ids: string[] | undefined - ) { - let mode = GQL.BulkUpdateIdMode.Add; - let existingIds: string[] | undefined = []; - switch (type) { - case "performers": - mode = performerMode; - existingIds = existingPerformerIds; - break; - case "tags": - mode = tagMode; - existingIds = existingTagIds; - break; - } - - return ( - { - switch (type) { - case "performers": - setPerformerIds(itemIDs); - break; - case "tags": - setTagIds(itemIDs); - break; - } - }} - onSetMode={(newMode) => { - switch (type) { - case "performers": - setPerformerMode(newMode); - break; - case "tags": - setTagMode(newMode); - break; - } - }} - existingIds={existingIds ?? []} - ids={ids ?? []} - mode={mode} - menuPortalTarget={document.body} - /> - ); - } - - function cycleOrganized() { - if (organized) { - setOrganized(undefined); - } else if (organized === undefined) { - setOrganized(false); - } else { - setOrganized(true); - } - } - function render() { return ( = ( onClick: onSave, text: intl.formatMessage({ id: "actions.apply" }), }} + disabled={isUpdating || !!dateError} cancel={{ onClick: () => props.onClose(false), text: intl.formatMessage({ id: "actions.cancel" }), @@ -251,55 +165,119 @@ export const EditGalleriesDialog: React.FC = ( isRunning={isUpdating} >
- - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "rating" }), - })} - - setRating(value ?? undefined)} - disabled={isUpdating} - /> - - - - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "studio" }), - })} - - - setStudioId(items.length > 0 ? items[0]?.id : undefined) - } - ids={studioId ? [studioId] : []} - isDisabled={isUpdating} - menuPortalTarget={document.body} - /> - - + + + setUpdateField({ rating100: value ?? undefined }) + } + disabled={isUpdating} + /> + - - - - - {renderMultiSelect("performers", performerIds)} - + + setUpdateField({ code: newValue })} + unsetDisabled={unsetDisabled} + /> + + + setUpdateField({ date: newValue })} + unsetDisabled={unsetDisabled} + error={dateError} + /> + - - - - - {renderMultiSelect("tags", tagIds)} - + + + setUpdateField({ photographer: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ + studio_id: items.length > 0 ? items[0]?.id : undefined, + }) + } + ids={updateInput.studio_id ? [updateInput.studio_id] : []} + isDisabled={isUpdating} + menuPortalTarget={document.body} + /> + + + + { + setPerformerIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setPerformerIds((c) => ({ ...c, mode: newMode })); + }} + ids={performerIds.ids ?? []} + existingIds={aggregateState.performerIds} + mode={performerIds.mode} + menuPortalTarget={document.body} + /> + + + + { + setSceneIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setSceneIds((c) => ({ ...c, mode: newMode })); + }} + ids={sceneIds.ids ?? []} + existingIds={aggregateState.sceneIds} + mode={sceneIds.mode} + menuPortalTarget={document.body} + /> + + + + { + setTagIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setTagIds((c) => ({ ...c, mode: newMode })); + }} + ids={tagIds.ids ?? []} + existingIds={aggregateState.tagIds} + mode={tagIds.mode} + menuPortalTarget={document.body} + /> + + + + setUpdateField({ details: newValue })} + unsetDisabled={unsetDisabled} + as="textarea" + /> + - cycleOrganized()} + setChecked={(checked) => setUpdateField({ organized: checked })} + checked={updateInput.organized ?? undefined} />
diff --git a/ui/v2.5/src/components/Galleries/Galleries.tsx b/ui/v2.5/src/components/Galleries/Galleries.tsx index c845a153c..388ce6720 100644 --- a/ui/v2.5/src/components/Galleries/Galleries.tsx +++ b/ui/v2.5/src/components/Galleries/Galleries.tsx @@ -4,7 +4,7 @@ import { Helmet } from "react-helmet"; import { useTitleProps } from "src/hooks/title"; import Gallery from "./GalleryDetails/Gallery"; import GalleryCreate from "./GalleryDetails/GalleryCreate"; -import { GalleryList } from "./GalleryList"; +import { FilteredGalleryList } from "./GalleryList"; import { View } from "../List/views"; import { LoadingIndicator } from "../Shared/LoadingIndicator"; import { ErrorMessage } from "../Shared/ErrorMessage"; @@ -40,7 +40,7 @@ const GalleryImage: React.FC> = ({ }; const Galleries: React.FC = () => { - return ; + return ; }; const GalleryRoutes: React.FC = () => { diff --git a/ui/v2.5/src/components/Galleries/GalleryCard.tsx b/ui/v2.5/src/components/Galleries/GalleryCard.tsx index e4e227f3e..01e0b6045 100644 --- a/ui/v2.5/src/components/Galleries/GalleryCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryCard.tsx @@ -1,5 +1,5 @@ import { Button, ButtonGroup, OverlayTrigger, Tooltip } from "react-bootstrap"; -import React, { useState } from "react"; +import React, { useMemo, useState } from "react"; import * as GQL from "src/core/generated-graphql"; import { GridCard } from "../Shared/GridCard/GridCard"; import { HoverPopover } from "../Shared/HoverPopover"; @@ -21,11 +21,13 @@ import { PatchComponent } from "src/patch"; interface IGalleryPreviewProps { gallery: GQL.SlimGalleryDataFragment; onScrubberClick?: (index: number) => void; + disabled?: boolean; } export const GalleryPreview: React.FC = ({ gallery, onScrubberClick, + disabled, }) => { const [imgSrc, setImgSrc] = useState( gallery.paths.cover ?? undefined @@ -48,6 +50,7 @@ export const GalleryPreview: React.FC = ({ imageCount={gallery.image_count} onClick={onScrubberClick} onPathChanged={setImgSrc} + disabled={disabled} /> )}
@@ -195,7 +198,16 @@ const GalleryCardDetails = PatchComponent( const GalleryCardOverlays = PatchComponent( "GalleryCard.Overlays", (props: IGalleryCardProps) => { - return ; + const ret = useMemo(() => { + return ( + + ); + }, [props.gallery.studio, props.selecting]); + + return ret; } ); @@ -211,6 +223,7 @@ const GalleryCardImage = PatchComponent( onScrubberClick={(i) => { history.push(`/galleries/${props.gallery.id}/images/${i}`); }} + disabled={props.selecting} /> diff --git a/ui/v2.5/src/components/Galleries/GalleryCardGrid.tsx b/ui/v2.5/src/components/Galleries/GalleryCardGrid.tsx new file mode 100644 index 000000000..a249f27f7 --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryCardGrid.tsx @@ -0,0 +1,43 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { GalleryCard } from "./GalleryCard"; +import { + useCardWidth, + useContainerDimensions, +} from "../Shared/GridCard/GridCard"; +import { PatchComponent } from "src/patch"; + +interface IGalleryCardGrid { + galleries: GQL.SlimGalleryDataFragment[]; + selectedIds: Set; + zoomIndex: number; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; +} + +const zoomWidths = [280, 340, 480, 640]; + +export const GalleryCardGrid: React.FC = PatchComponent( + "GalleryCardGrid", + ({ galleries, selectedIds, zoomIndex, onSelectChange }) => { + const [componentRef, { width: containerWidth }] = useContainerDimensions(); + const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); + + return ( +
+ {galleries.map((gallery) => ( + 0} + selected={selectedIds.has(gallery.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + onSelectChange(gallery.id, selected, shiftKey) + } + /> + ))} +
+ ); + } +); diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx index 195766e03..1fce02b32 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx @@ -1,11 +1,6 @@ import { Button, Tab, Nav, Dropdown } from "react-bootstrap"; import React, { useEffect, useMemo, useState } from "react"; -import { - useHistory, - Link, - RouteComponentProps, - Redirect, -} from "react-router-dom"; +import { useHistory, RouteComponentProps, Redirect } from "react-router-dom"; import { FormattedMessage, useIntl } from "react-intl"; import { Helmet } from "react-helmet"; import * as GQL from "src/core/generated-graphql"; @@ -15,6 +10,11 @@ import { useFindGallery, useGalleryUpdate, } from "src/core/StashService"; +import { lazyComponent } from "src/utils/lazyComponent"; + +const GenerateDialog = lazyComponent( + () => import("../../Dialogs/GenerateDialog") +); import { ErrorMessage } from "src/components/Shared/ErrorMessage"; import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; import { Icon } from "src/components/Shared/Icon"; @@ -45,6 +45,7 @@ import { useConfigurationContext } from "src/hooks/Config"; import { TruncatedText } from "src/components/Shared/TruncatedText"; import { goBackOrReplace } from "src/utils/history"; import { FormattedDate } from "src/components/Shared/Date"; +import { StudioLogo } from "src/components/Shared/StudioLogo"; interface IProps { gallery: GQL.GalleryDataFragment; @@ -61,6 +62,7 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { const Toast = useToast(); const intl = useIntl(); const { configuration } = useConfigurationContext(); + const { showStudioText } = configuration?.ui ?? {}; const showLightbox = useGalleryLightbox(gallery.id, gallery.chapters); const [collapsed, setCollapsed] = useState(false); @@ -165,6 +167,7 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { } const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); + const [isGenerateDialogOpen, setIsGenerateDialogOpen] = useState(false); function onDeleteDialogClosed(deleted: boolean) { setIsDeleteAlertOpen(false); @@ -184,6 +187,18 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { } } + function maybeRenderGenerateDialog() { + if (isGenerateDialogOpen) { + return ( + setIsGenerateDialogOpen(false)} + type="gallery" + /> + ); + } + } + function renderOperations() { return ( @@ -210,6 +225,12 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { > + setIsGenerateDialogOpen(true)} + > + {`${intl.formatMessage({ id: "actions.generate" })}…`} + setIsDeleteAlertOpen(true)} @@ -387,20 +408,11 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { {title} {maybeRenderDeleteDialog()} + {maybeRenderGenerateDialog()}
- {gallery.studio && ( -

- - {`${gallery.studio.name} - -

- )} +

diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx index 275c4263b..e0c115f34 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx @@ -1,8 +1,8 @@ -import React from "react"; +import React, { useCallback } from "react"; import * as GQL from "src/core/generated-graphql"; import { GalleriesCriterion } from "src/models/list-filter/criteria/galleries"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { ImageList } from "src/components/Images/ImageList"; +import { FilteredImageList } from "src/components/Images/ImageList"; import { showWhenSelected } from "src/components/List/ItemList"; import { mutateAddGalleryImages } from "src/core/StashService"; import { useToast } from "src/hooks/Toast"; @@ -24,40 +24,43 @@ export const GalleryAddPanel: React.FC = PatchComponent( const Toast = useToast(); const intl = useIntl(); - function filterHook(filter: ListFilterModel) { - const galleryValue = { - id: gallery.id, - label: galleryTitle(gallery), - }; - // if galleries is already present, then we modify it, otherwise add - let galleryCriterion = filter.criteria.find((c) => { - return c.criterionOption.type === "galleries"; - }) as GalleriesCriterion | undefined; + const filterHook = useCallback( + (filter: ListFilterModel) => { + const galleryValue = { + id: gallery.id, + label: galleryTitle(gallery), + }; + // if galleries is already present, then we modify it, otherwise add + let galleryCriterion = filter.criteria.find((c) => { + return c.criterionOption.type === "galleries"; + }) as GalleriesCriterion | undefined; - if ( - galleryCriterion && - galleryCriterion.modifier === GQL.CriterionModifier.Excludes - ) { - // add the gallery if not present if ( - !galleryCriterion.value.find((p) => { - return p.id === gallery.id; - }) + galleryCriterion && + galleryCriterion.modifier === GQL.CriterionModifier.Excludes ) { - galleryCriterion.value.push(galleryValue); + // add the gallery if not present + if ( + !galleryCriterion.value.find((p) => { + return p.id === gallery.id; + }) + ) { + galleryCriterion.value.push(galleryValue); + } + + galleryCriterion.modifier = GQL.CriterionModifier.Excludes; + } else { + // overwrite + galleryCriterion = new GalleriesCriterion(); + galleryCriterion.modifier = GQL.CriterionModifier.Excludes; + galleryCriterion.value = [galleryValue]; + filter.criteria.push(galleryCriterion); } - galleryCriterion.modifier = GQL.CriterionModifier.Excludes; - } else { - // overwrite - galleryCriterion = new GalleriesCriterion(); - galleryCriterion.modifier = GQL.CriterionModifier.Excludes; - galleryCriterion.value = [galleryValue]; - filter.criteria.push(galleryCriterion); - } - - return filter; - } + return filter; + }, + [gallery] + ); async function addImages( result: GQL.FindImagesQueryResult, @@ -100,7 +103,7 @@ export const GalleryAddPanel: React.FC = PatchComponent( ]; return ( - { const [createGallery] = useGalleryCreate(); - async function onSave(input: GQL.GalleryCreateInput) { + async function onSave(input: GQL.GalleryCreateInput, andNew?: boolean) { const result = await createGallery({ variables: { input }, }); if (result.data?.galleryCreate) { - history.push(`/galleries/${result.data.galleryCreate.id}`); + if (!andNew) { + history.push(`/galleries/${result.data.galleryCreate.id}`); + } Toast.success( intl.formatMessage( { id: "toast.created_entity" }, diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx index 597a57b15..ead882ec0 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx @@ -6,6 +6,7 @@ import { TagLink } from "src/components/Shared/TagLink"; import { PerformerCard } from "src/components/Performers/PerformerCard"; import { sortPerformers } from "src/core/performers"; import { PhotographerLink } from "src/components/Shared/Link"; +import { CustomFields } from "src/components/Shared/CustomFields"; interface IGalleryDetailProps { gallery: GQL.GalleryDataFragment; @@ -108,6 +109,7 @@ export const GalleryDetailPanel: React.FC = ({ {renderDetails()} {renderTags()} {renderPerformers()} +

diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx index fe7959c55..14b5d6aad 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx @@ -1,7 +1,7 @@ import React, { useEffect, useMemo, useState } from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { Prompt } from "react-router-dom"; -import { Button, Form, Col, Row } from "react-bootstrap"; +import { Button, Dropdown, Form, Col, Row, SplitButton } from "react-bootstrap"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; import * as yup from "yup"; @@ -31,11 +31,16 @@ import { Studio, StudioSelect } from "src/components/Studios/StudioSelect"; import { Scene, SceneSelect } from "src/components/Scenes/SceneSelect"; import { useTagsEdit } from "src/hooks/tagsEdit"; import { ScraperMenu } from "src/components/Shared/ScraperMenu"; +import { + CustomFieldsInput, + formatCustomFieldInput, +} from "src/components/Shared/CustomFields"; +import { cloneDeep } from "@apollo/client/utilities"; interface IProps { gallery: Partial; isVisible: boolean; - onSubmit: (input: GQL.GalleryCreateInput) => Promise; + onSubmit: (input: GQL.GalleryCreateInput, andNew?: boolean) => Promise; onDelete: () => void; } @@ -76,6 +81,7 @@ export const GalleryEditPanel: React.FC = ({ tag_ids: yup.array(yup.string().required()).defined(), scene_ids: yup.array(yup.string().required()).defined(), details: yup.string().ensure(), + custom_fields: yup.object().required().defined(), }); const initialValues = { @@ -89,15 +95,26 @@ export const GalleryEditPanel: React.FC = ({ tag_ids: (gallery?.tags ?? []).map((t) => t.id), scene_ids: (gallery?.scenes ?? []).map((s) => s.id), details: gallery?.details ?? "", + custom_fields: cloneDeep(gallery?.custom_fields ?? {}), }; type InputValues = yup.InferType; + const [customFieldsError, setCustomFieldsError] = useState(); + + function submit(values: InputValues) { + const input = { + ...schema.cast(values), + custom_fields: formatCustomFieldInput(isNew, values.custom_fields), + }; + onSave(input); + } + const formik = useFormik({ initialValues, enableReinitialize: true, validate: yupFormikValidate(schema), - onSubmit: (values) => onSave(schema.cast(values)), + onSubmit: submit, }); const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit( @@ -177,10 +194,10 @@ export const GalleryEditPanel: React.FC = ({ return
; }, [gallery?.paths?.cover, intl]); - async function onSave(input: InputValues) { + async function onSave(input: InputValues, andNew?: boolean) { setIsLoading(true); try { - await onSubmit(input); + await onSubmit(input, andNew); formik.resetForm(); } catch (e) { Toast.error(e); @@ -188,6 +205,14 @@ export const GalleryEditPanel: React.FC = ({ setIsLoading(false); } + async function onSaveAndNewClick() { + const input = { + ...schema.cast(formik.values), + custom_fields: formatCustomFieldInput(isNew, formik.values.custom_fields), + }; + onSave(input, true); + } + async function onScrapeClicked(s: GQL.ScraperSourceInput) { if (!gallery || !gallery.id) return; @@ -445,16 +470,35 @@ export const GalleryEditPanel: React.FC = ({
- + {isNew ? ( + formik.submitForm()} + > + onSaveAndNewClick()}> + + + + ) : ( + + )} +
+ + ); +}; interface IGalleryList { filterHook?: (filter: ListFilterModel) => ListFilterModel; @@ -33,180 +207,323 @@ interface IGalleryList { extraOperations?: IItemListOperation[]; } -export const GalleryList: React.FC = PatchComponent( - "GalleryList", - ({ filterHook, view, alterQuery, extraOperations = [] }) => { - const intl = useIntl(); - const history = useHistory(); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); +function useViewRandom(filter: ListFilterModel, count: number) { + const history = useHistory(); - const filterMode = GQL.FilterMode.Galleries; + const viewRandom = useCallback(async () => { + // query for a random scene + if (count === 0) { + return; + } + + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindGalleries(filterCopy); + if (singleResult.data.findGalleries.galleries.length === 1) { + const { id } = singleResult.data.findGalleries.galleries[0]; + // navigate to the image player page + history.push(`/galleries/${id}`); + } + }, [history, filter, count]); + + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { + Mousetrap.bind("p r", () => { + viewRandom(); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }, [viewRandom]); +} + +export const FilteredGalleryList = PatchComponent( + "FilteredGalleryList", + (props: IGalleryList) => { + const intl = useIntl(); + + const searchFocus = useFocus(); + + const { filterHook, view, alterQuery, extraOperations = [] } = props; + + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Galleries, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindGalleries, + getCount: (r) => r.data?.findGalleries.count ?? 0, + getItems: (r) => r.data?.findGalleries.galleries ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(effectiveFilter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); + + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }); + + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); + + const viewRandom = useViewRandom(effectiveFilter, totalCount); + + function onExport(all: boolean) { + showModal( + closeModal()} + /> + ); + } + + function onEdit() { + showModal( + + ); + } + + function onDelete() { + showModal( + + ); + } + + function onGenerate() { + showModal( + closeModal()} + /> + ); + } + + const convertedExtraOperations: IListFilterOperation[] = + extraOperations.map((o) => ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); const otherOperations = [ - ...extraOperations, + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, { text: intl.formatMessage({ id: "actions.view_random" }), onClick: viewRandom, }, + { + text: `${intl.formatMessage({ id: "actions.generate" })}…`, + onClick: onGenerate, + isDisplayed: () => hasSelection, + }, { text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, + onClick: () => onExport(false), + isDisplayed: () => hasSelection, }, { text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, + onClick: () => onExport(true), }, ]; - function addKeybinds( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel - ) { - Mousetrap.bind("p r", () => { - viewRandom(result, filter); - }); + // render + if (sidebarStateLoading) return null; - return () => { - Mousetrap.unbind("p r"); - }; - } - - async function viewRandom( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel - ) { - // query for a random image - if (result.data?.findGalleries) { - const { count } = result.data.findGalleries; - - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindGalleries(filterCopy); - if (singleResult.data.findGalleries.galleries.length === 1) { - const { id } = singleResult.data.findGalleries.galleries[0]; - // navigate to the image player page - history.push(`/galleries/${id}`); - } - } - } - - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } - - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } - - function renderContent( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - function maybeRenderGalleryExportDialog() { - if (isExportDialogOpen) { - return ( - setIsExportDialogOpen(false)} - /> - ); - } - } - - function renderGalleries() { - if (!result.data?.findGalleries) return; - - if (filter.displayMode === DisplayMode.Grid) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.List) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.Wall) { - return ( -
-
- {result.data.findGalleries.galleries.map((gallery) => ( - - ))} -
-
- ); - } - } - - return ( - <> - {maybeRenderGalleryExportDialog()} - {renderGalleries()} - - ); - } - - function renderEditDialog( - selectedImages: GQL.SlimGalleryDataFragment[], - onClose: (applied: boolean) => void - ) { - return ( - - ); - } - - function renderDeleteDialog( - selectedImages: GQL.SlimGalleryDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ( - - ); - } + const operations = ( + + ); return ( - - - + {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
+ setFilter(filter.changePage(page))} + /> + +
+ + + + + + {totalCount > filter.itemsPerPage && ( +
+
+ +
+
+ )} +
+
+
+
); } ); diff --git a/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx b/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx index ef47782bf..5c0a07356 100644 --- a/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx @@ -10,6 +10,7 @@ export const GalleryPreviewScrubber: React.FC<{ imageCount: number; onClick?: (imageIndex: number) => void; onPathChanged: React.Dispatch>; + disabled?: boolean; }> = ({ className, previewPath, @@ -17,6 +18,7 @@ export const GalleryPreviewScrubber: React.FC<{ imageCount, onClick, onPathChanged, + disabled, }) => { const [activeIndex, setActiveIndex] = useState(); const debounceSetActiveIndex = useThrottle(setActiveIndex, 50); @@ -48,6 +50,7 @@ export const GalleryPreviewScrubber: React.FC<{ activeIndex={activeIndex} setActiveIndex={(i) => debounceSetActiveIndex(i)} onClick={onScrubberClick} + disabled={disabled} />
); diff --git a/ui/v2.5/src/components/Galleries/GalleryRecommendationRow.tsx b/ui/v2.5/src/components/Galleries/GalleryRecommendationRow.tsx index ee94d6da2..3df07b643 100644 --- a/ui/v2.5/src/components/Galleries/GalleryRecommendationRow.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryRecommendationRow.tsx @@ -1,12 +1,9 @@ import React from "react"; -import { Link } from "react-router-dom"; import { useFindGalleries } from "src/core/StashService"; -import Slider from "@ant-design/react-slick"; import { GalleryCard } from "./GalleryCard"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { getSlickSliderSettings } from "src/core/recommendations"; -import { RecommendationRow } from "../FrontPage/RecommendationRow"; -import { FormattedMessage } from "react-intl"; +import { PatchComponent } from "src/patch"; +import { FilteredRecommendationRow } from "../FrontPage/FilteredRecommendationRow"; interface IProps { isTouch: boolean; @@ -14,29 +11,21 @@ interface IProps { header: string; } -export const GalleryRecommendationRow: React.FC = (props) => { - const result = useFindGalleries(props.filter); - const cardCount = result.data?.findGalleries.count; +export const GalleryRecommendationRow: React.FC = PatchComponent( + "GalleryRecommendationRow", + (props) => { + const result = useFindGalleries(props.filter); + const count = result.data?.findGalleries.count ?? 0; - if (!result.loading && !cardCount) { - return null; - } - - return ( - - - - } - > - {result.loading ? [...Array(props.filter.itemsPerPage)].map((i) => ( @@ -48,7 +37,7 @@ export const GalleryRecommendationRow: React.FC = (props) => { : result.data?.findGalleries.galleries.map((g) => ( ))} - - - ); -}; + + ); + } +); diff --git a/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx b/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx index c57bf45ad..c79000783 100644 --- a/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx @@ -1,4 +1,5 @@ import React, { useState } from "react"; +import { Form } from "react-bootstrap"; import { useIntl } from "react-intl"; import { Link } from "react-router-dom"; import * as GQL from "src/core/generated-graphql"; @@ -8,6 +9,7 @@ import { useGalleryLightbox } from "src/hooks/Lightbox/hooks"; import { galleryTitle } from "src/core/galleries"; import { RatingSystem } from "../Shared/Rating/RatingSystem"; import { GalleryPreviewScrubber } from "./GalleryPreviewScrubber"; +import { useDragMoveSelect } from "../Shared/GridCard/dragMoveSelect"; import cx from "classnames"; const CLASSNAME = "GalleryWallCard"; @@ -18,6 +20,9 @@ const CLASSNAME_IMG_CONTAIN = `${CLASSNAME}-img-contain`; interface IProps { gallery: GQL.SlimGalleryDataFragment; + selected?: boolean; + onSelectedChanged?: (selected: boolean, shiftKey: boolean) => void; + selecting?: boolean; } type Orientation = "landscape" | "portrait"; @@ -26,7 +31,12 @@ function getOrientation(width: number, height: number): Orientation { return width > height ? "landscape" : "portrait"; } -const GalleryWallCard: React.FC = ({ gallery }) => { +const GalleryWallCard: React.FC = ({ + gallery, + selected, + onSelectedChanged, + selecting, +}) => { const intl = useIntl(); const [coverOrientation, setCoverOrientation] = React.useState("landscape"); @@ -34,6 +44,12 @@ const GalleryWallCard: React.FC = ({ gallery }) => { React.useState("landscape"); const showLightbox = useGalleryLightbox(gallery.id, gallery.chapters); + const { dragProps } = useDragMoveSelect({ + selecting: selecting || false, + selected: selected || false, + onSelectedChanged: onSelectedChanged, + }); + const cover = gallery?.paths.cover; function onCoverLoad(e: React.SyntheticEvent) { @@ -58,6 +74,14 @@ const GalleryWallCard: React.FC = ({ gallery }) => { ? [...performerNames.slice(0, -2), performerNames.slice(-2).join(" & ")] : performerNames; + function handleCardClick(event: React.MouseEvent) { + if (selecting && onSelectedChanged) { + onSelectedChanged(!selected, event.shiftKey); + return; + } + showLightboxStart(); + } + async function showLightboxStart() { if (gallery.image_count === 0) { return; @@ -69,15 +93,32 @@ const GalleryWallCard: React.FC = ({ gallery }) => { const imgClassname = imageOrientation !== coverOrientation ? CLASSNAME_IMG_CONTAIN : ""; + let shiftKey = false; + return ( <>
showLightboxStart()} role="button" tabIndex={0} + {...dragProps} > + {onSelectedChanged && ( + onSelectedChanged(!selected, shiftKey)} + onClick={( + event: React.MouseEvent + ) => { + shiftKey = event.shiftKey; + event.stopPropagation(); + }} + /> + )} = ({ gallery }) => {