diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index ea06d6d43..000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: "[Bug Report] Short Form Subject (50 Chars or less)" -labels: bug report -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem please ensure that your screenshots are SFW or at least appropriately censored. - -**Stash Version: (from Settings -> About):** - -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Browser [e.g. chrome, safari] - - Version [e.g. 22] - -**Smartphone (please complete the following information):** - - Device: [e.g. iPhone6] - - OS: [e.g. iOS8.1] - - Browser [e.g. stock browser, safari] - - Version [e.g. 22] - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..0dc6d10a8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,64 @@ +name: Bug Report +description: Create a report to help us fix the bug +labels: ["bug report"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! + - type: textarea + id: description + attributes: + label: Describe the bug + description: Provide a clear and concise description of what the bug is. + validations: + required: true + - type: textarea + id: reproduction + attributes: + label: Steps to reproduce + description: Detail the steps that would replicate this issue. + placeholder: | + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + validations: + required: true + - type: textarea + id: expected + attributes: + label: Expected behaviour + description: Provide clear and concise description of what you expected to happen. + validations: + required: true + - type: textarea + id: context + attributes: + label: Screenshots or additional context + description: Provide any additional context and SFW screenshots here to help us solve this issue. + validations: + required: false + - type: input + id: stashversion + attributes: + label: Stash version + description: This can be found in Settings > About. + placeholder: (e.g. v0.28.1) + validations: + required: true + - type: input + id: devicedetails + attributes: + label: Device details + description: | + If this is an issue that occurs when using the Stash interface, please provide details of the device/browser used which presents the reported issue. + placeholder: (e.g. Firefox 97 (64-bit) on Windows 11) + validations: + required: false + - type: textarea + id: logs + attributes: + label: Relevant log output + description: Please copy and paste any relevant log output from Settings > Logs. This will be automatically formatted into code, so no need for backticks. + render: shell \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..028fdf8ac --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,11 @@ +blank_issues_enabled: false +contact_links: + - name: Community forum + url: https://discourse.stashapp.cc + about: Start a discussion on the community forum. + - name: Community Discord + url: https://discord.gg/Y8MNsvQBvZ + about: Chat with the community on Discord. + - name: Documentation + url: https://docs.stashapp.cc + about: Check out documentation for help and information. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/discussion---request-for-commentary--rfc-.md b/.github/ISSUE_TEMPLATE/discussion---request-for-commentary--rfc-.md deleted file mode 100644 index b79564f83..000000000 --- a/.github/ISSUE_TEMPLATE/discussion---request-for-commentary--rfc-.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -name: Discussion / Request for Commentary [RFC] -about: This is for issues that will be discussed and won't necessarily result directly - in commits or pull requests. -title: "[RFC] Short Form Title" -labels: help wanted -assignees: '' - ---- - - - -## Long Form - - -## Examples - - -## Reference Reading - diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index db5df9d8b..000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: "[Feature] Short Form Title (50 chars or less.)" -labels: feature request -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..f139433c5 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,44 @@ +name: Feature Request +description: Request a new feature or idea to be added to Stash +labels: ["feature request"] +body: + - type: textarea + id: description + attributes: + label: Describe the feature you'd like + description: Provide a clear description of the feature you'd like implemented + validations: + required: true + - type: textarea + id: benefits + attributes: + label: Describe the benefits this would bring to existing users + description: | + Explain the measurable benefits this feature would achieve for existing users. + The benefits should be described in terms of outcomes for users, not specific implementations. + validations: + required: true + - type: textarea + id: already_possible + attributes: + label: Is there an existing way to achieve this goal? + description: | + Yes/No. If Yes, describe how your proposed feature differs from or improves upon the current method + validations: + required: true + - type: checkboxes + id: confirm-search + attributes: + label: Have you searched for an existing open/closed issue? + description: | + To help us keep these issues under control, please ensure you have first [searched our issue list](https://github.com/stashapp/stash/issues?q=is%3Aissue) for any existing issues that cover the core request or benefit of your proposal. + options: + - label: I have searched for existing issues and none cover the core request of my proposal + required: true + - type: textarea + id: context + attributes: + label: Additional context + description: Add any other context or screenshots about the feature request here. + validations: + required: false \ No newline at end of file diff --git a/.github/workflows/build-compiler.yml b/.github/workflows/build-compiler.yml new file mode 100644 index 000000000..e7881720b --- /dev/null +++ b/.github/workflows/build-compiler.yml @@ -0,0 +1,28 @@ +name: Compiler Build + +on: + workflow_dispatch: + +env: + COMPILER_IMAGE: ghcr.io/stashapp/compiler:13 + +jobs: + build-compiler: + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v6 + - uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + - uses: docker/setup-buildx-action@v3 + - uses: docker/build-push-action@v6 + with: + push: true + context: "{{defaultContext}}:docker/compiler" + tags: | + ${{ env.COMPILER_IMAGE }} + ghcr.io/stashapp/compiler:latest + cache-from: type=gha,scope=all,mode=max + cache-to: type=gha,scope=all,mode=max \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 92c98effc..c068b46f0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,7 +2,10 @@ name: Build on: push: - branches: [ develop, master ] + branches: + - develop + - master + - 'releases/**' pull_request: release: types: [ published ] @@ -12,50 +15,163 @@ concurrency: cancel-in-progress: true env: - COMPILER_IMAGE: stashapp/compiler:11 + COMPILER_IMAGE: ghcr.io/stashapp/compiler:13 jobs: - build: - runs-on: ubuntu-22.04 + # Job 1: Generate code and build UI + # Runs natively (no Docker) — go generate/gqlgen and node don't need cross-compilers. + # Produces artifacts (generated Go files + UI build) consumed by test and build jobs. + generate: + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + - name: Setup Go + uses: actions/setup-go@v6 - - name: Checkout - run: git fetch --prune --unshallow --tags + # pnpm version is read from the packageManager field in package.json + # very broken (4.3, 4.4) + - name: Install pnpm + uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 + with: + package_json_file: ui/v2.5/package.json + + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + node-version: '20' + cache: 'pnpm' + cache-dependency-path: ui/v2.5/pnpm-lock.yaml + + - name: Install UI dependencies + run: cd ui/v2.5 && pnpm install --frozen-lockfile + + - name: Generate + run: make generate + + - name: Cache UI build + uses: actions/cache@v5 + id: cache-ui + with: + path: ui/v2.5/build + key: ${{ runner.os }}-ui-build-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }} + + - name: Validate UI + # skip UI validation for pull requests if UI is unchanged + if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }} + run: make validate-ui + + - name: Build UI + # skip UI build for pull requests if UI is unchanged (UI was cached) + if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }} + run: make ui + + # Bundle generated Go files + UI build for downstream jobs (test + build) + - name: Upload generated artifacts + uses: actions/upload-artifact@v7 + with: + name: generated + retention-days: 1 + path: | + internal/api/generated_exec.go + internal/api/generated_models.go + ui/v2.5/build/ + ui/login/locales/ + + # Job 2: Integration tests + # Runs natively (no Docker) — only needs Go + GCC (for CGO/SQLite), both on ubuntu-22.04. + # Runs in parallel with the build matrix jobs. + test: + needs: generate + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v6 - name: Setup Go - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version-file: 'go.mod' - - name: Pull compiler image - run: docker pull $COMPILER_IMAGE - - - name: Cache node modules - uses: actions/cache@v3 - env: - cache-name: cache-node_modules + # Places generated Go files + UI build into the working tree so the build compiles + - name: Download generated artifacts + uses: actions/download-artifact@v8 with: - path: ui/v2.5/node_modules - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock') }} + name: generated - - name: Cache UI build - uses: actions/cache@v3 - id: cache-ui - env: - cache-name: cache-ui + - name: Test Backend + run: make it + + # Job 3: Cross-compile for all platforms + # Each platform gets its own runner and Docker container (ghcr.io/stashapp/compiler:13). + # Each build-cc-* make target is self-contained (sets its own GOOS/GOARCH/CC), + # so running them in separate containers is functionally identical to one container. + # Runs in parallel with the test job. + build: + needs: generate + runs-on: ubuntu-24.04 + strategy: + fail-fast: false + matrix: + include: + - platform: windows + make-target: build-cc-windows + artifact-paths: | + dist/stash-win.exe + tag: win + - platform: macos + make-target: build-cc-macos + artifact-paths: | + dist/stash-macos + dist/Stash.app.zip + tag: osx + - platform: linux + make-target: build-cc-linux + artifact-paths: | + dist/stash-linux + tag: linux + - platform: linux-arm64v8 + make-target: build-cc-linux-arm64v8 + artifact-paths: | + dist/stash-linux-arm64v8 + tag: arm + - platform: linux-arm32v7 + make-target: build-cc-linux-arm32v7 + artifact-paths: | + dist/stash-linux-arm32v7 + tag: arm + - platform: linux-arm32v6 + make-target: build-cc-linux-arm32v6 + artifact-paths: | + dist/stash-linux-arm32v6 + tag: arm + - platform: freebsd + make-target: build-cc-freebsd + artifact-paths: | + dist/stash-freebsd + tag: freebsd + + steps: + - uses: actions/checkout@v6 with: - path: ui/v2.5/build - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }} + fetch-depth: 0 + fetch-tags: true - - name: Cache go build - uses: actions/cache@v3 - env: - # increment the number suffix to bump the cache - cache-name: cache-go-cache-1 + - name: Download generated artifacts + uses: actions/download-artifact@v8 + with: + name: generated + + - name: Cache Go build + uses: actions/cache@v5 with: path: .go-cache - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('go.mod', '**/go.sum') }} + key: ${{ runner.os }}-go-cache-${{ matrix.platform }}-${{ hashFiles('go.mod', '**/go.sum') }} + + # kept seperate to test timings + - name: pull compiler image + run: docker pull $COMPILER_IMAGE - name: Start build container env: @@ -64,45 +180,50 @@ jobs: mkdir -p .go-cache docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null - - name: Pre-install - run: docker exec -t build /bin/bash -c "make pre-ui" - - - name: Generate - run: docker exec -t build /bin/bash -c "make generate" - - - name: Validate UI - # skip UI validation for pull requests if UI is unchanged - if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }} - run: docker exec -t build /bin/bash -c "make validate-ui" - - # Static validation happens in the linter workflow in parallel to this workflow - # Run Dynamic validation here, to make sure we pass all the projects integration tests - - name: Test Backend - run: docker exec -t build /bin/bash -c "make it" - - - name: Build UI - # skip UI build for pull requests if UI is unchanged (UI was cached) - # this means that the build version/time may be incorrect if the UI is - # not changed in a pull request - if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }} - run: docker exec -t build /bin/bash -c "make ui" - - - name: Compile for all supported platforms - run: | - docker exec -t build /bin/bash -c "make build-cc-windows" - docker exec -t build /bin/bash -c "make build-cc-macos" - docker exec -t build /bin/bash -c "make build-cc-linux" - docker exec -t build /bin/bash -c "make build-cc-linux-arm64v8" - docker exec -t build /bin/bash -c "make build-cc-linux-arm32v7" - docker exec -t build /bin/bash -c "make build-cc-linux-arm32v6" - docker exec -t build /bin/bash -c "make build-cc-freebsd" - - - name: Zip UI - run: docker exec -t build /bin/bash -c "make zip-ui" + - name: Build (${{ matrix.platform }}) + run: docker exec -t build /bin/bash -c "make ${{ matrix.make-target }}" - name: Cleanup build container run: docker rm -f -v build + - name: Upload build artifact + uses: actions/upload-artifact@v7 + with: + name: build-${{ matrix.platform }} + retention-days: 1 + path: ${{ matrix.artifact-paths }} + + # Job 4: Release + # Waits for both test and build to pass, then collects all platform artifacts + # into dist/ for checksums, GitHub releases, and multi-arch Docker push. + release: + needs: [test, build] + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + + # Downloads all artifacts (generated + 7 platform builds) into artifacts/ subdirectories + - name: Download all build artifacts + uses: actions/download-artifact@v8 + with: + path: artifacts + + # Reassemble platform binaries from matrix job artifacts into a single dist/ directory + # make sure that artifacts have executable bit set + # upload-artifact@v4 strips the common path prefix (dist/), so files are at the artifact root + - name: Collect binaries + run: | + mkdir -p dist + cp artifacts/build-*/* dist/ + chmod +x dist/* + + - name: Zip UI + run: | + cd artifacts/generated/ui/v2.5/build && zip -r ../../../../../dist/stash-ui.zip . + - name: Generate checksums run: | git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1 @@ -113,7 +234,7 @@ jobs: - name: Upload Windows binary # only upload binaries for pull requests if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: stash-win.exe path: dist/stash-win.exe @@ -121,15 +242,23 @@ jobs: - name: Upload macOS binary # only upload binaries for pull requests if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: stash-macos path: dist/stash-macos + - name: Upload macOS bundle + # only upload binaries for pull requests + if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} + uses: actions/upload-artifact@v7 + with: + name: Stash.app.zip + path: dist/Stash.app.zip + - name: Upload Linux binary # only upload binaries for pull requests if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: stash-linux path: dist/stash-linux @@ -137,14 +266,14 @@ jobs: - name: Upload UI # only upload for pull requests if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v7 with: name: stash-ui.zip path: dist/stash-ui.zip - name: Update latest_develop tag if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} - run : git tag -f latest_develop; git push -f --tags + run: git tag -f latest_develop; git push -f --tags - name: Development Release if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} @@ -194,7 +323,7 @@ jobs: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} run: | - docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64 + docker run --rm --privileged tonistiigi/binfmt docker info docker buildx create --name builder --use docker buildx inspect --bootstrap @@ -210,7 +339,7 @@ jobs: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} run: | - docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64 + docker run --rm --privileged tonistiigi/binfmt docker info docker buildx create --name builder --use docker buildx inspect --bootstrap diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml index 1b7838b62..19a6d62bd 100644 --- a/.github/workflows/golangci-lint.yml +++ b/.github/workflows/golangci-lint.yml @@ -6,67 +6,23 @@ on: branches: - master - develop + - 'releases/**' pull_request: -env: - COMPILER_IMAGE: stashapp/compiler:11 - jobs: golangci: name: lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - - name: Checkout - run: git fetch --prune --unshallow --tags - - - name: Setup Go - uses: actions/setup-go@v5 - with: - go-version-file: 'go.mod' - - - name: Pull compiler image - run: docker pull $COMPILER_IMAGE - - - name: Start build container - run: | - mkdir -p .go-cache - docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null + # no tags or depth needed for lint + - uses: actions/checkout@v6 + - uses: actions/setup-go@v6 + # generate-backend runs natively (just go generate + touch-ui) — no Docker needed - name: Generate Backend - run: docker exec -t build /bin/bash -c "make generate-backend" + run: make generate-backend + ## WARN + ## using v1, update in a later PR - name: Run golangci-lint - uses: golangci/golangci-lint-action@v6 - with: - # Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version - version: latest - - # Optional: working directory, useful for monorepos - # working-directory: somedir - - # Optional: golangci-lint command line arguments. - # - # Note: By default, the `.golangci.yml` file should be at the root of the repository. - # The location of the configuration file can be changed by using `--config=` - args: --timeout=5m - - # Optional: show only new issues if it's a pull request. The default value is `false`. - # only-new-issues: true - - # Optional: if set to true, then all caching functionality will be completely disabled, - # takes precedence over all other caching options. - # skip-cache: true - - # Optional: if set to true, then the action won't cache or restore ~/go/pkg. - # skip-pkg-cache: true - - # Optional: if set to true, then the action won't cache or restore ~/.cache/go-build. - # skip-build-cache: true - - # Optional: The mode to install golangci-lint. It can be 'binary' or 'goinstall'. - # install-mode: "goinstall" - - - name: Cleanup build container - run: docker rm -f -v build + uses: golangci/golangci-lint-action@v6 \ No newline at end of file diff --git a/.idea/go.iml b/.idea/go.iml index eddfcc6c3..86461b085 100644 --- a/.idea/go.iml +++ b/.idea/go.iml @@ -1,5 +1,6 @@ + @@ -10,4 +11,4 @@ - + \ No newline at end of file diff --git a/Makefile b/Makefile index b6d0a9e28..d9caf0ee5 100644 --- a/Makefile +++ b/Makefile @@ -50,7 +50,7 @@ export CGO_ENABLED := 1 # define COMPILER_IMAGE for cross-compilation docker container ifndef COMPILER_IMAGE - COMPILER_IMAGE := stashapp/compiler:latest + COMPILER_IMAGE := ghcr.io/stashapp/compiler:latest endif .PHONY: release @@ -129,7 +129,7 @@ phasher: build-flags # builds dynamically-linked debug binaries .PHONY: build -build: stash phasher +build: stash # builds dynamically-linked PIE release binaries .PHONY: build-release @@ -187,8 +187,6 @@ build-cc-macos: # Combine into universal binaries lipo -create -output dist/stash-macos dist/stash-macos-intel dist/stash-macos-arm rm dist/stash-macos-intel dist/stash-macos-arm - lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm - rm dist/phasher-macos-intel dist/phasher-macos-arm # Place into bundle and zip up rm -rf dist/Stash.app @@ -198,6 +196,16 @@ build-cc-macos: cd dist && rm -f Stash.app.zip && zip -r Stash.app.zip Stash.app rm -rf dist/Stash.app +.PHONY: build-cc-macos-phasher +build-cc-macos-phasher: + make build-cc-macos-arm + make build-cc-macos-intel + + # Combine into universal binaries + lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm + rm dist/phasher-macos-intel dist/phasher-macos-arm + # do not bundle phasher + .PHONY: build-cc-freebsd build-cc-freebsd: export GOOS := freebsd build-cc-freebsd: export GOARCH := amd64 @@ -275,7 +283,7 @@ generate: generate-backend generate-ui .PHONY: generate-ui generate-ui: - cd ui/v2.5 && yarn run gqlgen + cd ui/v2.5 && npm run gqlgen .PHONY: generate-backend generate-backend: touch-ui @@ -338,9 +346,19 @@ server-clean: # installs UI dependencies. Run when first cloning repository, or if UI # dependencies have changed +# If CI is set, configures pnpm to use a local store to avoid +# putting .pnpm-store in /stash +# NOTE: to run in the docker build container, using the existing +# node_modules folder, rename the .modules.yaml to .modules.yaml.bak +# and a new one will be generated. This will need to be reversed after +# building. .PHONY: pre-ui pre-ui: - cd ui/v2.5 && yarn install --frozen-lockfile +ifdef CI + cd ui/v2.5 && pnpm config set store-dir ~/.pnpm-store && pnpm install --frozen-lockfile +else + cd ui/v2.5 && pnpm install --frozen-lockfile +endif .PHONY: ui-env ui-env: build-info @@ -359,7 +377,7 @@ ui: ui-only generate-login-locale .PHONY: ui-only ui-only: ui-env - cd ui/v2.5 && yarn build + cd ui/v2.5 && npm run build .PHONY: zip-ui zip-ui: @@ -368,20 +386,24 @@ zip-ui: .PHONY: ui-start ui-start: ui-env - cd ui/v2.5 && yarn start --host + cd ui/v2.5 && npm run start -- --host .PHONY: fmt-ui fmt-ui: - cd ui/v2.5 && yarn format + cd ui/v2.5 && npm run format # runs all of the frontend PR-acceptance steps .PHONY: validate-ui validate-ui: - cd ui/v2.5 && yarn run validate + cd ui/v2.5 && npm run validate # these targets run the same steps as fmt-ui and validate-ui, but only on files that have changed fmt-ui-quick: - cd ui/v2.5 && yarn run prettier --write $$(git diff --name-only --relative --diff-filter d . ../../graphql) + cd ui/v2.5 && \ + files=$$(git diff --name-only --relative --diff-filter d . ../../graphql); \ + if [ -n "$$files" ]; then \ + npm run prettier -- --write $$files; \ + fi # does not run tsc checks, as they are slow validate-ui-quick: @@ -389,9 +411,9 @@ validate-ui-quick: tsfiles=$$(git diff --name-only --relative --diff-filter d src | grep -e "\.tsx\?\$$"); \ scssfiles=$$(git diff --name-only --relative --diff-filter d src | grep "\.scss"); \ prettyfiles=$$(git diff --name-only --relative --diff-filter d . ../../graphql); \ - if [ -n "$$tsfiles" ]; then yarn run eslint $$tsfiles; fi && \ - if [ -n "$$scssfiles" ]; then yarn run stylelint $$scssfiles; fi && \ - if [ -n "$$prettyfiles" ]; then yarn run prettier --check $$prettyfiles; fi + if [ -n "$$tsfiles" ]; then npm run eslint -- $$tsfiles; fi && \ + if [ -n "$$scssfiles" ]; then npm run stylelint -- $$scssfiles; fi && \ + if [ -n "$$prettyfiles" ]; then npm run prettier -- --check $$prettyfiles; fi # runs all of the backend PR-acceptance steps .PHONY: validate-backend diff --git a/README.md b/README.md index c54d94528..2d90a76ea 100644 --- a/README.md +++ b/README.md @@ -9,90 +9,101 @@ [![GitHub release (latest by date)](https://img.shields.io/github/v/release/stashapp/stash?logo=github)](https://github.com/stashapp/stash/releases/latest) [![GitHub issues by-label](https://img.shields.io/github/issues-raw/stashapp/stash/bounty)](https://github.com/stashapp/stash/labels/bounty) -### **Stash is a self-hosted webapp written in Go which organizes and serves your porn.** -![demo image](docs/readme_assets/demo_image.png) +### **Stash is a self-hosted webapp written in Go which organizes and serves your diverse content collection, catering to both your SFW and NSFW needs.** -* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites. -* Stash supports a wide variety of both video and image formats. -* You can tag videos and find them later. -* Stash provides statistics about performers, tags, studios and more. +![Screenshot of Stash web application interface](docs/readme_assets/demo_image.png) + +- Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites. +- Stash supports a wide variety of both video and image formats. +- You can tag videos and find them later. +- Stash provides statistics about performers, tags, studios and more. You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action. -For further information you can consult the [documentation](https://docs.stashapp.cc) or [read the in-app manual](ui/v2.5/src/docs/en). +For further information you can consult the [documentation](https://docs.stashapp.cc) or access the in-app manual from within the application (also available at [docs.stashapp.cc/in-app-manual](https://docs.stashapp.cc/in-app-manual)). # Installing Stash -#### Windows Users: +> [!tip] +Step-by-step instructions are available at [docs.stashapp.cc/installation](https://docs.stashapp.cc/installation/). -As of version 0.27.0, Stash doesn't support anymore _Windows 7, 8, Server 2008 and Server 2012._ -Windows 10 or Server 2016 are at least required. - -#### Mac Users: - -As of version 0.29.0, Stash requires at least _macOS 11 Big Sur._ -Stash can still be ran through docker on older versions of macOS +> [!important] +>**Windows Users** +> +>As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._ +>At least Windows 10 or Server 2016 is required. +> +>**macOS Users** +> +> As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later. +> Stash can still be run through docker on older versions of macOS. Windows | macOS | Linux | Docker :---:|:---:|:---:|:---: [Latest Release](https://github.com/stashapp/stash/releases/latest/download/stash-win.exe)
[Development Preview](https://github.com/stashapp/stash/releases/download/latest_develop/stash-win.exe) | [Latest Release](https://github.com/stashapp/stash/releases/latest/download/Stash.app.zip)
[Development Preview](https://github.com/stashapp/stash/releases/download/latest_develop/Stash.app.zip) | [Latest Release (amd64)](https://github.com/stashapp/stash/releases/latest/download/stash-linux)
[Development Preview (amd64)](https://github.com/stashapp/stash/releases/download/latest_develop/stash-linux)
[More Architectures...](https://github.com/stashapp/stash/releases/latest) | [Instructions](docker/production/README.md)
[Sample docker-compose.yml](docker/production/docker-compose.yml) -Download links for other platforms and architectures are available on the [Releases page](https://github.com/stashapp/stash/releases). +Download links for other platforms and architectures are available on the [Releases](https://github.com/stashapp/stash/releases) page. ## First Run #### Windows/macOS Users: Security Prompt -On Windows or macOS, running the app might present a security prompt since the binary isn't yet signed. +On Windows or macOS, running the app might present a security prompt since the application binary isn't yet signed. -On Windows, bypass this by clicking "more info" and then the "run anyway" button. On macOS, Control+Click the app, click "Open", and then "Open" again. +- On Windows, bypass this by clicking "more info" and then the "run anyway" button. +- On macOS, Control+Click the app, click "Open", and then "Open" again. -#### FFmpeg -Stash requires FFmpeg. If you don't have it installed, Stash will download a copy for you. It is recommended that Linux users install `ffmpeg` from their distro's package manager. +#### ffmpeg + +Stash requires FFmpeg. If you don't have it installed, Stash will prompt you to download a copy during setup. It is recommended that Linux users install `ffmpeg` from their distro's package manager. # Usage ## Quickstart Guide -Stash is a web-based application. Once the application is running, the interface is available (by default) from http://localhost:9999. + +Stash is a web-based application. Once the application is running, the interface is available (by default) from `http://localhost:9999`. On first run, Stash will prompt you for some configuration options and media directories to index, called "Scanning" in Stash. After scanning, your media will be available for browsing, curating, editing, and tagging. Stash can pull metadata (performers, tags, descriptions, studios, and more) directly from many sites through the use of [scrapers](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Manual/Scraping.md), which integrate directly into Stash. Identifying an entire collection will typically require a mix of multiple sources: -- The project maintains [StashDB](https://stashdb.org/), a crowd-sourced repository of scene, studio, and performer information. Connecting it to Stash will allow you to automatically identify much of a typical media collection. It runs on our stash-box software and is primarily focused on mainstream digital scenes and studios. Instructions, invite codes, and more can be found in this guide to [Accessing StashDB](https://guidelines.stashdb.org/docs/faq_getting-started/stashdb/accessing-stashdb/). +- The stashapp team maintains [StashDB](https://stashdb.org/), a crowd-sourced repository of scene, studio, and performer information. Connecting it to Stash will allow you to automatically identify much of a typical media collection. It runs on our stash-box software and is primarily focused on mainstream digital scenes and studios. Instructions, invite codes, and more can be found in this guide to [Accessing StashDB](https://guidelines.stashdb.org/docs/faq_getting-started/stashdb/accessing-stashdb/). - Several community-managed stash-box databases can also be connected to Stash in a similar manner. Each one serves a slightly different niche and follows their own methodology. A rundown of each stash-box, their differences, and the information you need to sign up can be found in this guide to [Accessing Stash-Boxes](https://guidelines.stashdb.org/docs/faq_getting-started/stashdb/accessing-stash-boxes/). -- Many community-maintained scrapers can also be downloaded, installed, and updated from within Stash, allowing you to pull data from a wide range of other websites and databases. They can be found by navigating to Settings -> Metadata Providers -> Available Scrapers -> Community (stable). These can be trickier to use than a stash-box because every scraper works a little differently. For more information, please visit the [CommunityScrapers repository](https://github.com/stashapp/CommunityScrapers). +- Many community-maintained scrapers can also be downloaded, installed, and updated from within Stash, allowing you to pull data from a wide range of other websites and databases. They can be found by navigating to `Settings → Metadata Providers → Available Scrapers → Community (stable)`. These can be trickier to use than a stash-box because every scraper works a little differently. For more information, please visit the [CommunityScrapers repository](https://github.com/stashapp/CommunityScrapers). - All of the above methods of scraping data into Stash are also covered in more detail in our [Guide to Scraping](https://docs.stashapp.cc/beginner-guides/guide-to-scraping/). [StashDB](http://stashdb.org) is the canonical instance of our open source metadata API, [stash-box](https://github.com/stashapp/stash-box). # Translation + [![Translate](https://translate.codeberg.org/widget/stash/stash/svg-badge.svg)](https://translate.codeberg.org/engage/stash/) -Stash is available in 32 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash into your language, you can make an account at [Codeberg's Weblate](https://translate.codeberg.org/projects/stash/stash/) to get started contributing new languages or improving existing ones. Thanks! +Stash is available in 32 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash, you can make an account at [Codeberg's Weblate](https://translate.codeberg.org/projects/stash/stash/) to contribute to new or existing languages. Thanks! + +The badge below shows the current translation status of Stash across all supported languages: [![Translation status](https://translate.codeberg.org/widget/stash/stash/multi-auto.svg)](https://translate.codeberg.org/engage/stash/) -## Join Our Community +# Support & Resources -We are excited to announce that we have a new home for support, feature requests, and discussions related to Stash and its associated projects. Join our community on the [Discourse forum](https://discourse.stashapp.cc) to connect with other users, share your ideas, and get help from fellow enthusiasts. +Need help or want to get involved? Start with the documentation, then reach out to the community if you need further assistance. -# Support (FAQ) +### Documentation +- [Official documentation](https://docs.stashapp.cc) - official guides guides and troubleshooting. +- [In-app manual](https://docs.stashapp.cc/in-app-manual) press Shift + ? in the app or view the manual online. +- [FAQ](https://discourse.stashapp.cc/c/support/faq/28) - common questions and answers. +- [Community wiki](https://discourse.stashapp.cc/tags/c/community-wiki/22/stash) - guides, how-to’s and tips. + +### Community & discussion +- [Community forum](https://discourse.stashapp.cc) - community support, feature requests and discussions. +- [Discord](https://discord.gg/2TsNFKt) - real-time chat and community support. +- [GitHub discussions](https://github.com/stashapp/stash/discussions) - community support and feature discussions. +- [Lemmy community](https://discuss.online/c/stashapp) - board-style community space. -Check out our documentation on [Stash-Docs](https://docs.stashapp.cc) for information about the software, questions, guides, add-ons and more. - -For more help you can: -* Check the in-app documentation, in the top right corner of the app (it's also mirrored on [Stash-Docs](https://docs.stashapp.cc/in-app-manual)) -* Join our [community forum](https://discourse.stashapp.cc) -* Join the [Discord server](https://discord.gg/2TsNFKt) -* Start a [discussion on GitHub](https://github.com/stashapp/stash/discussions) - -# Customization - -## Themes and CSS Customization - -There is a [directory of community-created themes](https://docs.stashapp.cc/themes/list) on Stash-Docs. - -You can also change the Stash interface to fit your desired style with various snippets from [Custom CSS snippets](https://docs.stashapp.cc/themes/custom-css-snippets). +### Community scrapers & plugins +- [Metadata sources](https://docs.stashapp.cc/metadata-sources/) +- [Plugins](https://docs.stashapp.cc/plugins/) +- [Themes](https://docs.stashapp.cc/themes/) +- [Other projects](https://docs.stashapp.cc/other-projects/) # For Developers diff --git a/cmd/phasher/main.go b/cmd/phasher/main.go index 864195631..be2053784 100644 --- a/cmd/phasher/main.go +++ b/cmd/phasher/main.go @@ -5,20 +5,39 @@ import ( "fmt" "os" "os/exec" + "path/filepath" flag "github.com/spf13/pflag" "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/hash/imagephash" "github.com/stashapp/stash/pkg/hash/videophash" "github.com/stashapp/stash/pkg/models" ) func customUsage() { fmt.Fprintf(os.Stderr, "Usage:\n") - fmt.Fprintf(os.Stderr, "%s [OPTIONS] VIDEOFILE...\n\nOptions:\n", os.Args[0]) + fmt.Fprintf(os.Stderr, "%s [OPTIONS] FILE...\n\nOptions:\n", os.Args[0]) flag.PrintDefaults() } func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error { + // Determine if this is a video or image file based on extension + ext := filepath.Ext(inputfile) + ext = ext[1:] // remove the leading dot + + // Common image extensions + imageExts := map[string]bool{ + "jpg": true, "jpeg": true, "png": true, "gif": true, "webp": true, "bmp": true, "avif": true, + } + + if imageExts[ext] { + return printImagePhash(ff, inputfile, quiet) + } + + return printVideoPhash(ff, ffp, inputfile, quiet) +} + +func printVideoPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error { ffvideoFile, err := ffp.NewVideoFile(inputfile) if err != nil { return err @@ -46,6 +65,24 @@ func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet return nil } +func printImagePhash(ff *ffmpeg.FFMpeg, inputfile string, quiet *bool) error { + imgFile := &models.ImageFile{ + BaseFile: &models.BaseFile{Path: inputfile}, + } + + phash, err := imagephash.Generate(ff, imgFile) + if err != nil { + return err + } + + if *quiet { + fmt.Printf("%x\n", *phash) + } else { + fmt.Printf("%x %v\n", *phash, imgFile.Path) + } + return nil +} + func getPaths() (string, string) { ffmpegPath, _ := exec.LookPath("ffmpeg") ffprobePath, _ := exec.LookPath("ffprobe") @@ -67,7 +104,7 @@ func main() { args := flag.Args() if len(args) < 1 { - fmt.Fprintf(os.Stderr, "Missing VIDEOFILE argument.\n") + fmt.Fprintf(os.Stderr, "Missing FILE argument.\n") flag.Usage() os.Exit(2) } @@ -87,4 +124,5 @@ func main() { fmt.Fprintln(os.Stderr, err) } } + } diff --git a/cmd/stash/main.go b/cmd/stash/main.go index 86edd6276..57fedd0e2 100644 --- a/cmd/stash/main.go +++ b/cmd/stash/main.go @@ -76,6 +76,10 @@ func main() { defer pprof.StopCPUProfile() } + // initialise desktop.IsDesktop here so that it doesn't get affected by + // ffmpeg hardware checks later on + desktop.InitIsDesktop() + mgr, err := manager.Initialize(cfg, l) if err != nil { exitError(fmt.Errorf("manager initialization error: %w", err)) @@ -110,7 +114,7 @@ func main() { // Logs only error level message to stderr. func initLogTemp() *log.Logger { l := log.NewLogger() - l.Init("", true, "Error") + l.Init("", true, "Error", 0) logger.Logger = l return l @@ -118,7 +122,7 @@ func initLogTemp() *log.Logger { func initLog(cfg *config.Config) *log.Logger { l := log.NewLogger() - l.Init(cfg.GetLogFile(), cfg.GetLogOut(), cfg.GetLogLevel()) + l.Init(cfg.GetLogFile(), cfg.GetLogOut(), cfg.GetLogLevel(), cfg.GetLogFileMaxSize()) logger.Logger = l return l diff --git a/docker/build/x86_64/Dockerfile b/docker/build/x86_64/Dockerfile index 4d153e8bc..163bd64b2 100644 --- a/docker/build/x86_64/Dockerfile +++ b/docker/build/x86_64/Dockerfile @@ -1,14 +1,16 @@ # This dockerfile should be built with `make docker-build` from the stash root. # Build Frontend -FROM node:20-alpine AS frontend +FROM node:24-alpine AS frontend RUN apk add --no-cache make git ## cache node_modules separately -COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/ +COPY ./ui/v2.5/package.json ./ui/v2.5/pnpm-lock.yaml /stash/ui/v2.5/ WORKDIR /stash COPY Makefile /stash/ COPY ./graphql /stash/graphql/ COPY ./ui /stash/ui/ +# pnpm install with npm +RUN npm install -g pnpm RUN make pre-ui RUN make generate-ui ARG GITHASH diff --git a/docker/build/x86_64/Dockerfile-CUDA b/docker/build/x86_64/Dockerfile-CUDA index 4cab3f6c1..8a0b02e10 100644 --- a/docker/build/x86_64/Dockerfile-CUDA +++ b/docker/build/x86_64/Dockerfile-CUDA @@ -5,11 +5,13 @@ ARG CUDA_VERSION=12.8.0 FROM node:20-alpine AS frontend RUN apk add --no-cache make git ## cache node_modules separately -COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/ +COPY ./ui/v2.5/package.json ./ui/v2.5/pnpm-lock.yaml /stash/ui/v2.5/ WORKDIR /stash COPY Makefile /stash/ COPY ./graphql /stash/graphql/ COPY ./ui /stash/ui/ +# pnpm install with npm +RUN npm install -g pnpm RUN make pre-ui RUN make generate-ui ARG GITHASH diff --git a/docker/ci/x86_64/Dockerfile b/docker/ci/x86_64/Dockerfile index f0f1e242b..2161cb6af 100644 --- a/docker/ci/x86_64/Dockerfile +++ b/docker/ci/x86_64/Dockerfile @@ -12,9 +12,8 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-linux-arm32v6; \ FROM --platform=$TARGETPLATFORM alpine:latest AS app COPY --from=binary /stash /usr/bin/ -RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg ruby tzdata vips vips-tools \ - && pip install --user --break-system-packages mechanicalsoup cloudscraper stashapp-tools \ - && gem install faraday +RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools vips-heif \ + && pip install --break-system-packages mechanicalsoup cloudscraper stashapp-tools ENV STASH_CONFIG_FILE=/root/.stash/config.yml # Basic build-time metadata as defined at https://github.com/opencontainers/image-spec/blob/main/annotations.md#pre-defined-annotation-keys diff --git a/docker/compiler/.gitignore b/docker/compiler/.gitignore deleted file mode 100644 index 7012bfd63..000000000 --- a/docker/compiler/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.sdk.tar.* \ No newline at end of file diff --git a/docker/compiler/Dockerfile b/docker/compiler/Dockerfile index 40b92c180..c9dfb9c7c 100644 --- a/docker/compiler/Dockerfile +++ b/docker/compiler/Dockerfile @@ -1,83 +1,86 @@ -FROM golang:1.24.3 +### OSXCROSS +FROM debian:bookworm AS osxcross +# add osxcross +WORKDIR /tmp/osxcross +ARG OSXCROSS_REVISION=5e1b71fcceb23952f3229995edca1b6231525b5b +ADD --checksum=sha256:d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647 https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION} /tmp/osxcross.tar.gz -LABEL maintainer="https://discord.gg/2TsNFKt" +ARG OSX_SDK_VERSION=11.3 +ARG OSX_SDK_DOWNLOAD_FILE=MacOSX${OSX_SDK_VERSION}.sdk.tar.xz +ARG OSX_SDK_DOWNLOAD_URL=https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE} +ADD --checksum=sha256:cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4 ${OSX_SDK_DOWNLOAD_URL} /tmp/osxcross/tarballs/${OSX_SDK_DOWNLOAD_FILE} -RUN apt-get update && apt-get install -y apt-transport-https ca-certificates gnupg +ENV UNATTENDED=yes \ + SDK_VERSION=${OSX_SDK_VERSION} \ + OSX_VERSION_MIN=10.10 +RUN apt update && \ + apt install -y --no-install-recommends \ + bash ca-certificates clang cmake git patch libssl-dev bzip2 cpio libbz2-dev libxml2-dev make python3 xz-utils zlib1g-dev +# lzma-dev libxml2-dev xz +RUN tar --strip=1 -C /tmp/osxcross -xf /tmp/osxcross.tar.gz +RUN ./build.sh -RUN mkdir -p /etc/apt/keyrings +### FREEBSD cross-compilation stage +# use alpine for cacheable image since apt is notorous for not caching +FROM alpine:3 AS freebsd +# match golang latest +# https://go.dev/wiki/FreeBSD +ARG FREEBSD_VERSION=12.4 +ADD --checksum=sha256:581c7edacfd2fca2bdf5791f667402d22fccd8a5e184635e0cac075564d57aa8 \ + http://ftp-archive.freebsd.org/mirror/FreeBSD-Archive/old-releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz \ + /tmp/base.txz -ADD https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key nodesource.gpg.key -RUN cat nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && rm nodesource.gpg.key -RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list - -ADD https://dl.yarnpkg.com/debian/pubkey.gpg yarn.gpg -RUN cat yarn.gpg | gpg --dearmor -o /etc/apt/keyrings/yarn.gpg && rm yarn.gpg -RUN echo "deb [signed-by=/etc/apt/keyrings/yarn.gpg] https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list - -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - git make tar bash nodejs yarn zip \ - clang llvm-dev cmake patch libxml2-dev uuid-dev libssl-dev xz-utils \ - bzip2 gzip sed cpio libbz2-dev zlib1g-dev \ - gcc-mingw-w64 \ - gcc-arm-linux-gnueabi libc-dev-armel-cross linux-libc-dev-armel-cross \ - gcc-aarch64-linux-gnu libc-dev-arm64-cross && \ - rm -rf /var/lib/apt/lists/*; - -# FreeBSD cross-compilation setup -# https://github.com/smartmontools/docker-build/blob/6b8c92560d17d325310ba02d9f5a4b250cb0764a/Dockerfile#L66 -ENV FREEBSD_VERSION 13.4 -ENV FREEBSD_DOWNLOAD_URL http://ftp.plusline.de/FreeBSD/releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz -ENV FREEBSD_SHA 8e13b0a93daba349b8d28ad246d7beb327659b2ef4fe44d89f447392daec5a7c - -RUN cd /tmp && \ - curl -o base.txz $FREEBSD_DOWNLOAD_URL && \ - echo "$FREEBSD_SHA base.txz" | sha256sum -c - && \ - mkdir -p /opt/cross-freebsd && \ - cd /opt/cross-freebsd && \ - tar -xf /tmp/base.txz ./lib/ ./usr/lib/ ./usr/include/ && \ - rm -f /tmp/base.txz && \ - cd /opt/cross-freebsd/usr/lib && \ - find . -xtype l | xargs ls -l | grep ' /lib/' | awk '{print "ln -sf /opt/cross-freebsd"$11 " " $9}' | /bin/sh && \ +WORKDIR /opt/cross-freebsd +RUN apk add --no-cache tar xz +RUN tar -xf /tmp/base.txz --strip-components=1 ./usr/lib ./usr/include ./lib +RUN cd /opt/cross-freebsd/usr/lib && \ + find . -type l -exec sh -c ' \ + for link; do \ + target=$(readlink "$link"); \ + case "$target" in \ + /lib/*) ln -sf "/opt/cross-freebsd$target" "$link";; \ + esac; \ + done \ + ' sh {} + && \ ln -s libc++.a libstdc++.a && \ ln -s libc++.so libstdc++.so -# macOS cross-compilation setup -ENV OSX_SDK_VERSION 11.3 -ENV OSX_SDK_DOWNLOAD_FILE MacOSX${OSX_SDK_VERSION}.sdk.tar.xz -ENV OSX_SDK_DOWNLOAD_URL https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE} -ENV OSX_SDK_SHA cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4 -ENV OSXCROSS_REVISION 5e1b71fcceb23952f3229995edca1b6231525b5b -ENV OSXCROSS_DOWNLOAD_URL https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION} -ENV OSXCROSS_SHA d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647 +### BUILDER +FROM golang:1.24.3 AS builder +ENV PATH=/opt/osx-ndk-x86/bin:$PATH -RUN cd /tmp && \ - curl -o osxcross.tar.gz $OSXCROSS_DOWNLOAD_URL && \ - echo "$OSXCROSS_SHA osxcross.tar.gz" | sha256sum -c - && \ - mkdir osxcross && \ - tar --strip=1 -C osxcross -xf osxcross.tar.gz && \ - rm -f osxcross.tar.gz && \ - curl -Lo $OSX_SDK_DOWNLOAD_FILE $OSX_SDK_DOWNLOAD_URL && \ - echo "$OSX_SDK_SHA $OSX_SDK_DOWNLOAD_FILE" | sha256sum -c - && \ - mv $OSX_SDK_DOWNLOAD_FILE osxcross/tarballs/ && \ - UNATTENDED=yes SDK_VERSION=$OSX_SDK_VERSION OSX_VERSION_MIN=10.10 osxcross/build.sh && \ - cp osxcross/target/lib/* /usr/lib/ && \ - mv osxcross/target /opt/osx-ndk-x86 && \ - rm -rf /tmp/osxcross +# copy in nodejs instead of using nodesource :thumbsup: +COPY --from=docker.io/library/node:24-bookworm /usr/local /usr/local +# copy in osxcross +COPY --from=osxcross /tmp/osxcross/target/lib /usr/lib +COPY --from=osxcross /tmp/osxcross/target /opt/osx-ndk-x86 +# copy in cross-freebsd +COPY --from=freebsd /opt/cross-freebsd /opt/cross-freebsd -ENV PATH /opt/osx-ndk-x86/bin:$PATH +# pnpm install with npm +RUN npm install -g pnpm -RUN mkdir -p /root/.ssh && \ - chmod 0700 /root/.ssh && \ - ssh-keyscan github.com > /root/.ssh/known_hosts +# git for getting hash +# make and bash for building -# ignore "dubious ownership" errors +# clang for macos +# zip for stashapp.zip +# gcc-extensions for cross-arch build +# we still target arm soft float? +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + git make bash \ + clang zip \ + gcc-mingw-w64 \ + gcc-arm-linux-gnueabi \ + libc-dev-armel-cross linux-libc-dev-armel-cross \ + gcc-aarch64-linux-gnu libc-dev-arm64-cross && \ + rm -rf /var/lib/apt/lists/*; RUN git config --global safe.directory '*' - # To test locally: # make generate # make ui # cd docker/compiler -# make build -# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t stashapp/compiler:latest make build-cc-all -# # binaries will show up in /dist +# docker build . -t ghcr.io/stashapp/compiler:latest +# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t ghcr.io/stashapp/compiler:latest make build-cc-all +# # binaries will show up in /dist \ No newline at end of file diff --git a/docker/compiler/Makefile b/docker/compiler/Makefile index 275466640..66f19f5d6 100644 --- a/docker/compiler/Makefile +++ b/docker/compiler/Makefile @@ -1,16 +1,22 @@ +host=ghcr.io user=stashapp repo=compiler -version=11 +version=13 + +VERSION_IMAGE = ${host}/${user}/${repo}:${version} +LATEST_IMAGE = ${host}/${user}/${repo}:latest latest: - docker build -t ${user}/${repo}:latest . + docker build -t ${LATEST_IMAGE} . build: - docker build -t ${user}/${repo}:${version} -t ${user}/${repo}:latest . + docker build -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} . build-no-cache: - docker build --no-cache -t ${user}/${repo}:${version} -t ${user}/${repo}:latest . + docker build --no-cache -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} . -install: build - docker push ${user}/${repo}:${version} - docker push ${user}/${repo}:latest +# requires docker login ghcr.io +# echo $CR_PAT | docker login ghcr.io -u USERNAME --password-stdin +push: + docker push ${VERSION_IMAGE} + docker push ${LATEST_IMAGE} \ No newline at end of file diff --git a/docker/compiler/README.md b/docker/compiler/README.md index 6bb7d8d99..c7b4840f9 100644 --- a/docker/compiler/README.md +++ b/docker/compiler/README.md @@ -1,3 +1,3 @@ Modified from https://github.com/bep/dockerfiles/tree/master/ci-goreleaser -When the Dockerfile is changed, the version number should be incremented in the Makefile and the new version tag should be pushed to Docker Hub. The GitHub workflow files also need to be updated to pull the correct image tag. +When the Dockerfile is changed, the version number should be incremented in [.github/workflows/build-compiler.yml](../../.github/workflows/build-compiler.yml) and the workflow [manually ran](). `env: COMPILER_IMAGE` in [.github/workflows/build.yml](../../.github/workflows/build.yml) also needs to be updated to pull the correct image tag. \ No newline at end of file diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md index 4a1cf30df..a26ce6817 100644 --- a/docs/DEVELOPMENT.md +++ b/docs/DEVELOPMENT.md @@ -5,7 +5,8 @@ * [Go](https://golang.org/dl/) * [GolangCI](https://golangci-lint.run/) - A meta-linter which runs several linters in parallel * To install, follow the [local installation instructions](https://golangci-lint.run/welcome/install/#local-installation) -* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager +* [nodejs](https://nodejs.org/en/download) - nodejs runtime + * corepack/[pnpm](https://pnpm.io/installation) - nodejs package manager (included with nodejs) ## Environment @@ -22,32 +23,22 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MinGW. For examp ### macOS 1. If you don't have it already, install the [Homebrew package manager](https://brew.sh). -2. Install dependencies: `brew install go git yarn gcc make node ffmpeg` +2. Install dependencies: `brew install go git gcc make node ffmpeg` ### Linux #### Arch Linux -1. Install dependencies: `sudo pacman -S go git yarn gcc make nodejs ffmpeg --needed` +1. Install dependencies: `sudo pacman -S go git gcc make nodejs ffmpeg --needed` #### Ubuntu -1. Install dependencies: `sudo apt-get install golang git yarnpkg gcc nodejs ffmpeg -y` +1. Install dependencies: `sudo apt-get install golang git gcc nodejs ffmpeg -y` ### OpenBSD -1. Install dependencies `doas pkg_add gmake go git yarn node cmake` -2. Compile a custom ffmpeg from ports. The default ffmpeg in OpenBSD's packages is not compiled with WebP support, which is required by Stash. - - If you've already installed ffmpeg, uninstall it: `doas pkg_delete ffmpeg` - - If you haven't already, [fetch the ports tree and verify](https://www.openbsd.org/faq/ports/ports.html#PortsFetch). - - Find the ffmpeg port in `/usr/ports/graphics/ffmpeg`, and patch the Makefile to include libwebp - - Add `webp` to `WANTLIB` - - Add `graphics/libwebp` to the list in `LIB_DEPENDS` - - Add `-lwebp -lwebpdecoder -lwebpdemux -lwebpmux` to `LIBavcodec_EXTRALIBS` - - Add `--enable-libweb` to the list in `CONFIGURE_ARGS` - - If you've already built ffmpeg from ports before, you may need to also increment `REVISION` - - Run `doas make install` - - Follow the instructions below to build a release, but replace the final step `make build-release` with `gmake flags-release stash`, to [avoid the PIE buildmode](https://github.com/golang/go/issues/59866). +1. Install dependencies `doas pkg_add gmake go git node cmake ffmpeg` +2. Follow the instructions below to build a release, but replace the final step `make build-release` with `gmake flags-release stash`, to [avoid the PIE buildmode](https://github.com/golang/go/issues/59866). NOTE: The `make` command in OpenBSD will be `gmake`. For example, `make pre-ui` will be `gmake pre-ui`. @@ -127,8 +118,8 @@ This project uses a modification of the [CI-GoReleaser](https://github.com/bep/d To cross-compile the app yourself: 1. Run `make pre-ui`, `make generate` and `make ui` outside the container, to generate files and build the UI. -2. Pull the latest compiler image from Docker Hub: `docker pull stashapp/compiler` -3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it stashapp/compiler /bin/bash` to open a shell inside the container. +2. Pull the latest compiler image from GHCR: `docker pull ghcr.io/stashapp/compiler` +3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it ghcr.io/stashapp/compiler /bin/bash` to open a shell inside the container. 4. From inside the container, run `make build-cc-all` to build for all platforms, or run `make build-cc-{platform}` to build for a specific platform (have a look at the `Makefile` for the list of targets). 5. You will find the compiled binaries in `dist/`. diff --git a/go.mod b/go.mod index 268276841..348036710 100644 --- a/go.mod +++ b/go.mod @@ -7,15 +7,15 @@ require ( github.com/WithoutPants/sortorder v0.0.0-20230616003020-921c9ef69552 github.com/Yamashou/gqlgenc v0.32.1 github.com/anacrolix/dms v1.2.2 - github.com/antchfx/htmlquery v1.3.0 + github.com/antchfx/htmlquery v1.3.5 github.com/asticode/go-astisub v0.25.1 - github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617 - github.com/chromedp/chromedp v0.9.2 + github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d + github.com/chromedp/chromedp v0.14.2 github.com/corona10/goimagehash v1.1.0 github.com/disintegration/imaging v1.6.2 github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d github.com/doug-martin/goqu/v9 v9.18.0 - github.com/go-chi/chi/v5 v5.0.12 + github.com/go-chi/chi/v5 v5.2.2 github.com/go-chi/cors v1.2.1 github.com/go-chi/httplog v0.3.1 github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 @@ -32,7 +32,11 @@ require ( github.com/json-iterator/go v1.1.12 github.com/kermieisinthehouse/gosx-notifier v0.1.2 github.com/kermieisinthehouse/systray v1.2.4 - github.com/knadh/koanf v1.5.0 + github.com/knadh/koanf/parsers/yaml v1.1.0 + github.com/knadh/koanf/providers/env v1.1.0 + github.com/knadh/koanf/providers/file v1.2.0 + github.com/knadh/koanf/providers/posflag v1.0.1 + github.com/knadh/koanf/v2 v2.2.1 github.com/lucasb-eyer/go-colorful v1.2.0 github.com/mattn/go-sqlite3 v1.14.22 github.com/mitchellh/mapstructure v1.5.0 @@ -40,9 +44,10 @@ require ( github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/remeh/sizedwaitgroup v1.0.0 github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd + github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 github.com/sirupsen/logrus v1.9.3 github.com/spf13/cast v1.6.0 - github.com/spf13/pflag v1.0.5 + github.com/spf13/pflag v1.0.6 github.com/stretchr/testify v1.10.0 github.com/tidwall/gjson v1.16.0 github.com/vearutop/statigz v1.4.0 @@ -51,33 +56,35 @@ require ( github.com/vektra/mockery/v2 v2.10.0 github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e github.com/zencoder/go-dash/v3 v3.0.2 - golang.org/x/crypto v0.38.0 + golang.org/x/crypto v0.45.0 golang.org/x/image v0.18.0 - golang.org/x/net v0.40.0 - golang.org/x/sys v0.33.0 - golang.org/x/term v0.32.0 - golang.org/x/text v0.25.0 + golang.org/x/net v0.47.0 + golang.org/x/sys v0.38.0 + golang.org/x/term v0.37.0 + golang.org/x/text v0.31.0 golang.org/x/time v0.10.0 gopkg.in/guregu/null.v4 v4.0.0 + gopkg.in/natefinch/lumberjack.v2 v2.2.1 gopkg.in/yaml.v2 v2.4.0 ) require ( github.com/agnivade/levenshtein v1.2.1 // indirect - github.com/antchfx/xpath v1.2.3 // indirect + github.com/antchfx/xpath v1.3.5 // indirect github.com/asticode/go-astikit v0.20.0 // indirect github.com/asticode/go-astits v1.8.0 // indirect - github.com/chromedp/sysutil v1.0.0 // indirect + github.com/chromedp/sysutil v1.1.0 // indirect github.com/coder/websocket v1.8.12 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dlclark/regexp2 v1.7.0 // indirect - github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect + github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 // indirect github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect - github.com/go-viper/mapstructure/v2 v2.2.1 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gobwas/httphead v0.1.0 // indirect github.com/gobwas/pool v0.2.1 // indirect - github.com/gobwas/ws v1.3.0 // indirect + github.com/gobwas/ws v1.4.0 // indirect github.com/goccy/go-yaml v1.18.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect @@ -85,9 +92,8 @@ require ( github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/josharian/intern v1.0.0 // indirect + github.com/knadh/koanf/maps v0.1.2 // indirect github.com/magiconair/properties v1.8.7 // indirect - github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect @@ -114,9 +120,10 @@ require ( github.com/urfave/cli/v2 v2.27.6 // indirect github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect go.uber.org/atomic v1.11.0 // indirect - golang.org/x/mod v0.24.0 // indirect - golang.org/x/sync v0.14.0 // indirect - golang.org/x/tools v0.33.0 // indirect + go.yaml.in/yaml/v3 v3.0.3 // indirect + golang.org/x/mod v0.29.0 // indirect + golang.org/x/sync v0.18.0 // indirect + golang.org/x/tools v0.38.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index bbb38befb..4e19720f5 100644 --- a/go.sum +++ b/go.sum @@ -72,7 +72,6 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/anacrolix/dms v1.2.2 h1:0mk2/DXNqa5KDDbaLgFPf3oMV6VCGdFNh3d/gt4oafM= github.com/anacrolix/dms v1.2.2/go.mod h1:msPKAoppoNRfrYplJqx63FZ+VipDZ4Xsj3KzIQxyU7k= github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c= @@ -86,10 +85,10 @@ github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/ github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM= github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA= -github.com/antchfx/htmlquery v1.3.0 h1:5I5yNFOVI+egyia5F2s/5Do2nFWxJz41Tr3DyfKD25E= -github.com/antchfx/htmlquery v1.3.0/go.mod h1:zKPDVTMhfOmcwxheXUsx4rKJy8KEY/PU6eXr/2SebQ8= -github.com/antchfx/xpath v1.2.3 h1:CCZWOzv5bAqjVv0offZ2LVgVYFbeldKQVuLNbViZdes= -github.com/antchfx/xpath v1.2.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= +github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0= +github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA= +github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ= +github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= @@ -104,16 +103,6 @@ github.com/asticode/go-astisub v0.25.1 h1:RZMGfZPp7CXOkI6g+zCU7DRLuciGPGup921uKZ github.com/asticode/go-astisub v0.25.1/go.mod h1:WTkuSzFB+Bp7wezuSf2Oxulj5A8zu2zLRVFf6bIFQK8= github.com/asticode/go-astits v1.8.0 h1:rf6aiiGn/QhlFjNON1n5plqF3Fs025XLUwiQ0NB6oZg= github.com/asticode/go-astits v1.8.0/go.mod h1:DkOWmBNQpnr9mv24KfZjq4JawCFX1FCqjLVGvO0DygQ= -github.com/aws/aws-sdk-go-v2 v1.9.2/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= -github.com/aws/aws-sdk-go-v2/config v1.8.3/go.mod h1:4AEiLtAb8kLs7vgw2ZV3p2VZ1+hBavOc84hqxVNpCyw= -github.com/aws/aws-sdk-go-v2/credentials v1.4.3/go.mod h1:FNNC6nQZQUuyhq5aE5c7ata8o9e4ECGmS4lAXC7o1mQ= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.6.0/go.mod h1:gqlclDEZp4aqJOancXK6TN24aKhT0W0Ae9MHk3wzTMM= -github.com/aws/aws-sdk-go-v2/internal/ini v1.2.4/go.mod h1:ZcBrrI3zBKlhGFNYWvju0I3TR93I7YIgAfy82Fh4lcQ= -github.com/aws/aws-sdk-go-v2/service/appconfig v1.4.2/go.mod h1:FZ3HkCe+b10uFZZkFdvf98LHW21k49W8o8J366lqVKY= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.2/go.mod h1:72HRZDLMtmVQiLG2tLfQcaWLCssELvGl+Zf2WVxMmR8= -github.com/aws/aws-sdk-go-v2/service/sso v1.4.2/go.mod h1:NBvT9R1MEF+Ud6ApJKM0G+IkPchKS7p7c2YPKwHmBOk= -github.com/aws/aws-sdk-go-v2/service/sts v1.7.2/go.mod h1:8EzeIqfWt2wWT4rJVu3f21TfrhJ8AEMzVybRNSb/b4g= -github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= @@ -127,13 +116,12 @@ github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= -github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617 h1:/5dwcyi5WOawM1Iz6MjrYqB90TRIdZv3O0fVHEJb86w= -github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= -github.com/chromedp/chromedp v0.9.2 h1:dKtNz4kApb06KuSXoTQIyUC2TrA0fhGDwNZf3bcgfKw= -github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs= -github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic= -github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww= +github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d h1:ZtA1sedVbEW7EW80Iz2GR3Ye6PwbJAJXjv7D74xG6HU= +github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k= +github.com/chromedp/chromedp v0.14.2 h1:r3b/WtwM50RsBZHMUm9fsNhhzRStTHrKdr2zmwbZSzM= +github.com/chromedp/chromedp v0.14.2/go.mod h1:rHzAv60xDE7VNy/MYtTUrYreSc0ujt2O1/C3bzctYBo= +github.com/chromedp/sysutil v1.1.0 h1:PUFNv5EcprjqXZD9nJb9b/c9ibAbxiYo4exNWZyipwM= +github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHGPTUfWTJ8= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= @@ -185,7 +173,6 @@ github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8 github.com/doug-martin/goqu/v9 v9.18.0 h1:/6bcuEtAe6nsSMVK/M+fOiXUNfyFF3yYtE07DBPFMYY= github.com/doug-martin/goqu/v9 v9.18.0/go.mod h1:nf0Wc2/hV3gYK9LiyqIrzBEVGlI8qW3GuDCEobC4wBQ= github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -200,19 +187,17 @@ github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= -github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE= github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= -github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s= -github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618= +github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= github.com/go-chi/httplog v0.3.1 h1:uC3IUWCZagtbCinb3ypFh36SEcgd6StWw2Bu0XSXRtg= @@ -220,31 +205,28 @@ github.com/go-chi/httplog v0.3.1/go.mod h1:UoiQQ/MTZH5V6JbNB2FzF0DynTh5okpXxlhsy github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 h1:iizUGZ9pEquQS5jTGkh4AqeeHCMbfbjeb0zMt0aEFzs= +github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE= github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10= -github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= -github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= -github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= -github.com/gobwas/ws v1.3.0 h1:sbeU3Y4Qzlb+MOzIe6mQGf7QR4Hkv6ZD0qhGkBFL2O0= -github.com/gobwas/ws v1.3.0/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/gobwas/ws v1.4.0 h1:CTaoG1tojrh4ucGPcoJFiAQUAsEWekEWvLy7GsVNqGs= +github.com/gobwas/ws v1.4.0/go.mod h1:G3gNqMNtPppf5XUz7O4shetPpcZ1VJ7zt18dlUeakrc= github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= @@ -288,7 +270,6 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -305,7 +286,7 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= @@ -346,11 +327,9 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= -github.com/hashicorp/consul/api v1.13.0/go.mod h1:ZlVrynguJKcYr54zGaDbaL3fOvKC9m72FhPvA8T35KQ= github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= @@ -358,8 +337,6 @@ github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brv github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= -github.com/hashicorp/go-hclog v0.8.0/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= @@ -369,17 +346,12 @@ github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHh github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= @@ -394,14 +366,8 @@ github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= -github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoIospckxBxk6Q= -github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= -github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/hasura/go-graphql-client v0.13.1 h1:kKbjhxhpwz58usVl+Xvgah/TDha5K2akNTRQdsEHN6U= github.com/hasura/go-graphql-client v0.13.1/go.mod h1:k7FF7h53C+hSNFRG3++DdVZWIuHdCaTbI7siTJ//zGQ= -github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs= -github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E= github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -412,18 +378,10 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2 github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8= github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= -github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= -github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= -github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= -github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= @@ -431,17 +389,25 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1 github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/kermieisinthehouse/gosx-notifier v0.1.2 h1:KV0KBeKK2B24kIHY7iK0jgS64Q05f4oB+hUZmsPodxQ= github.com/kermieisinthehouse/gosx-notifier v0.1.2/go.mod h1:xyWT07azFtUOcHl96qMVvKhvKzsMcS7rKTHQyv8WTho= github.com/kermieisinthehouse/systray v1.2.4 h1:pdH5vnl+KKjRrVCRU4g/2W1/0HVzuuJ6WXHlPPHYY6s= github.com/kermieisinthehouse/systray v1.2.4/go.mod h1:axh6C/jNuSyC0QGtidZJURc9h+h41HNoMySoLVrhVR4= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs= -github.com/knadh/koanf v1.5.0/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs= +github.com/knadh/koanf/maps v0.1.2 h1:RBfmAW5CnZT+PJ1CVc1QSJKf4Xu9kxfQgYVQSu8hpbo= +github.com/knadh/koanf/maps v0.1.2/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= +github.com/knadh/koanf/parsers/yaml v1.1.0 h1:3ltfm9ljprAHt4jxgeYLlFPmUaunuCgu1yILuTXRdM4= +github.com/knadh/koanf/parsers/yaml v1.1.0/go.mod h1:HHmcHXUrp9cOPcuC+2wrr44GTUB0EC+PyfN3HZD9tFg= +github.com/knadh/koanf/providers/env v1.1.0 h1:U2VXPY0f+CsNDkvdsG8GcsnK4ah85WwWyJgef9oQMSc= +github.com/knadh/koanf/providers/env v1.1.0/go.mod h1:QhHHHZ87h9JxJAn2czdEl6pdkNnDh/JS1Vtsyt65hTY= +github.com/knadh/koanf/providers/file v1.2.0 h1:hrUJ6Y9YOA49aNu/RSYzOTFlqzXSCpmYIDXI7OJU6+U= +github.com/knadh/koanf/providers/file v1.2.0/go.mod h1:bp1PM5f83Q+TOUu10J/0ApLBd9uIzg+n9UgthfY+nRA= +github.com/knadh/koanf/providers/posflag v1.0.1 h1:EnMxHSrPkYCFnKgBUl5KBgrjed8gVFrcXDzaW4l/C6Y= +github.com/knadh/koanf/providers/posflag v1.0.1/go.mod h1:3Wn3+YG3f4ljzRyCUgIwH7G0sZ1pMjCOsNBovrbKmAk= +github.com/knadh/koanf/v2 v2.2.1 h1:jaleChtw85y3UdBnI0wCqcg1sj1gPoz6D3caGNHtrNE= +github.com/knadh/koanf/v2 v2.2.1/go.mod h1:PSFru3ufQgTsI7IF+95rf9s8XA1+aHxKuO/W+dPoHEY= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -465,8 +431,6 @@ github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc8 github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= -github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= @@ -492,22 +456,17 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5 github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -519,26 +478,20 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+bczlMBiulwmqYzQpvQNUdtt3oc= github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ= github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ= github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= -github.com/npillmayer/nestext v0.1.3/go.mod h1:h2lrijH8jpicr25dFY+oAJLyzlya6jhnuG+zWp9L0Uk= github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d h1:VhgPp6v9qf9Agr/56bj7Y/xa04UccTW04VP0Qed4vnQ= github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde h1:x0TT0RDC7UhAVbbWWBzr41ElhJx5tXPWkIHA2HWPRuw= github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= -github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= -github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -555,24 +508,17 @@ github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSg github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E= github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo= -github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= @@ -590,9 +536,9 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc= github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8= +github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI= +github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= @@ -600,7 +546,6 @@ github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= @@ -621,8 +566,9 @@ github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU= github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc= @@ -683,11 +629,8 @@ github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5t github.com/zencoder/go-dash/v3 v3.0.2 h1:oP1+dOh+Gp57PkvdCyMfbHtrHaxfl3w4kR3KBBbuqQE= github.com/zencoder/go-dash/v3 v3.0.2/go.mod h1:30R5bKy1aUYY45yesjtZ9l8trNc2TwNqbS17WVQmCzk= go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= -go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -701,6 +644,8 @@ go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +go.yaml.in/yaml/v3 v3.0.3 h1:bXOww4E/J3f66rav3pX3m8w6jDE4knZjGOw8b5Y6iNE= +go.yaml.in/yaml/v3 v3.0.3/go.mod h1:tBHosrYAkRZjRAOREWbDnBXUf08JOwYq++0QNwQiWzI= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -718,8 +663,12 @@ golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= -golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -761,8 +710,12 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= -golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -811,9 +764,14 @@ golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= -golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= -golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -843,18 +801,21 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= -golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190415145633-3fd5a3612ccd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -866,12 +827,10 @@ golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -886,8 +845,6 @@ golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -895,7 +852,6 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -908,7 +864,6 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -931,21 +886,30 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -953,9 +917,14 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= -golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1020,8 +989,11 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc= -golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1068,7 +1040,6 @@ google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCID google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -1132,11 +1103,9 @@ google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.22.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= @@ -1177,7 +1146,6 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= -gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -1190,14 +1158,14 @@ gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.66.3/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= +gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= @@ -1214,4 +1182,3 @@ honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= diff --git a/gqlgen.yml b/gqlgen.yml index d3b8fc67f..4a3d73d51 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -35,6 +35,8 @@ models: model: github.com/stashapp/stash/internal/api.BoolMap PluginConfigMap: model: github.com/stashapp/stash/internal/api.PluginConfigMap + File: + model: github.com/stashapp/stash/internal/api.File VideoFile: fields: # override float fields - #1572 @@ -138,4 +140,8 @@ models: fields: plugins: resolver: true + Performer: + fields: + career_length: + resolver: true diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 51718aee3..7f07e4579 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -6,6 +6,26 @@ type Query { findDefaultFilter(mode: FilterMode!): SavedFilter @deprecated(reason: "default filter now stored in UI config") + "Find a file by its id or path" + findFile(id: ID, path: String): BaseFile! + + "Queries for Files" + findFiles( + file_filter: FileFilterType + filter: FindFilterType + ids: [ID!] + ): FindFilesResultType! + + "Find a file by its id or path" + findFolder(id: ID, path: String): Folder! + + "Queries for Files" + findFolders( + folder_filter: FolderFilterType + filter: FindFilterType + ids: [ID!] + ): FindFoldersResultType! + "Find a scene by ID or Checksum" findScene(id: ID, checksum: String): Scene findSceneByHash(input: SceneHashInput!): Scene @@ -145,6 +165,12 @@ type Query { input: ScrapeSingleStudioInput! ): [ScrapedStudio!]! + "Scrape for a single tag" + scrapeSingleTag( + source: ScraperSourceInput! + input: ScrapeSingleTagInput! + ): [ScrapedTag!]! + "Scrape for a single performer" scrapeSinglePerformer( source: ScraperSourceInput! @@ -308,6 +334,7 @@ type Mutation { sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker + bulkSceneMarkerUpdate(input: BulkSceneMarkerUpdateInput!): [SceneMarker!] sceneMarkerDestroy(id: ID!): Boolean! sceneMarkersDestroy(ids: [ID!]!): Boolean! @@ -346,11 +373,13 @@ type Mutation { performerDestroy(input: PerformerDestroyInput!): Boolean! performersDestroy(ids: [ID!]!): Boolean! bulkPerformerUpdate(input: BulkPerformerUpdateInput!): [Performer!] + performerMerge(input: PerformerMergeInput!): Performer! studioCreate(input: StudioCreateInput!): Studio studioUpdate(input: StudioUpdateInput!): Studio studioDestroy(input: StudioDestroyInput!): Boolean! studiosDestroy(ids: [ID!]!): Boolean! + bulkStudioUpdate(input: BulkStudioUpdateInput!): [Studio!] movieCreate(input: MovieCreateInput!): Movie @deprecated(reason: "Use groupCreate instead") @@ -393,8 +422,14 @@ type Mutation { """ moveFiles(input: MoveFilesInput!): Boolean! deleteFiles(ids: [ID!]!): Boolean! + "Deletes file entries from the database without deleting the files from the filesystem" + destroyFiles(ids: [ID!]!): Boolean! fileSetFingerprints(input: FileSetFingerprintsInput!): Boolean! + "Reveal the file in the system file manager" + revealFileInFileManager(id: ID!): Boolean! + "Reveal the folder in the system file manager" + revealFolderInFileManager(id: ID!): Boolean! # Saved filters saveFilter(input: SaveFilterInput!): SavedFilter! @@ -548,6 +583,8 @@ type Mutation { stashBoxBatchPerformerTag(input: StashBoxBatchTagInput!): String! "Run batch studio tag task. Returns the job ID." stashBoxBatchStudioTag(input: StashBoxBatchTagInput!): String! + "Run batch tag tag task. Returns the job ID." + stashBoxBatchTagTag(input: StashBoxBatchTagInput!): String! "Enables DLNA for an optional duration. Has no effect if DLNA is enabled by default" enableDLNA(input: EnableDLNAInput!): Boolean! diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql index 4d6d2080b..5ab7fdfea 100644 --- a/graphql/schema/types/config.graphql +++ b/graphql/schema/types/config.graphql @@ -2,6 +2,8 @@ input SetupInput { "Empty to indicate $HOME/.stash/config.yml default" configLocation: String! stashes: [StashConfigInput!]! + "True if SFW content mode is enabled" + sfwContentMode: Boolean "Empty to indicate default" databaseFile: String! "Empty to indicate default" @@ -67,6 +69,8 @@ input ConfigGeneralInput { databasePath: String "Path to backup directory" backupDirectoryPath: String + "Path to trash directory - if set, deleted files will be moved here instead of being permanently deleted" + deleteTrashPath: String "Path to generated files" generatedPath: String "Path to import/export files" @@ -153,6 +157,8 @@ input ConfigGeneralInput { logLevel: String "Whether to log http access" logAccess: Boolean + "Maximum log size" + logFileMaxSize: Int "True if galleries should be created from folders with images" createGalleriesFromFolders: Boolean "Regex used to identify images as gallery covers" @@ -178,6 +184,18 @@ input ConfigGeneralInput { scraperPackageSources: [PackageSourceInput!] "Source of plugin packages" pluginPackageSources: [PackageSourceInput!] + + "Size of the longest dimension for each sprite in pixels" + spriteScreenshotSize: Int + + "True if sprite generation should use the sprite interval and min/max sprites settings instead of the default" + useCustomSpriteInterval: Boolean + "Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true" + spriteInterval: Float + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + minimumSprites: Int + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + maximumSprites: Int } type ConfigGeneralResult { @@ -187,6 +205,8 @@ type ConfigGeneralResult { databasePath: String! "Path to backup directory" backupDirectoryPath: String! + "Path to trash directory - if set, deleted files will be moved here instead of being permanently deleted" + deleteTrashPath: String! "Path to generated files" generatedPath: String! "Path to import/export files" @@ -277,6 +297,18 @@ type ConfigGeneralResult { logLevel: String! "Whether to log http access" logAccess: Boolean! + "Maximum log size" + logFileMaxSize: Int! + "True if sprite generation should use the sprite interval and min/max sprites settings instead of the default" + useCustomSpriteInterval: Boolean! + "Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true" + spriteInterval: Float! + "Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true" + minimumSprites: Int! + "Maximum number of sprites to be generated - only used if useCustomSpriteInterval is true" + maximumSprites: Int! + "Size of the longest dimension for each sprite in pixels" + spriteScreenshotSize: Int! "Array of video file extensions" videoExtensions: [String!]! "Array of image file extensions" @@ -309,6 +341,7 @@ input ConfigDisableDropdownCreateInput { tag: Boolean studio: Boolean movie: Boolean + gallery: Boolean } enum ImageLightboxDisplayMode { @@ -329,6 +362,7 @@ input ConfigImageLightboxInput { resetZoomOnNav: Boolean scrollMode: ImageLightboxScrollMode scrollAttemptsBeforeChange: Int + disableAnimation: Boolean } type ConfigImageLightboxResult { @@ -338,9 +372,13 @@ type ConfigImageLightboxResult { resetZoomOnNav: Boolean scrollMode: ImageLightboxScrollMode scrollAttemptsBeforeChange: Int! + disableAnimation: Boolean } input ConfigInterfaceInput { + "True if SFW content mode is enabled" + sfwContentMode: Boolean + "Ordered list of items that should be shown in the menu" menuItems: [String!] @@ -379,6 +417,9 @@ input ConfigInterfaceInput { customLocales: String customLocalesEnabled: Boolean + "When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting" + disableCustomizations: Boolean + "Interface language" language: String @@ -404,9 +445,13 @@ type ConfigDisableDropdownCreate { tag: Boolean! studio: Boolean! movie: Boolean! + gallery: Boolean! } type ConfigInterfaceResult { + "True if SFW content mode is enabled" + sfwContentMode: Boolean! + "Ordered list of items that should be shown in the menu" menuItems: [String!] @@ -449,6 +494,9 @@ type ConfigInterfaceResult { customLocales: String customLocalesEnabled: Boolean + "When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting" + disableCustomizations: Boolean + "Interface language" language: String diff --git a/graphql/schema/types/file.graphql b/graphql/schema/types/file.graphql index 8dea777bd..fcc2a58c8 100644 --- a/graphql/schema/types/file.graphql +++ b/graphql/schema/types/file.graphql @@ -6,9 +6,18 @@ type Fingerprint { type Folder { id: ID! path: String! + basename: String! - parent_folder_id: ID - zip_file_id: ID + parent_folder_id: ID @deprecated(reason: "Use parent_folder instead") + zip_file_id: ID @deprecated(reason: "Use zip_file instead") + + parent_folder: Folder + "Returns all parent folders in order from immediate parent to top-level" + parent_folders: [Folder!]! + zip_file: BasicFile + + "Returns direct sub-folders" + sub_folders: [Folder!]! mod_time: Time! @@ -21,8 +30,32 @@ interface BaseFile { path: String! basename: String! - parent_folder_id: ID! - zip_file_id: ID + parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead") + zip_file_id: ID @deprecated(reason: "Use zip_file instead") + + parent_folder: Folder! + zip_file: BasicFile + + mod_time: Time! + size: Int64! + + fingerprint(type: String!): String + fingerprints: [Fingerprint!]! + + created_at: Time! + updated_at: Time! +} + +type BasicFile implements BaseFile { + id: ID! + path: String! + basename: String! + + parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead") + zip_file_id: ID @deprecated(reason: "Use zip_file instead") + + parent_folder: Folder! + zip_file: BasicFile mod_time: Time! size: Int64! @@ -39,8 +72,11 @@ type VideoFile implements BaseFile { path: String! basename: String! - parent_folder_id: ID! - zip_file_id: ID + parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead") + zip_file_id: ID @deprecated(reason: "Use zip_file instead") + + parent_folder: Folder! + zip_file: BasicFile mod_time: Time! size: Int64! @@ -66,8 +102,11 @@ type ImageFile implements BaseFile { path: String! basename: String! - parent_folder_id: ID! - zip_file_id: ID + parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead") + zip_file_id: ID @deprecated(reason: "Use zip_file instead") + + parent_folder: Folder! + zip_file: BasicFile mod_time: Time! size: Int64! @@ -75,6 +114,7 @@ type ImageFile implements BaseFile { fingerprint(type: String!): String fingerprints: [Fingerprint!]! + format: String! width: Int! height: Int! @@ -89,8 +129,11 @@ type GalleryFile implements BaseFile { path: String! basename: String! - parent_folder_id: ID! - zip_file_id: ID + parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead") + zip_file_id: ID @deprecated(reason: "Use zip_file instead") + + parent_folder: Folder! + zip_file: BasicFile mod_time: Time! size: Int64! @@ -116,7 +159,7 @@ input MoveFilesInput { input SetFingerprintsInput { type: String! - "an null value will remove the fingerprint" + "a null value will remove the fingerprint" value: String } @@ -125,3 +168,22 @@ input FileSetFingerprintsInput { "only supplied fingerprint types will be modified" fingerprints: [SetFingerprintsInput!]! } + +type FindFilesResultType { + count: Int! + + "Total megapixels of any image files" + megapixels: Float! + "Total duration in seconds of any video files" + duration: Float! + + "Total file size in bytes" + size: Int! + + files: [BaseFile!]! +} + +type FindFoldersResultType { + count: Int! + folders: [Folder!]! +} diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 14bb8680b..c7d880266 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -75,22 +75,48 @@ input OrientationCriterionInput { value: [OrientationEnum!]! } -input PHashDuplicationCriterionInput { - duplicated: Boolean - "Currently unimplemented" +input DuplicationCriterionInput { + duplicated: Boolean @deprecated(reason: "Use phash field instead") + "Currently unimplemented. Intended for phash distance matching." distance: Int + "Filter by phash duplication" + phash: Boolean + "Filter by URL duplication" + url: Boolean + "Filter by Stash ID duplication" + stash_id: Boolean + "Filter by title duplication" + title: Boolean +} + +input FileDuplicationCriterionInput { + duplicated: Boolean @deprecated(reason: "Use phash field instead") + "Currently unimplemented. Intended for phash distance matching." + distance: Int + "Filter by phash duplication" + phash: Boolean } input StashIDCriterionInput { """ If present, this value is treated as a predicate. - That is, it will filter based on stash_ids with the matching endpoint + That is, it will filter based on stash_id with the matching endpoint """ endpoint: String stash_id: String modifier: CriterionModifier! } +input StashIDsCriterionInput { + """ + If present, this value is treated as a predicate. + That is, it will filter based on stash_ids with the matching endpoint + """ + endpoint: String + stash_ids: [String] + modifier: CriterionModifier! +} + input CustomFieldCriterionInput { field: String! value: [Any!] @@ -126,10 +152,15 @@ input PerformerFilterType { fake_tits: StringCriterionInput "Filter by penis length value" penis_length: FloatCriterionInput - "Filter by ciricumcision" + "Filter by circumcision" circumcised: CircumcisionCriterionInput - "Filter by career length" + "Deprecated: use career_start and career_end. This filter is non-functional." career_length: StringCriterionInput + @deprecated(reason: "Use career_start and career_end") + "Filter by career start" + career_start: DateCriterionInput + "Filter by career end" + career_end: DateCriterionInput "Filter by tattoos" tattoos: StringCriterionInput "Filter by piercings" @@ -146,6 +177,8 @@ input PerformerFilterType { tag_count: IntCriterionInput "Filter by scene count" scene_count: IntCriterionInput + "Filter by marker count (via scene)" + marker_count: IntCriterionInput "Filter by image count" image_count: IntCriterionInput "Filter by gallery count" @@ -156,6 +189,9 @@ input PerformerFilterType { o_counter: IntCriterionInput "Filter by StashID" stash_id_endpoint: StashIDCriterionInput + @deprecated(reason: "use stash_ids_endpoint instead") + "Filter by StashIDs" + stash_ids_endpoint: StashIDsCriterionInput # rating expressed as 1-100 rating100: IntCriterionInput "Filter by url" @@ -186,6 +222,8 @@ input PerformerFilterType { galleries_filter: GalleryFilterType "Filter by related tags that meet this criteria" tags_filter: TagFilterType + "Filter by related scene markers (via scene) that meet this criteria" + markers_filter: SceneMarkerFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" @@ -211,9 +249,9 @@ input SceneMarkerFilterType { updated_at: TimestampCriterionInput "Filter by scene date" scene_date: DateCriterionInput - "Filter by cscene reation time" + "Filter by scene creation time" scene_created_at: TimestampCriterionInput - "Filter by lscene ast update time" + "Filter by scene last update time" scene_updated_at: TimestampCriterionInput "Filter by related scenes that meet this criteria" scene_filter: SceneFilterType @@ -248,8 +286,8 @@ input SceneFilterType { organized: Boolean "Filter by o-counter" o_counter: IntCriterionInput - "Filter Scenes that have an exact phash match available" - duplicated: PHashDuplicationCriterionInput + "Filter Scenes by duplication criteria" + duplicated: DuplicationCriterionInput "Filter by resolution" resolution: ResolutionCriterionInput "Filter by orientation" @@ -292,6 +330,11 @@ input SceneFilterType { performer_count: IntCriterionInput "Filter by StashID" stash_id_endpoint: StashIDCriterionInput + @deprecated(reason: "use stash_ids_endpoint instead") + "Filter by StashIDs" + stash_ids_endpoint: StashIDsCriterionInput + "Filter by StashID count" + stash_id_count: IntCriterionInput "Filter by url" url: StringCriterionInput "Filter by interactive" @@ -330,6 +373,10 @@ input SceneFilterType { groups_filter: GroupFilterType "Filter by related markers that meet this criteria" markers_filter: SceneMarkerFilterType + "Filter by related files that meet this criteria" + files_filter: FileFilterType + + custom_fields: [CustomFieldCriterionInput!] } input MovieFilterType { @@ -401,6 +448,8 @@ input GroupFilterType { created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + "Filter by o-counter" + o_counter: IntCriterionInput "Filter by containing groups" containing_groups: HierarchicalMultiCriterionInput @@ -410,11 +459,16 @@ input GroupFilterType { containing_group_count: IntCriterionInput "Filter by number of sub-groups the group has" sub_group_count: IntCriterionInput + "Filter by number of scenes the group has" + scene_count: IntCriterionInput "Filter by related scenes that meet this criteria" scenes_filter: SceneFilterType "Filter by related studios that meet this criteria" studios_filter: StudioFilterType + + "Filter by custom fields" + custom_fields: [CustomFieldCriterionInput!] } input StudioFilterType { @@ -428,6 +482,9 @@ input StudioFilterType { parents: MultiCriterionInput "Filter by StashID" stash_id_endpoint: StashIDCriterionInput + @deprecated(reason: "use stash_ids_endpoint instead") + "Filter by StashIDs" + stash_ids_endpoint: StashIDsCriterionInput "Filter to only include studios with these tags" tags: HierarchicalMultiCriterionInput "Filter to only include studios missing this property" @@ -442,6 +499,8 @@ input StudioFilterType { image_count: IntCriterionInput "Filter by gallery count" gallery_count: IntCriterionInput + "Filter by group count" + group_count: IntCriterionInput "Filter by tag count" tag_count: IntCriterionInput "Filter by url" @@ -452,16 +511,22 @@ input StudioFilterType { child_count: IntCriterionInput "Filter by autotag ignore value" ignore_auto_tag: Boolean + "Filter by organized" + organized: Boolean "Filter by related scenes that meet this criteria" scenes_filter: SceneFilterType "Filter by related images that meet this criteria" images_filter: ImageFilterType "Filter by related galleries that meet this criteria" galleries_filter: GalleryFilterType + "Filter by related groups that meet this criteria" + groups_filter: GroupFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input GalleryFilterType { @@ -534,6 +599,14 @@ input GalleryFilterType { studios_filter: StudioFilterType "Filter by related tags that meet this criteria" tags_filter: TagFilterType + "Filter by related files that meet this criteria" + files_filter: FileFilterType + "Filter by related folders that meet this criteria" + folders_filter: FolderFilterType + "Filter by parent folder of the zip or folder the gallery is in" + parent_folder: HierarchicalMultiCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input TagFilterType { @@ -592,24 +665,41 @@ input TagFilterType { "Filter by number of parent tags the tag has" parent_count: IntCriterionInput - "Filter by number f child tags the tag has" + "Filter by number of child tags the tag has" child_count: IntCriterionInput "Filter by autotag ignore value" ignore_auto_tag: Boolean + "Filter by StashID" + stash_id_endpoint: StashIDCriterionInput + @deprecated(reason: "use stash_ids_endpoint instead") + + "Filter by StashID" + stash_ids_endpoint: StashIDsCriterionInput + "Filter by related scenes that meet this criteria" scenes_filter: SceneFilterType "Filter by related images that meet this criteria" images_filter: ImageFilterType "Filter by related galleries that meet this criteria" galleries_filter: GalleryFilterType + "Filter by related groups that meet this criteria" + groups_filter: GroupFilterType + "Filter by related performers that meet this criteria" + performers_filter: PerformerFilterType + "Filter by related studios that meet this criteria" + studios_filter: StudioFilterType + "Filter by related scene markers that meet this criteria" + markers_filter: SceneMarkerFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input ImageFilterType { @@ -624,6 +714,8 @@ input ImageFilterType { id: IntCriterionInput "Filter by file checksum" checksum: StringCriterionInput + "Filter by file phash distance" + phash_distance: PhashDistanceCriterionInput "Filter by path" path: StringCriterionInput "Filter by file count" @@ -679,6 +771,109 @@ input ImageFilterType { studios_filter: StudioFilterType "Filter by related tags that meet this criteria" tags_filter: TagFilterType + "Filter by related files that meet this criteria" + files_filter: FileFilterType + "Filter by custom fields" + custom_fields: [CustomFieldCriterionInput!] +} + +input FileFilterType { + AND: FileFilterType + OR: FileFilterType + NOT: FileFilterType + + path: StringCriterionInput + basename: StringCriterionInput + dir: StringCriterionInput + + parent_folder: HierarchicalMultiCriterionInput + zip_file: MultiCriterionInput + + "Filter by modification time" + mod_time: TimestampCriterionInput + + "Filter files by duplication criteria (only phash applies to files)" + duplicated: FileDuplicationCriterionInput + + "find files based on hash" + hashes: [FingerprintFilterInput!] + + video_file_filter: VideoFileFilterInput + image_file_filter: ImageFileFilterInput + + scene_count: IntCriterionInput + image_count: IntCriterionInput + gallery_count: IntCriterionInput + + "Filter by related scenes that meet this criteria" + scenes_filter: SceneFilterType + "Filter by related images that meet this criteria" + images_filter: ImageFilterType + "Filter by related galleries that meet this criteria" + galleries_filter: GalleryFilterType + + "Filter by creation time" + created_at: TimestampCriterionInput + "Filter by last update time" + updated_at: TimestampCriterionInput +} + +input FolderFilterType { + AND: FolderFilterType + OR: FolderFilterType + NOT: FolderFilterType + + path: StringCriterionInput + basename: StringCriterionInput + + parent_folder: HierarchicalMultiCriterionInput + zip_file: MultiCriterionInput + + "Filter by modification time" + mod_time: TimestampCriterionInput + + gallery_count: IntCriterionInput + + "Filter by files that meet this criteria" + files_filter: FileFilterType + "Filter by related galleries that meet this criteria" + galleries_filter: GalleryFilterType + + "Filter by creation time" + created_at: TimestampCriterionInput + "Filter by last update time" + updated_at: TimestampCriterionInput +} + +input VideoFileFilterInput { + resolution: ResolutionCriterionInput + orientation: OrientationCriterionInput + framerate: IntCriterionInput + bitrate: IntCriterionInput + format: StringCriterionInput + video_codec: StringCriterionInput + audio_codec: StringCriterionInput + + "in seconds" + duration: IntCriterionInput + + captions: StringCriterionInput + + interactive: Boolean + interactive_speed: IntCriterionInput +} + +input ImageFileFilterInput { + format: StringCriterionInput + resolution: ResolutionCriterionInput + orientation: OrientationCriterionInput +} + +input FingerprintFilterInput { + type: String! + value: String! + "Hamming distance - defaults to 0" + distance: Int } enum CriterionModifier { @@ -738,7 +933,7 @@ input GenderCriterionInput { } input CircumcisionCriterionInput { - value: [CircumisedEnum!] + value: [CircumcisedEnum!] modifier: CriterionModifier! } diff --git a/graphql/schema/types/gallery.graphql b/graphql/schema/types/gallery.graphql index 999a743f7..e28c3802b 100644 --- a/graphql/schema/types/gallery.graphql +++ b/graphql/schema/types/gallery.graphql @@ -32,6 +32,7 @@ type Gallery { cover: Image paths: GalleryPathsType! # Resolver + custom_fields: Map! image(index: Int!): Image! } @@ -50,6 +51,8 @@ input GalleryCreateInput { studio_id: ID tag_ids: [ID!] performer_ids: [ID!] + + custom_fields: Map } input GalleryUpdateInput { @@ -71,6 +74,8 @@ input GalleryUpdateInput { performer_ids: [ID!] primary_file_id: ID + + custom_fields: CustomFieldsInput } input BulkGalleryUpdateInput { @@ -89,6 +94,8 @@ input BulkGalleryUpdateInput { studio_id: ID tag_ids: BulkUpdateIds performer_ids: BulkUpdateIds + + custom_fields: CustomFieldsInput } input GalleryDestroyInput { @@ -100,6 +107,8 @@ input GalleryDestroyInput { """ delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindGalleriesResultType { diff --git a/graphql/schema/types/group.graphql b/graphql/schema/types/group.graphql index 35fc17a68..8610f39dc 100644 --- a/graphql/schema/types/group.graphql +++ b/graphql/schema/types/group.graphql @@ -30,6 +30,8 @@ type Group { performer_count(depth: Int): Int! # Resolver sub_group_count(depth: Int): Int! # Resolver scenes: [Scene!]! + o_counter: Int # Resolver + custom_fields: Map! } input GroupDescriptionInput { @@ -58,6 +60,8 @@ input GroupCreateInput { front_image: String "This should be a URL or a base64 encoded data URL" back_image: String + + custom_fields: Map } input GroupUpdateInput { @@ -81,6 +85,8 @@ input GroupUpdateInput { front_image: String "This should be a URL or a base64 encoded data URL" back_image: String + + custom_fields: CustomFieldsInput } input BulkUpdateGroupDescriptionsInput { @@ -93,6 +99,8 @@ input BulkGroupUpdateInput { ids: [ID!] # rating expressed as 1-100 rating100: Int + date: String + synopsis: String studio_id: ID director: String urls: BulkUpdateStrings @@ -100,6 +108,8 @@ input BulkGroupUpdateInput { containing_groups: BulkUpdateGroupDescriptionsInput sub_groups: BulkUpdateGroupDescriptionsInput + + custom_fields: CustomFieldsInput } input GroupDestroyInput { diff --git a/graphql/schema/types/image.graphql b/graphql/schema/types/image.graphql index fb95556f5..ccc414542 100644 --- a/graphql/schema/types/image.graphql +++ b/graphql/schema/types/image.graphql @@ -21,6 +21,7 @@ type Image { studio: Studio tags: [Tag!]! performers: [Performer!]! + custom_fields: Map! } type ImageFileType { @@ -56,6 +57,7 @@ input ImageUpdateInput { gallery_ids: [ID!] primary_file_id: ID + custom_fields: CustomFieldsInput } input BulkImageUpdateInput { @@ -76,18 +78,23 @@ input BulkImageUpdateInput { performer_ids: BulkUpdateIds tag_ids: BulkUpdateIds gallery_ids: BulkUpdateIds + custom_fields: CustomFieldsInput } input ImageDestroyInput { id: ID! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } input ImagesDestroyInput { ids: [ID!]! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindImagesResultType { diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index 923c25b4c..6ad620dbe 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -10,8 +10,11 @@ input GenerateMetadataInput { transcodes: Boolean "Generate transcodes even if not required" forceTranscodes: Boolean + "Generate video phashes during scan" phashes: Boolean interactiveHeatmapsSpeeds: Boolean + "Generate image phashes during scan" + imagePhashes: Boolean imageThumbnails: Boolean clipPreviews: Boolean @@ -19,6 +22,12 @@ input GenerateMetadataInput { sceneIDs: [ID!] "marker ids to generate for" markerIDs: [ID!] + "image ids to generate for" + imageIDs: [ID!] + "gallery ids to generate for" + galleryIDs: [ID!] + "paths to run generate on, in addition to the other ID lists" + paths: [String!] "overwrite existing media" overwrite: Boolean @@ -85,8 +94,10 @@ input ScanMetadataInput { scanGenerateImagePreviews: Boolean "Generate sprites during scan" scanGenerateSprites: Boolean - "Generate phashes during scan" + "Generate video phashes during scan" scanGeneratePhashes: Boolean + "Generate image phashes during scan" + scanGenerateImagePhashes: Boolean "Generate image thumbnails during scan" scanGenerateThumbnails: Boolean "Generate image clip previews during scan" @@ -107,8 +118,10 @@ type ScanMetadataOptions { scanGenerateImagePreviews: Boolean! "Generate sprites during scan" scanGenerateSprites: Boolean! - "Generate phashes during scan" + "Generate video phashes during scan" scanGeneratePhashes: Boolean! + "Generate image phashes during scan" + scanGenerateImagePhashes: Boolean "Generate image thumbnails during scan" scanGenerateThumbnails: Boolean! "Generate image clip previews during scan" @@ -118,6 +131,14 @@ type ScanMetadataOptions { input CleanMetadataInput { paths: [String!] + """ + Don't check zip file contents when determining whether to clean a file. + This can significantly speed up the clean process, but will potentially miss removed files within zip files. + Where users do not modify zip files contents directly, this should be safe to use. + Defaults to false. + """ + ignoreZipFileContents: Boolean + "Do a dry run. Don't delete any files" dryRun: Boolean! } @@ -204,7 +225,9 @@ input IdentifyMetadataOptionsInput { setCoverImage: Boolean setOrganized: Boolean "defaults to true if not provided" - includeMalePerformers: Boolean + includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders") + "Filter to only include performers with these genders. If not provided, all genders are included." + performerGenders: [GenderEnum!] "defaults to true if not provided" skipMultipleMatches: Boolean "tag to tag skipped multiple matches with" @@ -249,7 +272,9 @@ type IdentifyMetadataOptions { setCoverImage: Boolean setOrganized: Boolean "defaults to true if not provided" - includeMalePerformers: Boolean + includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders") + "Filter to only include performers with these genders. If not provided, all genders are included." + performerGenders: [GenderEnum!] "defaults to true if not provided" skipMultipleMatches: Boolean "tag to tag skipped multiple matches with" @@ -310,6 +335,8 @@ input ImportObjectsInput { input BackupDatabaseInput { download: Boolean + "If true, blob files will be included in the backup. This can significantly increase the size of the backup and the time it takes to create it, but allows for a complete backup of the system that can be restored without needing access to the original media files." + includeBlobs: Boolean } input AnonymiseDatabaseInput { @@ -344,4 +371,6 @@ input CustomFieldsInput { full: Map "If populated, only the keys in this map will be updated" partial: Map + "Remove any keys in this list" + remove: [String!] } diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql index fbb67ce8f..bf17298da 100644 --- a/graphql/schema/types/performer.graphql +++ b/graphql/schema/types/performer.graphql @@ -7,7 +7,7 @@ enum GenderEnum { NON_BINARY } -enum CircumisedEnum { +enum CircumcisedEnum { CUT UNCUT } @@ -29,8 +29,10 @@ type Performer { measurements: String fake_tits: String penis_length: Float - circumcised: CircumisedEnum - career_length: String + circumcised: CircumcisedEnum + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String alias_list: [String!]! @@ -76,10 +78,13 @@ input PerformerCreateInput { measurements: String fake_tits: String penis_length: Float - circumcised: CircumisedEnum - career_length: String + circumcised: CircumcisedEnum + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" alias_list: [String!] twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") @@ -114,10 +119,13 @@ input PerformerUpdateInput { measurements: String fake_tits: String penis_length: Float - circumcised: CircumisedEnum - career_length: String + circumcised: CircumcisedEnum + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" alias_list: [String!] twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") @@ -157,10 +165,13 @@ input BulkPerformerUpdateInput { measurements: String fake_tits: String penis_length: Float - circumcised: CircumisedEnum - career_length: String + circumcised: CircumcisedEnum + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String + "Duplicate aliases and those equal to name will result in an error (case-insensitive)" alias_list: BulkUpdateStrings twitter: String @deprecated(reason: "Use urls") instagram: String @deprecated(reason: "Use urls") @@ -185,3 +196,10 @@ type FindPerformersResultType { count: Int! performers: [Performer!]! } + +input PerformerMergeInput { + source: [ID!]! + destination: ID! + # values defined here will override values in the destination + values: PerformerUpdateInput +} diff --git a/graphql/schema/types/scene-marker.graphql b/graphql/schema/types/scene-marker.graphql index 6d1441213..9312c5aa3 100644 --- a/graphql/schema/types/scene-marker.graphql +++ b/graphql/schema/types/scene-marker.graphql @@ -42,6 +42,13 @@ input SceneMarkerUpdateInput { tag_ids: [ID!] } +input BulkSceneMarkerUpdateInput { + ids: [ID!] + title: String + primary_tag_id: ID + tag_ids: BulkUpdateIds +} + type FindSceneMarkersResultType { count: Int! scene_markers: [SceneMarker!]! diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index eca01d15e..4d99e0a21 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -79,6 +79,8 @@ type Scene { performers: [Performer!]! stash_ids: [StashID!]! + custom_fields: Map! + "Return valid stream paths" sceneStreams: [SceneStreamEndpoint!]! } @@ -120,6 +122,8 @@ input SceneCreateInput { Files must not already be primary for another scene. """ file_ids: [ID!] + + custom_fields: Map } input SceneUpdateInput { @@ -158,6 +162,8 @@ input SceneUpdateInput { ) primary_file_id: ID + + custom_fields: CustomFieldsInput } enum BulkUpdateIdMode { @@ -190,18 +196,24 @@ input BulkSceneUpdateInput { tag_ids: BulkUpdateIds group_ids: BulkUpdateIds movie_ids: BulkUpdateIds @deprecated(reason: "Use group_ids") + + custom_fields: CustomFieldsInput } input SceneDestroyInput { id: ID! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } input ScenesDestroyInput { ids: [ID!]! delete_file: Boolean delete_generated: Boolean + "If true, delete the file entry from the database if the file is not assigned to any other objects" + destroy_file_entry: Boolean } type FindScenesResultType { diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql index 487c89516..799b5cd6e 100644 --- a/graphql/schema/types/scraped-performer.graphql +++ b/graphql/schema/types/scraped-performer.graphql @@ -18,7 +18,9 @@ type ScrapedPerformer { fake_tits: String penis_length: String circumcised: String - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String # aliases must be comma-delimited to be parsed correctly @@ -54,7 +56,9 @@ input ScrapedPerformerInput { fake_tits: String penis_length: String circumcised: String - career_length: String + career_length: String @deprecated(reason: "Use career_start and career_end") + career_start: String + career_end: String tattoos: String piercings: String aliases: String diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql index 8d430be5f..fafd928f7 100644 --- a/graphql/schema/types/scraper.graphql +++ b/graphql/schema/types/scraper.graphql @@ -55,9 +55,14 @@ type ScrapedStudio { "Set if studio matched" stored_id: ID name: String! - url: String + url: String @deprecated(reason: "use urls") + urls: [String!] parent: ScrapedStudio image: String + details: String + "Aliases must be comma-delimited to be parsed correctly" + aliases: String + tags: [ScrapedTag!] remote_site_id: String } @@ -66,6 +71,11 @@ type ScrapedTag { "Set if tag matched" stored_id: ID name: String! + description: String + alias_list: [String!] + parent: ScrapedTag + "Remote site ID, if applicable" + remote_site_id: String } type ScrapedScene { @@ -191,6 +201,13 @@ input ScrapeSingleStudioInput { query: String } +input ScrapeSingleTagInput { + """ + Query can be either a name or a Stash ID + """ + query: String +} + input ScrapeSinglePerformerInput { "Instructs to query by string" query: String @@ -274,7 +291,10 @@ type StashBoxFingerprint { duration: Int! } -"If neither ids nor names are set, tag all items" +""" +Accepts either ids, or a combination of names and stash_ids. +If none are set, then all existing items will be tagged. +""" input StashBoxBatchTagInput { "Stash endpoint to use for the tagging" endpoint: Int @deprecated(reason: "use stash_box_endpoint") @@ -286,12 +306,17 @@ input StashBoxBatchTagInput { refresh: Boolean! "If batch adding studios, should their parent studios also be created?" createParent: Boolean! - "If set, only tag these ids" + """ + IDs in stash of the items to update. + If set, names and stash_ids fields will be ignored. + """ ids: [ID!] - "If set, only tag these names" + "Names of the items in the stash-box instance to search for and create" names: [String!] - "If set, only tag these performer ids" + "Stash IDs of the items in the stash-box instance to search for and create" + stash_ids: [String!] + "IDs in stash of the performers to update" performer_ids: [ID!] @deprecated(reason: "use ids") - "If set, only tag these performer names" + "Names of the performers in the stash-box instance to search for and create" performer_names: [String!] @deprecated(reason: "use names") } diff --git a/graphql/schema/types/studio.graphql b/graphql/schema/types/studio.graphql index 7823bf0c4..51a87bf4f 100644 --- a/graphql/schema/types/studio.graphql +++ b/graphql/schema/types/studio.graphql @@ -1,12 +1,14 @@ type Studio { id: ID! name: String! - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!]! parent_studio: Studio child_studios: [Studio!]! aliases: [String!]! tags: [Tag!]! ignore_auto_tag: Boolean! + organized: Boolean! image_path: String # Resolver scene_count(depth: Int): Int! # Resolver @@ -24,11 +26,15 @@ type Studio { updated_at: Time! groups: [Group!]! movies: [Movie!]! @deprecated(reason: "use groups instead") + o_counter: Int + + custom_fields: Map! } input StudioCreateInput { name: String! - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!] parent_id: ID "This should be a URL or a base64 encoded data URL" image: String @@ -37,15 +43,20 @@ input StudioCreateInput { rating100: Int favorite: Boolean details: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] tag_ids: [ID!] ignore_auto_tag: Boolean + organized: Boolean + + custom_fields: Map } input StudioUpdateInput { id: ID! name: String - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!] parent_id: ID "This should be a URL or a base64 encoded data URL" image: String @@ -54,9 +65,27 @@ input StudioUpdateInput { rating100: Int favorite: Boolean details: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] tag_ids: [ID!] ignore_auto_tag: Boolean + organized: Boolean + + custom_fields: CustomFieldsInput +} + +input BulkStudioUpdateInput { + ids: [ID!]! + url: String @deprecated(reason: "Use urls") + urls: BulkUpdateStrings + parent_id: ID + # rating expressed as 1-100 + rating100: Int + favorite: Boolean + details: String + tag_ids: BulkUpdateIds + ignore_auto_tag: Boolean + organized: Boolean } input StudioDestroyInput { diff --git a/graphql/schema/types/tag.graphql b/graphql/schema/types/tag.graphql index 504f23e3d..0acbc927f 100644 --- a/graphql/schema/types/tag.graphql +++ b/graphql/schema/types/tag.graphql @@ -9,6 +9,7 @@ type Tag { created_at: Time! updated_at: Time! favorite: Boolean! + stash_ids: [StashID!]! image_path: String # Resolver scene_count(depth: Int): Int! # Resolver scene_marker_count(depth: Int): Int! # Resolver @@ -23,6 +24,7 @@ type Tag { parent_count: Int! # Resolver child_count: Int! # Resolver + custom_fields: Map! } input TagCreateInput { @@ -30,14 +32,18 @@ input TagCreateInput { "Value that does not appear in the UI but overrides name for sorting" sort_name: String description: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] ignore_auto_tag: Boolean favorite: Boolean "This should be a URL or a base64 encoded data URL" image: String + stash_ids: [StashIDInput!] parent_ids: [ID!] child_ids: [ID!] + + custom_fields: Map } input TagUpdateInput { @@ -46,14 +52,18 @@ input TagUpdateInput { "Value that does not appear in the UI but overrides name for sorting" sort_name: String description: String + "Duplicate aliases and those equal to name will be ignored (case-insensitive)" aliases: [String!] ignore_auto_tag: Boolean favorite: Boolean "This should be a URL or a base64 encoded data URL" image: String + stash_ids: [StashIDInput!] parent_ids: [ID!] child_ids: [ID!] + + custom_fields: CustomFieldsInput } input TagDestroyInput { @@ -68,11 +78,14 @@ type FindTagsResultType { input TagsMergeInput { source: [ID!]! destination: ID! + # values defined here will override values in the destination + values: TagUpdateInput } input BulkTagUpdateInput { ids: [ID!] description: String + "Duplicate aliases and those equal to name will result in an error (case-insensitive)" aliases: BulkUpdateStrings ignore_auto_tag: Boolean favorite: Boolean diff --git a/graphql/stash-box/query.graphql b/graphql/stash-box/query.graphql index f7528e728..ebaf05648 100644 --- a/graphql/stash-box/query.graphql +++ b/graphql/stash-box/query.graphql @@ -13,6 +13,7 @@ fragment ImageFragment on Image { fragment StudioFragment on Studio { name id + aliases urls { ...URLFragment } @@ -28,6 +29,13 @@ fragment StudioFragment on Studio { fragment TagFragment on Tag { name id + description + aliases + category { + id + name + description + } } fragment MeasurementsFragment on Measurements { @@ -119,18 +127,6 @@ fragment SceneFragment on Scene { } } -query FindSceneByFingerprint($fingerprint: FingerprintQueryInput!) { - findSceneByFingerprint(fingerprint: $fingerprint) { - ...SceneFragment - } -} - -query FindScenesByFullFingerprints($fingerprints: [FingerprintQueryInput!]!) { - findScenesByFullFingerprints(fingerprints: $fingerprints) { - ...SceneFragment - } -} - query FindScenesBySceneFingerprints( $fingerprints: [[FingerprintQueryInput!]!]! ) { @@ -169,6 +165,21 @@ query FindStudio($id: ID, $name: String) { } } +query FindTag($id: ID, $name: String) { + findTag(id: $id, name: $name) { + ...TagFragment + } +} + +query QueryTags($input: TagQueryInput!) { + queryTags(input: $input) { + count + tags { + ...TagFragment + } + } +} + mutation SubmitFingerprint($input: FingerprintSubmission!) { submitFingerprint(input: $input) } diff --git a/internal/api/authentication.go b/internal/api/authentication.go index 6ad7117a1..be399d222 100644 --- a/internal/api/authentication.go +++ b/internal/api/authentication.go @@ -40,6 +40,8 @@ func authenticateHandler() func(http.Handler) http.Handler { return } + r = session.SetLocalRequest(r) + userID, err := manager.GetInstance().SessionStore.Authenticate(w, r) if err != nil { if !errors.Is(err, session.ErrUnauthorized) { diff --git a/internal/api/changeset_translator.go b/internal/api/changeset_translator.go index 5c81c12cb..45285bdde 100644 --- a/internal/api/changeset_translator.go +++ b/internal/api/changeset_translator.go @@ -98,7 +98,7 @@ func (t changesetTranslator) string(value *string) string { return "" } - return *value + return strings.TrimSpace(*value) } func (t changesetTranslator) optionalString(value *string, field string) models.OptionalString { @@ -106,7 +106,12 @@ func (t changesetTranslator) optionalString(value *string, field string) models. return models.OptionalString{} } - return models.NewOptionalStringPtr(value) + if value == nil { + return models.NewOptionalStringPtr(nil) + } + + trimmed := strings.TrimSpace(*value) + return models.NewOptionalString(trimmed) } func (t changesetTranslator) optionalDate(value *string, field string) (models.OptionalDate, error) { @@ -318,8 +323,14 @@ func (t changesetTranslator) updateStrings(value []string, field string) *models return nil } + // Trim whitespace from each string + trimmedValues := make([]string, len(value)) + for i, v := range value { + trimmedValues[i] = strings.TrimSpace(v) + } + return &models.UpdateStrings{ - Values: value, + Values: trimmedValues, Mode: models.RelationshipUpdateModeSet, } } @@ -329,8 +340,14 @@ func (t changesetTranslator) updateStringsBulk(value *BulkUpdateStrings, field s return nil } + // Trim whitespace from each string + trimmedValues := make([]string, len(value.Values)) + for i, v := range value.Values { + trimmedValues[i] = strings.TrimSpace(v) + } + return &models.UpdateStrings{ - Values: value.Values, + Values: trimmedValues, Mode: value.Mode, } } @@ -448,7 +465,7 @@ func groupsDescriptionsFromGroupInput(input []*GroupDescriptionInput) ([]models. GroupID: gID, } if v.Description != nil { - ret[i].Description = *v.Description + ret[i].Description = strings.TrimSpace(*v.Description) } } diff --git a/internal/api/check_version.go b/internal/api/check_version.go index 6279997d7..f4c2950f1 100644 --- a/internal/api/check_version.go +++ b/internal/api/check_version.go @@ -7,8 +7,10 @@ import ( "fmt" "io" "net/http" + "os" "regexp" "runtime" + "strings" "time" "golang.org/x/sys/cpu" @@ -36,6 +38,24 @@ var stashReleases = func() map[string]string { } } +// isMacOSBundle checks if the application is running from within a macOS .app bundle +func isMacOSBundle() bool { + exec, err := os.Executable() + return err == nil && strings.Contains(exec, "Stash.app/") +} + +// getWantedRelease determines which release variant to download based on platform and bundle type +func getWantedRelease(platform string) string { + release := stashReleases()[platform] + + // On macOS, check if running from .app bundle + if runtime.GOOS == "darwin" && isMacOSBundle() { + return "Stash.app.zip" + } + + return release +} + type githubReleasesResponse struct { Url string Assets_url string @@ -168,7 +188,7 @@ func GetLatestRelease(ctx context.Context) (*LatestRelease, error) { } platform := fmt.Sprintf("%s/%s", runtime.GOOS, arch) - wantedRelease := stashReleases()[platform] + wantedRelease := getWantedRelease(platform) url := apiReleases if build.IsDevelop() { diff --git a/internal/api/custom_fields.go b/internal/api/custom_fields.go new file mode 100644 index 000000000..5eaa6f67a --- /dev/null +++ b/internal/api/custom_fields.go @@ -0,0 +1,12 @@ +package api + +import "github.com/stashapp/stash/pkg/models" + +func handleUpdateCustomFields(input models.CustomFieldsInput) models.CustomFieldsInput { + ret := input + // convert json.Numbers to int/float + ret.Full = convertMapJSONNumbers(ret.Full) + ret.Partial = convertMapJSONNumbers(ret.Partial) + + return ret +} diff --git a/internal/api/fields.go b/internal/api/fields.go new file mode 100644 index 000000000..5f47ed06f --- /dev/null +++ b/internal/api/fields.go @@ -0,0 +1,23 @@ +package api + +import ( + "context" + + "github.com/99designs/gqlgen/graphql" +) + +type queryFields []string + +func collectQueryFields(ctx context.Context) queryFields { + fields := graphql.CollectAllFields(ctx) + return queryFields(fields) +} + +func (f queryFields) Has(field string) bool { + for _, v := range f { + if v == field { + return true + } + } + return false +} diff --git a/internal/api/images.go b/internal/api/images.go index 89a8e87b0..e0f11416a 100644 --- a/internal/api/images.go +++ b/internal/api/images.go @@ -26,6 +26,7 @@ var imageBoxExts = []string{ ".gif", ".svg", ".webp", + ".avif", } func newImageBox(box fs.FS) (*imageBox, error) { @@ -101,7 +102,7 @@ func initCustomPerformerImages(customPath string) { } } -func getDefaultPerformerImage(name string, gender *models.GenderEnum) []byte { +func getDefaultPerformerImage(name string, gender *models.GenderEnum, sfwMode bool) []byte { // try the custom box first if we have one if performerBoxCustom != nil { ret, err := performerBoxCustom.GetRandomImageByName(name) @@ -111,6 +112,10 @@ func getDefaultPerformerImage(name string, gender *models.GenderEnum) []byte { logger.Warnf("error loading custom default performer image: %v", err) } + if sfwMode { + return static.ReadAll(static.DefaultSFWPerformerImage) + } + var g models.GenderEnum if gender != nil { g = *gender diff --git a/internal/api/input.go b/internal/api/input.go new file mode 100644 index 000000000..1a720e965 --- /dev/null +++ b/internal/api/input.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + + "github.com/stashapp/stash/pkg/sliceutil/stringslice" +) + +// TODO - apply handleIDs to other resolvers that accept ID lists + +// handleIDList validates and converts a list of string IDs to integers +func handleIDList(idList []string, field string) ([]int, error) { + if err := validateIDList(idList); err != nil { + return nil, fmt.Errorf("validating %s: %w", field, err) + } + + ids, err := stringslice.StringSliceToIntSlice(idList) + if err != nil { + return nil, fmt.Errorf("converting %s: %w", field, err) + } + + return ids, nil +} + +// validateIDList returns an error if there are any duplicate ids in the list +func validateIDList(ids []string) error { + seen := make(map[string]struct{}) + for _, id := range ids { + if _, exists := seen[id]; exists { + return fmt.Errorf("duplicate id found: %s", id) + } + seen[id] = struct{}{} + } + return nil +} diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go index 493c353d7..c1faf61ed 100644 --- a/internal/api/loaders/dataloaders.go +++ b/internal/api/loaders/dataloaders.go @@ -10,6 +10,8 @@ //go:generate go run github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag //go:generate go run github.com/vektah/dataloaden GroupLoader int *github.com/stashapp/stash/pkg/models.Group //go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File +//go:generate go run github.com/vektah/dataloaden FolderLoader github.com/stashapp/stash/pkg/models.FolderID *github.com/stashapp/stash/pkg/models.Folder +//go:generate go run github.com/vektah/dataloaden FolderRelatedFolderIDsLoader github.com/stashapp/stash/pkg/models.FolderID []github.com/stashapp/stash/pkg/models.FolderID //go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID @@ -41,27 +43,40 @@ const ( ) type Loaders struct { - SceneByID *SceneLoader - SceneFiles *SceneFileIDsLoader - ScenePlayCount *ScenePlayCountLoader - SceneOCount *SceneOCountLoader - ScenePlayHistory *ScenePlayHistoryLoader - SceneOHistory *SceneOHistoryLoader - SceneLastPlayed *SceneLastPlayedLoader + SceneByID *SceneLoader + SceneFiles *SceneFileIDsLoader + ScenePlayCount *ScenePlayCountLoader + SceneOCount *SceneOCountLoader + ScenePlayHistory *ScenePlayHistoryLoader + SceneOHistory *SceneOHistoryLoader + SceneLastPlayed *SceneLastPlayedLoader + SceneCustomFields *CustomFieldsLoader ImageFiles *ImageFileIDsLoader GalleryFiles *GalleryFileIDsLoader - GalleryByID *GalleryLoader - ImageByID *ImageLoader + GalleryByID *GalleryLoader + GalleryCustomFields *CustomFieldsLoader + ImageByID *ImageLoader + ImageCustomFields *CustomFieldsLoader PerformerByID *PerformerLoader PerformerCustomFields *CustomFieldsLoader - StudioByID *StudioLoader - TagByID *TagLoader - GroupByID *GroupLoader - FileByID *FileLoader + StudioByID *StudioLoader + StudioCustomFields *CustomFieldsLoader + + TagByID *TagLoader + TagCustomFields *CustomFieldsLoader + + GroupByID *GroupLoader + GroupCustomFields *CustomFieldsLoader + + FileByID *FileLoader + + FolderByID *FolderLoader + FolderParentFolderIDs *FolderRelatedFolderIDsLoader + FolderSubFolderIDs *FolderRelatedFolderIDsLoader } type Middleware struct { @@ -82,11 +97,21 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchGalleries(ctx), }, + GalleryCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchGalleryCustomFields(ctx), + }, ImageByID: &ImageLoader{ wait: wait, maxBatch: maxBatch, fetch: m.fetchImages(ctx), }, + ImageCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchImageCustomFields(ctx), + }, PerformerByID: &PerformerLoader{ wait: wait, maxBatch: maxBatch, @@ -97,6 +122,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchPerformerCustomFields(ctx), }, + StudioCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchStudioCustomFields(ctx), + }, + SceneCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchSceneCustomFields(ctx), + }, StudioByID: &StudioLoader{ wait: wait, maxBatch: maxBatch, @@ -107,16 +142,41 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchTags(ctx), }, + TagCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchTagCustomFields(ctx), + }, GroupByID: &GroupLoader{ wait: wait, maxBatch: maxBatch, fetch: m.fetchGroups(ctx), }, + GroupCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchGroupCustomFields(ctx), + }, FileByID: &FileLoader{ wait: wait, maxBatch: maxBatch, fetch: m.fetchFiles(ctx), }, + FolderByID: &FolderLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchFolders(ctx), + }, + FolderParentFolderIDs: &FolderRelatedFolderIDsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchFoldersParentFolderIDs(ctx), + }, + FolderSubFolderIDs: &FolderRelatedFolderIDsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchFoldersSubFolderIDs(ctx), + }, SceneFiles: &SceneFileIDsLoader{ wait: wait, maxBatch: maxBatch, @@ -187,6 +247,18 @@ func (m Middleware) fetchScenes(ctx context.Context) func(keys []int) ([]*models } } +func (m Middleware) fetchSceneCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Scene.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models.Image, []error) { return func(keys []int) (ret []*models.Image, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -199,6 +271,18 @@ func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models } } +func (m Middleware) fetchImageCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Image.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchGalleries(ctx context.Context) func(keys []int) ([]*models.Gallery, []error) { return func(keys []int) (ret []*models.Gallery, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -246,6 +330,18 @@ func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*model } } +func (m Middleware) fetchStudioCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Studio.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.Tag, []error) { return func(keys []int) (ret []*models.Tag, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -257,6 +353,42 @@ func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.T } } +func (m Middleware) fetchTagCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Tag.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + +func (m Middleware) fetchGroupCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Group.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + +func (m Middleware) fetchGalleryCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Gallery.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchGroups(ctx context.Context) func(keys []int) ([]*models.Group, []error) { return func(keys []int) (ret []*models.Group, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { @@ -279,6 +411,39 @@ func (m Middleware) fetchFiles(ctx context.Context) func(keys []models.FileID) ( } } +func (m Middleware) fetchFolders(ctx context.Context) func(keys []models.FolderID) ([]*models.Folder, []error) { + return func(keys []models.FolderID) (ret []*models.Folder, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Folder.FindMany(ctx, keys) + return err + }) + return ret, toErrorSlice(err) + } +} + +func (m Middleware) fetchFoldersParentFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) { + return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Folder.GetManyParentFolderIDs(ctx, keys) + return err + }) + return ret, toErrorSlice(err) + } +} + +func (m Middleware) fetchFoldersSubFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) { + return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Folder.GetManySubFolderIDs(ctx, keys) + return err + }) + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { diff --git a/internal/api/loaders/folderloader_gen.go b/internal/api/loaders/folderloader_gen.go new file mode 100644 index 000000000..ca2518b82 --- /dev/null +++ b/internal/api/loaders/folderloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package loaders + +import ( + "sync" + "time" + + "github.com/stashapp/stash/pkg/models" +) + +// FolderLoaderConfig captures the config to create a new FolderLoader +type FolderLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []models.FolderID) ([]*models.Folder, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewFolderLoader creates a new FolderLoader given a fetch, wait, and maxBatch +func NewFolderLoader(config FolderLoaderConfig) *FolderLoader { + return &FolderLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// FolderLoader batches and caches requests +type FolderLoader struct { + // this method provides the data for the loader + fetch func(keys []models.FolderID) ([]*models.Folder, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[models.FolderID]*models.Folder + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *folderLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type folderLoaderBatch struct { + keys []models.FolderID + data []*models.Folder + error []error + closing bool + done chan struct{} +} + +// Load a Folder by key, batching and caching will be applied automatically +func (l *FolderLoader) Load(key models.FolderID) (*models.Folder, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Folder. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *FolderLoader) LoadThunk(key models.FolderID) func() (*models.Folder, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*models.Folder, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &folderLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*models.Folder, error) { + <-batch.done + + var data *models.Folder + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *FolderLoader) LoadAll(keys []models.FolderID) ([]*models.Folder, []error) { + results := make([]func() (*models.Folder, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + folders := make([]*models.Folder, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + folders[i], errors[i] = thunk() + } + return folders, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Folders. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *FolderLoader) LoadAllThunk(keys []models.FolderID) func() ([]*models.Folder, []error) { + results := make([]func() (*models.Folder, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*models.Folder, []error) { + folders := make([]*models.Folder, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + folders[i], errors[i] = thunk() + } + return folders, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *FolderLoader) Prime(key models.FolderID, value *models.Folder) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *FolderLoader) Clear(key models.FolderID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *FolderLoader) unsafeSet(key models.FolderID, value *models.Folder) { + if l.cache == nil { + l.cache = map[models.FolderID]*models.Folder{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *folderLoaderBatch) keyIndex(l *FolderLoader, key models.FolderID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *folderLoaderBatch) startTimer(l *FolderLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *folderLoaderBatch) end(l *FolderLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/api/loaders/folderrelatedfolderidsloader_gen.go b/internal/api/loaders/folderrelatedfolderidsloader_gen.go new file mode 100644 index 000000000..d0edb92f4 --- /dev/null +++ b/internal/api/loaders/folderrelatedfolderidsloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package loaders + +import ( + "sync" + "time" + + "github.com/stashapp/stash/pkg/models" +) + +// FolderParentFolderIDsLoaderConfig captures the config to create a new FolderParentFolderIDsLoader +type FolderParentFolderIDsLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []models.FolderID) ([][]models.FolderID, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewFolderParentFolderIDsLoader creates a new FolderParentFolderIDsLoader given a fetch, wait, and maxBatch +func NewFolderParentFolderIDsLoader(config FolderParentFolderIDsLoaderConfig) *FolderRelatedFolderIDsLoader { + return &FolderRelatedFolderIDsLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// FolderRelatedFolderIDsLoader batches and caches requests +type FolderRelatedFolderIDsLoader struct { + // this method provides the data for the loader + fetch func(keys []models.FolderID) ([][]models.FolderID, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[models.FolderID][]models.FolderID + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *folderParentFolderIDsLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type folderParentFolderIDsLoaderBatch struct { + keys []models.FolderID + data [][]models.FolderID + error []error + closing bool + done chan struct{} +} + +// Load a FolderID by key, batching and caching will be applied automatically +func (l *FolderRelatedFolderIDsLoader) Load(key models.FolderID) ([]models.FolderID, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a FolderID. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *FolderRelatedFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]models.FolderID, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() ([]models.FolderID, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &folderParentFolderIDsLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() ([]models.FolderID, error) { + <-batch.done + + var data []models.FolderID + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *FolderRelatedFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]models.FolderID, []error) { + results := make([]func() ([]models.FolderID, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + folderIDs := make([][]models.FolderID, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + folderIDs[i], errors[i] = thunk() + } + return folderIDs, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a FolderIDs. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *FolderRelatedFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func() ([][]models.FolderID, []error) { + results := make([]func() ([]models.FolderID, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([][]models.FolderID, []error) { + folderIDs := make([][]models.FolderID, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + folderIDs[i], errors[i] = thunk() + } + return folderIDs, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *FolderRelatedFolderIDsLoader) Prime(key models.FolderID, value []models.FolderID) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := make([]models.FolderID, len(value)) + copy(cpy, value) + l.unsafeSet(key, cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *FolderRelatedFolderIDsLoader) Clear(key models.FolderID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *FolderRelatedFolderIDsLoader) unsafeSet(key models.FolderID, value []models.FolderID) { + if l.cache == nil { + l.cache = map[models.FolderID][]models.FolderID{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderRelatedFolderIDsLoader, key models.FolderID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderRelatedFolderIDsLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *folderParentFolderIDsLoaderBatch) end(l *FolderRelatedFolderIDsLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/api/models.go b/internal/api/models.go index d8f4dc63c..1c7346697 100644 --- a/internal/api/models.go +++ b/internal/api/models.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil" ) type BaseFile interface { @@ -27,6 +28,29 @@ func convertVisualFile(f models.File) (VisualFile, error) { } } +func convertBaseFile(f models.File) BaseFile { + if f == nil { + return nil + } + + switch f := f.(type) { + case BaseFile: + return f + case *models.VideoFile: + return &VideoFile{VideoFile: f} + case *models.ImageFile: + return &ImageFile{ImageFile: f} + case *models.BaseFile: + return &BasicFile{BaseFile: f} + default: + panic("unknown file type") + } +} + +func convertBaseFiles(files []models.File) []BaseFile { + return sliceutil.Map(files, convertBaseFile) +} + type GalleryFile struct { *models.BaseFile } @@ -62,3 +86,15 @@ func (ImageFile) IsVisualFile() {} func (f *ImageFile) Fingerprints() []models.Fingerprint { return f.ImageFile.Fingerprints } + +type BasicFile struct { + *models.BaseFile +} + +func (BasicFile) IsBaseFile() {} + +func (BasicFile) IsVisualFile() {} + +func (f *BasicFile) Fingerprints() []models.Fingerprint { + return f.BaseFile.Fingerprints +} diff --git a/internal/api/resolver.go b/internal/api/resolver.go index f3097969d..b1cec1c9d 100644 --- a/internal/api/resolver.go +++ b/internal/api/resolver.go @@ -7,6 +7,7 @@ import ( "sort" "strconv" + "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/internal/build" "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/pkg/logger" @@ -95,6 +96,12 @@ func (r *Resolver) VideoFile() VideoFileResolver { func (r *Resolver) ImageFile() ImageFileResolver { return &imageFileResolver{r} } +func (r *Resolver) BasicFile() BasicFileResolver { + return &basicFileResolver{r} +} +func (r *Resolver) Folder() FolderResolver { + return &folderResolver{r} +} func (r *Resolver) SavedFilter() SavedFilterResolver { return &savedFilterResolver{r} } @@ -125,6 +132,8 @@ type tagResolver struct{ *Resolver } type galleryFileResolver struct{ *Resolver } type videoFileResolver struct{ *Resolver } type imageFileResolver struct{ *Resolver } +type basicFileResolver struct{ *Resolver } +type folderResolver struct{ *Resolver } type savedFilterResolver struct{ *Resolver } type pluginResolver struct{ *Resolver } type configResultResolver struct{ *Resolver } @@ -137,6 +146,13 @@ func (r *Resolver) withReadTxn(ctx context.Context, fn func(ctx context.Context) return r.repository.WithReadTxn(ctx, fn) } +// idOnly returns true if the query is only asking for the id field. +// This can be used to optimize certain queries where we don't need to load the full object if we're only getting the id. +func (r *Resolver) idOnly(ctx context.Context) bool { + fields := graphql.CollectAllFields(ctx) + return len(fields) == 1 && fields[0] == "id" +} + func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.SceneMarker.Wall(ctx, q) diff --git a/internal/api/resolver_model_file.go b/internal/api/resolver_model_file.go index 35013cfbd..4b9995311 100644 --- a/internal/api/resolver_model_file.go +++ b/internal/api/resolver_model_file.go @@ -1,30 +1,80 @@ package api -import "context" +import ( + "context" -func (r *galleryFileResolver) Fingerprint(ctx context.Context, obj *GalleryFile, type_ string) (*string, error) { - fp := obj.BaseFile.Fingerprints.For(type_) - if fp != nil { - v := fp.Value() - return &v, nil + "github.com/stashapp/stash/internal/api/loaders" + "github.com/stashapp/stash/pkg/models" +) + +func fingerprintResolver(fp models.Fingerprints, type_ string) (*string, error) { + fingerprint := fp.For(type_) + if fingerprint != nil { + value := fingerprint.Value() + return &value, nil } return nil, nil } +func (r *galleryFileResolver) Fingerprint(ctx context.Context, obj *GalleryFile, type_ string) (*string, error) { + return fingerprintResolver(obj.BaseFile.Fingerprints, type_) +} + func (r *imageFileResolver) Fingerprint(ctx context.Context, obj *ImageFile, type_ string) (*string, error) { - fp := obj.ImageFile.Fingerprints.For(type_) - if fp != nil { - v := fp.Value() - return &v, nil - } - return nil, nil + return fingerprintResolver(obj.ImageFile.Fingerprints, type_) } func (r *videoFileResolver) Fingerprint(ctx context.Context, obj *VideoFile, type_ string) (*string, error) { - fp := obj.VideoFile.Fingerprints.For(type_) - if fp != nil { - v := fp.Value() - return &v, nil - } - return nil, nil + return fingerprintResolver(obj.VideoFile.Fingerprints, type_) +} + +func (r *basicFileResolver) Fingerprint(ctx context.Context, obj *BasicFile, type_ string) (*string, error) { + return fingerprintResolver(obj.BaseFile.Fingerprints, type_) +} + +func (r *galleryFileResolver) ParentFolder(ctx context.Context, obj *GalleryFile) (*models.Folder, error) { + return loaders.From(ctx).FolderByID.Load(obj.ParentFolderID) +} + +func (r *imageFileResolver) ParentFolder(ctx context.Context, obj *ImageFile) (*models.Folder, error) { + return loaders.From(ctx).FolderByID.Load(obj.ParentFolderID) +} + +func (r *videoFileResolver) ParentFolder(ctx context.Context, obj *VideoFile) (*models.Folder, error) { + return loaders.From(ctx).FolderByID.Load(obj.ParentFolderID) +} + +func (r *basicFileResolver) ParentFolder(ctx context.Context, obj *BasicFile) (*models.Folder, error) { + return loaders.From(ctx).FolderByID.Load(obj.ParentFolderID) +} + +func zipFileResolver(ctx context.Context, zipFileID *models.FileID) (*BasicFile, error) { + if zipFileID == nil { + return nil, nil + } + + f, err := loaders.From(ctx).FileByID.Load(*zipFileID) + if err != nil { + return nil, err + } + + return &BasicFile{ + BaseFile: f.Base(), + }, nil +} + +func (r *galleryFileResolver) ZipFile(ctx context.Context, obj *GalleryFile) (*BasicFile, error) { + return zipFileResolver(ctx, obj.ZipFileID) +} + +func (r *imageFileResolver) ZipFile(ctx context.Context, obj *ImageFile) (*BasicFile, error) { + return zipFileResolver(ctx, obj.ZipFileID) +} + +func (r *videoFileResolver) ZipFile(ctx context.Context, obj *VideoFile) (*BasicFile, error) { + return zipFileResolver(ctx, obj.ZipFileID) +} + +func (r *basicFileResolver) ZipFile(ctx context.Context, obj *BasicFile) (*BasicFile, error) { + return zipFileResolver(ctx, obj.ZipFileID) } diff --git a/internal/api/resolver_model_folder.go b/internal/api/resolver_model_folder.go new file mode 100644 index 000000000..725ca34f8 --- /dev/null +++ b/internal/api/resolver_model_folder.go @@ -0,0 +1,78 @@ +package api + +import ( + "context" + "path/filepath" + + "github.com/stashapp/stash/internal/api/loaders" + "github.com/stashapp/stash/pkg/models" +) + +func (r *folderResolver) Basename(ctx context.Context, obj *models.Folder) (string, error) { + return filepath.Base(obj.Path), nil +} + +func (r *folderResolver) ParentFolder(ctx context.Context, obj *models.Folder) (*models.Folder, error) { + if obj.ParentFolderID == nil { + return nil, nil + } + + if r.idOnly(ctx) { + return &models.Folder{ID: *obj.ParentFolderID}, nil + } + + return loaders.From(ctx).FolderByID.Load(*obj.ParentFolderID) +} + +func foldersFromIDs(ids []models.FolderID) []*models.Folder { + ret := make([]*models.Folder, len(ids)) + for i, id := range ids { + ret[i] = &models.Folder{ID: id} + } + return ret +} + +func (r *folderResolver) ParentFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) { + ids, err := loaders.From(ctx).FolderParentFolderIDs.Load(obj.ID) + if err != nil { + return nil, err + } + + if r.idOnly(ctx) { + return foldersFromIDs(ids), nil + } + + var errs []error + ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids) + return ret, firstError(errs) +} + +func (r *folderResolver) SubFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) { + ids, err := loaders.From(ctx).FolderSubFolderIDs.Load(obj.ID) + if err != nil { + return nil, err + } + + if r.idOnly(ctx) { + return foldersFromIDs(ids), nil + } + + var errs []error + ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids) + return ret, firstError(errs) +} + +func (r *folderResolver) ZipFile(ctx context.Context, obj *models.Folder) (*BasicFile, error) { + // shortcut for id only queries + if r.idOnly(ctx) { + if obj.ZipFileID == nil { + return nil, nil + } + + return &BasicFile{ + BaseFile: &models.BaseFile{ID: *obj.ZipFileID}, + }, nil + } + + return zipFileResolver(ctx, obj.ZipFileID) +} diff --git a/internal/api/resolver_model_gallery.go b/internal/api/resolver_model_gallery.go index 9dc68b4c4..773a831d8 100644 --- a/internal/api/resolver_model_gallery.go +++ b/internal/api/resolver_model_gallery.go @@ -216,3 +216,16 @@ func (r *galleryResolver) Image(ctx context.Context, obj *models.Gallery, index return } + +func (r *galleryResolver) CustomFields(ctx context.Context, obj *models.Gallery) (map[string]interface{}, error) { + m, err := loaders.From(ctx).GalleryCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_model_image.go b/internal/api/resolver_model_image.go index 0886bea40..4a95ae1f4 100644 --- a/internal/api/resolver_model_image.go +++ b/internal/api/resolver_model_image.go @@ -161,3 +161,12 @@ func (r *imageResolver) Urls(ctx context.Context, obj *models.Image) ([]string, return obj.URLs.List(), nil } + +func (r *imageResolver) CustomFields(ctx context.Context, obj *models.Image) (map[string]interface{}, error) { + customFields, err := loaders.From(ctx).ImageCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + return customFields, nil +} diff --git a/internal/api/resolver_model_movie.go b/internal/api/resolver_model_movie.go index e3fba57c0..287d5d51a 100644 --- a/internal/api/resolver_model_movie.go +++ b/internal/api/resolver_model_movie.go @@ -204,3 +204,27 @@ func (r *groupResolver) Scenes(ctx context.Context, obj *models.Group) (ret []*m return ret, nil } + +func (r *groupResolver) OCounter(ctx context.Context, obj *models.Group) (ret *int, err error) { + var count int + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + count, err = r.repository.Scene.OCountByGroupID(ctx, obj.ID) + return err + }); err != nil { + return nil, err + } + return &count, nil +} + +func (r *groupResolver) CustomFields(ctx context.Context, obj *models.Group) (map[string]interface{}, error) { + m, err := loaders.From(ctx).GroupCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_model_performer.go b/internal/api/resolver_model_performer.go index 94da62932..261a98ff3 100644 --- a/internal/api/resolver_model_performer.go +++ b/internal/api/resolver_model_performer.go @@ -109,6 +109,31 @@ func (r *performerResolver) HeightCm(ctx context.Context, obj *models.Performer) return obj.Height, nil } +func (r *performerResolver) CareerStart(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.CareerStart != nil { + ret := obj.CareerStart.String() + return &ret, nil + } + return nil, nil +} + +func (r *performerResolver) CareerEnd(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.CareerEnd != nil { + ret := obj.CareerEnd.String() + return &ret, nil + } + return nil, nil +} + +func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.CareerStart == nil && obj.CareerEnd == nil { + return nil, nil + } + + ret := models.FormatYearRange(obj.CareerStart, obj.CareerEnd) + return &ret, nil +} + func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer) (*string, error) { if obj.Birthdate != nil { ret := obj.Birthdate.String() diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index 2600c9538..81113d858 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -410,3 +410,16 @@ func (r *sceneResolver) OHistory(ctx context.Context, obj *models.Scene) ([]*tim return ptrRet, nil } + +func (r *sceneResolver) CustomFields(ctx context.Context, obj *models.Scene) (map[string]interface{}, error) { + m, err := loaders.From(ctx).SceneCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_model_studio.go b/internal/api/resolver_model_studio.go index 2111039c8..b54455920 100644 --- a/internal/api/resolver_model_studio.go +++ b/internal/api/resolver_model_studio.go @@ -40,6 +40,35 @@ func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) ([]str return obj.Aliases.List(), nil } +func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Studio) + }); err != nil { + return nil, err + } + } + + urls := obj.URLs.List() + if len(urls) == 0 { + return nil, nil + } + + return &urls[0], nil +} + +func (r *studioResolver) Urls(ctx context.Context, obj *models.Studio) ([]string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Studio) + }); err != nil { + return nil, err + } + } + + return obj.URLs.List(), nil +} + func (r *studioResolver) Tags(ctx context.Context, obj *models.Studio) (ret []*models.Tag, err error) { if !obj.TagIDs.Loaded() { if err := r.withReadTxn(ctx, func(ctx context.Context) error { @@ -114,6 +143,24 @@ func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio, dep return r.GroupCount(ctx, obj, depth) } +func (r *studioResolver) OCounter(ctx context.Context, obj *models.Studio) (ret *int, err error) { + var res_scene int + var res_image int + var res int + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + res_scene, err = r.repository.Scene.OCountByStudioID(ctx, obj.ID) + if err != nil { + return err + } + res_image, err = r.repository.Image.OCountByStudioID(ctx, obj.ID) + return err + }); err != nil { + return nil, err + } + res = res_scene + res_image + return &res, nil +} + func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) { if obj.ParentID == nil { return nil, nil @@ -160,6 +207,19 @@ func (r *studioResolver) Groups(ctx context.Context, obj *models.Studio) (ret [] return ret, nil } +func (r *studioResolver) CustomFields(ctx context.Context, obj *models.Studio) (map[string]interface{}, error) { + m, err := loaders.From(ctx).StudioCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} + // deprecated func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Group, err error) { return r.Groups(ctx, obj) diff --git a/internal/api/resolver_model_tag.go b/internal/api/resolver_model_tag.go index 14237d2fe..7518036b0 100644 --- a/internal/api/resolver_model_tag.go +++ b/internal/api/resolver_model_tag.go @@ -54,6 +54,16 @@ func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []strin return obj.Aliases.List(), nil } +func (r *tagResolver) StashIds(ctx context.Context, obj *models.Tag) ([]*models.StashID, error) { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadStashIDs(ctx, r.repository.Tag) + }); err != nil { + return nil, err + } + + return stashIDsSliceToPtrSlice(obj.StashIDs.List()), nil +} + func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { ret, err = scene.CountByTagID(ctx, r.repository.Scene, obj.ID, depth) @@ -171,3 +181,16 @@ func (r *tagResolver) ChildCount(ctx context.Context, obj *models.Tag) (ret int, return ret, nil } + +func (r *tagResolver) CustomFields(ctx context.Context, obj *models.Tag) (map[string]interface{}, error) { + m, err := loaders.From(ctx).TagCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} diff --git a/internal/api/resolver_mutation_configure.go b/internal/api/resolver_mutation_configure.go index d9c71b09f..3df1c9114 100644 --- a/internal/api/resolver_mutation_configure.go +++ b/internal/api/resolver_mutation_configure.go @@ -5,6 +5,7 @@ import ( "encoding/json" "errors" "fmt" + "io/fs" "path/filepath" "regexp" "strconv" @@ -85,6 +86,8 @@ func (r *mutationResolver) setConfigFloat(key string, value *float64) { func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGeneralInput) (*ConfigGeneralResult, error) { c := config.GetInstance() + // #4709 - allow stash paths even if they do not exist, so that users may configure stash + // for disconnected drives or network storage. existingPaths := c.GetStashPaths() if input.Stashes != nil { for _, s := range input.Stashes { @@ -97,8 +100,12 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen } } if isNew { + s.Path = filepath.Clean(s.Path) + + // if it exists, it must be directory exists, err := fsutil.DirExists(s.Path) - if !exists { + // allow it to not exist but if it does exist it must be a directory + if !exists && !errors.Is(err, fs.ErrNotExist) { return makeConfigGeneralResult(), err } } @@ -150,6 +157,15 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen c.SetString(config.BackupDirectoryPath, *input.BackupDirectoryPath) } + existingDeleteTrashPath := c.GetDeleteTrashPath() + if input.DeleteTrashPath != nil && existingDeleteTrashPath != *input.DeleteTrashPath { + if err := validateDir(config.DeleteTrashPath, *input.DeleteTrashPath, true); err != nil { + return makeConfigGeneralResult(), err + } + + c.SetString(config.DeleteTrashPath, *input.DeleteTrashPath) + } + existingGeneratedPath := c.GetGeneratedPath() if input.GeneratedPath != nil && existingGeneratedPath != *input.GeneratedPath { if err := validateDir(config.Generated, *input.GeneratedPath, false); err != nil { @@ -278,6 +294,11 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen if input.PreviewPreset != nil { c.SetString(config.PreviewPreset, input.PreviewPreset.String()) } + r.setConfigBool(config.UseCustomSpriteInterval, input.UseCustomSpriteInterval) + r.setConfigFloat(config.SpriteInterval, input.SpriteInterval) + r.setConfigInt(config.MinimumSprites, input.MinimumSprites) + r.setConfigInt(config.MaximumSprites, input.MaximumSprites) + r.setConfigInt(config.SpriteScreenshotSize, input.SpriteScreenshotSize) r.setConfigBool(config.TranscodeHardwareAcceleration, input.TranscodeHardwareAcceleration) if input.MaxTranscodeSize != nil { @@ -334,6 +355,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen logger.SetLogLevel(*input.LogLevel) } + if input.LogFileMaxSize != nil && *input.LogFileMaxSize != c.GetLogFileMaxSize() { + c.SetInt(config.LogFileMaxSize, *input.LogFileMaxSize) + } + if input.Excludes != nil { for _, r := range input.Excludes { _, err := regexp.Compile(r) @@ -445,6 +470,8 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigInterfaceInput) (*ConfigInterfaceResult, error) { c := config.GetInstance() + r.setConfigBool(config.SFWContentMode, input.SfwContentMode) + if input.MenuItems != nil { c.SetInterface(config.MenuItems, input.MenuItems) } @@ -478,6 +505,8 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigI r.setConfigString(config.ImageLightboxScrollModeKey, (*string)(options.ScrollMode)) r.setConfigInt(config.ImageLightboxScrollAttemptsBeforeChange, options.ScrollAttemptsBeforeChange) + + r.setConfigBool(config.ImageLightboxDisableAnimation, options.DisableAnimation) } if input.CSS != nil { @@ -498,12 +527,15 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigI r.setConfigBool(config.CustomLocalesEnabled, input.CustomLocalesEnabled) + r.setConfigBool(config.DisableCustomizations, input.DisableCustomizations) + if input.DisableDropdownCreate != nil { ddc := input.DisableDropdownCreate r.setConfigBool(config.DisableDropdownCreatePerformer, ddc.Performer) r.setConfigBool(config.DisableDropdownCreateStudio, ddc.Studio) r.setConfigBool(config.DisableDropdownCreateTag, ddc.Tag) r.setConfigBool(config.DisableDropdownCreateMovie, ddc.Movie) + r.setConfigBool(config.DisableDropdownCreateGallery, ddc.Gallery) } r.setConfigString(config.HandyKey, input.HandyKey) diff --git a/internal/api/resolver_mutation_file.go b/internal/api/resolver_mutation_file.go index c303446e1..b9e36aa76 100644 --- a/internal/api/resolver_mutation_file.go +++ b/internal/api/resolver_mutation_file.go @@ -5,10 +5,14 @@ import ( "fmt" "strconv" + "github.com/stashapp/stash/internal/desktop" "github.com/stashapp/stash/internal/manager" + "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/session" "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) @@ -16,7 +20,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) if err := r.withTxn(ctx, func(ctx context.Context) error { fileStore := r.repository.File folderStore := r.repository.Folder - mover := file.NewMover(fileStore, folderStore) + mover := file.NewMover(fileStore, folderStore, manager.GetInstance().Config.GetStashPaths().Paths()) mover.RegisterHooks(ctx) var ( @@ -54,13 +58,14 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) folderPath := *input.DestinationFolder // ensure folder path is within the library - if err := r.validateFolderPath(folderPath); err != nil { + stashPaths := manager.GetInstance().Config.GetStashPaths() + if err := r.validateFolderPath(stashPaths, folderPath); err != nil { return err } // get or create folder hierarchy var err error - folder, err = file.GetOrCreateFolderHierarchy(ctx, folderStore, folderPath) + folder, err = file.GetOrCreateFolderHierarchy(ctx, folderStore, folderPath, stashPaths.Paths()) if err != nil { return fmt.Errorf("getting or creating folder hierarchy: %w", err) } @@ -109,8 +114,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) return true, nil } -func (r *mutationResolver) validateFolderPath(folderPath string) error { - paths := manager.GetInstance().Config.GetStashPaths() +func (r *mutationResolver) validateFolderPath(paths config.StashConfigs, folderPath string) error { if l := paths.GetStashFromDirPath(folderPath); l == nil { return fmt.Errorf("folder path %s must be within a stash library path", folderPath) } @@ -149,7 +153,9 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b return false, fmt.Errorf("converting ids: %w", err) } - fileDeleter := file.NewDeleter() + trashPath := manager.GetInstance().Config.GetDeleteTrashPath() + + fileDeleter := file.NewDeleterWithTrash(trashPath) destroyer := &file.ZipDestroyer{ FileDestroyer: r.repository.File, FolderDestroyer: r.repository.Folder, @@ -208,6 +214,58 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b return true, nil } +func (r *mutationResolver) DestroyFiles(ctx context.Context, ids []string) (ret bool, err error) { + fileIDs, err := stringslice.StringSliceToIntSlice(ids) + if err != nil { + return false, fmt.Errorf("converting ids: %w", err) + } + + destroyer := &file.ZipDestroyer{ + FileDestroyer: r.repository.File, + FolderDestroyer: r.repository.Folder, + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.File + + for _, fileIDInt := range fileIDs { + fileID := models.FileID(fileIDInt) + f, err := qb.Find(ctx, fileID) + if err != nil { + return err + } + + if len(f) == 0 { + return fmt.Errorf("file with id %d not found", fileID) + } + + path := f[0].Base().Path + + // ensure not a primary file + isPrimary, err := qb.IsPrimary(ctx, fileID) + if err != nil { + return fmt.Errorf("checking if file %s is primary: %w", path, err) + } + + if isPrimary { + return fmt.Errorf("cannot destroy primary file entry %s", path) + } + + // destroy DB entries only (no filesystem deletion) + const deleteFile = false + if err := destroyer.DestroyZip(ctx, f[0], nil, deleteFile); err != nil { + return fmt.Errorf("destroying file entry %s: %w", path, err) + } + } + + return nil + }); err != nil { + return false, err + } + + return true, nil +} + func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSetFingerprintsInput) (bool, error) { fileIDInt, err := strconv.Atoi(input.ID) if err != nil { @@ -272,3 +330,71 @@ func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSe return true, nil } + +func (r *mutationResolver) RevealFileInFileManager(ctx context.Context, id string) (bool, error) { + // disallow if request did not come from localhost + if !session.IsLocalRequest(ctx) { + logger.Warnf("Attempt to reveal file in file manager from non-local request") + return false, fmt.Errorf("access denied") + } + + fileIDInt, err := strconv.Atoi(id) + if err != nil { + return false, fmt.Errorf("converting id: %w", err) + } + + var filePath string + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + files, err := r.repository.File.Find(ctx, models.FileID(fileIDInt)) + if err != nil { + return fmt.Errorf("finding file: %w", err) + } + if len(files) == 0 { + return fmt.Errorf("file with id %d not found", fileIDInt) + } + filePath = files[0].Base().Path + return nil + }); err != nil { + return false, err + } + + if err := desktop.RevealInFileManager(filePath); err != nil { + return false, err + } + + return true, nil +} + +func (r *mutationResolver) RevealFolderInFileManager(ctx context.Context, id string) (bool, error) { + // disallow if request did not come from localhost + if !session.IsLocalRequest(ctx) { + logger.Warnf("Attempt to reveal folder in file manager from non-local request") + return false, fmt.Errorf("access denied") + } + + folderIDInt, err := strconv.Atoi(id) + if err != nil { + return false, fmt.Errorf("converting id: %w", err) + } + + var folderPath string + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + folder, err := r.repository.Folder.Find(ctx, models.FolderID(folderIDInt)) + if err != nil { + return fmt.Errorf("finding folder: %w", err) + } + if folder == nil { + return fmt.Errorf("folder with id %d not found", folderIDInt) + } + folderPath = folder.Path + return nil + }); err != nil { + return false, err + } + + if err := desktop.RevealInFileManager(folderPath); err != nil { + return false, err + } + + return true, nil +} diff --git a/internal/api/resolver_mutation_gallery.go b/internal/api/resolver_mutation_gallery.go index 5d5cd4b37..2cd80b1ff 100644 --- a/internal/api/resolver_mutation_gallery.go +++ b/internal/api/resolver_mutation_gallery.go @@ -6,6 +6,7 @@ import ( "fmt" "os" "strconv" + "strings" "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/pkg/file" @@ -41,13 +42,17 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat } // Populate a new gallery from the input - newGallery := models.NewGallery() + newGallery := models.CreateGalleryInput{ + Gallery: &models.Gallery{}, + } + *newGallery.Gallery = models.NewGallery() - newGallery.Title = input.Title + newGallery.Title = strings.TrimSpace(input.Title) newGallery.Code = translator.string(input.Code) newGallery.Details = translator.string(input.Details) newGallery.Photographer = translator.string(input.Photographer) newGallery.Rating = input.Rating100 + newGallery.Organized = translator.bool(input.Organized) var err error @@ -74,15 +79,17 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat } if input.Urls != nil { - newGallery.URLs = models.NewRelatedStrings(input.Urls) + newGallery.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls)) } else if input.URL != nil { - newGallery.URLs = models.NewRelatedStrings([]string{*input.URL}) + newGallery.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)}) } + newGallery.CustomFields = convertMapJSONNumbers(input.CustomFields) + // Start the transaction and save the gallery if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Gallery - if err := qb.Create(ctx, &newGallery, nil); err != nil { + if err := qb.Create(ctx, &newGallery); err != nil { return err } @@ -239,6 +246,10 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle return nil, fmt.Errorf("converting scene ids: %w", err) } + if input.CustomFields != nil { + updatedGallery.CustomFields = handleUpdateCustomFields(*input.CustomFields) + } + // gallery scene is set from the scene only gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery) @@ -291,6 +302,10 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall return nil, fmt.Errorf("converting scene ids: %w", err) } + if input.CustomFields != nil { + updatedGallery.CustomFields = handleUpdateCustomFields(*input.CustomFields) + } + ret := []*models.Gallery{} // Start the transaction and save the galleries @@ -333,15 +348,18 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall return false, fmt.Errorf("converting ids: %w", err) } + trashPath := manager.GetInstance().Config.GetDeleteTrashPath() + var galleries []*models.Gallery var imgsDestroyed []*models.Image fileDeleter := &image.FileDeleter{ - Deleter: file.NewDeleter(), + Deleter: file.NewDeleterWithTrash(trashPath), Paths: manager.GetInstance().Paths, } deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Gallery @@ -362,7 +380,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall galleries = append(galleries, gallery) - imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile) + imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) if err != nil { return err } diff --git a/internal/api/resolver_mutation_group.go b/internal/api/resolver_mutation_group.go index d75994d14..6c986c4da 100644 --- a/internal/api/resolver_mutation_group.go +++ b/internal/api/resolver_mutation_group.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "strconv" + "strings" "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/group" @@ -13,15 +14,19 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.Group, error) { +func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.CreateGroupInput, error) { translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } // Populate a new group from the input - newGroup := models.NewGroup() + newGroupInput := &models.CreateGroupInput{ + Group: &models.Group{}, + } + *newGroupInput.Group = models.NewGroup() + newGroup := newGroupInput.Group - newGroup.Name = input.Name + newGroup.Name = strings.TrimSpace(input.Name) newGroup.Aliases = translator.string(input.Aliases) newGroup.Duration = input.Duration newGroup.Rating = input.Rating100 @@ -55,31 +60,22 @@ func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*mo } if input.Urls != nil { - newGroup.URLs = models.NewRelatedStrings(input.Urls) + newGroup.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls)) } - return &newGroup, nil -} - -func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) { - newGroup, err := groupFromGroupCreateInput(ctx, input) - if err != nil { - return nil, err - } + newGroupInput.CustomFields = convertMapJSONNumbers(input.CustomFields) // Process the base 64 encoded image string - var frontimageData []byte if input.FrontImage != nil { - frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) + newGroupInput.FrontImageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) if err != nil { return nil, fmt.Errorf("processing front image: %w", err) } } // Process the base 64 encoded image string - var backimageData []byte if input.BackImage != nil { - backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage) + newGroupInput.BackImageData, err = utils.ProcessImageInput(ctx, *input.BackImage) if err != nil { return nil, fmt.Errorf("processing back image: %w", err) } @@ -87,13 +83,22 @@ func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInp // HACK: if back image is being set, set the front image to the default. // This is because we can't have a null front image with a non-null back image. - if len(frontimageData) == 0 && len(backimageData) != 0 { - frontimageData = static.ReadAll(static.DefaultGroupImage) + if len(newGroupInput.FrontImageData) == 0 && len(newGroupInput.BackImageData) != 0 { + newGroupInput.FrontImageData = static.ReadAll(static.DefaultGroupImage) + } + + return newGroupInput, nil +} + +func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) { + createGroupInput, err := groupFromGroupCreateInput(ctx, input) + if err != nil { + return nil, err } // Start the transaction and save the group if err := r.withTxn(ctx, func(ctx context.Context) error { - if err = r.groupService.Create(ctx, newGroup, frontimageData, backimageData); err != nil { + if err = r.groupService.Create(ctx, createGroupInput); err != nil { return err } @@ -103,9 +108,9 @@ func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInp } // for backwards compatibility - run both movie and group hooks - r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.GroupCreatePost, input, nil) - r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.MovieCreatePost, input, nil) - return r.getGroup(ctx, newGroup.ID) + r.hookExecutor.ExecutePostHooks(ctx, createGroupInput.Group.ID, hook.GroupCreatePost, input, nil) + r.hookExecutor.ExecutePostHooks(ctx, createGroupInput.Group.ID, hook.MovieCreatePost, input, nil) + return r.getGroup(ctx, createGroupInput.Group.ID) } func groupPartialFromGroupUpdateInput(translator changesetTranslator, input GroupUpdateInput) (ret models.GroupPartial, err error) { @@ -149,6 +154,12 @@ func groupPartialFromGroupUpdateInput(translator changesetTranslator, input Grou } updatedGroup.URLs = translator.updateStrings(input.Urls, "urls") + if input.CustomFields != nil { + updatedGroup.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedGroup.CustomFields.Full = convertMapJSONNumbers(updatedGroup.CustomFields.Full) + updatedGroup.CustomFields.Partial = convertMapJSONNumbers(updatedGroup.CustomFields.Partial) + } return updatedGroup, nil } @@ -216,6 +227,12 @@ func (r *mutationResolver) GroupUpdate(ctx context.Context, input GroupUpdateInp func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input BulkGroupUpdateInput) (ret models.GroupPartial, err error) { updatedGroup := models.NewGroupPartial() + updatedGroup.Date, err = translator.optionalDate(input.Date, "date") + if err != nil { + err = fmt.Errorf("converting date: %w", err) + return + } + updatedGroup.Synopsis = translator.optionalString(input.Synopsis, "synopsis") updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100") updatedGroup.Director = translator.optionalString(input.Director, "director") @@ -245,6 +262,13 @@ func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input updatedGroup.URLs = translator.optionalURLsBulk(input.Urls, nil) + if input.CustomFields != nil { + updatedGroup.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedGroup.CustomFields.Full = convertMapJSONNumbers(updatedGroup.CustomFields.Full) + updatedGroup.CustomFields.Partial = convertMapJSONNumbers(updatedGroup.CustomFields.Partial) + } + return updatedGroup, nil } diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go index 721598634..cc03c5286 100644 --- a/internal/api/resolver_mutation_image.go +++ b/internal/api/resolver_mutation_image.go @@ -177,6 +177,13 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input models.ImageUp return nil, fmt.Errorf("converting tag ids: %w", err) } + if input.CustomFields != nil { + updatedImage.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedImage.CustomFields.Full = convertMapJSONNumbers(updatedImage.CustomFields.Full) + updatedImage.CustomFields.Partial = convertMapJSONNumbers(updatedImage.CustomFields.Partial) + } + qb := r.repository.Image image, err := qb.UpdatePartial(ctx, imageID, updatedImage) if err != nil { @@ -237,6 +244,13 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU return nil, fmt.Errorf("converting tag ids: %w", err) } + if input.CustomFields != nil { + updatedImage.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedImage.CustomFields.Full = convertMapJSONNumbers(updatedImage.CustomFields.Full) + updatedImage.CustomFields.Partial = convertMapJSONNumbers(updatedImage.CustomFields.Partial) + } + // Start the transaction and save the images if err := r.withTxn(ctx, func(ctx context.Context) error { var updatedGalleryIDs []int @@ -308,9 +322,11 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD return false, fmt.Errorf("converting id: %w", err) } + trashPath := manager.GetInstance().Config.GetDeleteTrashPath() + var i *models.Image fileDeleter := &image.FileDeleter{ - Deleter: file.NewDeleter(), + Deleter: file.NewDeleterWithTrash(trashPath), Paths: manager.GetInstance().Paths, } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -323,7 +339,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD return fmt.Errorf("image with id %d not found", imageID) } - return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)) + return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry)) }); err != nil { fileDeleter.Rollback() return false, err @@ -348,9 +364,11 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image return false, fmt.Errorf("converting ids: %w", err) } + trashPath := manager.GetInstance().Config.GetDeleteTrashPath() + var images []*models.Image fileDeleter := &image.FileDeleter{ - Deleter: file.NewDeleter(), + Deleter: file.NewDeleterWithTrash(trashPath), Paths: manager.GetInstance().Paths, } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -368,7 +386,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image images = append(images, i) - if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil { + if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry)); err != nil { return err } } diff --git a/internal/api/resolver_mutation_metadata.go b/internal/api/resolver_mutation_metadata.go index 8120e2d31..ea6496800 100644 --- a/internal/api/resolver_mutation_metadata.go +++ b/internal/api/resolver_mutation_metadata.go @@ -122,9 +122,10 @@ func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error func (r *mutationResolver) BackupDatabase(ctx context.Context, input BackupDatabaseInput) (*string, error) { // if download is true, then backup to temporary file and return a link download := input.Download != nil && *input.Download + includeBlobs := input.IncludeBlobs != nil && *input.IncludeBlobs mgr := manager.GetInstance() - backupPath, backupName, err := mgr.BackupDatabase(download) + backupPath, backupName, err := mgr.BackupDatabase(download, includeBlobs) if err != nil { logger.Errorf("Error backing up database: %v", err) return nil, err diff --git a/internal/api/resolver_mutation_movie.go b/internal/api/resolver_mutation_movie.go index 2e1011083..cb19e540f 100644 --- a/internal/api/resolver_mutation_movie.go +++ b/internal/api/resolver_mutation_movie.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "strconv" + "strings" "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/models" @@ -32,7 +33,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp // Populate a new group from the input newGroup := models.NewGroup() - newGroup.Name = input.Name + newGroup.Name = strings.TrimSpace(input.Name) newGroup.Aliases = translator.string(input.Aliases) newGroup.Duration = input.Duration newGroup.Rating = input.Rating100 @@ -56,9 +57,9 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp } if input.Urls != nil { - newGroup.URLs = models.NewRelatedStrings(input.Urls) + newGroup.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls)) } else if input.URL != nil { - newGroup.URLs = models.NewRelatedStrings([]string{*input.URL}) + newGroup.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)}) } // Process the base 64 encoded image string diff --git a/internal/api/resolver_mutation_performer.go b/internal/api/resolver_mutation_performer.go index 47b02147d..6f88c54ca 100644 --- a/internal/api/resolver_mutation_performer.go +++ b/internal/api/resolver_mutation_performer.go @@ -2,12 +2,16 @@ package api import ( "context" + "errors" "fmt" + "slices" "strconv" + "strings" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/plugin/hook" + "github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/sliceutil/stringslice" "github.com/stashapp/stash/pkg/utils" ) @@ -37,9 +41,9 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per // Populate a new performer from the input newPerformer := models.NewPerformer() - newPerformer.Name = input.Name + newPerformer.Name = strings.TrimSpace(input.Name) newPerformer.Disambiguation = translator.string(input.Disambiguation) - newPerformer.Aliases = models.NewRelatedStrings(input.AliasList) + newPerformer.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.AliasList), newPerformer.Name)) newPerformer.Gender = input.Gender newPerformer.Ethnicity = translator.string(input.Ethnicity) newPerformer.Country = translator.string(input.Country) @@ -48,7 +52,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.FakeTits = translator.string(input.FakeTits) newPerformer.PenisLength = input.PenisLength newPerformer.Circumcised = input.Circumcised - newPerformer.CareerLength = translator.string(input.CareerLength) newPerformer.Tattoos = translator.string(input.Tattoos) newPerformer.Piercings = translator.string(input.Piercings) newPerformer.Favorite = translator.bool(input.Favorite) @@ -62,17 +65,17 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.URLs = models.NewRelatedStrings([]string{}) if input.URL != nil { - newPerformer.URLs.Add(*input.URL) + newPerformer.URLs.Add(strings.TrimSpace(*input.URL)) } if input.Twitter != nil { - newPerformer.URLs.Add(utils.URLFromHandle(*input.Twitter, twitterURL)) + newPerformer.URLs.Add(utils.URLFromHandle(strings.TrimSpace(*input.Twitter), twitterURL)) } if input.Instagram != nil { - newPerformer.URLs.Add(utils.URLFromHandle(*input.Instagram, instagramURL)) + newPerformer.URLs.Add(utils.URLFromHandle(strings.TrimSpace(*input.Instagram), instagramURL)) } if input.Urls != nil { - newPerformer.URLs.Add(input.Urls...) + newPerformer.URLs.Add(stringslice.TrimSpace(input.Urls)...) } var err error @@ -86,6 +89,25 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per return nil, fmt.Errorf("converting death date: %w", err) } + newPerformer.CareerStart, err = translator.datePtr(input.CareerStart) + if err != nil { + return nil, fmt.Errorf("converting career start: %w", err) + } + newPerformer.CareerEnd, err = translator.datePtr(input.CareerEnd) + if err != nil { + return nil, fmt.Errorf("converting career end: %w", err) + } + + // if career_start/career_end not provided, parse deprecated career_length + if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil { + start, end, err := models.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + newPerformer.CareerStart = start + newPerformer.CareerEnd = end + } + newPerformer.TagIDs, err = translator.relatedIds(input.TagIds) if err != nil { return nil, fmt.Errorf("converting tag ids: %w", err) @@ -135,7 +157,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per return r.getPerformer(ctx, newPerformer.ID) } -func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator) error { +func validateNoLegacyURLs(translator changesetTranslator) error { // ensure url/twitter/instagram are not included in the input if translator.hasField("url") { return fmt.Errorf("url field must not be included if urls is included") @@ -150,7 +172,7 @@ func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator) return nil } -func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURL, legacyTwitter, legacyInstagram models.OptionalString, updatedPerformer *models.PerformerPartial) error { +func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURLs legacyPerformerURLs, updatedPerformer *models.PerformerPartial) error { qb := r.repository.Performer // we need to be careful with URL/Twitter/Instagram @@ -169,23 +191,23 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int existingURLs := p.URLs.List() // performer partial URLs should be empty - if legacyURL.Set { + if legacyURLs.URL.Set { replaced := false for i, url := range existingURLs { if !performer.IsTwitterURL(url) && !performer.IsInstagramURL(url) { - existingURLs[i] = legacyURL.Value + existingURLs[i] = legacyURLs.URL.Value replaced = true break } } if !replaced { - existingURLs = append(existingURLs, legacyURL.Value) + existingURLs = append(existingURLs, legacyURLs.URL.Value) } } - if legacyTwitter.Set { - value := utils.URLFromHandle(legacyTwitter.Value, twitterURL) + if legacyURLs.Twitter.Set { + value := utils.URLFromHandle(legacyURLs.Twitter.Value, twitterURL) found := false // find and replace the first twitter URL for i, url := range existingURLs { @@ -200,9 +222,9 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int existingURLs = append(existingURLs, value) } } - if legacyInstagram.Set { + if legacyURLs.Instagram.Set { found := false - value := utils.URLFromHandle(legacyInstagram.Value, instagramURL) + value := utils.URLFromHandle(legacyURLs.Instagram.Value, instagramURL) // find and replace the first instagram URL for i, url := range existingURLs { if performer.IsInstagramURL(url) { @@ -225,16 +247,25 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int return nil } -func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) { - performerID, err := strconv.Atoi(input.ID) - if err != nil { - return nil, fmt.Errorf("converting id: %w", err) - } +type legacyPerformerURLs struct { + URL models.OptionalString + Twitter models.OptionalString + Instagram models.OptionalString +} - translator := changesetTranslator{ - inputMap: getUpdateInputMap(ctx), - } +func (u *legacyPerformerURLs) AnySet() bool { + return u.URL.Set || u.Twitter.Set || u.Instagram.Set +} +func legacyPerformerURLsFromInput(input models.PerformerUpdateInput, translator changesetTranslator) legacyPerformerURLs { + return legacyPerformerURLs{ + URL: translator.optionalString(input.URL, "url"), + Twitter: translator.optionalString(input.Twitter, "twitter"), + Instagram: translator.optionalString(input.Instagram, "instagram"), + } +} + +func performerPartialFromInput(input models.PerformerUpdateInput, translator changesetTranslator) (*models.PerformerPartial, error) { // Populate performer from the input updatedPerformer := models.NewPerformerPartial() @@ -248,7 +279,29 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised") - updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") + // prefer career_start/career_end over deprecated career_length + if translator.hasField("career_start") || translator.hasField("career_end") { + var err error + updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start") + if err != nil { + return nil, fmt.Errorf("converting career start: %w", err) + } + updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end") + if err != nil { + return nil, fmt.Errorf("converting career end: %w", err) + } + } else if translator.hasField("career_length") && input.CareerLength != nil { + start, end, err := models.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + if start != nil { + updatedPerformer.CareerStart = models.NewOptionalDate(*start) + } + if end != nil { + updatedPerformer.CareerEnd = models.NewOptionalDate(*end) + } + } updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite") @@ -259,19 +312,17 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") updatedPerformer.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") + var err error + if translator.hasField("urls") { // ensure url/twitter/instagram are not included in the input - if err := r.validateNoLegacyURLs(translator); err != nil { + if err := validateNoLegacyURLs(translator); err != nil { return nil, err } updatedPerformer.URLs = translator.updateStrings(input.Urls, "urls") } - legacyURL := translator.optionalString(input.URL, "url") - legacyTwitter := translator.optionalString(input.Twitter, "twitter") - legacyInstagram := translator.optionalString(input.Instagram, "instagram") - updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") if err != nil { return nil, fmt.Errorf("converting birthdate: %w", err) @@ -296,10 +347,27 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per return nil, fmt.Errorf("converting tag ids: %w", err) } - updatedPerformer.CustomFields = input.CustomFields - // convert json.Numbers to int/float - updatedPerformer.CustomFields.Full = convertMapJSONNumbers(updatedPerformer.CustomFields.Full) - updatedPerformer.CustomFields.Partial = convertMapJSONNumbers(updatedPerformer.CustomFields.Partial) + updatedPerformer.CustomFields = handleUpdateCustomFields(input.CustomFields) + + return &updatedPerformer, nil +} + +func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) { + performerID, err := strconv.Atoi(input.ID) + if err != nil { + return nil, fmt.Errorf("converting id: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + updatedPerformer, err := performerPartialFromInput(input, translator) + if err != nil { + return nil, err + } + + legacyURLs := legacyPerformerURLsFromInput(input, translator) var imageData []byte imageIncluded := translator.hasField("image") @@ -314,17 +382,38 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Performer - if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set { - if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil { + if legacyURLs.AnySet() { + if err := r.handleLegacyURLs(ctx, performerID, legacyURLs, updatedPerformer); err != nil { return err } } - if err := performer.ValidateUpdate(ctx, performerID, updatedPerformer, qb); err != nil { + if updatedPerformer.Aliases != nil { + p, err := qb.Find(ctx, performerID) + if err != nil { + return err + } + if p != nil { + if err := p.LoadAliases(ctx, qb); err != nil { + return err + } + + effectiveAliases := updatedPerformer.Aliases.Apply(p.Aliases.List()) + name := p.Name + if updatedPerformer.Name.Set { + name = updatedPerformer.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name) + updatedPerformer.Aliases.Values = sanitized + updatedPerformer.Aliases.Mode = models.RelationshipUpdateModeSet + } + } + if err := performer.ValidateUpdate(ctx, performerID, *updatedPerformer, qb); err != nil { return err } - _, err = qb.UpdatePartial(ctx, performerID, updatedPerformer) + _, err = qb.UpdatePartial(ctx, performerID, *updatedPerformer) if err != nil { return err } @@ -368,7 +457,28 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised") - updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") + // prefer career_start/career_end over deprecated career_length + if translator.hasField("career_start") || translator.hasField("career_end") { + updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start") + if err != nil { + return nil, fmt.Errorf("converting career start: %w", err) + } + updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end") + if err != nil { + return nil, fmt.Errorf("converting career end: %w", err) + } + } else if translator.hasField("career_length") && input.CareerLength != nil { + start, end, err := models.ParseYearRangeString(*input.CareerLength) + if err != nil { + return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err) + } + if start != nil { + updatedPerformer.CareerStart = models.NewOptionalDate(*start) + } + if end != nil { + updatedPerformer.CareerEnd = models.NewOptionalDate(*end) + } + } updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") @@ -381,16 +491,18 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe if translator.hasField("urls") { // ensure url/twitter/instagram are not included in the input - if err := r.validateNoLegacyURLs(translator); err != nil { + if err := validateNoLegacyURLs(translator); err != nil { return nil, err } updatedPerformer.URLs = translator.updateStringsBulk(input.Urls, "urls") } - legacyURL := translator.optionalString(input.URL, "url") - legacyTwitter := translator.optionalString(input.Twitter, "twitter") - legacyInstagram := translator.optionalString(input.Instagram, "instagram") + legacyURLs := legacyPerformerURLs{ + URL: translator.optionalString(input.URL, "url"), + Twitter: translator.optionalString(input.Twitter, "twitter"), + Instagram: translator.optionalString(input.Instagram, "instagram"), + } updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") if err != nil { @@ -416,6 +528,10 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe return nil, fmt.Errorf("converting tag ids: %w", err) } + if input.CustomFields != nil { + updatedPerformer.CustomFields = handleUpdateCustomFields(*input.CustomFields) + } + ret := []*models.Performer{} // Start the transaction and save the performers @@ -423,8 +539,8 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe qb := r.repository.Performer for _, performerID := range performerIDs { - if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set { - if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil { + if legacyURLs.AnySet() { + if err := r.handleLegacyURLs(ctx, performerID, legacyURLs, &updatedPerformer); err != nil { return err } } @@ -504,3 +620,87 @@ func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs [ return true, nil } + +func (r *mutationResolver) PerformerMerge(ctx context.Context, input PerformerMergeInput) (*models.Performer, error) { + srcIDs, err := stringslice.StringSliceToIntSlice(input.Source) + if err != nil { + return nil, fmt.Errorf("converting source ids: %w", err) + } + + // ensure source ids are unique + srcIDs = sliceutil.AppendUniques(nil, srcIDs) + + destID, err := strconv.Atoi(input.Destination) + if err != nil { + return nil, fmt.Errorf("converting destination id: %w", err) + } + + // ensure destination is not in source list + if slices.Contains(srcIDs, destID) { + return nil, errors.New("destination performer cannot be in source list") + } + + var values *models.PerformerPartial + var imageData []byte + + if input.Values != nil { + translator := changesetTranslator{ + inputMap: getNamedUpdateInputMap(ctx, "input.values"), + } + + values, err = performerPartialFromInput(*input.Values, translator) + if err != nil { + return nil, err + } + legacyURLs := legacyPerformerURLsFromInput(*input.Values, translator) + if legacyURLs.AnySet() { + return nil, errors.New("Merging legacy performer URLs is not supported") + } + + if input.Values.Image != nil { + var err error + imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image) + if err != nil { + return nil, fmt.Errorf("processing cover image: %w", err) + } + } + } else { + v := models.NewPerformerPartial() + values = &v + } + + var dest *models.Performer + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Performer + + dest, err = qb.Find(ctx, destID) + if err != nil { + return fmt.Errorf("finding destination performer ID %d: %w", destID, err) + } + + // ensure source performers exist + if _, err := qb.FindMany(ctx, srcIDs); err != nil { + return fmt.Errorf("finding source performers: %w", err) + } + + if _, err := qb.UpdatePartial(ctx, destID, *values); err != nil { + return fmt.Errorf("updating performer: %w", err) + } + + if err := qb.Merge(ctx, srcIDs, destID); err != nil { + return fmt.Errorf("merging performers: %w", err) + } + + if len(imageData) > 0 { + if err := qb.UpdateImage(ctx, destID, imageData); err != nil { + return err + } + } + + return nil + }); err != nil { + return nil, err + } + + return dest, nil +} diff --git a/internal/api/resolver_mutation_saved_filter.go b/internal/api/resolver_mutation_saved_filter.go index e49c1214c..6e825e3d5 100644 --- a/internal/api/resolver_mutation_saved_filter.go +++ b/internal/api/resolver_mutation_saved_filter.go @@ -32,7 +32,7 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput f := models.SavedFilter{ Mode: input.Mode, - Name: input.Name, + Name: strings.TrimSpace(input.Name), FindFilter: input.FindFilter, ObjectFilter: input.ObjectFilter, UIOptions: input.UIOptions, diff --git a/internal/api/resolver_mutation_scene.go b/internal/api/resolver_mutation_scene.go index b740955d0..70158fc6f 100644 --- a/internal/api/resolver_mutation_scene.go +++ b/internal/api/resolver_mutation_scene.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "strconv" + "strings" "time" "github.com/stashapp/stash/internal/manager" @@ -62,9 +63,9 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCr } if input.Urls != nil { - newScene.URLs = models.NewRelatedStrings(input.Urls) + newScene.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls)) } else if input.URL != nil { - newScene.URLs = models.NewRelatedStrings([]string{*input.URL}) + newScene.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)}) } newScene.PerformerIDs, err = translator.relatedIds(input.PerformerIds) @@ -102,8 +103,15 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCr } } + customFields := convertMapJSONNumbers(input.CustomFields) + if err := r.withTxn(ctx, func(ctx context.Context) error { - ret, err = r.Resolver.sceneService.Create(ctx, &newScene, fileIDs, coverImageData) + ret, err = r.Resolver.sceneService.Create(ctx, models.CreateSceneInput{ + Scene: &newScene, + FileIDs: fileIDs, + CoverImage: coverImageData, + CustomFields: customFields, + }) return err }); err != nil { return nil, err @@ -296,6 +304,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } var coverImageData []byte + coverImageIncluded := translator.hasField("cover_image") if input.CoverImage != nil { var err error coverImageData, err = utils.ProcessImageInput(ctx, *input.CoverImage) @@ -304,26 +313,41 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } } + var customFields *models.CustomFieldsInput + if input.CustomFields != nil { + cfCopy := *input.CustomFields + customFields = &cfCopy + // convert json.Numbers to int/float + customFields.Full = convertMapJSONNumbers(customFields.Full) + customFields.Partial = convertMapJSONNumbers(customFields.Partial) + } + scene, err := qb.UpdatePartial(ctx, sceneID, *updatedScene) if err != nil { return nil, err } - if err := r.sceneUpdateCoverImage(ctx, scene, coverImageData); err != nil { - return nil, err + if coverImageIncluded { + if err := r.sceneUpdateCoverImage(ctx, scene, coverImageData); err != nil { + return nil, err + } + } + + if customFields != nil { + if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil { + return nil, err + } } return scene, nil } func (r *mutationResolver) sceneUpdateCoverImage(ctx context.Context, s *models.Scene, coverImageData []byte) error { - if len(coverImageData) > 0 { - qb := r.repository.Scene + qb := r.repository.Scene - // update cover table - if err := qb.UpdateCover(ctx, s.ID, coverImageData); err != nil { - return err - } + // update cover table - empty data will clear the cover + if err := qb.UpdateCover(ctx, s.ID, coverImageData); err != nil { + return err } return nil @@ -385,6 +409,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU } } + var customFields *models.CustomFieldsInput + if input.CustomFields != nil { + cf := handleUpdateCustomFields(*input.CustomFields) + customFields = &cf + } + ret := []*models.Scene{} // Start the transaction and save the scenes @@ -397,6 +427,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU return err } + if customFields != nil { + if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil { + return err + } + } + ret = append(ret, scene) } @@ -428,16 +464,18 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD } fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() + trashPath := manager.GetInstance().Config.GetDeleteTrashPath() var s *models.Scene fileDeleter := &scene.FileDeleter{ - Deleter: file.NewDeleter(), + Deleter: file.NewDeleterWithTrash(trashPath), FileNamingAlgo: fileNamingAlgo, Paths: manager.GetInstance().Paths, } deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Scene @@ -454,7 +492,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD // kill any running encoders manager.KillRunningStreams(s, fileNamingAlgo) - return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile) + return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) }); err != nil { fileDeleter.Rollback() return false, err @@ -482,15 +520,17 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene var scenes []*models.Scene fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() + trashPath := manager.GetInstance().Config.GetDeleteTrashPath() fileDeleter := &scene.FileDeleter{ - Deleter: file.NewDeleter(), + Deleter: file.NewDeleterWithTrash(trashPath), FileNamingAlgo: fileNamingAlgo, Paths: manager.GetInstance().Paths, } deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteFile := utils.IsTrue(input.DeleteFile) + destroyFileEntry := utils.IsTrue(input.DestroyFileEntry) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Scene @@ -509,7 +549,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene // kill any running encoders manager.KillRunningStreams(scene, fileNamingAlgo) - if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile); err != nil { + if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } } @@ -569,6 +609,7 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput var values *models.ScenePartial var coverImageData []byte + var customFields *models.CustomFieldsInput if input.Values != nil { translator := changesetTranslator{ @@ -587,14 +628,20 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput return nil, fmt.Errorf("processing cover image: %w", err) } } + + if input.Values.CustomFields != nil { + cf := handleUpdateCustomFields(*input.Values.CustomFields) + customFields = &cf + } } else { v := models.NewScenePartial() values = &v } mgr := manager.GetInstance() + trashPath := mgr.Config.GetDeleteTrashPath() fileDeleter := &scene.FileDeleter{ - Deleter: file.NewDeleter(), + Deleter: file.NewDeleterWithTrash(trashPath), FileNamingAlgo: mgr.Config.GetVideoFileNamingAlgorithm(), Paths: mgr.Paths, } @@ -617,7 +664,20 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput return fmt.Errorf("scene with id %d not found", destID) } - return r.sceneUpdateCoverImage(ctx, ret, coverImageData) + // only update cover image if one was provided + if len(coverImageData) > 0 { + if err := r.sceneUpdateCoverImage(ctx, ret, coverImageData); err != nil { + return err + } + } + + if customFields != nil { + if err := r.Resolver.repository.Scene.SetCustomFields(ctx, ret.ID, *customFields); err != nil { + return err + } + } + + return nil }); err != nil { return nil, err } @@ -650,7 +710,7 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input SceneMar // Populate a new scene marker from the input newMarker := models.NewSceneMarker() - newMarker.Title = input.Title + newMarker.Title = strings.TrimSpace(input.Title) newMarker.Seconds = input.Seconds newMarker.PrimaryTagID = primaryTagID newMarker.SceneID = sceneID @@ -736,9 +796,10 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar } mgr := manager.GetInstance() + trashPath := mgr.Config.GetDeleteTrashPath() fileDeleter := &scene.FileDeleter{ - Deleter: file.NewDeleter(), + Deleter: file.NewDeleterWithTrash(trashPath), FileNamingAlgo: mgr.Config.GetVideoFileNamingAlgorithm(), Paths: mgr.Paths, } @@ -820,6 +881,123 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar return r.getSceneMarker(ctx, markerID) } +func (r *mutationResolver) BulkSceneMarkerUpdate(ctx context.Context, input BulkSceneMarkerUpdateInput) ([]*models.SceneMarker, error) { + ids, err := stringslice.StringSliceToIntSlice(input.Ids) + if err != nil { + return nil, fmt.Errorf("converting ids: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + // Populate performer from the input + partial := models.NewSceneMarkerPartial() + + partial.Title = translator.optionalString(input.Title, "title") + + partial.PrimaryTagID, err = translator.optionalIntFromString(input.PrimaryTagID, "primary_tag_id") + if err != nil { + return nil, fmt.Errorf("converting primary tag id: %w", err) + } + + partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + + ret := []*models.SceneMarker{} + + // Start the transaction and save the performers + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.SceneMarker + + for _, id := range ids { + l := partial + + if err := adjustMarkerPartialForTagExclusion(ctx, r.repository.SceneMarker, id, &l); err != nil { + return err + } + + updated, err := qb.UpdatePartial(ctx, id, l) + if err != nil { + return err + } + + ret = append(ret, updated) + } + + return nil + }); err != nil { + return nil, err + } + + // execute post hooks outside of txn + var newRet []*models.SceneMarker + for _, m := range ret { + r.hookExecutor.ExecutePostHooks(ctx, m.ID, hook.SceneMarkerUpdatePost, input, translator.getFields()) + + m, err = r.getSceneMarker(ctx, m.ID) + if err != nil { + return nil, err + } + + newRet = append(newRet, m) + } + + return newRet, nil +} + +// adjustMarkerPartialForTagExclusion adjusts the SceneMarkerPartial to exclude the primary tag from tag updates. +func adjustMarkerPartialForTagExclusion(ctx context.Context, r models.SceneMarkerReader, id int, partial *models.SceneMarkerPartial) error { + if partial.TagIDs == nil && !partial.PrimaryTagID.Set { + return nil + } + + // exclude primary tag from tag updates + var primaryTagID int + if partial.PrimaryTagID.Set { + primaryTagID = partial.PrimaryTagID.Value + } else { + existing, err := r.Find(ctx, id) + if err != nil { + return fmt.Errorf("finding existing primary tag id: %w", err) + } + + primaryTagID = existing.PrimaryTagID + } + + existingTagIDs, err := r.GetTagIDs(ctx, id) + if err != nil { + return fmt.Errorf("getting existing tag ids: %w", err) + } + + tagIDAttr := partial.TagIDs + + if tagIDAttr == nil { + tagIDAttr = &models.UpdateIDs{ + IDs: existingTagIDs, + Mode: models.RelationshipUpdateModeSet, + } + } + + newTagIDs := tagIDAttr.Apply(existingTagIDs) + // Remove primary tag from newTagIDs if present + newTagIDs = sliceutil.Exclude(newTagIDs, []int{primaryTagID}) + + if len(existingTagIDs) != len(newTagIDs) { + partial.TagIDs = &models.UpdateIDs{ + IDs: newTagIDs, + Mode: models.RelationshipUpdateModeSet, + } + } else { + // no change to tags required + partial.TagIDs = nil + } + + return nil +} + func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) { return r.SceneMarkersDestroy(ctx, []string{id}) } @@ -832,9 +1010,10 @@ func (r *mutationResolver) SceneMarkersDestroy(ctx context.Context, markerIDs [] var markers []*models.SceneMarker fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() + trashPath := manager.GetInstance().Config.GetDeleteTrashPath() fileDeleter := &scene.FileDeleter{ - Deleter: file.NewDeleter(), + Deleter: file.NewDeleterWithTrash(trashPath), FileNamingAlgo: fileNamingAlgo, Paths: manager.GetInstance().Paths, } diff --git a/internal/api/resolver_mutation_stash_box.go b/internal/api/resolver_mutation_stash_box.go index bbfe8b854..6d2ab84fd 100644 --- a/internal/api/resolver_mutation_stash_box.go +++ b/internal/api/resolver_mutation_stash_box.go @@ -39,7 +39,7 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input } func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) { - b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) + b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) //nolint:staticcheck if err != nil { return "", err } @@ -49,7 +49,7 @@ func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input } func (r *mutationResolver) StashBoxBatchStudioTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) { - b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) + b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) //nolint:staticcheck if err != nil { return "", err } @@ -58,6 +58,16 @@ func (r *mutationResolver) StashBoxBatchStudioTag(ctx context.Context, input man return strconv.Itoa(jobID), nil } +func (r *mutationResolver) StashBoxBatchTagTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) { + b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) //nolint:staticcheck + if err != nil { + return "", err + } + + jobID := manager.GetInstance().StashBoxBatchTagTag(ctx, b, input) + return strconv.Itoa(jobID), nil +} + func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) { b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint) if err != nil { @@ -153,6 +163,14 @@ func (r *mutationResolver) makeSceneDraft(ctx context.Context, s *models.Scene, return nil, err } + // Load StashIDs for tags + tqb := r.repository.Tag + for _, t := range draft.Tags { + if err := t.LoadStashIDs(ctx, tqb); err != nil { + return nil, err + } + } + draft.Cover = cover return draft, nil diff --git a/internal/api/resolver_mutation_studio.go b/internal/api/resolver_mutation_studio.go index 727951755..c7af918a1 100644 --- a/internal/api/resolver_mutation_studio.go +++ b/internal/api/resolver_mutation_studio.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "strconv" + "strings" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin/hook" @@ -30,19 +31,28 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio } // Populate a new studio from the input - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() - newStudio.Name = input.Name - newStudio.URL = translator.string(input.URL) + newStudio.Name = strings.TrimSpace(input.Name) newStudio.Rating = input.Rating100 newStudio.Favorite = translator.bool(input.Favorite) newStudio.Details = translator.string(input.Details) newStudio.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) - newStudio.Aliases = models.NewRelatedStrings(input.Aliases) + newStudio.Organized = translator.bool(input.Organized) + newStudio.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newStudio.Name)) newStudio.StashIDs = models.NewRelatedStashIDs(models.StashIDInputs(input.StashIds).ToStashIDs()) var err error + newStudio.URLs = models.NewRelatedStrings([]string{}) + if input.URL != nil { + newStudio.URLs.Add(strings.TrimSpace(*input.URL)) + } + + if input.Urls != nil { + newStudio.URLs.Add(stringslice.TrimSpace(input.Urls)...) + } + newStudio.ParentID, err = translator.intPtrFromString(input.ParentID) if err != nil { return nil, fmt.Errorf("converting parent id: %w", err) @@ -52,6 +62,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio if err != nil { return nil, fmt.Errorf("converting tag ids: %w", err) } + newStudio.CustomFields = convertMapJSONNumbers(input.CustomFields) // Process the base 64 encoded image string var imageData []byte @@ -106,11 +117,11 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio updatedStudio.ID = studioID updatedStudio.Name = translator.optionalString(input.Name, "name") - updatedStudio.URL = translator.optionalString(input.URL, "url") updatedStudio.Details = translator.optionalString(input.Details, "details") updatedStudio.Rating = translator.optionalInt(input.Rating100, "rating100") updatedStudio.Favorite = translator.optionalBool(input.Favorite, "favorite") updatedStudio.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + updatedStudio.Organized = translator.optionalBool(input.Organized, "organized") updatedStudio.Aliases = translator.updateStrings(input.Aliases, "aliases") updatedStudio.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") @@ -124,6 +135,31 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio return nil, fmt.Errorf("converting tag ids: %w", err) } + if translator.hasField("urls") { + // ensure url not included in the input + if err := validateNoLegacyURLs(translator); err != nil { + return nil, err + } + + updatedStudio.URLs = translator.updateStrings(input.Urls, "urls") + } else if translator.hasField("url") { + // handle legacy url field + legacyURLs := []string{} + if input.URL != nil { + legacyURLs = append(legacyURLs, *input.URL) + } + + updatedStudio.URLs = &models.UpdateStrings{ + Mode: models.RelationshipUpdateModeSet, + Values: legacyURLs, + } + } + + updatedStudio.CustomFields = input.CustomFields + // convert json.Numbers to int/float + updatedStudio.CustomFields.Full = convertMapJSONNumbers(updatedStudio.CustomFields.Full) + updatedStudio.CustomFields.Partial = convertMapJSONNumbers(updatedStudio.CustomFields.Partial) + // Process the base 64 encoded image string var imageData []byte imageIncluded := translator.hasField("image") @@ -139,6 +175,28 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Studio + if updatedStudio.Aliases != nil { + s, err := qb.Find(ctx, studioID) + if err != nil { + return err + } + if s != nil { + if err := s.LoadAliases(ctx, qb); err != nil { + return err + } + + effectiveAliases := updatedStudio.Aliases.Apply(s.Aliases.List()) + name := s.Name + if updatedStudio.Name.Set { + name = updatedStudio.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name) + updatedStudio.Aliases.Values = sanitized + updatedStudio.Aliases.Mode = models.RelationshipUpdateModeSet + } + } + if err := studio.ValidateModify(ctx, updatedStudio, qb); err != nil { return err } @@ -163,6 +221,97 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio return r.getStudio(ctx, studioID) } +func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudioUpdateInput) ([]*models.Studio, error) { + ids, err := stringslice.StringSliceToIntSlice(input.Ids) + if err != nil { + return nil, fmt.Errorf("converting ids: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + // Populate performer from the input + partial := models.NewStudioPartial() + + partial.ParentID, err = translator.optionalIntFromString(input.ParentID, "parent_id") + if err != nil { + return nil, fmt.Errorf("converting parent id: %w", err) + } + + if translator.hasField("urls") { + // ensure url/twitter/instagram are not included in the input + if err := validateNoLegacyURLs(translator); err != nil { + return nil, err + } + + partial.URLs = translator.updateStringsBulk(input.Urls, "urls") + } else if translator.hasField("url") { + // handle legacy url field + legacyURLs := []string{} + if input.URL != nil { + legacyURLs = append(legacyURLs, *input.URL) + } + + partial.URLs = &models.UpdateStrings{ + Mode: models.RelationshipUpdateModeSet, + Values: legacyURLs, + } + } + + partial.Favorite = translator.optionalBool(input.Favorite, "favorite") + partial.Rating = translator.optionalInt(input.Rating100, "rating100") + partial.Details = translator.optionalString(input.Details, "details") + partial.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + partial.Organized = translator.optionalBool(input.Organized, "organized") + + partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + + ret := []*models.Studio{} + + // Start the transaction and save the performers + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Studio + + for _, id := range ids { + local := partial + local.ID = id + if err := studio.ValidateModify(ctx, local, qb); err != nil { + return err + } + + updated, err := qb.UpdatePartial(ctx, local) + if err != nil { + return err + } + + ret = append(ret, updated) + } + + return nil + }); err != nil { + return nil, err + } + + // execute post hooks outside of txn + var newRet []*models.Studio + for _, studio := range ret { + r.hookExecutor.ExecutePostHooks(ctx, studio.ID, hook.StudioUpdatePost, input, translator.getFields()) + + studio, err = r.getStudio(ctx, studio.ID) + if err != nil { + return nil, err + } + + newRet = append(newRet, studio) + } + + return newRet, nil +} + func (r *mutationResolver) StudioDestroy(ctx context.Context, input StudioDestroyInput) (bool, error) { id, err := strconv.Atoi(input.ID) if err != nil { diff --git a/internal/api/resolver_mutation_tag.go b/internal/api/resolver_mutation_tag.go index 1e8b6066a..ac0183b74 100644 --- a/internal/api/resolver_mutation_tag.go +++ b/internal/api/resolver_mutation_tag.go @@ -4,8 +4,8 @@ import ( "context" "fmt" "strconv" + "strings" - "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin/hook" "github.com/stashapp/stash/pkg/sliceutil/stringslice" @@ -30,15 +30,26 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) } // Populate a new tag from the input - newTag := models.NewTag() + newTag := models.CreateTagInput{ + Tag: &models.Tag{}, + } + *newTag.Tag = models.NewTag() - newTag.Name = input.Name + newTag.Name = strings.TrimSpace(input.Name) newTag.SortName = translator.string(input.SortName) - newTag.Aliases = models.NewRelatedStrings(input.Aliases) + newTag.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newTag.Name)) newTag.Favorite = translator.bool(input.Favorite) newTag.Description = translator.string(input.Description) newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) + var stashIDInputs models.StashIDInputs + for _, sid := range input.StashIds { + if sid != nil { + stashIDInputs = append(stashIDInputs, *sid) + } + } + newTag.StashIDs = models.NewRelatedStashIDs(stashIDInputs.ToStashIDs()) + var err error newTag.ParentIDs, err = translator.relatedIds(input.ParentIds) @@ -51,6 +62,8 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) return nil, fmt.Errorf("converting child tag ids: %w", err) } + newTag.CustomFields = convertMapJSONNumbers(input.CustomFields) + // Process the base 64 encoded image string var imageData []byte if input.Image != nil { @@ -64,7 +77,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag - if err := tag.ValidateCreate(ctx, newTag, qb); err != nil { + if err := tag.ValidateCreate(ctx, *newTag.Tag, qb); err != nil { return err } @@ -89,6 +102,46 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) return r.getTag(ctx, newTag.ID) } +func tagPartialFromInput(input TagUpdateInput, translator changesetTranslator) (*models.TagPartial, error) { + updatedTag := models.NewTagPartial() + + updatedTag.Name = translator.optionalString(input.Name, "name") + updatedTag.SortName = translator.optionalString(input.SortName, "sort_name") + updatedTag.Favorite = translator.optionalBool(input.Favorite, "favorite") + updatedTag.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + updatedTag.Description = translator.optionalString(input.Description, "description") + + updatedTag.Aliases = translator.updateStrings(input.Aliases, "aliases") + + var updateStashIDInputs models.StashIDInputs + for _, sid := range input.StashIds { + if sid != nil { + updateStashIDInputs = append(updateStashIDInputs, *sid) + } + } + updatedTag.StashIDs = translator.updateStashIDs(updateStashIDInputs, "stash_ids") + + var err error + updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids") + if err != nil { + return nil, fmt.Errorf("converting parent tag ids: %w", err) + } + + updatedTag.ChildIDs, err = translator.updateIds(input.ChildIds, "child_ids") + if err != nil { + return nil, fmt.Errorf("converting child tag ids: %w", err) + } + + if input.CustomFields != nil { + updatedTag.CustomFields = *input.CustomFields + // convert json.Numbers to int/float + updatedTag.CustomFields.Full = convertMapJSONNumbers(updatedTag.CustomFields.Full) + updatedTag.CustomFields.Partial = convertMapJSONNumbers(updatedTag.CustomFields.Partial) + } + + return &updatedTag, nil +} + func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) { tagID, err := strconv.Atoi(input.ID) if err != nil { @@ -100,24 +153,9 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) } // Populate tag from the input - updatedTag := models.NewTagPartial() - - updatedTag.Name = translator.optionalString(input.Name, "name") - updatedTag.SortName = translator.optionalString(input.SortName, "sort_name") - updatedTag.Favorite = translator.optionalBool(input.Favorite, "favorite") - updatedTag.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") - updatedTag.Description = translator.optionalString(input.Description, "description") - - updatedTag.Aliases = translator.updateStrings(input.Aliases, "aliases") - - updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids") + updatedTag, err := tagPartialFromInput(input, translator) if err != nil { - return nil, fmt.Errorf("converting parent tag ids: %w", err) - } - - updatedTag.ChildIDs, err = translator.updateIds(input.ChildIds, "child_ids") - if err != nil { - return nil, fmt.Errorf("converting child tag ids: %w", err) + return nil, err } var imageData []byte @@ -134,11 +172,33 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag - if err := tag.ValidateUpdate(ctx, tagID, updatedTag, qb); err != nil { + if updatedTag.Aliases != nil { + t, err := qb.Find(ctx, tagID) + if err != nil { + return err + } + if t != nil { + if err := t.LoadAliases(ctx, qb); err != nil { + return err + } + + newAliases := updatedTag.Aliases.Apply(t.Aliases.List()) + name := t.Name + if updatedTag.Name.Set { + name = updatedTag.Name.Value + } + + sanitized := stringslice.UniqueExcludeFold(newAliases, name) + updatedTag.Aliases.Values = sanitized + updatedTag.Aliases.Mode = models.RelationshipUpdateModeSet + } + } + + if err := tag.ValidateUpdate(ctx, tagID, *updatedTag, qb); err != nil { return err } - t, err = qb.UpdatePartial(ctx, tagID, updatedTag) + t, err = qb.UpdatePartial(ctx, tagID, *updatedTag) if err != nil { return err } @@ -286,6 +346,31 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) return nil, nil } + var values *models.TagPartial + var imageData []byte + + if input.Values != nil { + translator := changesetTranslator{ + inputMap: getNamedUpdateInputMap(ctx, "input.values"), + } + + values, err = tagPartialFromInput(*input.Values, translator) + if err != nil { + return nil, err + } + + if input.Values.Image != nil { + var err error + imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image) + if err != nil { + return nil, fmt.Errorf("processing cover image: %w", err) + } + } + } else { + v := models.NewTagPartial() + values = &v + } + var t *models.Tag if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag @@ -300,28 +385,22 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) return fmt.Errorf("tag with id %d not found", destination) } - parents, children, err := tag.MergeHierarchy(ctx, destination, source, qb) - if err != nil { - return err - } - if err = qb.Merge(ctx, source, destination); err != nil { return err } - err = qb.UpdateParentTags(ctx, destination, parents) - if err != nil { - return err - } - err = qb.UpdateChildTags(ctx, destination, children) - if err != nil { + if err := tag.ValidateUpdate(ctx, destination, *values, qb); err != nil { return err } - err = tag.ValidateHierarchyExisting(ctx, t, parents, children, qb) - if err != nil { - logger.Errorf("Error merging tag: %s", err) - return err + if _, err := qb.UpdatePartial(ctx, destination, *values); err != nil { + return fmt.Errorf("updating tag: %w", err) + } + + if len(imageData) > 0 { + if err := qb.UpdateImage(ctx, destination, imageData); err != nil { + return err + } } return nil diff --git a/internal/api/resolver_query_configuration.go b/internal/api/resolver_query_configuration.go index cfa22720b..cf2c0e3cc 100644 --- a/internal/api/resolver_query_configuration.go +++ b/internal/api/resolver_query_configuration.go @@ -82,6 +82,7 @@ func makeConfigGeneralResult() *ConfigGeneralResult { Stashes: config.GetStashPaths(), DatabasePath: config.GetDatabasePath(), BackupDirectoryPath: config.GetBackupDirectoryPath(), + DeleteTrashPath: config.GetDeleteTrashPath(), GeneratedPath: config.GetGeneratedPath(), MetadataPath: config.GetMetadataPath(), ConfigFilePath: config.GetConfigFile(), @@ -95,6 +96,11 @@ func makeConfigGeneralResult() *ConfigGeneralResult { CalculateMd5: config.IsCalculateMD5(), VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), ParallelTasks: config.GetParallelTasks(), + UseCustomSpriteInterval: config.GetUseCustomSpriteInterval(), + SpriteInterval: config.GetSpriteInterval(), + SpriteScreenshotSize: config.GetSpriteScreenshotSize(), + MinimumSprites: config.GetMinimumSprites(), + MaximumSprites: config.GetMaximumSprites(), PreviewAudio: config.GetPreviewAudio(), PreviewSegments: config.GetPreviewSegments(), PreviewSegmentDuration: config.GetPreviewSegmentDuration(), @@ -115,6 +121,7 @@ func makeConfigGeneralResult() *ConfigGeneralResult { LogOut: config.GetLogOut(), LogLevel: config.GetLogLevel(), LogAccess: config.GetLogAccess(), + LogFileMaxSize: config.GetLogFileMaxSize(), VideoExtensions: config.GetVideoExtensions(), ImageExtensions: config.GetImageExtensions(), GalleryExtensions: config.GetGalleryExtensions(), @@ -154,6 +161,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult { javascriptEnabled := config.GetJavascriptEnabled() customLocales := config.GetCustomLocales() customLocalesEnabled := config.GetCustomLocalesEnabled() + disableCustomizations := config.GetDisableCustomizations() language := config.GetLanguage() handyKey := config.GetHandyKey() scriptOffset := config.GetFunscriptOffset() @@ -162,6 +170,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult { disableDropdownCreate := config.GetDisableDropdownCreate() return &ConfigInterfaceResult{ + SfwContentMode: config.GetSFWContentMode(), MenuItems: menuItems, SoundOnPreview: &soundOnPreview, WallShowTitle: &wallShowTitle, @@ -180,6 +189,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult { JavascriptEnabled: &javascriptEnabled, CustomLocales: &customLocales, CustomLocalesEnabled: &customLocalesEnabled, + DisableCustomizations: &disableCustomizations, Language: &language, ImageLightbox: &imageLightboxOptions, diff --git a/internal/api/resolver_query_find_file.go b/internal/api/resolver_query_find_file.go new file mode 100644 index 000000000..01c14b1ed --- /dev/null +++ b/internal/api/resolver_query_find_file.go @@ -0,0 +1,120 @@ +package api + +import ( + "context" + "errors" + "strconv" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil/stringslice" +) + +func (r *queryResolver) FindFile(ctx context.Context, id *string, path *string) (BaseFile, error) { + var ret models.File + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + qb := r.repository.File + var err error + switch { + case id != nil: + idInt, err := strconv.Atoi(*id) + if err != nil { + return err + } + var files []models.File + files, err = qb.Find(ctx, models.FileID(idInt)) + if err != nil { + return err + } + if len(files) > 0 { + ret = files[0] + } + case path != nil: + ret, err = qb.FindByPath(ctx, *path, true) + if err == nil && ret == nil { + return errors.New("file not found") + } + default: + return errors.New("either id or path must be provided") + } + + return err + }); err != nil { + return nil, err + } + + return convertBaseFile(ret), nil +} + +func (r *queryResolver) FindFiles( + ctx context.Context, + fileFilter *models.FileFilterType, + filter *models.FindFilterType, + ids []string, +) (ret *FindFilesResultType, err error) { + var fileIDs []models.FileID + if len(ids) > 0 { + fileIDsInt, err := stringslice.StringSliceToIntSlice(ids) + if err != nil { + return nil, err + } + + fileIDs = models.FileIDsFromInts(fileIDsInt) + } + + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + var files []models.File + var err error + + fields := collectQueryFields(ctx) + result := &models.FileQueryResult{} + + if len(fileIDs) > 0 { + files, err = r.repository.File.Find(ctx, fileIDs...) + if err == nil { + result.Count = len(files) + for _, f := range files { + if asVideo, ok := f.(*models.VideoFile); ok { + result.TotalDuration += asVideo.Duration + } + if asImage, ok := f.(*models.ImageFile); ok { + result.Megapixels += asImage.Megapixels() + } + + result.TotalSize += f.Base().Size + } + } + } else { + result, err = r.repository.File.Query(ctx, models.FileQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: filter, + Count: fields.Has("count"), + }, + FileFilter: fileFilter, + TotalDuration: fields.Has("duration"), + Megapixels: fields.Has("megapixels"), + TotalSize: fields.Has("size"), + }) + if err == nil { + files, err = result.Resolve(ctx) + } + } + + if err != nil { + return err + } + + ret = &FindFilesResultType{ + Count: result.Count, + Files: convertBaseFiles(files), + Duration: result.TotalDuration, + Megapixels: result.Megapixels, + Size: int(result.TotalSize), + } + + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} diff --git a/internal/api/resolver_query_find_folder.go b/internal/api/resolver_query_find_folder.go new file mode 100644 index 000000000..60088e2a3 --- /dev/null +++ b/internal/api/resolver_query_find_folder.go @@ -0,0 +1,99 @@ +package api + +import ( + "context" + "errors" + "strconv" + + "github.com/stashapp/stash/pkg/models" +) + +func (r *queryResolver) FindFolder(ctx context.Context, id *string, path *string) (*models.Folder, error) { + var ret *models.Folder + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Folder + var err error + switch { + case id != nil: + idInt, err := strconv.Atoi(*id) + if err != nil { + return err + } + ret, err = qb.Find(ctx, models.FolderID(idInt)) + if err != nil { + return err + } + case path != nil: + ret, err = qb.FindByPath(ctx, *path, true) + if err == nil && ret == nil { + return errors.New("folder not found") + } + default: + return errors.New("either id or path must be provided") + } + + return err + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (r *queryResolver) FindFolders( + ctx context.Context, + folderFilter *models.FolderFilterType, + filter *models.FindFilterType, + ids []string, +) (ret *FindFoldersResultType, err error) { + var folderIDs []models.FolderID + if len(ids) > 0 { + folderIDsInt, err := handleIDList(ids, "ids") + if err != nil { + return nil, err + } + + folderIDs = models.FolderIDsFromInts(folderIDsInt) + } + + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + var folders []*models.Folder + var err error + + fields := collectQueryFields(ctx) + result := &models.FolderQueryResult{} + + if len(folderIDs) > 0 { + folders, err = r.repository.Folder.FindMany(ctx, folderIDs) + if err == nil { + result.Count = len(folders) + } + } else { + result, err = r.repository.Folder.Query(ctx, models.FolderQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: filter, + Count: fields.Has("count"), + }, + FolderFilter: folderFilter, + }) + if err == nil { + folders, err = result.Resolve(ctx) + } + } + + if err != nil { + return err + } + + ret = &FindFoldersResultType{ + Count: result.Count, + Folders: folders, + } + + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} diff --git a/internal/api/resolver_query_find_gallery.go b/internal/api/resolver_query_find_gallery.go index 724a48b12..09c0387cd 100644 --- a/internal/api/resolver_query_find_gallery.go +++ b/internal/api/resolver_query_find_gallery.go @@ -5,7 +5,6 @@ import ( "strconv" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models.Gallery, err error) { @@ -25,7 +24,7 @@ func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models } func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType, ids []string) (ret *FindGalleriesResultType, err error) { - idInts, err := stringslice.StringSliceToIntSlice(ids) + idInts, err := handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_find_group.go b/internal/api/resolver_query_find_group.go index 6f8a6c6ba..14d282379 100644 --- a/internal/api/resolver_query_find_group.go +++ b/internal/api/resolver_query_find_group.go @@ -5,7 +5,6 @@ import ( "strconv" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindGroup(ctx context.Context, id string) (ret *models.Group, err error) { @@ -25,7 +24,7 @@ func (r *queryResolver) FindGroup(ctx context.Context, id string) (ret *models.G } func (r *queryResolver) FindGroups(ctx context.Context, groupFilter *models.GroupFilterType, filter *models.FindFilterType, ids []string) (ret *FindGroupsResultType, err error) { - idInts, err := stringslice.StringSliceToIntSlice(ids) + idInts, err := handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_find_image.go b/internal/api/resolver_query_find_image.go index 48b926345..90eaf33c0 100644 --- a/internal/api/resolver_query_find_image.go +++ b/internal/api/resolver_query_find_image.go @@ -7,7 +7,6 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *string) (*models.Image, error) { @@ -55,7 +54,7 @@ func (r *queryResolver) FindImages( filter *models.FindFilterType, ) (ret *FindImagesResultType, err error) { if len(ids) > 0 { - imageIds, err = stringslice.StringSliceToIntSlice(ids) + imageIds, err = handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_find_movie.go b/internal/api/resolver_query_find_movie.go index 2f80d6f59..c9dd3f846 100644 --- a/internal/api/resolver_query_find_movie.go +++ b/internal/api/resolver_query_find_movie.go @@ -5,7 +5,6 @@ import ( "strconv" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.Group, err error) { @@ -25,7 +24,7 @@ func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.G } func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.GroupFilterType, filter *models.FindFilterType, ids []string) (ret *FindMoviesResultType, err error) { - idInts, err := stringslice.StringSliceToIntSlice(ids) + idInts, err := handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_find_performer.go b/internal/api/resolver_query_find_performer.go index 150c99d20..7ea1f90c8 100644 --- a/internal/api/resolver_query_find_performer.go +++ b/internal/api/resolver_query_find_performer.go @@ -5,7 +5,6 @@ import ( "strconv" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *models.Performer, err error) { @@ -26,7 +25,7 @@ func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *mode func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType, performerIDs []int, ids []string) (ret *FindPerformersResultType, err error) { if len(ids) > 0 { - performerIDs, err = stringslice.StringSliceToIntSlice(ids) + performerIDs, err = handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_find_scene.go b/internal/api/resolver_query_find_scene.go index 44b5cfd5e..135ec43b7 100644 --- a/internal/api/resolver_query_find_scene.go +++ b/internal/api/resolver_query_find_scene.go @@ -9,7 +9,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) { @@ -83,7 +82,7 @@ func (r *queryResolver) FindScenes( filter *models.FindFilterType, ) (ret *FindScenesResultType, err error) { if len(ids) > 0 { - sceneIDs, err = stringslice.StringSliceToIntSlice(ids) + sceneIDs, err = handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_find_scene_marker.go b/internal/api/resolver_query_find_scene_marker.go index d3e47ce8d..e244bafef 100644 --- a/internal/api/resolver_query_find_scene_marker.go +++ b/internal/api/resolver_query_find_scene_marker.go @@ -4,11 +4,10 @@ import ( "context" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType, ids []string) (ret *FindSceneMarkersResultType, err error) { - idInts, err := stringslice.StringSliceToIntSlice(ids) + idInts, err := handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_find_studio.go b/internal/api/resolver_query_find_studio.go index 843592953..636772fe8 100644 --- a/internal/api/resolver_query_find_studio.go +++ b/internal/api/resolver_query_find_studio.go @@ -5,7 +5,6 @@ import ( "strconv" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.Studio, err error) { @@ -26,7 +25,7 @@ func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models. } func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType, ids []string) (ret *FindStudiosResultType, err error) { - idInts, err := stringslice.StringSliceToIntSlice(ids) + idInts, err := handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_find_tag.go b/internal/api/resolver_query_find_tag.go index f0e1d8b97..7dca0b481 100644 --- a/internal/api/resolver_query_find_tag.go +++ b/internal/api/resolver_query_find_tag.go @@ -5,7 +5,6 @@ import ( "strconv" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag, err error) { @@ -25,7 +24,7 @@ func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag } func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType, ids []string) (ret *FindTagsResultType, err error) { - idInts, err := stringslice.StringSliceToIntSlice(ids) + idInts, err := handleIDList(ids, "ids") if err != nil { return nil, err } diff --git a/internal/api/resolver_query_scraper.go b/internal/api/resolver_query_scraper.go index f0e89cd34..353bb1a32 100644 --- a/internal/api/resolver_query_scraper.go +++ b/internal/api/resolver_query_scraper.go @@ -6,6 +6,7 @@ import ( "fmt" "slices" "strconv" + "strings" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" @@ -201,7 +202,7 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.So } // TODO - this should happen after any scene is scraped - if err := r.matchScenesRelationships(ctx, ret, *source.StashBoxEndpoint); err != nil { + if err := r.matchScenesRelationships(ctx, ret, b.Endpoint); err != nil { return nil, err } default: @@ -245,7 +246,7 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.So // just flatten the slice and pass it in flat := sliceutil.Flatten(ret) - if err := r.matchScenesRelationships(ctx, flat, *source.StashBoxEndpoint); err != nil { + if err := r.matchScenesRelationships(ctx, flat, b.Endpoint); err != nil { return nil, err } @@ -335,7 +336,7 @@ func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.S if len(ret) > 0 { if err := r.withReadTxn(ctx, func(ctx context.Context) error { for _, studio := range ret { - if err := match.ScrapedStudioHierarchy(ctx, r.repository.Studio, studio, *source.StashBoxEndpoint); err != nil { + if err := match.ScrapedStudioHierarchy(ctx, r.repository.Studio, studio, b.Endpoint); err != nil { return err } } @@ -350,7 +351,63 @@ func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.S return nil, nil } - return nil, errors.New("stash_box_index must be set") + return nil, errors.New("stash_box_endpoint must be set") +} + +func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Source, input ScrapeSingleTagInput) ([]*models.ScrapedTag, error) { + if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil { + b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint) + if err != nil { + return nil, err + } + + client := r.newStashBoxClient(*b) + + var ret []*models.ScrapedTag + query := *input.Query + out, err := client.QueryTag(ctx, query) + + if err != nil { + return nil, err + } else if out != nil { + ret = append(ret, out...) + } + + if len(ret) > 0 { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + for _, tag := range ret { + if err := match.ScrapedTag(ctx, r.repository.Tag, tag, b.Endpoint); err != nil { + return err + } + } + + return nil + }); err != nil { + return nil, err + } + + // tag name query returns results that may not match the query exactly. + // if there is an exact match, it should be first + if query != "" { + for i, result := range ret { + if strings.EqualFold(result.Name, query) { + // prepend exact match to the front of the slice + if i != 0 { + ret = append([]*models.ScrapedTag{result}, append(ret[:i], ret[i+1:]...)...) + } + + break + } + } + } + + return ret, nil + } + + return nil, nil + } + + return nil, errors.New("stash_box_endpoint must be set") } func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scraper.Source, input ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) { diff --git a/internal/api/routes_performer.go b/internal/api/routes_performer.go index b27fdbd6c..8d5463d63 100644 --- a/internal/api/routes_performer.go +++ b/internal/api/routes_performer.go @@ -18,9 +18,14 @@ type PerformerFinder interface { GetImage(ctx context.Context, performerID int) ([]byte, error) } +type sfwConfig interface { + GetSFWContentMode() bool +} + type performerRoutes struct { routes performerFinder PerformerFinder + sfwConfig sfwConfig } func (rs performerRoutes) Routes() chi.Router { @@ -54,7 +59,7 @@ func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) { } if len(image) == 0 { - image = getDefaultPerformerImage(performer.Name, performer.Gender) + image = getDefaultPerformerImage(performer.Name, performer.Gender, rs.sfwConfig.GetSFWContentMode()) } utils.ServeImage(w, r, image) diff --git a/internal/api/routes_scene.go b/internal/api/routes_scene.go index 95e7c9d44..2905bd53a 100644 --- a/internal/api/routes_scene.go +++ b/internal/api/routes_scene.go @@ -12,6 +12,7 @@ import ( "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/fsutil" @@ -243,6 +244,12 @@ func (rs sceneRoutes) streamSegment(w http.ResponseWriter, r *http.Request, stre } func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) { + // if default flag is set, return the default image + if r.URL.Query().Get("default") == "true" { + utils.ServeImage(w, r, static.ReadAll(static.DefaultSceneImage)) + return + } + scene := r.Context().Value(sceneKey).(*models.Scene) ss := manager.SceneServer{ diff --git a/internal/api/scraped_content.go b/internal/api/scraped_content.go index 6288812ef..f7d40c95d 100644 --- a/internal/api/scraped_content.go +++ b/internal/api/scraped_content.go @@ -135,6 +135,13 @@ func marshalScrapedGroups(content []scraper.ScrapedContent) ([]*models.ScrapedGr ret = append(ret, m) case models.ScrapedGroup: ret = append(ret, &m) + // it's possible that a scraper returns models.ScrapedMovie + case *models.ScrapedMovie: + g := m.ScrapedGroup() + ret = append(ret, &g) + case models.ScrapedMovie: + g := m.ScrapedGroup() + ret = append(ret, &g) default: return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGroup", models.ErrConversion) } diff --git a/internal/api/server.go b/internal/api/server.go index 5059e9a2a..a7516da52 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -11,6 +11,7 @@ import ( "net/http" "os" "path" + "path/filepath" "runtime/debug" "strconv" "strings" @@ -255,6 +256,9 @@ func Initialize() (*Server, error) { staticUI = statigz.FileServer(ui.UIBox.(fs.ReadDirFS)) } + // handle favicon override + r.HandleFunc("/favicon.ico", handleFavicon(staticUI)) + // Serve the web app r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) { ext := path.Ext(r.URL.Path) @@ -295,6 +299,31 @@ func Initialize() (*Server, error) { return server, nil } +func handleFavicon(staticUI *statigz.Server) func(w http.ResponseWriter, r *http.Request) { + mgr := manager.GetInstance() + cfg := mgr.Config + + // check if favicon.ico exists in the config directory + // if so, use that + // otherwise, use the embedded one + iconPath := filepath.Join(cfg.GetConfigPath(), "favicon.ico") + exists, _ := fsutil.FileExists(iconPath) + + if exists { + logger.Debugf("Using custom favicon at %s", iconPath) + } + + return func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "no-cache") + + if exists { + http.ServeFile(w, r, iconPath) + } else { + staticUI.ServeHTTP(w, r) + } + } +} + // Start starts the server. It listens on the configured address and port. // It calls ListenAndServeTLS if TLS is configured, otherwise it calls ListenAndServe. // Calls to Start are blocked until the server is shutdown. @@ -322,6 +351,7 @@ func (s *Server) getPerformerRoutes() chi.Router { return performerRoutes{ routes: routes{txnManager: repo.TxnManager}, performerFinder: repo.Performer, + sfwConfig: s.manager.Config, }.Routes() } @@ -420,7 +450,7 @@ func cssHandler(c *config.Config) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { var paths []string - if c.GetCSSEnabled() { + if c.GetCSSEnabled() && !c.GetDisableCustomizations() { // search for custom.css in current directory, then $HOME/.stash fn := c.GetCSSPath() exists, _ := fsutil.FileExists(fn) @@ -438,7 +468,7 @@ func javascriptHandler(c *config.Config) func(w http.ResponseWriter, r *http.Req return func(w http.ResponseWriter, r *http.Request) { var paths []string - if c.GetJavascriptEnabled() { + if c.GetJavascriptEnabled() && !c.GetDisableCustomizations() { // search for custom.js in current directory, then $HOME/.stash fn := c.GetJavascriptPath() exists, _ := fsutil.FileExists(fn) @@ -456,7 +486,7 @@ func customLocalesHandler(c *config.Config) func(w http.ResponseWriter, r *http. return func(w http.ResponseWriter, r *http.Request) { buffer := bytes.Buffer{} - if c.GetCustomLocalesEnabled() { + if c.GetCustomLocalesEnabled() && !c.GetDisableCustomizations() { // search for custom-locales.json in current directory, then $HOME/.stash path := c.GetCustomLocalesPath() exists, _ := fsutil.FileExists(path) diff --git a/internal/api/urlbuilders/gallery.go b/internal/api/urlbuilders/gallery.go index 3e6c5ef08..2723781f2 100644 --- a/internal/api/urlbuilders/gallery.go +++ b/internal/api/urlbuilders/gallery.go @@ -9,12 +9,14 @@ import ( type GalleryURLBuilder struct { BaseURL string GalleryID string + UpdatedAt string } func NewGalleryURLBuilder(baseURL string, gallery *models.Gallery) GalleryURLBuilder { return GalleryURLBuilder{ BaseURL: baseURL, GalleryID: strconv.Itoa(gallery.ID), + UpdatedAt: strconv.FormatInt(gallery.UpdatedAt.Unix(), 10), } } @@ -23,5 +25,5 @@ func (b GalleryURLBuilder) GetPreviewURL() string { } func (b GalleryURLBuilder) GetCoverURL() string { - return b.BaseURL + "/gallery/" + b.GalleryID + "/cover" + return b.BaseURL + "/gallery/" + b.GalleryID + "/cover?t=" + b.UpdatedAt } diff --git a/internal/autotag/integration_test.go b/internal/autotag/integration_test.go index 565d73853..f537ecfe7 100644 --- a/internal/autotag/integration_test.go +++ b/internal/autotag/integration_test.go @@ -101,16 +101,15 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error { func createStudio(ctx context.Context, qb models.StudioWriter, name string) (*models.Studio, error) { // create the studio - studio := models.Studio{ - Name: name, - } + studio := models.NewCreateStudioInput() + studio.Name = name err := qb.Create(ctx, &studio) if err != nil { return nil, err } - return &studio, nil + return studio.Studio, nil } func createTag(ctx context.Context, qb models.TagWriter) error { @@ -119,7 +118,7 @@ func createTag(ctx context.Context, qb models.TagWriter) error { Name: testName, } - err := qb.Create(ctx, &tag) + err := qb.Create(ctx, &models.CreateTagInput{Tag: &tag}) if err != nil { return err } @@ -225,7 +224,7 @@ func createSceneFile(ctx context.Context, name string, folderStore models.Folder } func getOrCreateFolder(ctx context.Context, folderStore models.FolderFinderCreator, folderPath string) (*models.Folder, error) { - f, err := folderStore.FindByPath(ctx, folderPath) + f, err := folderStore.FindByPath(ctx, folderPath, true) if err != nil { return nil, fmt.Errorf("getting folder by path: %w", err) } @@ -366,7 +365,10 @@ func makeImage(expectedResult bool) *models.Image { } func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *models.ImageFile) error { - err := w.Create(ctx, o, []models.FileID{f.ID}) + err := w.Create(ctx, &models.CreateImageInput{ + Image: o, + FileIDs: []models.FileID{f.ID}, + }) if err != nil { return fmt.Errorf("Failed to create image with path '%s': %s", f.Path, err.Error()) @@ -469,7 +471,10 @@ func makeGallery(expectedResult bool) *models.Gallery { } func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *models.BaseFile) error { - err := w.Create(ctx, o, []models.FileID{f.ID}) + err := w.Create(ctx, &models.CreateGalleryInput{ + Gallery: o, + FileIDs: []models.FileID{f.ID}, + }) if err != nil { return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error()) } diff --git a/internal/desktop/desktop.go b/internal/desktop/desktop.go index a89a3c962..f1ca9bc92 100644 --- a/internal/desktop/desktop.go +++ b/internal/desktop/desktop.go @@ -2,6 +2,7 @@ package desktop import ( + "fmt" "os" "path" "path/filepath" @@ -17,6 +18,16 @@ import ( "golang.org/x/term" ) +var isDesktop bool + +// InitIsDesktop sets the value of isDesktop. +// Changed IsDesktop to be evaluated once at startup because if it is +// checked while there are open terminal sessions (such as the ffmpeg hardware +// encoding checks), it may return false. +func InitIsDesktop() { + isDesktop = isDesktopCheck() +} + type FaviconProvider interface { GetFavicon() []byte GetFaviconPng() []byte @@ -59,22 +70,33 @@ func SendNotification(title string, text string) { } func IsDesktop() bool { + return isDesktop +} + +// isDesktop tries to determine if the application is running in a desktop environment +// where desktop features like system tray and notifications should be enabled. +func isDesktopCheck() bool { if isDoubleClickLaunched() { + logger.Debug("Detected double-click launch") return true } // Check if running under root if os.Getuid() == 0 { + logger.Debug("Running as root, disabling desktop features") return false } // Check if stdin is a terminal if term.IsTerminal(int(os.Stdin.Fd())) { + logger.Debug("Running in terminal, disabling desktop features") return false } if isService() { + logger.Debug("Running as a service, disabling desktop features") return false } if IsServerDockerized() { + logger.Debug("Running in docker, disabling desktop features") return false } @@ -134,15 +156,17 @@ func getIconPath() string { return path.Join(config.GetInstance().GetConfigPath(), "icon.png") } -func RevealInFileManager(path string) { - exists, err := fsutil.FileExists(path) +func RevealInFileManager(path string) error { + info, err := os.Stat(path) if err != nil { - logger.Errorf("Error checking file: %s", err) - return + return fmt.Errorf("error checking path: %w", err) } - if exists && IsDesktop() { - revealInFileManager(path) + + absPath, err := filepath.Abs(path) + if err != nil { + return fmt.Errorf("error getting absolute path: %w", err) } + return revealInFileManager(absPath, info) } func getServerURL(path string) string { diff --git a/internal/desktop/desktop_platform_darwin.go b/internal/desktop/desktop_platform_darwin.go index 593e9516f..732009007 100644 --- a/internal/desktop/desktop_platform_darwin.go +++ b/internal/desktop/desktop_platform_darwin.go @@ -4,9 +4,11 @@ package desktop import ( + "fmt" + "os" "os/exec" - "github.com/kermieisinthehouse/gosx-notifier" + gosxnotifier "github.com/kermieisinthehouse/gosx-notifier" "github.com/stashapp/stash/pkg/logger" ) @@ -32,8 +34,11 @@ func sendNotification(notificationTitle string, notificationText string) { } } -func revealInFileManager(path string) { - exec.Command(`open`, `-R`, path) +func revealInFileManager(path string, _ os.FileInfo) error { + if err := exec.Command(`open`, `-R`, path).Run(); err != nil { + return fmt.Errorf("error revealing path in Finder: %w", err) + } + return nil } func isDoubleClickLaunched() bool { diff --git a/internal/desktop/desktop_platform_nixes.go b/internal/desktop/desktop_platform_nixes.go index 69c780d3c..f5ab13384 100644 --- a/internal/desktop/desktop_platform_nixes.go +++ b/internal/desktop/desktop_platform_nixes.go @@ -4,8 +4,10 @@ package desktop import ( + "fmt" "os" "os/exec" + "path/filepath" "strings" "github.com/stashapp/stash/pkg/logger" @@ -33,8 +35,15 @@ func sendNotification(notificationTitle string, notificationText string) { } } -func revealInFileManager(path string) { - +func revealInFileManager(path string, info os.FileInfo) error { + dir := path + if !info.IsDir() { + dir = filepath.Dir(path) + } + if err := exec.Command("xdg-open", dir).Run(); err != nil { + return fmt.Errorf("error opening directory in file manager: %w", err) + } + return nil } func isDoubleClickLaunched() bool { diff --git a/internal/desktop/desktop_platform_windows.go b/internal/desktop/desktop_platform_windows.go index ecb4060e6..48feabed5 100644 --- a/internal/desktop/desktop_platform_windows.go +++ b/internal/desktop/desktop_platform_windows.go @@ -4,6 +4,7 @@ package desktop import ( + "os" "os/exec" "syscall" "unsafe" @@ -83,6 +84,10 @@ func sendNotification(notificationTitle string, notificationText string) { } } -func revealInFileManager(path string) { - exec.Command(`explorer`, `\select`, path) +func revealInFileManager(path string, _ os.FileInfo) error { + c := exec.Command(`explorer`, `/select,`, path) + logger.Debugf("Running: %s", c.String()) + // explorer seems to return an error code even when it works, so ignore the error + _ = c.Run() + return nil } diff --git a/internal/desktop/systray_nonlinux.go b/internal/desktop/systray_nonlinux.go index 450e503ea..6b6055f11 100644 --- a/internal/desktop/systray_nonlinux.go +++ b/internal/desktop/systray_nonlinux.go @@ -3,6 +3,8 @@ package desktop import ( + "fmt" + "runtime" "strings" "github.com/kermieisinthehouse/systray" @@ -20,7 +22,12 @@ func startSystray(exit chan int, faviconProvider FaviconProvider) { // system is started from a non-terminal method, e.g. double-clicking an icon. c := config.GetInstance() if c.GetShowOneTimeMovedNotification() { - SendNotification("Stash has moved!", "Stash now runs in your tray, instead of a terminal window.") + // Use platform-appropriate terminology + location := "tray" + if runtime.GOOS == "darwin" { + location = "menu bar" + } + SendNotification("Stash has moved!", "Stash now runs in your "+location+", instead of a terminal window.") c.SetBool(config.ShowOneTimeMovedNotification, false) if err := c.Write(); err != nil { logger.Errorf("Error while writing configuration file: %v", err) @@ -52,12 +59,12 @@ func startSystray(exit chan int, faviconProvider FaviconProvider) { func systrayInitialize(exit chan<- int, faviconProvider FaviconProvider) { favicon := faviconProvider.GetFavicon() systray.SetTemplateIcon(favicon, favicon) - systray.SetTooltip("🟢 Stash is Running.") + c := config.GetInstance() + systray.SetTooltip(fmt.Sprintf("🟢 Stash is Running on port %d.", c.GetPort())) openStashButton := systray.AddMenuItem("Open Stash", "Open a browser window to Stash") var menuItems []string systray.AddSeparator() - c := config.GetInstance() if !c.IsNewSystem() { menuItems = c.GetMenuItems() for _, item := range menuItems { diff --git a/internal/dlna/activity.go b/internal/dlna/activity.go new file mode 100644 index 000000000..a9a5d9b2d --- /dev/null +++ b/internal/dlna/activity.go @@ -0,0 +1,333 @@ +package dlna + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/txn" +) + +const ( + // DefaultSessionTimeout is the time after which a session is considered complete + // if no new requests are received. + // This is set high (5 minutes) because DLNA clients buffer aggressively and may not + // send any HTTP requests for extended periods while the user is still watching. + DefaultSessionTimeout = 5 * time.Minute + + // monitorInterval is how often we check for expired sessions. + monitorInterval = 10 * time.Second +) + +// ActivityConfig provides configuration options for DLNA activity tracking. +type ActivityConfig interface { + // GetDLNAActivityTrackingEnabled returns true if activity tracking should be enabled. + // If not implemented, defaults to true. + GetDLNAActivityTrackingEnabled() bool + + // GetMinimumPlayPercent returns the minimum percentage of a video that must be + // watched before incrementing the play count. Uses UI setting if available. + GetMinimumPlayPercent() int +} + +// SceneActivityWriter provides methods for saving scene activity. +type SceneActivityWriter interface { + SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) + AddViews(ctx context.Context, sceneID int, dates []time.Time) ([]time.Time, error) +} + +// streamSession represents an active DLNA streaming session. +type streamSession struct { + SceneID int + ClientIP string + StartTime time.Time + LastActivity time.Time + VideoDuration float64 + PlayCountAdded bool +} + +// sessionKey generates a unique key for a session based on client IP and scene ID. +func sessionKey(clientIP string, sceneID int) string { + return fmt.Sprintf("%s:%d", clientIP, sceneID) +} + +// percentWatched calculates the estimated percentage of video watched. +// Uses a time-based approach since DLNA clients buffer aggressively and byte +// positions don't correlate with actual playback position. +// +// The key insight: you cannot have watched more of the video than time has elapsed. +// If the video is 30 minutes and only 1 minute has passed, maximum watched is ~3.3%. +func (s *streamSession) percentWatched() float64 { + if s.VideoDuration <= 0 { + return 0 + } + + // Calculate elapsed time from session start to last activity + elapsed := s.LastActivity.Sub(s.StartTime).Seconds() + if elapsed <= 0 { + return 0 + } + + // Maximum possible percent is based on elapsed time + // You can't watch more of the video than time has passed + timeBasedPercent := (elapsed / s.VideoDuration) * 100 + + // Cap at 100% + if timeBasedPercent > 100 { + return 100 + } + + return timeBasedPercent +} + +// estimatedResumeTime calculates the estimated resume time based on elapsed time. +// Since DLNA clients buffer aggressively, byte positions don't correlate with playback. +// Instead, we estimate based on how long the session has been active. +// Returns the time in seconds, or 0 if the video is nearly complete (>=98%). +func (s *streamSession) estimatedResumeTime() float64 { + if s.VideoDuration <= 0 { + return 0 + } + + // Calculate elapsed time from session start + elapsed := s.LastActivity.Sub(s.StartTime).Seconds() + if elapsed <= 0 { + return 0 + } + + // If elapsed time exceeds 98% of video duration, reset resume time (matches frontend behavior) + if elapsed >= s.VideoDuration*0.98 { + return 0 + } + + // Resume time is approximately where the user was watching + // Capped by video duration + if elapsed > s.VideoDuration { + elapsed = s.VideoDuration + } + + return elapsed +} + +// ActivityTracker tracks DLNA streaming activity and saves it to the database. +type ActivityTracker struct { + txnManager txn.Manager + sceneWriter SceneActivityWriter + config ActivityConfig + sessionTimeout time.Duration + + sessions map[string]*streamSession + mutex sync.RWMutex + + ctx context.Context + cancelFunc context.CancelFunc + wg sync.WaitGroup +} + +// NewActivityTracker creates a new ActivityTracker. +func NewActivityTracker( + txnManager txn.Manager, + sceneWriter SceneActivityWriter, + config ActivityConfig, +) *ActivityTracker { + ctx, cancel := context.WithCancel(context.Background()) + + tracker := &ActivityTracker{ + txnManager: txnManager, + sceneWriter: sceneWriter, + config: config, + sessionTimeout: DefaultSessionTimeout, + sessions: make(map[string]*streamSession), + ctx: ctx, + cancelFunc: cancel, + } + + // Start the session monitor goroutine + tracker.wg.Add(1) + go tracker.monitorSessions() + + return tracker +} + +// Stop stops the activity tracker and processes any remaining sessions. +func (t *ActivityTracker) Stop() { + t.cancelFunc() + t.wg.Wait() + + // Process any remaining sessions + t.mutex.Lock() + sessions := make([]*streamSession, 0, len(t.sessions)) + for _, session := range t.sessions { + sessions = append(sessions, session) + } + t.sessions = make(map[string]*streamSession) + t.mutex.Unlock() + + for _, session := range sessions { + t.processCompletedSession(session) + } +} + +// RecordRequest records a streaming request for activity tracking. +// Each request updates the session's LastActivity time, which is used for +// time-based tracking of watch progress. +func (t *ActivityTracker) RecordRequest(sceneID int, clientIP string, videoDuration float64) { + if !t.isEnabled() { + return + } + + key := sessionKey(clientIP, sceneID) + now := time.Now() + + t.mutex.Lock() + defer t.mutex.Unlock() + + session, exists := t.sessions[key] + if !exists { + session = &streamSession{ + SceneID: sceneID, + ClientIP: clientIP, + StartTime: now, + VideoDuration: videoDuration, + } + t.sessions[key] = session + logger.Debugf("[DLNA Activity] New session started: scene=%d, client=%s", sceneID, clientIP) + } + + session.LastActivity = now +} + +// monitorSessions periodically checks for expired sessions and processes them. +func (t *ActivityTracker) monitorSessions() { + defer t.wg.Done() + + ticker := time.NewTicker(monitorInterval) + defer ticker.Stop() + + for { + select { + case <-t.ctx.Done(): + return + case <-ticker.C: + t.processExpiredSessions() + } + } +} + +// processExpiredSessions finds and processes sessions that have timed out. +func (t *ActivityTracker) processExpiredSessions() { + now := time.Now() + var expiredSessions []*streamSession + + t.mutex.Lock() + for key, session := range t.sessions { + timeSinceStart := now.Sub(session.StartTime) + timeSinceActivity := now.Sub(session.LastActivity) + + // Must have no HTTP activity for the full timeout period + if timeSinceActivity <= t.sessionTimeout { + continue + } + + // DLNA clients buffer aggressively - they fetch most/all of the video quickly, + // then play from cache with NO further HTTP requests. + // + // Two scenarios: + // 1. User watched the whole video: timeSinceStart >= videoDuration + // -> Set LastActivity to when timeout began (they finished watching) + // 2. User stopped early: timeSinceStart < videoDuration + // -> Keep LastActivity as-is (best estimate of when they stopped) + + videoDuration := time.Duration(session.VideoDuration) * time.Second + if timeSinceStart >= videoDuration && videoDuration > 0 { + // User likely watched the whole video, then it timed out + // Estimate they watched until the timeout period started + session.LastActivity = now.Add(-t.sessionTimeout) + } + // else: User stopped early - LastActivity is already our best estimate + + expiredSessions = append(expiredSessions, session) + delete(t.sessions, key) + } + t.mutex.Unlock() + + for _, session := range expiredSessions { + t.processCompletedSession(session) + } +} + +// processCompletedSession saves activity data for a completed streaming session. +func (t *ActivityTracker) processCompletedSession(session *streamSession) { + percentWatched := session.percentWatched() + resumeTime := session.estimatedResumeTime() + + logger.Debugf("[DLNA Activity] Session completed: scene=%d, client=%s, videoDuration=%.1fs, percent=%.1f%%, resume=%.1fs", + session.SceneID, session.ClientIP, session.VideoDuration, percentWatched, resumeTime) + + // Only save if there was meaningful activity (at least 1% watched) + if percentWatched < 1 { + logger.Debugf("[DLNA Activity] Session too short, skipping save") + return + } + + // Skip DB operations if txnManager is nil (for testing) + if t.txnManager == nil { + logger.Debugf("[DLNA Activity] No transaction manager, skipping DB save") + return + } + + // Determine what needs to be saved + shouldSaveResume := resumeTime > 0 + shouldAddView := !session.PlayCountAdded && percentWatched >= float64(t.getMinimumPlayPercent()) + + // Nothing to save + if !shouldSaveResume && !shouldAddView { + return + } + + // Save everything in a single transaction + ctx := context.Background() + if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error { + // Save resume time only. DLNA clients buffer aggressively and don't report + // playback position, so we can't accurately track play duration - saving + // guesses would corrupt analytics. Resume time is still useful as a + // "continue watching" hint even if imprecise. + if shouldSaveResume { + if _, err := t.sceneWriter.SaveActivity(ctx, session.SceneID, &resumeTime, nil); err != nil { + return fmt.Errorf("save resume time: %w", err) + } + } + + // Increment play count (also updates last_played_at via view date) + if shouldAddView { + if _, err := t.sceneWriter.AddViews(ctx, session.SceneID, []time.Time{time.Now()}); err != nil { + return fmt.Errorf("add view: %w", err) + } + session.PlayCountAdded = true + logger.Debugf("[DLNA Activity] Incremented play count for scene %d (%.1f%% watched)", + session.SceneID, percentWatched) + } + + return nil + }); err != nil { + logger.Warnf("[DLNA Activity] Failed to save activity for scene %d: %v", session.SceneID, err) + } +} + +// isEnabled returns true if activity tracking is enabled. +func (t *ActivityTracker) isEnabled() bool { + if t.config == nil { + return true // Default to enabled + } + return t.config.GetDLNAActivityTrackingEnabled() +} + +// getMinimumPlayPercent returns the minimum play percentage for incrementing play count. +func (t *ActivityTracker) getMinimumPlayPercent() int { + if t.config == nil { + return 0 // Default: any play increments count (matches frontend default) + } + return t.config.GetMinimumPlayPercent() +} diff --git a/internal/dlna/activity_test.go b/internal/dlna/activity_test.go new file mode 100644 index 000000000..19ae7ebb8 --- /dev/null +++ b/internal/dlna/activity_test.go @@ -0,0 +1,420 @@ +package dlna + +import ( + "context" + "sync" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +// mockSceneWriter is a mock implementation of SceneActivityWriter +type mockSceneWriter struct { + mu sync.Mutex + saveActivityCalls []saveActivityCall + addViewsCalls []addViewsCall +} + +type saveActivityCall struct { + sceneID int + resumeTime *float64 + playDuration *float64 +} + +type addViewsCall struct { + sceneID int + dates []time.Time +} + +func (m *mockSceneWriter) SaveActivity(_ context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) { + m.mu.Lock() + m.saveActivityCalls = append(m.saveActivityCalls, saveActivityCall{ + sceneID: sceneID, + resumeTime: resumeTime, + playDuration: playDuration, + }) + m.mu.Unlock() + return true, nil +} + +func (m *mockSceneWriter) AddViews(_ context.Context, sceneID int, dates []time.Time) ([]time.Time, error) { + m.mu.Lock() + m.addViewsCalls = append(m.addViewsCalls, addViewsCall{ + sceneID: sceneID, + dates: dates, + }) + m.mu.Unlock() + return dates, nil +} + +// mockConfig is a mock implementation of ActivityConfig +type mockConfig struct { + enabled bool + minPlayPercent int +} + +func (c *mockConfig) GetDLNAActivityTrackingEnabled() bool { + return c.enabled +} + +func (c *mockConfig) GetMinimumPlayPercent() int { + return c.minPlayPercent +} + +func TestStreamSession_PercentWatched(t *testing.T) { + now := time.Now() + + tests := []struct { + name string + startTime time.Time + lastActivity time.Time + videoDuration float64 + expected float64 + }{ + { + name: "no video duration", + startTime: now.Add(-60 * time.Second), + lastActivity: now, + videoDuration: 0, + expected: 0, + }, + { + name: "half watched", + startTime: now.Add(-60 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 1 minute = 50% + expected: 50.0, + }, + { + name: "fully watched", + startTime: now.Add(-120 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 2 minutes = 100% + expected: 100.0, + }, + { + name: "quarter watched", + startTime: now.Add(-30 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 30 seconds = 25% + expected: 25.0, + }, + { + name: "elapsed exceeds duration - capped at 100%", + startTime: now.Add(-180 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, but 3 minutes elapsed = capped at 100% + expected: 100.0, + }, + { + name: "no elapsed time", + startTime: now, + lastActivity: now, + videoDuration: 120.0, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + session := &streamSession{ + StartTime: tt.startTime, + LastActivity: tt.lastActivity, + VideoDuration: tt.videoDuration, + } + result := session.percentWatched() + assert.InDelta(t, tt.expected, result, 0.01) + }) + } +} + +func TestStreamSession_EstimatedResumeTime(t *testing.T) { + now := time.Now() + + tests := []struct { + name string + startTime time.Time + lastActivity time.Time + videoDuration float64 + expected float64 + }{ + { + name: "no elapsed time", + startTime: now, + lastActivity: now, + videoDuration: 120.0, + expected: 0, + }, + { + name: "half way through", + startTime: now.Add(-60 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 1 minute = resume at 60s + expected: 60.0, + }, + { + name: "quarter way through", + startTime: now.Add(-30 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 2 minutes, watched for 30 seconds = resume at 30s + expected: 30.0, + }, + { + name: "98% complete - should reset to 0", + startTime: now.Add(-118 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 98.3% elapsed, should reset + expected: 0, + }, + { + name: "100% complete - should reset to 0", + startTime: now.Add(-120 * time.Second), + lastActivity: now, + videoDuration: 120.0, + expected: 0, + }, + { + name: "elapsed exceeds duration - capped and reset to 0", + startTime: now.Add(-180 * time.Second), + lastActivity: now, + videoDuration: 120.0, // 150% elapsed, capped at 100%, reset to 0 + expected: 0, + }, + { + name: "no video duration", + startTime: now.Add(-60 * time.Second), + lastActivity: now, + videoDuration: 0, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + session := &streamSession{ + StartTime: tt.startTime, + LastActivity: tt.lastActivity, + VideoDuration: tt.videoDuration, + } + result := session.estimatedResumeTime() + assert.InDelta(t, tt.expected, result, 1.0) // Allow 1 second tolerance + }) + } +} + +func TestSessionKey(t *testing.T) { + key := sessionKey("192.168.1.100", 42) + assert.Equal(t, "192.168.1.100:42", key) +} + +func TestActivityTracker_RecordRequest(t *testing.T) { + config := &mockConfig{enabled: true, minPlayPercent: 50} + + // Create tracker without starting the goroutine (for unit testing) + tracker := &ActivityTracker{ + txnManager: nil, // Don't need DB for this test + sceneWriter: nil, + config: config, + sessionTimeout: DefaultSessionTimeout, + sessions: make(map[string]*streamSession), + } + + // Record first request - should create new session + tracker.RecordRequest(42, "192.168.1.100", 120.0) + + tracker.mutex.RLock() + session := tracker.sessions["192.168.1.100:42"] + tracker.mutex.RUnlock() + + assert.NotNil(t, session) + assert.Equal(t, 42, session.SceneID) + assert.Equal(t, "192.168.1.100", session.ClientIP) + assert.Equal(t, 120.0, session.VideoDuration) + assert.False(t, session.StartTime.IsZero()) + assert.False(t, session.LastActivity.IsZero()) + + // Record second request - should update LastActivity + firstActivity := session.LastActivity + time.Sleep(10 * time.Millisecond) + tracker.RecordRequest(42, "192.168.1.100", 120.0) + + tracker.mutex.RLock() + session = tracker.sessions["192.168.1.100:42"] + tracker.mutex.RUnlock() + + assert.True(t, session.LastActivity.After(firstActivity)) +} + +func TestActivityTracker_DisabledTracking(t *testing.T) { + config := &mockConfig{enabled: false, minPlayPercent: 50} + + // Create tracker without starting the goroutine (for unit testing) + tracker := &ActivityTracker{ + txnManager: nil, + sceneWriter: nil, + config: config, + sessionTimeout: DefaultSessionTimeout, + sessions: make(map[string]*streamSession), + } + + // Record request - should be ignored when tracking is disabled + tracker.RecordRequest(42, "192.168.1.100", 120.0) + + tracker.mutex.RLock() + sessionCount := len(tracker.sessions) + tracker.mutex.RUnlock() + + assert.Equal(t, 0, sessionCount) +} + +func TestActivityTracker_SessionExpiration(t *testing.T) { + // For this test, we'll test the session expiration logic directly + // without the full transaction manager integration + + sceneWriter := &mockSceneWriter{} + config := &mockConfig{enabled: true, minPlayPercent: 10} + + // Create a tracker with nil txnManager - we'll test processCompletedSession separately + // Here we just verify the session management logic + tracker := &ActivityTracker{ + txnManager: nil, // Skip DB calls for this test + sceneWriter: sceneWriter, + config: config, + sessionTimeout: 100 * time.Millisecond, + sessions: make(map[string]*streamSession), + } + + // Manually add a session + // Use a short video duration (1 second) so the test can verify expiration quickly. + now := time.Now() + tracker.sessions["192.168.1.100:42"] = &streamSession{ + SceneID: 42, + ClientIP: "192.168.1.100", + StartTime: now.Add(-5 * time.Second), // Started 5 seconds ago + LastActivity: now.Add(-200 * time.Millisecond), // Last activity 200ms ago (> 100ms timeout) + VideoDuration: 1.0, // Short video so timeSinceStart > videoDuration + } + + // Verify session exists + assert.Len(t, tracker.sessions, 1) + + // Process expired sessions - this will try to save activity but txnManager is nil + // so it will skip the DB calls but still remove the session + tracker.processExpiredSessions() + + // Verify session was removed (even though DB calls were skipped) + assert.Len(t, tracker.sessions, 0) +} + +func TestActivityTracker_SessionExpiration_StoppedEarly(t *testing.T) { + // Test that sessions expire when user stops watching early (before video ends) + // This was a bug where sessions wouldn't expire until video duration passed + + config := &mockConfig{enabled: true, minPlayPercent: 10} + tracker := &ActivityTracker{ + txnManager: nil, + sceneWriter: nil, + config: config, + sessionTimeout: 100 * time.Millisecond, + sessions: make(map[string]*streamSession), + } + + // User started watching a 30-minute video but stopped after 5 seconds + now := time.Now() + tracker.sessions["192.168.1.100:42"] = &streamSession{ + SceneID: 42, + ClientIP: "192.168.1.100", + StartTime: now.Add(-5 * time.Second), // Started 5 seconds ago + LastActivity: now.Add(-200 * time.Millisecond), // Last activity 200ms ago (> 100ms timeout) + VideoDuration: 1800.0, // 30 minute video - much longer than elapsed time + } + + assert.Len(t, tracker.sessions, 1) + + // Session should expire because timeSinceActivity > timeout + // Even though the video is 30 minutes and only 5 seconds have passed + tracker.processExpiredSessions() + + // Verify session was expired + assert.Len(t, tracker.sessions, 0, "Session should expire when user stops early, not wait for video duration") +} + +func TestActivityTracker_MinimumPlayPercentThreshold(t *testing.T) { + // Test the threshold logic without full transaction integration + config := &mockConfig{enabled: true, minPlayPercent: 75} // High threshold + + tracker := &ActivityTracker{ + txnManager: nil, + sceneWriter: nil, + config: config, + sessionTimeout: 50 * time.Millisecond, + sessions: make(map[string]*streamSession), + } + + // Test that getMinimumPlayPercent returns the configured value + assert.Equal(t, 75, tracker.getMinimumPlayPercent()) + + // Create a session with 30% watched (36 seconds of a 120 second video) + now := time.Now() + session := &streamSession{ + SceneID: 42, + StartTime: now.Add(-36 * time.Second), + LastActivity: now, + VideoDuration: 120.0, + } + + // 30% is below 75% threshold + percentWatched := session.percentWatched() + assert.InDelta(t, 30.0, percentWatched, 0.1) + assert.False(t, percentWatched >= float64(tracker.getMinimumPlayPercent())) +} + +func TestActivityTracker_MultipleSessions(t *testing.T) { + config := &mockConfig{enabled: true, minPlayPercent: 50} + + // Create tracker without starting the goroutine (for unit testing) + tracker := &ActivityTracker{ + txnManager: nil, + sceneWriter: nil, + config: config, + sessionTimeout: DefaultSessionTimeout, + sessions: make(map[string]*streamSession), + } + + // Different clients watching same scene + tracker.RecordRequest(42, "192.168.1.100", 120.0) + tracker.RecordRequest(42, "192.168.1.101", 120.0) + + // Same client watching different scenes + tracker.RecordRequest(43, "192.168.1.100", 180.0) + + tracker.mutex.RLock() + assert.Len(t, tracker.sessions, 3) + tracker.mutex.RUnlock() +} + +func TestActivityTracker_ShortSessionIgnored(t *testing.T) { + // Test that short sessions are ignored + // Create a session with only ~0.8% watched (1 second of a 120 second video) + now := time.Now() + session := &streamSession{ + SceneID: 42, + ClientIP: "192.168.1.100", + StartTime: now.Add(-1 * time.Second), // Only 1 second + LastActivity: now, + VideoDuration: 120.0, // 2 minutes + } + + // Verify percent watched is below threshold (1s / 120s = 0.83%) + assert.InDelta(t, 0.83, session.percentWatched(), 0.1) + + // Verify elapsed time is short + elapsed := session.LastActivity.Sub(session.StartTime).Seconds() + assert.InDelta(t, 1.0, elapsed, 0.5) + + // Both are below the minimum thresholds (1% and 5 seconds) + percentWatched := session.percentWatched() + shouldSkip := percentWatched < 1 && elapsed < 5 + assert.True(t, shouldSkip, "Short session should be skipped") +} diff --git a/internal/dlna/cms.go b/internal/dlna/cms.go index e4a560462..daf43b382 100644 --- a/internal/dlna/cms.go +++ b/internal/dlna/cms.go @@ -27,7 +27,7 @@ import ( // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -const defaultProtocolInfo = "http-get:*:video/mpeg:*,http-get:*:video/mp4:*,http-get:*:video/vnd.dlna.mpeg-tts:*,http-get:*:video/avi:*,http-get:*:video/x-matroska:*,http-get:*:video/x-ms-wmv:*,http-get:*:video/wtv:*,http-get:*:audio/mpeg:*,http-get:*:audio/mp3:*,http-get:*:audio/mp4:*,http-get:*:audio/x-ms-wma*,http-get:*:audio/wav:*,http-get:*:audio/L16:*,http-get:*image/jpeg:*,http-get:*image/png:*,http-get:*image/gif:*,http-get:*image/tiff:*" +const defaultProtocolInfo = "http-get:*:video/mpeg:*,http-get:*:video/mp4:*,http-get:*:video/vnd.dlna.mpeg-tts:*,http-get:*:video/avi:*,http-get:*:video/x-matroska:*,http-get:*:video/x-ms-wmv:*,http-get:*:video/wtv:*,http-get:*:audio/mpeg:*,http-get:*:audio/mp3:*,http-get:*:audio/mp4:*,http-get:*:audio/x-ms-wma*,http-get:*:audio/wav:*,http-get:*:audio/L16:*,http-get:*image/jpeg:*,http-get:*image/png:*,http-get:*image/gif:*,http-get:*image/tiff:*,http-get:*:image/avif:*" type connectionManagerService struct { *Server diff --git a/internal/dlna/dms.go b/internal/dlna/dms.go index 3b27d607b..d68705f74 100644 --- a/internal/dlna/dms.go +++ b/internal/dlna/dms.go @@ -278,6 +278,7 @@ type Server struct { repository Repository sceneServer sceneServer ipWhitelistManager *ipWhitelistManager + activityTracker *ActivityTracker VideoSortOrder string subscribeLock sync.Mutex @@ -596,6 +597,7 @@ func (me *Server) initMux(mux *http.ServeMux) { mux.HandleFunc(resPath, func(w http.ResponseWriter, r *http.Request) { sceneId := r.URL.Query().Get("scene") var scene *models.Scene + var videoDuration float64 repo := me.repository err := repo.WithReadTxn(r.Context(), func(ctx context.Context) error { sceneIdInt, err := strconv.Atoi(sceneId) @@ -603,6 +605,15 @@ func (me *Server) initMux(mux *http.ServeMux) { return nil } scene, _ = repo.SceneFinder.Find(ctx, sceneIdInt) + if scene != nil { + // Load primary file to get duration for activity tracking + if err := scene.LoadPrimaryFile(ctx, repo.FileGetter); err != nil { + logger.Debugf("failed to load primary file for scene %d: %v", sceneIdInt, err) + } + if f := scene.Files.Primary(); f != nil { + videoDuration = f.Duration + } + } return nil }) if err != nil { @@ -615,6 +626,14 @@ func (me *Server) initMux(mux *http.ServeMux) { w.Header().Set("transferMode.dlna.org", "Streaming") w.Header().Set("contentFeatures.dlna.org", "DLNA.ORG_OP=01;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=01500000000000000000000000000000") + + // Track activity - uses time-based tracking, updated on each request + if me.activityTracker != nil { + sceneIdInt, _ := strconv.Atoi(sceneId) + clientIP, _, _ := net.SplitHostPort(r.RemoteAddr) + me.activityTracker.RecordRequest(sceneIdInt, clientIP, videoDuration) + } + me.sceneServer.StreamSceneDirect(scene, w, r) }) mux.HandleFunc(rootDescPath, func(w http.ResponseWriter, r *http.Request) { diff --git a/internal/dlna/service.go b/internal/dlna/service.go index 6ef825bac..98715b1e6 100644 --- a/internal/dlna/service.go +++ b/internal/dlna/service.go @@ -77,13 +77,29 @@ type Config interface { GetDLNADefaultIPWhitelist() []string GetVideoSortOrder() string GetDLNAPortAsString() string + GetDLNAActivityTrackingEnabled() bool +} + +// activityConfig wraps Config to implement ActivityConfig. +type activityConfig struct { + config Config + minPlayPercent int // cached from UI config +} + +func (c *activityConfig) GetDLNAActivityTrackingEnabled() bool { + return c.config.GetDLNAActivityTrackingEnabled() +} + +func (c *activityConfig) GetMinimumPlayPercent() int { + return c.minPlayPercent } type Service struct { - repository Repository - config Config - sceneServer sceneServer - ipWhitelistMgr *ipWhitelistManager + repository Repository + config Config + sceneServer sceneServer + ipWhitelistMgr *ipWhitelistManager + activityTracker *ActivityTracker server *Server running bool @@ -155,6 +171,7 @@ func (s *Service) init() error { repository: s.repository, sceneServer: s.sceneServer, ipWhitelistManager: s.ipWhitelistMgr, + activityTracker: s.activityTracker, Interfaces: interfaces, HTTPConn: func() net.Listener { conn, err := net.Listen("tcp", dmsConfig.Http) @@ -215,7 +232,14 @@ func (s *Service) init() error { // } // NewService initialises and returns a new DLNA service. -func NewService(repo Repository, cfg Config, sceneServer sceneServer) *Service { +// The sceneWriter parameter should implement SceneActivityWriter (typically models.SceneReaderWriter). +// The minPlayPercent parameter is the minimum percentage of video that must be played to increment play count. +func NewService(repo Repository, cfg Config, sceneServer sceneServer, sceneWriter SceneActivityWriter, minPlayPercent int) *Service { + activityCfg := &activityConfig{ + config: cfg, + minPlayPercent: minPlayPercent, + } + ret := &Service{ repository: repo, sceneServer: sceneServer, @@ -223,7 +247,8 @@ func NewService(repo Repository, cfg Config, sceneServer sceneServer) *Service { ipWhitelistMgr: &ipWhitelistManager{ config: cfg, }, - mutex: sync.Mutex{}, + activityTracker: NewActivityTracker(repo.TxnManager, sceneWriter, activityCfg), + mutex: sync.Mutex{}, } return ret @@ -283,6 +308,12 @@ func (s *Service) Stop(duration *time.Duration) { if s.running { logger.Info("Stopping DLNA") + + // Stop activity tracker first to process any pending sessions + if s.activityTracker != nil { + s.activityTracker.Stop() + } + err := s.server.Close() if err != nil { logger.Error(err) diff --git a/internal/identify/identify.go b/internal/identify/identify.go index 3d4c94467..6dc67dac3 100644 --- a/internal/identify/identify.go +++ b/internal/identify/identify.go @@ -147,6 +147,9 @@ func (t *SceneIdentifier) getOptions(source ScraperSource) MetadataOptions { if source.Options.IncludeMalePerformers != nil { options.IncludeMalePerformers = source.Options.IncludeMalePerformers } + if source.Options.PerformerGenders != nil { + options.PerformerGenders = source.Options.PerformerGenders + } if source.Options.SkipMultipleMatches != nil { options.SkipMultipleMatches = source.Options.SkipMultipleMatches } @@ -204,13 +207,23 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene, ret.Partial.StudioID = models.NewOptionalInt(*studioID) } - includeMalePerformers := true - if options.IncludeMalePerformers != nil { - includeMalePerformers = *options.IncludeMalePerformers + // Determine allowed genders for performer filtering + var allowedGenders []models.GenderEnum + if options.PerformerGenders != nil { + // New field takes precedence + allowedGenders = options.PerformerGenders + } else if options.IncludeMalePerformers != nil && !*options.IncludeMalePerformers { + // Legacy: if includeMalePerformers is false, include all genders except male + for _, g := range models.AllGenderEnum { + if g != models.GenderEnumMale { + allowedGenders = append(allowedGenders, g) + } + } } + // nil allowedGenders means include all performers addSkipSingleNamePerformerTag := false - performerIDs, err := rel.performers(ctx, !includeMalePerformers) + performerIDs, err := rel.performers(ctx, allowedGenders) if err != nil { if errors.Is(err, ErrSkipSingleNamePerformer) { addSkipSingleNamePerformerTag = true diff --git a/internal/identify/identify_test.go b/internal/identify/identify_test.go index eb646c305..35ad2006d 100644 --- a/internal/identify/identify_test.go +++ b/internal/identify/identify_test.go @@ -60,9 +60,15 @@ func TestSceneIdentifier_Identify(t *testing.T) { ) defaultOptions := &MetadataOptions{ - SetOrganized: &boolFalse, - SetCoverImage: &boolFalse, - IncludeMalePerformers: &boolFalse, + SetOrganized: &boolFalse, + SetCoverImage: &boolFalse, + PerformerGenders: []models.GenderEnum{ + models.GenderEnumFemale, + models.GenderEnumTransgenderFemale, + models.GenderEnumTransgenderMale, + models.GenderEnumIntersex, + models.GenderEnumNonBinary, + }, SkipSingleNamePerformers: &boolFalse, } sources := []ScraperSource{ @@ -216,9 +222,15 @@ func TestSceneIdentifier_modifyScene(t *testing.T) { boolFalse := false defaultOptions := &MetadataOptions{ - SetOrganized: &boolFalse, - SetCoverImage: &boolFalse, - IncludeMalePerformers: &boolFalse, + SetOrganized: &boolFalse, + SetCoverImage: &boolFalse, + PerformerGenders: []models.GenderEnum{ + models.GenderEnumFemale, + models.GenderEnumTransgenderFemale, + models.GenderEnumTransgenderMale, + models.GenderEnumIntersex, + models.GenderEnumNonBinary, + }, SkipSingleNamePerformers: &boolFalse, } tr := &SceneIdentifier{ diff --git a/internal/identify/options.go b/internal/identify/options.go index b4954a1f1..9e27a3e39 100644 --- a/internal/identify/options.go +++ b/internal/identify/options.go @@ -5,6 +5,7 @@ import ( "io" "strconv" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scraper" ) @@ -32,7 +33,10 @@ type MetadataOptions struct { SetCoverImage *bool `json:"setCoverImage"` SetOrganized *bool `json:"setOrganized"` // defaults to true if not provided + // Deprecated: use PerformerGenders instead IncludeMalePerformers *bool `json:"includeMalePerformers"` + // Filter to only include performers with these genders. If not provided, all genders are included. + PerformerGenders []models.GenderEnum `json:"performerGenders"` // defaults to true if not provided SkipMultipleMatches *bool `json:"skipMultipleMatches"` // ID of tag to tag skipped multiple matches with diff --git a/internal/identify/scene.go b/internal/identify/scene.go index 847a140c5..00d387c41 100644 --- a/internal/identify/scene.go +++ b/internal/identify/scene.go @@ -5,6 +5,7 @@ import ( "context" "errors" "fmt" + "slices" "strconv" "strings" "time" @@ -69,7 +70,7 @@ func (g sceneRelationships) studio(ctx context.Context) (*int, error) { return nil, nil } -func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]int, error) { +func (g sceneRelationships) performers(ctx context.Context, allowedGenders []models.GenderEnum) ([]int, error) { fieldStrategy := g.fieldOptions["performers"] scraped := g.result.result.Performers @@ -97,8 +98,11 @@ func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([] singleNamePerformerSkipped := false for _, p := range scraped { - if ignoreMale && p.Gender != nil && strings.EqualFold(*p.Gender, models.GenderEnumMale.String()) { - continue + if allowedGenders != nil && p.Gender != nil { + gender := models.GenderEnum(strings.ToUpper(*p.Gender)) + if !slices.Contains(allowedGenders, gender) { + continue + } } performerID, err := getPerformerID(ctx, endpoint, g.performerCreator, p, createMissing, g.skipSingleNamePerformers) @@ -153,6 +157,8 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) { tagIDs = originalTagIDs } + endpoint := g.result.source.RemoteSite + for _, t := range scraped { if t.StoredID != nil { // existing tag, just add it @@ -163,10 +169,11 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) { tagIDs = sliceutil.AppendUnique(tagIDs, int(tagID)) } else if createMissing { - newTag := models.NewTag() - newTag.Name = t.Name + newTag := t.ToTag(endpoint, nil) - err := g.tagCreator.Create(ctx, &newTag) + err := g.tagCreator.Create(ctx, &models.CreateTagInput{ + Tag: newTag, + }) if err != nil { return nil, fmt.Errorf("error creating tag: %w", err) } diff --git a/internal/identify/scene_test.go b/internal/identify/scene_test.go index a76aef516..9a3fcf025 100644 --- a/internal/identify/scene_test.go +++ b/internal/identify/scene_test.go @@ -27,7 +27,7 @@ func Test_sceneRelationships_studio(t *testing.T) { db := mocks.NewDatabase() db.Studio.On("Create", testCtx, mock.Anything).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) + s := args.Get(1).(*models.CreateStudioInput) s.ID = validStoredIDInt }).Return(nil) @@ -183,13 +183,13 @@ func Test_sceneRelationships_performers(t *testing.T) { } tests := []struct { - name string - scene *models.Scene - fieldOptions *FieldOptions - scraped []*models.ScrapedPerformer - ignoreMale bool - want []int - wantErr bool + name string + scene *models.Scene + fieldOptions *FieldOptions + scraped []*models.ScrapedPerformer + allowedGenders []models.GenderEnum + want []int + wantErr bool }{ { "ignore", @@ -202,7 +202,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, nil, false, }, @@ -211,7 +211,7 @@ func Test_sceneRelationships_performers(t *testing.T) { emptyScene, defaultOptions, []*models.ScrapedPerformer{}, - false, + nil, nil, false, }, @@ -225,7 +225,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &existingPerformerStr, }, }, - false, + nil, nil, false, }, @@ -239,7 +239,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, []int{existingPerformerID, validStoredIDInt}, false, }, @@ -254,7 +254,7 @@ func Test_sceneRelationships_performers(t *testing.T) { Gender: &male, }, }, - true, + []models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary}, nil, false, }, @@ -270,7 +270,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &validStoredID, }, }, - false, + nil, []int{validStoredIDInt}, false, }, @@ -287,7 +287,7 @@ func Test_sceneRelationships_performers(t *testing.T) { Gender: &female, }, }, - true, + []models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary}, []int{validStoredIDInt}, false, }, @@ -304,7 +304,7 @@ func Test_sceneRelationships_performers(t *testing.T) { StoredID: &invalidStoredID, }, }, - false, + nil, nil, true, }, @@ -319,7 +319,7 @@ func Test_sceneRelationships_performers(t *testing.T) { }, } - got, err := tr.performers(testCtx, tt.ignoreMale) + got, err := tr.performers(testCtx, tt.allowedGenders) if (err != nil) != tt.wantErr { t.Errorf("sceneRelationships.performers() error = %v, wantErr %v", err, tt.wantErr) return @@ -368,14 +368,14 @@ func Test_sceneRelationships_tags(t *testing.T) { db := mocks.NewDatabase() - db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool { - return p.Name == validName + db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool { + return p.Tag.Name == validName })).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = validStoredIDInt + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = validStoredIDInt }).Return(nil) - db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool { - return p.Name == invalidName + db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool { + return p.Tag.Name == invalidName })).Return(errors.New("error creating tag")) tr := sceneRelationships{ diff --git a/internal/identify/studio_test.go b/internal/identify/studio_test.go index 5424a6a93..083675650 100644 --- a/internal/identify/studio_test.go +++ b/internal/identify/studio_test.go @@ -21,13 +21,13 @@ func Test_createMissingStudio(t *testing.T) { db := mocks.NewDatabase() - db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool { + db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool { return p.Name == validName })).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) + s := args.Get(1).(*models.CreateStudioInput) s.ID = createdID }).Return(nil) - db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool { + db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool { return p.Name == invalidName })).Return(errors.New("error creating studio")) diff --git a/internal/log/logger.go b/internal/log/logger.go index 5f686d32d..cb07121a5 100644 --- a/internal/log/logger.go +++ b/internal/log/logger.go @@ -3,12 +3,14 @@ package log import ( "fmt" + "io" "os" "strings" "sync" "time" "github.com/sirupsen/logrus" + lumberjack "gopkg.in/natefinch/lumberjack.v2" ) type LogItem struct { @@ -41,8 +43,8 @@ func NewLogger() *Logger { } // Init initialises the logger based on a logging configuration -func (log *Logger) Init(logFile string, logOut bool, logLevel string) { - var file *os.File +func (log *Logger) Init(logFile string, logOut bool, logLevel string, logFileMaxSize int) { + var logger io.WriteCloser customFormatter := new(logrus.TextFormatter) customFormatter.TimestampFormat = "2006-01-02 15:04:05" customFormatter.ForceColors = true @@ -57,30 +59,38 @@ func (log *Logger) Init(logFile string, logOut bool, logLevel string) { // the access log colouring not being applied _, _ = customFormatter.Format(logrus.NewEntry(log.logger)) + // if size is 0, disable rotation if logFile != "" { - var err error - file, err = os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644) - - if err != nil { - fmt.Printf("Could not open '%s' for log output due to error: %s\n", logFile, err.Error()) + if logFileMaxSize == 0 { + var err error + logger, err = os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644) + if err != nil { + fmt.Fprintf(os.Stderr, "unable to open log file %s: %v\n", logFile, err) + } + } else { + logger = &lumberjack.Logger{ + Filename: logFile, + MaxSize: logFileMaxSize, // Megabytes + Compress: true, + } } } - if file != nil { + if logger != nil { if logOut { // log to file separately disabling colours fileFormatter := new(logrus.TextFormatter) fileFormatter.TimestampFormat = customFormatter.TimestampFormat fileFormatter.FullTimestamp = customFormatter.FullTimestamp log.logger.AddHook(&fileLogHook{ - Writer: file, + Writer: logger, Formatter: fileFormatter, }) } else { // logging to file only // turn off the colouring for the file customFormatter.ForceColors = false - log.logger.Out = file + log.logger.Out = logger } } diff --git a/internal/manager/backup.go b/internal/manager/backup.go new file mode 100644 index 000000000..4a41b263b --- /dev/null +++ b/internal/manager/backup.go @@ -0,0 +1,185 @@ +package manager + +import ( + "archive/zip" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "strings" + + "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/logger" +) + +type databaseBackupZip struct { + *zip.Writer +} + +func (z *databaseBackupZip) zipFileRename(fn, outDir, outFn string) error { + p := filepath.Join(outDir, outFn) + p = filepath.ToSlash(p) + + f, err := z.Create(p) + if err != nil { + return fmt.Errorf("error creating zip entry for %s: %v", fn, err) + } + + i, err := os.Open(fn) + if err != nil { + return fmt.Errorf("error opening %s: %v", fn, err) + } + + defer i.Close() + + if _, err := io.Copy(f, i); err != nil { + return fmt.Errorf("error writing %s to zip: %v", fn, err) + } + + return nil +} + +func (z *databaseBackupZip) zipFile(fn, outDir string) error { + return z.zipFileRename(fn, outDir, filepath.Base(fn)) +} + +func (s *Manager) BackupDatabase(download bool, includeBlobs bool) (string, string, error) { + var backupPath string + var backupName string + + // if we include blobs, then the output is a zip file + // if not, using the same backup logic as before, which creates a sqlite file + if !includeBlobs || s.Config.GetBlobsStorage() != config.BlobStorageTypeFilesystem { + return s.backupDatabaseOnly(download) + } + + // use tmp directory for the backup + backupDir := s.Paths.Generated.Tmp + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + f, err := os.CreateTemp(backupDir, "backup*.sqlite") + if err != nil { + return "", "", err + } + + backupPath = f.Name() + backupName = s.Database.DatabaseBackupPath("") + f.Close() + + // delete the temp file so that the backup operation can create it + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + + if err := s.Database.Backup(backupPath); err != nil { + return "", "", err + } + + // create a zip file + zipFileDir := s.Paths.Generated.Downloads + if !download { + zipFileDir = s.Config.GetBackupDirectoryPathOrDefault() + if zipFileDir != "" { + if err := fsutil.EnsureDir(zipFileDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", zipFileDir, err) + } + } + } + + zipFileName := backupName + ".zip" + zipFilePath := filepath.Join(zipFileDir, zipFileName) + + logger.Debugf("Preparing zip file for database backup at %v", zipFilePath) + + zf, err := os.Create(zipFilePath) + if err != nil { + return "", "", fmt.Errorf("could not create zip file %v: %w", zipFilePath, err) + } + defer zf.Close() + + z := databaseBackupZip{ + Writer: zip.NewWriter(zf), + } + + defer z.Close() + + // move the database file into the zip + dbFn := filepath.Base(s.Config.GetDatabasePath()) + if err := z.zipFileRename(backupPath, "", dbFn); err != nil { + return "", "", fmt.Errorf("could not add database backup to zip file: %w", err) + } + + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + + // walk the blobs directory and add files to the zip + blobsDir := s.Config.GetBlobsPath() + err = filepath.WalkDir(blobsDir, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + if d.IsDir() { + return nil + } + + // calculate out dir by removing the blobsDir prefix from the path + outDir := filepath.Join("blobs", strings.TrimPrefix(filepath.Dir(path), blobsDir)) + if err := z.zipFile(path, outDir); err != nil { + return fmt.Errorf("could not add blob %v to zip file: %w", path, err) + } + + return nil + }) + + if err != nil { + return "", "", fmt.Errorf("error walking blobs directory: %w", err) + } + + return zipFilePath, zipFileName, nil +} + +func (s *Manager) backupDatabaseOnly(download bool) (string, string, error) { + var backupPath string + var backupName string + + if download { + backupDir := s.Paths.Generated.Downloads + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + f, err := os.CreateTemp(backupDir, "backup*.sqlite") + if err != nil { + return "", "", err + } + + backupPath = f.Name() + backupName = s.Database.DatabaseBackupPath("") + f.Close() + + // delete the temp file so that the backup operation can create it + if err := os.Remove(backupPath); err != nil { + return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err) + } + } else { + backupDir := s.Config.GetBackupDirectoryPathOrDefault() + if backupDir != "" { + if err := fsutil.EnsureDir(backupDir); err != nil { + return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) + } + } + backupPath = s.Database.DatabaseBackupPath(backupDir) + backupName = filepath.Base(backupPath) + } + + err := s.Database.Backup(backupPath) + if err != nil { + return "", "", err + } + + return backupPath, backupName, nil +} diff --git a/internal/manager/config/config.go b/internal/manager/config/config.go index 1b627cbdd..19e263810 100644 --- a/internal/manager/config/config.go +++ b/internal/manager/config/config.go @@ -16,9 +16,9 @@ import ( "golang.org/x/crypto/bcrypt" - "github.com/knadh/koanf" "github.com/knadh/koanf/parsers/yaml" "github.com/knadh/koanf/providers/file" + "github.com/knadh/koanf/v2" "github.com/stashapp/stash/internal/identify" "github.com/stashapp/stash/pkg/fsutil" @@ -43,6 +43,9 @@ const ( Password = "password" MaxSessionAge = "max_session_age" + // SFWContentMode mode config key + SFWContentMode = "sfw_content_mode" + FFMpegPath = "ffmpeg_path" FFProbePath = "ffprobe_path" @@ -80,6 +83,21 @@ const ( ParallelTasks = "parallel_tasks" parallelTasksDefault = 1 + UseCustomSpriteInterval = "use_custom_sprite_interval" + UseCustomSpriteIntervalDefault = false + + SpriteInterval = "sprite_interval" + SpriteIntervalDefault = 30 + + MinimumSprites = "minimum_sprites" + MinimumSpritesDefault = 10 + + MaximumSprites = "maximum_sprites" + MaximumSpritesDefault = 500 + + SpriteScreenshotSize = "sprite_screenshot_width" + spriteScreenshotSizeDefault = 160 + PreviewPreset = "preview_preset" TranscodeHardwareAcceleration = "ffmpeg.hardware_acceleration" @@ -191,6 +209,7 @@ const ( CSSEnabled = "cssenabled" JavascriptEnabled = "javascriptenabled" CustomLocalesEnabled = "customlocalesenabled" + DisableCustomizations = "disable_customizations" ShowScrubber = "show_scrubber" showScrubberDefault = true @@ -206,6 +225,7 @@ const ( ImageLightboxResetZoomOnNav = "image_lightbox.reset_zoom_on_nav" ImageLightboxScrollModeKey = "image_lightbox.scroll_mode" ImageLightboxScrollAttemptsBeforeChange = "image_lightbox.scroll_attempts_before_change" + ImageLightboxDisableAnimation = "image_lightbox.disable_animation" UI = "ui" @@ -215,6 +235,7 @@ const ( DisableDropdownCreateStudio = "disable_dropdown_create.studio" DisableDropdownCreateTag = "disable_dropdown_create.tag" DisableDropdownCreateMovie = "disable_dropdown_create.movie" + DisableDropdownCreateGallery = "disable_dropdown_create.gallery" HandyKey = "handy_key" FunscriptOffset = "funscript_offset" @@ -249,13 +270,15 @@ const ( DLNAPortDefault = 1338 // Logging options - LogFile = "logfile" - LogOut = "logout" - defaultLogOut = true - LogLevel = "loglevel" - defaultLogLevel = "Info" - LogAccess = "logaccess" - defaultLogAccess = true + LogFile = "logfile" + LogOut = "logout" + defaultLogOut = true + LogLevel = "loglevel" + defaultLogLevel = "Info" + LogAccess = "logaccess" + defaultLogAccess = true + LogFileMaxSize = "logfile_max_size" + defaultLogFileMaxSize = 0 // megabytes, default disabled // Default settings DefaultScanSettings = "defaults.scan_task" @@ -267,6 +290,9 @@ const ( DeleteGeneratedDefault = "defaults.delete_generated" deleteGeneratedDefaultDefault = true + // Trash/Recycle Bin options + DeleteTrashPath = "delete_trash_path" + // Desktop Integration Options NoBrowser = "nobrowser" NoBrowserDefault = false @@ -285,9 +311,9 @@ const ( // slice default values var ( defaultVideoExtensions = []string{"m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm", "f4v"} - defaultImageExtensions = []string{"png", "jpg", "jpeg", "gif", "webp"} + defaultImageExtensions = []string{"png", "jpg", "jpeg", "gif", "webp", "avif"} defaultGalleryExtensions = []string{"zip", "cbz"} - defaultMenuItems = []string{"scenes", "images", "movies", "markers", "galleries", "performers", "studios", "tags"} + defaultMenuItems = []string{"scenes", "images", "groups", "markers", "galleries", "performers", "studios", "tags"} ) type MissingConfigError struct { @@ -628,7 +654,15 @@ func (i *Config) getStringMapString(key string) map[string]string { return ret } -// GetStathPaths returns the configured stash library paths. +// GetSFW returns true if SFW mode is enabled. +// Default performer images are changed to more agnostic images when enabled. +func (i *Config) GetSFWContentMode() bool { + i.RLock() + defer i.RUnlock() + return i.getBool(SFWContentMode) +} + +// GetStashPaths returns the configured stash library paths. // Works opposite to the usual case - it will return the override // value only if the main value is not set. func (i *Config) GetStashPaths() StashConfigs { @@ -956,6 +990,50 @@ func (i *Config) GetParallelTasksWithAutoDetection() int { return parallelTasks } +// GetUseCustomSpriteInterval returns true if the sprite minimum, maximum, and interval settings +// should be used instead of the default +func (i *Config) GetUseCustomSpriteInterval() bool { + value := i.getBool(UseCustomSpriteInterval) + return value +} + +// GetSpriteInterval returns the time (in seconds) to be between each scrubber sprite +// A value of 0 indicates that the sprite interval should be automatically determined +// based on the minimum sprite setting. +func (i *Config) GetSpriteInterval() float64 { + value := i.getFloat64(SpriteInterval) + return value +} + +// GetMinimumSprites returns the minimum number of sprites that have to be generated +// A value of 0 will be overridden with the default of 10. +func (i *Config) GetMinimumSprites() int { + value := i.getInt(MinimumSprites) + if value <= 0 { + return MinimumSpritesDefault + } + return value +} + +// GetMaximumSprites returns the maximum number of sprites that can be generated +// A value of 0 indicates no maximum. +func (i *Config) GetMaximumSprites() int { + value := i.getInt(MaximumSprites) + return value +} + +// GetSpriteScreenshotSize returns the required size of the screenshots to be taken +// during sprite generation in pixels. This will be the width for landscape scenes +// and the height for portrait scenes, with the other dimension being scaled to maintain +// the aspect ratio. If the value is less than or equal to 0, the default will be used. +func (i *Config) GetSpriteScreenshotSize() int { + value := i.getInt(SpriteScreenshotSize) + if value <= 0 { + return spriteScreenshotSizeDefault + } + return value +} + func (i *Config) GetPreviewAudio() bool { return i.getBool(PreviewAudio) } @@ -1280,6 +1358,10 @@ func (i *Config) GetImageLightboxOptions() ConfigImageLightboxResult { if v := i.with(ImageLightboxScrollAttemptsBeforeChange); v != nil { ret.ScrollAttemptsBeforeChange = v.Int(ImageLightboxScrollAttemptsBeforeChange) } + if v := i.with(ImageLightboxDisableAnimation); v != nil { + value := v.Bool(ImageLightboxDisableAnimation) + ret.DisableAnimation = &value + } return ret } @@ -1290,6 +1372,7 @@ func (i *Config) GetDisableDropdownCreate() *ConfigDisableDropdownCreate { Studio: i.getBool(DisableDropdownCreateStudio), Tag: i.getBool(DisableDropdownCreateTag), Movie: i.getBool(DisableDropdownCreateMovie), + Gallery: i.getBool(DisableDropdownCreateGallery), } } @@ -1300,6 +1383,26 @@ func (i *Config) GetUIConfiguration() map[string]interface{} { return i.forKey(UI).Cut(UI).Raw() } +// GetMinimumPlayPercent returns the minimum percentage of a video that must be +// watched before incrementing the play count. Returns 0 if not configured. +func (i *Config) GetMinimumPlayPercent() int { + uiConfig := i.GetUIConfiguration() + if uiConfig == nil { + return 0 + } + if val, ok := uiConfig["minimumPlayPercent"]; ok { + switch v := val.(type) { + case int: + return v + case float64: + return int(v) + case int64: + return int(v) + } + } + return 0 +} + func (i *Config) SetUIConfiguration(v map[string]interface{}) { i.Lock() defer i.Unlock() @@ -1436,6 +1539,13 @@ func (i *Config) GetCustomLocalesEnabled() bool { return i.getBool(CustomLocalesEnabled) } +// GetDisableCustomizations returns true if all customizations (plugins, custom CSS, +// custom JavaScript, and custom locales) should be disabled. This is useful for +// troubleshooting issues without permanently disabling individual customizations. +func (i *Config) GetDisableCustomizations() bool { + return i.getBool(DisableCustomizations) +} + func (i *Config) GetHandyKey() string { return i.getString(HandyKey) } @@ -1456,6 +1566,14 @@ func (i *Config) GetDeleteGeneratedDefault() bool { return i.getBoolDefault(DeleteGeneratedDefault, deleteGeneratedDefaultDefault) } +func (i *Config) GetDeleteTrashPath() string { + return i.getString(DeleteTrashPath) +} + +func (i *Config) SetDeleteTrashPath(value string) { + i.SetString(DeleteTrashPath, value) +} + // GetDefaultIdentifySettings returns the default Identify task settings. // Returns nil if the settings could not be unmarshalled, or if it // has not been set. @@ -1584,6 +1702,22 @@ func (i *Config) GetDLNAPortAsString() string { return ":" + strconv.Itoa(i.GetDLNAPort()) } +// GetDLNAActivityTrackingEnabled returns true if DLNA activity tracking is enabled. +// This uses the same "trackActivity" UI setting that controls frontend play history tracking. +// When enabled, scenes played via DLNA will have their play count and duration tracked. +func (i *Config) GetDLNAActivityTrackingEnabled() bool { + uiConfig := i.GetUIConfiguration() + if uiConfig == nil { + return true // Default to enabled + } + if val, ok := uiConfig["trackActivity"]; ok { + if v, ok := val.(bool); ok { + return v + } + } + return true // Default to enabled +} + // GetVideoSortOrder returns the sort order to display videos. If // empty, videos will be sorted by titles. func (i *Config) GetVideoSortOrder() string { @@ -1625,6 +1759,16 @@ func (i *Config) GetLogAccess() bool { return i.getBoolDefault(LogAccess, defaultLogAccess) } +// GetLogFileMaxSize returns the maximum size of the log file in megabytes for lumberjack to rotate +func (i *Config) GetLogFileMaxSize() int { + value := i.getInt(LogFileMaxSize) + if value < 0 { + value = defaultLogFileMaxSize + } + + return value +} + // Max allowed graphql upload size in megabytes func (i *Config) GetMaxUploadSize() int64 { i.RLock() @@ -1776,6 +1920,12 @@ func (i *Config) setDefaultValues() { i.setDefault(PreviewAudio, previewAudioDefault) i.setDefault(SoundOnPreview, false) + i.setDefault(UseCustomSpriteInterval, UseCustomSpriteIntervalDefault) + i.setDefault(SpriteInterval, SpriteIntervalDefault) + i.setDefault(MinimumSprites, MinimumSpritesDefault) + i.setDefault(MaximumSprites, MaximumSpritesDefault) + i.setDefault(SpriteScreenshotSize, spriteScreenshotSizeDefault) + i.setDefault(ThemeColor, DefaultThemeColor) i.setDefault(WriteImageThumbnails, writeImageThumbnailsDefault) diff --git a/internal/manager/config/init.go b/internal/manager/config/init.go index 09f1c18bc..840b50b70 100644 --- a/internal/manager/config/init.go +++ b/internal/manager/config/init.go @@ -8,9 +8,9 @@ import ( "path/filepath" "strings" - "github.com/knadh/koanf" "github.com/knadh/koanf/providers/env" "github.com/knadh/koanf/providers/posflag" + "github.com/knadh/koanf/v2" "github.com/spf13/pflag" "github.com/stashapp/stash/pkg/fsutil" diff --git a/internal/manager/config/stash_config.go b/internal/manager/config/stash_config.go index 4a2cc7d60..7a103631c 100644 --- a/internal/manager/config/stash_config.go +++ b/internal/manager/config/stash_config.go @@ -38,3 +38,12 @@ func (s StashConfigs) GetStashFromDirPath(dirPath string) *StashConfig { } return nil } + +func (s StashConfigs) Paths() []string { + paths := make([]string, len(s)) + for i, c := range s { + // #6618 - clean the path to ensure comparison works correctly + paths[i] = filepath.Clean(c.Path) + } + return paths +} diff --git a/internal/manager/config/tasks.go b/internal/manager/config/tasks.go index 0cfabef30..af7d5f674 100644 --- a/internal/manager/config/tasks.go +++ b/internal/manager/config/tasks.go @@ -11,8 +11,10 @@ type ScanMetadataOptions struct { ScanGenerateImagePreviews bool `json:"scanGenerateImagePreviews"` // Generate sprites during scan ScanGenerateSprites bool `json:"scanGenerateSprites"` - // Generate phashes during scan + // Generate video phashes during scan ScanGeneratePhashes bool `json:"scanGeneratePhashes"` + // Generate image phashes during scan + ScanGenerateImagePhashes bool `json:"scanGenerateImagePhashes"` // Generate image thumbnails during scan ScanGenerateThumbnails bool `json:"scanGenerateThumbnails"` // Generate image thumbnails during scan diff --git a/internal/manager/config/ui.go b/internal/manager/config/ui.go index 699091154..de769304f 100644 --- a/internal/manager/config/ui.go +++ b/internal/manager/config/ui.go @@ -13,6 +13,7 @@ type ConfigImageLightboxResult struct { ResetZoomOnNav *bool `json:"resetZoomOnNav"` ScrollMode *ImageLightboxScrollMode `json:"scrollMode"` ScrollAttemptsBeforeChange int `json:"scrollAttemptsBeforeChange"` + DisableAnimation *bool `json:"disableAnimation"` } type ImageLightboxDisplayMode string @@ -104,4 +105,5 @@ type ConfigDisableDropdownCreate struct { Tag bool `json:"tag"` Studio bool `json:"studio"` Movie bool `json:"movie"` + Gallery bool `json:"gallery"` } diff --git a/internal/manager/exclude_files.go b/internal/manager/exclude_files.go index 6c5452d0d..7ab24b51c 100644 --- a/internal/manager/exclude_files.go +++ b/internal/manager/exclude_files.go @@ -60,6 +60,10 @@ func generateRegexps(patterns []string) []*regexp.Regexp { var fileRegexps []*regexp.Regexp for _, pattern := range patterns { + if pattern == "" || pattern == " " { + logger.Warnf("Skipping empty exclude pattern") + continue + } if !strings.HasPrefix(pattern, "(?i)") { pattern = "(?i)" + pattern } diff --git a/internal/manager/generator_interactive_heatmap_speed.go b/internal/manager/generator_interactive_heatmap_speed.go index ac6ca53bd..d10ce5b19 100644 --- a/internal/manager/generator_interactive_heatmap_speed.go +++ b/internal/manager/generator_interactive_heatmap_speed.go @@ -28,7 +28,8 @@ type InteractiveHeatmapSpeedGenerator struct { type Script struct { // Version of Launchscript - Version string `json:"version"` + // #5600 - ignore version, don't validate type + Version json.RawMessage `json:"version"` // Inverted causes up and down movement to be flipped. Inverted bool `json:"inverted,omitempty"` // Range is the percentage of a full stroke to use. @@ -40,7 +41,7 @@ type Script struct { // Action is a move at a specific time. type Action struct { // At time in milliseconds the action should fire. - At int64 `json:"at"` + At float64 `json:"at"` // Pos is the place in percent to move to. Pos int `json:"pos"` @@ -109,8 +110,8 @@ func (g *InteractiveHeatmapSpeedGenerator) LoadFunscriptData(path string, sceneD // trim actions with negative timestamps to avoid index range errors when generating heatmap // #3181 - also trim actions that occur after the scene duration loggedBadTimestamp := false - sceneDurationMilli := int64(sceneDuration * 1000) - isValid := func(x int64) bool { + sceneDurationMilli := sceneDuration * 1000 + isValid := func(x float64) bool { return x >= 0 && x < sceneDurationMilli } @@ -132,7 +133,7 @@ func (g *InteractiveHeatmapSpeedGenerator) LoadFunscriptData(path string, sceneD func (funscript *Script) UpdateIntensityAndSpeed() { - var t1, t2 int64 + var t1, t2 float64 var p1, p2 int var intensity float64 for i := range funscript.Actions { @@ -241,13 +242,13 @@ func (gt GradientTable) GetYRange(t float64) [2]float64 { func (funscript Script) getGradientTable(numSegments int, sceneDurationMilli int64) GradientTable { const windowSize = 15 - const backfillThreshold = 500 + const backfillThreshold = float64(500) segments := make([]struct { count int intensity int yRange [2]float64 - at int64 + at float64 }, numSegments) gradient := make(GradientTable, numSegments) posList := []int{} @@ -297,7 +298,7 @@ func (funscript Script) getGradientTable(numSegments int, sceneDurationMilli int // Fill in gaps in segments for i := 0; i < numSegments; i++ { - segmentTS := (maxts / int64(numSegments)) * int64(i) + segmentTS := float64((maxts / int64(numSegments)) * int64(i)) // Empty segment - fill it with the previous up to backfillThreshold ms if segments[i].count == 0 { @@ -406,7 +407,8 @@ func ConvertFunscriptToCSV(funscriptPath string) ([]byte, error) { pos = convertRange(pos, 0, funscript.Range, 0, 100) } - buffer.WriteString(fmt.Sprintf("%d,%d\r\n", action.At, pos)) + // I don't know whether the csv format requires int or float, so for now we'll use int + buffer.WriteString(fmt.Sprintf("%d,%d\r\n", int(math.Round(action.At)), pos)) } return buffer.Bytes(), nil } diff --git a/internal/manager/generator_sprite.go b/internal/manager/generator_sprite.go index c28d28674..dc56fde88 100644 --- a/internal/manager/generator_sprite.go +++ b/internal/manager/generator_sprite.go @@ -21,8 +21,7 @@ type SpriteGenerator struct { VideoChecksum string ImageOutputPath string VTTOutputPath string - Rows int - Columns int + Config SpriteGeneratorConfig SlowSeek bool // use alternate seek function, very slow! Overwrite bool @@ -30,13 +29,81 @@ type SpriteGenerator struct { g *generate.Generator } -func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) { +// SpriteGeneratorConfig holds configuration for the SpriteGenerator +type SpriteGeneratorConfig struct { + // MinimumSprites is the minimum number of sprites to generate, even if the video duration is short + // SpriteInterval will be adjusted accordingly to ensure at least this many sprites are generated. + // A value of 0 means no minimum, and the generator will use the provided SpriteInterval or + // calculate it based on the video duration and MaximumSprites + MinimumSprites int + + // MaximumSprites is the maximum number of sprites to generate, even if the video duration is long + // SpriteInterval will be adjusted accordingly to ensure no more than this many sprites are generated + // A value of 0 means no maximum, and the generator will use the provided SpriteInterval or + // calculate it based on the video duration and MinimumSprites + MaximumSprites int + + // SpriteInterval is the default interval in seconds between each sprite. + // If MinimumSprites or MaximumSprites are set, this value will be adjusted accordingly + // to ensure the desired number of sprites are generated + // A value of 0 means the generator will calculate the interval based on the video duration and + // the provided MinimumSprites and MaximumSprites + SpriteInterval float64 + + // SpriteSize is the size in pixels of the longest dimension of each sprite image. + // The other dimension will be automatically calculated to maintain the aspect ratio of the video + SpriteSize int +} + +const ( + // DefaultSpriteAmount is the default number of sprites to generate if no configuration is provided + // This corresponds to the legacy behavior of the generator, which generates 81 sprites at equal + // intervals across the video duration + DefaultSpriteAmount = 81 + + // DefaultSpriteSize is the default size in pixels of the longest dimension of each sprite image + // if no configuration is provided. This corresponds to the legacy behavior of the generator. + DefaultSpriteSize = 160 +) + +var DefaultSpriteGeneratorConfig = SpriteGeneratorConfig{ + MinimumSprites: DefaultSpriteAmount, + MaximumSprites: DefaultSpriteAmount, + SpriteInterval: 0, + SpriteSize: DefaultSpriteSize, +} + +// NewSpriteGenerator creates a new SpriteGenerator for the given video file and configuration +// It calculates the appropriate sprite interval and count based on the video duration and the provided configuration +func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, config SpriteGeneratorConfig) (*SpriteGenerator, error) { exists, err := fsutil.FileExists(videoFile.Path) if !exists { return nil, err } + + if videoFile.VideoStreamDuration <= 0 { + s := fmt.Sprintf("video %s: duration(%.3f)/frame count(%d) invalid, skipping sprite creation", videoFile.Path, videoFile.VideoStreamDuration, videoFile.FrameCount) + return nil, errors.New(s) + } + + config.SpriteInterval = calculateSpriteInterval(videoFile, config) + chunkCount := int(math.Ceil(videoFile.VideoStreamDuration / config.SpriteInterval)) + + // adjust the chunk count to the next highest perfect square, to ensure the sprite image + // is completely filled (no empty space in the grid) and the grid is as square as possible (minimizing the number of rows/columns) + gridSize := generate.GetSpriteGridSize(chunkCount) + newChunkCount := gridSize * gridSize + + if newChunkCount != chunkCount { + logger.Debugf("[generator] adjusting chunk count from %d to %d to fit a %dx%d grid", chunkCount, newChunkCount, gridSize, gridSize) + chunkCount = newChunkCount + } + + if config.SpriteSize <= 0 { + config.SpriteSize = DefaultSpriteSize + } + slowSeek := false - chunkCount := rows * cols // For files with small duration / low frame count try to seek using frame number intead of seconds if videoFile.VideoStreamDuration < 5 || (0 < videoFile.FrameCount && videoFile.FrameCount <= int64(chunkCount)) { // some files can have FrameCount == 0, only use SlowSeek if duration < 5 @@ -71,9 +138,8 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO VideoChecksum: videoChecksum, ImageOutputPath: imageOutputPath, VTTOutputPath: vttOutputPath, - Rows: rows, + Config: config, SlowSeek: slowSeek, - Columns: cols, g: &generate.Generator{ Encoder: instance.FFMpeg, FFMpegConfig: instance.Config, @@ -83,6 +149,40 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO }, nil } +func calculateSpriteInterval(videoFile ffmpeg.VideoFile, config SpriteGeneratorConfig) float64 { + // If a custom sprite interval is provided, start with that + spriteInterval := config.SpriteInterval + + // If no custom interval is provided, calculate the interval based on the + // video duration and minimum sprite count + if spriteInterval <= 0 { + minSprites := config.MinimumSprites + if minSprites <= 0 { + panic("invalid configuration: MinimumSprites must be greater than 0 if SpriteInterval is not set") + } + + logger.Debugf("[generator] calculating sprite interval for video duration %.3fs with minimum sprites %d", videoFile.VideoStreamDuration, minSprites) + return videoFile.VideoStreamDuration / float64(minSprites) + } + + // Calculate the number of sprites that would be generated with the provided interval + spriteCount := int(math.Ceil(videoFile.VideoStreamDuration / spriteInterval)) + + // If the calculated sprite count is greater than the maximum, adjust the interval to meet the maximum + if config.MaximumSprites > 0 && spriteCount > int(config.MaximumSprites) { + spriteInterval = videoFile.VideoStreamDuration / float64(config.MaximumSprites) + logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which exceeds the maximum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MaximumSprites, spriteInterval) + } + + // If the calculated sprite count is less than the minimum, adjust the interval to meet the minimum + if config.MinimumSprites > 0 && spriteCount < int(config.MinimumSprites) { + spriteInterval = videoFile.VideoStreamDuration / float64(config.MinimumSprites) + logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which is less than the minimum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MinimumSprites, spriteInterval) + } + + return spriteInterval +} + func (g *SpriteGenerator) Generate() error { if err := g.generateSpriteImage(); err != nil { return err @@ -100,6 +200,8 @@ func (g *SpriteGenerator) generateSpriteImage() error { var images []image.Image + isPortrait := g.Info.VideoFile.Height > g.Info.VideoFile.Width + if !g.SlowSeek { logger.Infof("[generator] generating sprite image for %s", g.Info.VideoFile.Path) // generate `ChunkCount` thumbnails @@ -107,8 +209,7 @@ func (g *SpriteGenerator) generateSpriteImage() error { for i := 0; i < g.Info.ChunkCount; i++ { time := float64(i) * stepSize - - img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time) + img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time, g.Config.SpriteSize, isPortrait) if err != nil { return err } @@ -126,7 +227,7 @@ func (g *SpriteGenerator) generateSpriteImage() error { return errors.New("invalid frame number conversion") } - img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame)) + img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame), g.Config.SpriteSize) if err != nil { return err } @@ -158,7 +259,7 @@ func (g *SpriteGenerator) generateSpriteVTT() error { stepSize /= g.Info.FrameRate } - return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize) + return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize, g.Info.ChunkCount) } func (g *SpriteGenerator) imageExists() bool { diff --git a/internal/manager/init.go b/internal/manager/init.go index dd1640ed3..b4af5eab7 100644 --- a/internal/manager/init.go +++ b/internal/manager/init.go @@ -78,7 +78,7 @@ func Initialize(cfg *config.Config, l *log.Logger) (*Manager, error) { } dlnaRepository := dlna.NewRepository(repo) - dlnaService := dlna.NewService(dlnaRepository, cfg, sceneServer) + dlnaService := dlna.NewService(dlnaRepository, cfg, sceneServer, repo.Scene, cfg.GetMinimumPlayPercent()) mgr := &Manager{ Config: cfg, @@ -313,6 +313,7 @@ func (s *Manager) RefreshFFMpeg(ctx context.Context) { s.FFMpeg = ffmpeg.NewEncoder(ffmpegPath) s.FFProbe = ffmpeg.NewFFProbe(ffprobePath) - s.FFMpeg.InitHWSupport(ctx) + // initialise hardware support with background context + s.FFMpeg.InitHWSupport(context.Background()) } } diff --git a/internal/manager/manager.go b/internal/manager/manager.go index 4827a3e3d..d3b91ec29 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -219,8 +219,11 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error { // paths since they must not be relative. The config file property is // resolved to an absolute path when stash is run normally, so convert // relative paths to absolute paths during setup. - configFile, _ := filepath.Abs(input.ConfigLocation) - + // #6287 - this should no longer be necessary since the ffmpeg code + // converts to absolute paths. Converting the config location to + // absolute means that scraper and plugin paths default to absolute + // which we don't want. + configFile := input.ConfigLocation configDir := filepath.Dir(configFile) if exists, _ := fsutil.DirExists(configDir); !exists { @@ -262,6 +265,10 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error { cfg.SetString(config.Cache, input.CacheLocation) } + if input.SFWContentMode { + cfg.SetBool(config.SFWContentMode, true) + } + if input.StoreBlobsInDatabase { cfg.SetInterface(config.BlobsStorage, config.BlobStorageTypeDatabase) } else { @@ -306,41 +313,6 @@ func (s *Manager) validateFFmpeg() error { return nil } -func (s *Manager) BackupDatabase(download bool) (string, string, error) { - var backupPath string - var backupName string - if download { - backupDir := s.Paths.Generated.Downloads - if err := fsutil.EnsureDir(backupDir); err != nil { - return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) - } - f, err := os.CreateTemp(backupDir, "backup*.sqlite") - if err != nil { - return "", "", err - } - - backupPath = f.Name() - backupName = s.Database.DatabaseBackupPath("") - f.Close() - } else { - backupDir := s.Config.GetBackupDirectoryPathOrDefault() - if backupDir != "" { - if err := fsutil.EnsureDir(backupDir); err != nil { - return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err) - } - } - backupPath = s.Database.DatabaseBackupPath(backupDir) - backupName = filepath.Base(backupPath) - } - - err := s.Database.Backup(backupPath) - if err != nil { - return "", "", err - } - - return backupPath, backupName, nil -} - func (s *Manager) AnonymiseDatabase(download bool) (string, string, error) { var outPath string var outName string diff --git a/internal/manager/manager_tasks.go b/internal/manager/manager_tasks.go index b85a4c2cf..76938e9ff 100644 --- a/internal/manager/manager_tasks.go +++ b/internal/manager/manager_tasks.go @@ -74,6 +74,28 @@ func getScanPaths(inputPaths []string) []*config.StashConfig { return ret } +// Filters the input array for paths that are within the paths managed by stash +func filterStashPaths(inputPaths []string) []string { + if len(inputPaths) == 0 { + return inputPaths + } + + stashPaths := config.GetInstance().GetStashPaths() + + var ret []string + for _, p := range inputPaths { + s := stashPaths.GetStashFromDirPath(p) + if s == nil { + logger.Warnf("%s is not in the configured stash paths", p) + continue + } + + ret = append(ret, p) + } + + return ret +} + // ScanSubscribe subscribes to a notification that is triggered when a // scan or clean is complete. func (s *Manager) ScanSubscribe(ctx context.Context) <-chan bool { @@ -100,6 +122,8 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error return 0, err } + cfg := config.GetInstance() + scanner := &file.Scanner{ Repository: file.NewRepository(s.Repository), FileDecorators: []file.Decorator{ @@ -118,6 +142,11 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error }, FingerprintCalculator: &fingerprintCalculator{s.Config}, FS: &file.OsFS{}, + ZipFileExtensions: cfg.GetGalleryExtensions(), + // ScanFilters is set in ScanJob.Execute + // HandlerRequiredFilters is set in ScanJob.Execute + RootPaths: cfg.GetStashPaths().Paths(), + Rescan: input.Rescan, } scanJob := ScanJob{ @@ -285,6 +314,8 @@ type CleanMetadataInput struct { Paths []string `json:"paths"` // Do a dry run. Don't delete any files DryRun bool `json:"dryRun"` + + IgnoreZipFileContents bool `json:"ignoreZipFileContents"` } func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int { @@ -294,6 +325,7 @@ func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int { Handlers: []file.CleanHandler{ &cleanHandler{}, }, + TrashPath: s.Config.GetDeleteTrashPath(), } j := cleanJob{ @@ -364,139 +396,182 @@ func (s *Manager) MigrateHash(ctx context.Context) int { return s.JobManager.Add(ctx, "Migrating scene hashes...", j) } -// If neither ids nor names are set, tag all items +// batchTagType indicates which batch tagging mode to use +type batchTagType int + +const ( + batchTagByIds batchTagType = iota + batchTagByNamesOrStashIds + batchTagAll +) + +// getBatchTagType determines the batch tag mode based on the input +func (input StashBoxBatchTagInput) getBatchTagType(hasPerformerFields bool) batchTagType { + switch { + case len(input.Ids) > 0: + return batchTagByIds + case hasPerformerFields && len(input.PerformerIds) > 0: + return batchTagByIds + case len(input.StashIDs) > 0 || len(input.Names) > 0: + return batchTagByNamesOrStashIds + case hasPerformerFields && len(input.PerformerNames) > 0: + return batchTagByNamesOrStashIds + default: + return batchTagAll + } +} + +// Accepts either ids, or a combination of names and stash_ids. +// If none are set, then all existing items will be tagged. type StashBoxBatchTagInput struct { - // Stash endpoint to use for the tagging - deprecated - use StashBoxEndpoint + // Stash endpoint to use for the tagging + // + // Deprecated: use StashBoxEndpoint Endpoint *int `json:"endpoint"` StashBoxEndpoint *string `json:"stash_box_endpoint"` // Fields to exclude when executing the tagging ExcludeFields []string `json:"exclude_fields"` // Refresh items already tagged by StashBox if true. Only tag items with no StashBox tagging if false Refresh bool `json:"refresh"` - // If batch adding studios, should their parent studios also be created? + // If batch adding studios or tags, should their parent entities also be created? CreateParent bool `json:"createParent"` - // If set, only tag these ids + // IDs in stash of the items to update. + // If set, names and stash_ids fields will be ignored. Ids []string `json:"ids"` - // If set, only tag these names + // Names of the items in the stash-box instance to search for and create Names []string `json:"names"` - // If set, only tag these performer ids + // Stash IDs of the items in the stash-box instance to search for and create + StashIDs []string `json:"stash_ids"` + // IDs in stash of the performers to update // - // Deprecated: please use Ids + // Deprecated: use Ids PerformerIds []string `json:"performer_ids"` - // If set, only tag these performer names + // Names of the performers in the stash-box instance to search for and create // - // Deprecated: please use Names + // Deprecated: use Names PerformerNames []string `json:"performer_names"` } +func (s *Manager) batchTagPerformersByIds(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) { + var tasks []Task + + err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + performerQuery := s.Repository.Performer + + ids := input.Ids + if len(ids) == 0 { + ids = input.PerformerIds //nolint:staticcheck + } + + for _, performerID := range ids { + if id, err := strconv.Atoi(performerID); err == nil { + performer, err := performerQuery.Find(ctx, id) + if err != nil { + return err + } + + if err := performer.LoadStashIDs(ctx, performerQuery); err != nil { + return fmt.Errorf("loading performer stash ids: %w", err) + } + + hasStashID := performer.StashIDs.ForEndpoint(box.Endpoint) != nil + if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) { + tasks = append(tasks, &stashBoxBatchPerformerTagTask{ + performer: performer, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + } + return nil + }) + + return tasks, err +} + +func (s *Manager) batchTagPerformersByNamesOrStashIds(input StashBoxBatchTagInput, box *models.StashBox) []Task { + var tasks []Task + + for i := range input.StashIDs { + stashID := input.StashIDs[i] + if len(stashID) > 0 { + tasks = append(tasks, &stashBoxBatchPerformerTagTask{ + stashID: &stashID, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + + names := input.Names + if len(names) == 0 { + names = input.PerformerNames //nolint:staticcheck + } + + for i := range names { + name := names[i] + if len(name) > 0 { + tasks = append(tasks, &stashBoxBatchPerformerTagTask{ + name: &name, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + + return tasks +} + +func (s *Manager) batchTagAllPerformers(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) { + var tasks []Task + + err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + performerQuery := s.Repository.Performer + var performers []*models.Performer + var err error + + performers, err = performerQuery.FindByStashIDStatus(ctx, input.Refresh, box.Endpoint) + + if err != nil { + return fmt.Errorf("error querying performers: %v", err) + } + + for _, performer := range performers { + if err := performer.LoadStashIDs(ctx, performerQuery); err != nil { + return fmt.Errorf("error loading stash ids for performer %s: %v", performer.Name, err) + } + + tasks = append(tasks, &stashBoxBatchPerformerTagTask{ + performer: performer, + box: box, + excludedFields: input.ExcludeFields, + }) + } + return nil + }) + + return tasks, err +} + func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int { j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error { logger.Infof("Initiating stash-box batch performer tag") - var tasks []StashBoxBatchTagTask + var tasks []Task + var err error - // The gocritic linter wants to turn this ifElseChain into a switch. - // however, such a switch would contain quite large blocks for each section - // and would arguably be hard to read. - // - // This is why we mark this section nolint. In principle, we should look to - // rewrite the section at some point, to avoid the linter warning. - if len(input.Ids) > 0 || len(input.PerformerIds) > 0 { //nolint:gocritic - // The user has chosen only to tag the items on the current page - if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { - performerQuery := s.Repository.Performer + switch input.getBatchTagType(true) { + case batchTagByIds: + tasks, err = s.batchTagPerformersByIds(ctx, input, box) + case batchTagByNamesOrStashIds: + tasks = s.batchTagPerformersByNamesOrStashIds(input, box) + case batchTagAll: + tasks, err = s.batchTagAllPerformers(ctx, input, box) + } - idsToUse := input.PerformerIds - if len(input.Ids) > 0 { - idsToUse = input.Ids - } - - for _, performerID := range idsToUse { - if id, err := strconv.Atoi(performerID); err == nil { - performer, err := performerQuery.Find(ctx, id) - if err == nil { - if err := performer.LoadStashIDs(ctx, performerQuery); err != nil { - return fmt.Errorf("loading performer stash ids: %w", err) - } - - // Check if the user wants to refresh existing or new items - hasStashID := performer.StashIDs.ForEndpoint(box.Endpoint) != nil - if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) { - tasks = append(tasks, StashBoxBatchTagTask{ - performer: performer, - refresh: input.Refresh, - box: box, - excludedFields: input.ExcludeFields, - taskType: Performer, - }) - } - } else { - return err - } - } - } - return nil - }); err != nil { - return err - } - } else if len(input.Names) > 0 || len(input.PerformerNames) > 0 { - // The user is batch adding performers - namesToUse := input.PerformerNames - if len(input.Names) > 0 { - namesToUse = input.Names - } - - for i := range namesToUse { - name := namesToUse[i] - if len(name) > 0 { - tasks = append(tasks, StashBoxBatchTagTask{ - name: &name, - refresh: false, - box: box, - excludedFields: input.ExcludeFields, - taskType: Performer, - }) - } - } - } else { //nolint:gocritic - // The gocritic linter wants to fold this if-block into the else on the line above. - // However, this doesn't really help with readability of the current section. Mark it - // as nolint for now. In the future we'd like to rewrite this code by factoring some of - // this into separate functions. - - // The user has chosen to tag every item in their database - if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { - performerQuery := s.Repository.Performer - var performers []*models.Performer - var err error - - if input.Refresh { - performers, err = performerQuery.FindByStashIDStatus(ctx, true, box.Endpoint) - } else { - performers, err = performerQuery.FindByStashIDStatus(ctx, false, box.Endpoint) - } - - if err != nil { - return fmt.Errorf("error querying performers: %v", err) - } - - for _, performer := range performers { - if err := performer.LoadStashIDs(ctx, performerQuery); err != nil { - return fmt.Errorf("error loading stash ids for performer %s: %v", performer.Name, err) - } - - tasks = append(tasks, StashBoxBatchTagTask{ - performer: performer, - refresh: input.Refresh, - box: box, - excludedFields: input.ExcludeFields, - taskType: Performer, - }) - } - return nil - }); err != nil { - return err - } + if err != nil { + return err } if len(tasks) == 0 { @@ -508,7 +583,7 @@ func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, box *models.Sta logger.Infof("Starting stash-box batch operation for %d performers", len(tasks)) for _, task := range tasks { - progress.ExecuteTask(task.Description(), func() { + progress.ExecuteTask(task.GetDescription(), func() { task.Start(ctx) }) @@ -521,103 +596,116 @@ func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, box *models.Sta return s.JobManager.Add(ctx, "Batch stash-box performer tag...", j) } +func (s *Manager) batchTagStudiosByIds(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) { + var tasks []Task + + err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + studioQuery := s.Repository.Studio + + for _, studioID := range input.Ids { + if id, err := strconv.Atoi(studioID); err == nil { + studio, err := studioQuery.Find(ctx, id) + if err != nil { + return err + } + + if err := studio.LoadStashIDs(ctx, studioQuery); err != nil { + return fmt.Errorf("loading studio stash ids: %w", err) + } + + hasStashID := studio.StashIDs.ForEndpoint(box.Endpoint) != nil + if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) { + tasks = append(tasks, &stashBoxBatchStudioTagTask{ + studio: studio, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + } + return nil + }) + + return tasks, err +} + +func (s *Manager) batchTagStudiosByNamesOrStashIds(input StashBoxBatchTagInput, box *models.StashBox) []Task { + var tasks []Task + + for i := range input.StashIDs { + stashID := input.StashIDs[i] + if len(stashID) > 0 { + tasks = append(tasks, &stashBoxBatchStudioTagTask{ + stashID: &stashID, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + + for i := range input.Names { + name := input.Names[i] + if len(name) > 0 { + tasks = append(tasks, &stashBoxBatchStudioTagTask{ + name: &name, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + + return tasks +} + +func (s *Manager) batchTagAllStudios(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) { + var tasks []Task + + err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + studioQuery := s.Repository.Studio + var studios []*models.Studio + var err error + + studios, err = studioQuery.FindByStashIDStatus(ctx, input.Refresh, box.Endpoint) + + if err != nil { + return fmt.Errorf("error querying studios: %v", err) + } + + for _, studio := range studios { + tasks = append(tasks, &stashBoxBatchStudioTagTask{ + studio: studio, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + return nil + }) + + return tasks, err +} + func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int { j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error { logger.Infof("Initiating stash-box batch studio tag") - var tasks []StashBoxBatchTagTask + var tasks []Task + var err error - // The gocritic linter wants to turn this ifElseChain into a switch. - // however, such a switch would contain quite large blocks for each section - // and would arguably be hard to read. - // - // This is why we mark this section nolint. In principle, we should look to - // rewrite the section at some point, to avoid the linter warning. - if len(input.Ids) > 0 { //nolint:gocritic - // The user has chosen only to tag the items on the current page - if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { - studioQuery := s.Repository.Studio + switch input.getBatchTagType(false) { + case batchTagByIds: + tasks, err = s.batchTagStudiosByIds(ctx, input, box) + case batchTagByNamesOrStashIds: + tasks = s.batchTagStudiosByNamesOrStashIds(input, box) + case batchTagAll: + tasks, err = s.batchTagAllStudios(ctx, input, box) + } - for _, studioID := range input.Ids { - if id, err := strconv.Atoi(studioID); err == nil { - studio, err := studioQuery.Find(ctx, id) - if err == nil { - if err := studio.LoadStashIDs(ctx, studioQuery); err != nil { - return fmt.Errorf("loading studio stash ids: %w", err) - } - - // Check if the user wants to refresh existing or new items - hasStashID := studio.StashIDs.ForEndpoint(box.Endpoint) != nil - if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) { - tasks = append(tasks, StashBoxBatchTagTask{ - studio: studio, - refresh: input.Refresh, - createParent: input.CreateParent, - box: box, - excludedFields: input.ExcludeFields, - taskType: Studio, - }) - } - } else { - return err - } - } - } - return nil - }); err != nil { - logger.Error(err.Error()) - } - } else if len(input.Names) > 0 { - // The user is batch adding studios - for i := range input.Names { - name := input.Names[i] - if len(name) > 0 { - tasks = append(tasks, StashBoxBatchTagTask{ - name: &name, - refresh: false, - createParent: input.CreateParent, - box: box, - excludedFields: input.ExcludeFields, - taskType: Studio, - }) - } - } - } else { //nolint:gocritic - // The gocritic linter wants to fold this if-block into the else on the line above. - // However, this doesn't really help with readability of the current section. Mark it - // as nolint for now. In the future we'd like to rewrite this code by factoring some of - // this into separate functions. - - // The user has chosen to tag every item in their database - if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { - studioQuery := s.Repository.Studio - var studios []*models.Studio - var err error - - if input.Refresh { - studios, err = studioQuery.FindByStashIDStatus(ctx, true, box.Endpoint) - } else { - studios, err = studioQuery.FindByStashIDStatus(ctx, false, box.Endpoint) - } - - if err != nil { - return fmt.Errorf("error querying studios: %v", err) - } - - for _, studio := range studios { - tasks = append(tasks, StashBoxBatchTagTask{ - studio: studio, - refresh: input.Refresh, - createParent: input.CreateParent, - box: box, - excludedFields: input.ExcludeFields, - taskType: Studio, - }) - } - return nil - }); err != nil { - return err - } + if err != nil { + return err } if len(tasks) == 0 { @@ -629,7 +717,7 @@ func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashB logger.Infof("Starting stash-box batch operation for %d studios", len(tasks)) for _, task := range tasks { - progress.ExecuteTask(task.Description(), func() { + progress.ExecuteTask(task.GetDescription(), func() { task.Start(ctx) }) @@ -641,3 +729,137 @@ func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashB return s.JobManager.Add(ctx, "Batch stash-box studio tag...", j) } + +func (s *Manager) batchTagTagsByIds(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) { + var tasks []Task + + err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + tagQuery := s.Repository.Tag + + for _, tagID := range input.Ids { + if id, err := strconv.Atoi(tagID); err == nil { + t, err := tagQuery.Find(ctx, id) + if err != nil { + return err + } + + if err := t.LoadStashIDs(ctx, tagQuery); err != nil { + return fmt.Errorf("loading tag stash ids: %w", err) + } + + hasStashID := t.StashIDs.ForEndpoint(box.Endpoint) != nil + if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) { + tasks = append(tasks, &stashBoxBatchTagTagTask{ + tag: t, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + } + return nil + }) + + return tasks, err +} + +func (s *Manager) batchTagTagsByNamesOrStashIds(input StashBoxBatchTagInput, box *models.StashBox) []Task { + var tasks []Task + + for i := range input.StashIDs { + stashID := input.StashIDs[i] + if len(stashID) > 0 { + tasks = append(tasks, &stashBoxBatchTagTagTask{ + stashID: &stashID, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + + for i := range input.Names { + name := input.Names[i] + if len(name) > 0 { + tasks = append(tasks, &stashBoxBatchTagTagTask{ + name: &name, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + } + + return tasks +} + +func (s *Manager) batchTagAllTags(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) { + var tasks []Task + + err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + tagQuery := s.Repository.Tag + var tags []*models.Tag + var err error + + tags, err = tagQuery.FindByStashIDStatus(ctx, input.Refresh, box.Endpoint) + + if err != nil { + return fmt.Errorf("error querying tags: %v", err) + } + + for _, t := range tags { + tasks = append(tasks, &stashBoxBatchTagTagTask{ + tag: t, + createParent: input.CreateParent, + box: box, + excludedFields: input.ExcludeFields, + }) + } + return nil + }) + + return tasks, err +} + +func (s *Manager) StashBoxBatchTagTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int { + j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error { + logger.Infof("Initiating stash-box batch tag tag") + + var tasks []Task + var err error + + switch input.getBatchTagType(false) { + case batchTagByIds: + tasks, err = s.batchTagTagsByIds(ctx, input, box) + case batchTagByNamesOrStashIds: + tasks = s.batchTagTagsByNamesOrStashIds(input, box) + case batchTagAll: + tasks, err = s.batchTagAllTags(ctx, input, box) + } + + if err != nil { + return err + } + + if len(tasks) == 0 { + return nil + } + + progress.SetTotal(len(tasks)) + + logger.Infof("Starting stash-box batch operation for %d tags", len(tasks)) + + for _, task := range tasks { + progress.ExecuteTask(task.GetDescription(), func() { + task.Start(ctx) + }) + + progress.Increment() + } + + return nil + }) + + return s.JobManager.Add(ctx, "Batch stash-box tag tag...", j) +} diff --git a/internal/manager/models.go b/internal/manager/models.go index 3e96e6182..b7c7232c5 100644 --- a/internal/manager/models.go +++ b/internal/manager/models.go @@ -21,6 +21,7 @@ type SetupInput struct { // Empty to indicate $HOME/.stash/config.yml default ConfigLocation string `json:"configLocation"` Stashes []*config.StashConfigInput `json:"stashes"` + SFWContentMode bool `json:"sfwContentMode"` // Empty to indicate default DatabaseFile string `json:"databaseFile"` // Empty to indicate default diff --git a/internal/manager/repository.go b/internal/manager/repository.go index 8d4ef1137..65514ed1d 100644 --- a/internal/manager/repository.go +++ b/internal/manager/repository.go @@ -10,17 +10,17 @@ import ( ) type SceneService interface { - Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) + Create(ctx context.Context, input models.CreateSceneInput) (*models.Scene, error) AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error - Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error FindByIDs(ctx context.Context, ids []int, load ...scene.LoadRelationshipOption) ([]*models.Scene, error) sceneFingerprintGetter } type ImageService interface { - Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } @@ -31,7 +31,7 @@ type GalleryService interface { SetCover(ctx context.Context, g *models.Gallery, coverImageId int) error ResetCover(ctx context.Context, g *models.Gallery) error - Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) + Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) ValidateImageGalleryChange(ctx context.Context, i *models.Image, updateIDs models.UpdateIDs) error @@ -39,7 +39,7 @@ type GalleryService interface { } type GroupService interface { - Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error + Create(ctx context.Context, input *models.CreateGroupInput) error UpdatePartial(ctx context.Context, id int, updatedGroup models.GroupPartial, frontImage group.ImageInput, backImage group.ImageInput) (*models.Group, error) AddSubGroups(ctx context.Context, groupID int, subGroups []models.GroupIDDescription, insertIndex *int) error diff --git a/internal/manager/running_streams.go b/internal/manager/running_streams.go index c6b0c4665..18ac3b042 100644 --- a/internal/manager/running_streams.go +++ b/internal/manager/running_streams.go @@ -3,7 +3,9 @@ package manager import ( "context" "errors" + "mime" "net/http" + "path/filepath" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/static" @@ -46,14 +48,17 @@ func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWrit sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()) - filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, sceneHash) + fp := GetInstance().Paths.Scene.GetStreamPath(scene.Path, sceneHash) streamRequestCtx := ffmpeg.NewStreamRequestContext(w, r) // #2579 - hijacking and closing the connection here causes video playback to fail in Safari // We trust that the request context will be closed, so we don't need to call Cancel on the // returned context here. - _ = GetInstance().ReadLockManager.ReadLock(streamRequestCtx, filepath) - http.ServeFile(w, r, filepath) + _ = GetInstance().ReadLockManager.ReadLock(streamRequestCtx, fp) + _, filename := filepath.Split(fp) + contentDisposition := mime.FormatMediaType("inline", map[string]string{"filename": filename}) + w.Header().Set("Content-Disposition", contentDisposition) + http.ServeFile(w, r, fp) } func (s *SceneServer) ServeScreenshot(scene *models.Scene, w http.ResponseWriter, r *http.Request) { diff --git a/internal/manager/scan_stashignore_test.go b/internal/manager/scan_stashignore_test.go new file mode 100644 index 000000000..2745ff970 --- /dev/null +++ b/internal/manager/scan_stashignore_test.go @@ -0,0 +1,268 @@ +//go:build integration +// +build integration + +package manager + +import ( + "context" + "io/fs" + "os" + "path/filepath" + "testing" + + "github.com/stashapp/stash/pkg/file" + + // Necessary to register custom migrations. + _ "github.com/stashapp/stash/pkg/sqlite/migrations" +) + +// stashIgnorePathFilter wraps StashIgnoreFilter to implement PathFilter for testing. +// It provides a fixed library root for the filter. +type stashIgnorePathFilter struct { + filter *file.StashIgnoreFilter + libraryRoot string +} + +func (f *stashIgnorePathFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool { + return f.filter.Accept(ctx, path, info, f.libraryRoot, zipFilePath) +} + +// createTestFileOnDisk creates a file with some content. +func createTestFileOnDisk(t *testing.T, dir, name string) string { + t.Helper() + path := filepath.Join(dir, name) + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + t.Fatalf("failed to create directory for %s: %v", path, err) + } + // Write some content so the file has a non-zero size. + if err := os.WriteFile(path, []byte("test content for "+name), 0644); err != nil { + t.Fatalf("failed to create file %s: %v", path, err) + } + return path +} + +// createStashIgnoreFile creates a .stashignore file with the given content. +func createStashIgnoreFile(t *testing.T, dir, content string) { + t.Helper() + path := filepath.Join(dir, ".stashignore") + if err := os.WriteFile(path, []byte(content), 0644); err != nil { + t.Fatalf("failed to create .stashignore: %v", err) + } +} + +func TestScannerWithStashIgnore(t *testing.T) { + // Create temp directory structure. + tmpDir := t.TempDir() + + // Create test files. + createTestFileOnDisk(t, tmpDir, "video1.mp4") + createTestFileOnDisk(t, tmpDir, "video2.mp4") + createTestFileOnDisk(t, tmpDir, "ignore_me.mp4") + createTestFileOnDisk(t, tmpDir, "subdir/video3.mp4") + createTestFileOnDisk(t, tmpDir, "subdir/skip_this.mp4") + createTestFileOnDisk(t, tmpDir, "excluded_dir/video4.mp4") + createTestFileOnDisk(t, tmpDir, "temp/processing.mp4") + + // Create .stashignore file. + stashignore := `# Ignore specific files +ignore_me.mp4 +subdir/skip_this.mp4 + +# Ignore directories +excluded_dir/ +temp/ +` + createStashIgnoreFile(t, tmpDir, stashignore) + + // Create stashignore filter with library root. + stashIgnoreFilter := &stashIgnorePathFilter{ + filter: file.NewStashIgnoreFilter(), + libraryRoot: tmpDir, + } + + // Create scanner. + scanner := &file.Scanner{ + ScanFilters: []file.PathFilter{stashIgnoreFilter}, + } + + testScenarios := []struct { + path string + accepted bool + }{ + {filepath.Join(tmpDir, "video1.mp4"), true}, + {filepath.Join(tmpDir, "video2.mp4"), true}, + {filepath.Join(tmpDir, "ignore_me.mp4"), false}, + {filepath.Join(tmpDir, "subdir/video3.mp4"), true}, + {filepath.Join(tmpDir, "subdir/skip_this.mp4"), false}, + {filepath.Join(tmpDir, "excluded_dir/video4.mp4"), false}, + {filepath.Join(tmpDir, "temp/processing.mp4"), false}, + } + + ctx := context.Background() + + for _, scenario := range testScenarios { + info, err := os.Stat(scenario.path) + if err != nil { + t.Fatalf("failed to stat file %s: %v", scenario.path, err) + } + accepted := scanner.AcceptEntry(ctx, scenario.path, info, "") + + if accepted != scenario.accepted { + t.Errorf("unexpected accept result for %s: expected %v, got %v", + scenario.path, scenario.accepted, accepted) + } + } +} + +func TestScannerWithNestedStashIgnore(t *testing.T) { + // Create temp directory structure. + tmpDir := t.TempDir() + + // Create test files. + createTestFileOnDisk(t, tmpDir, "root.mp4") + createTestFileOnDisk(t, tmpDir, "root.tmp") + createTestFileOnDisk(t, tmpDir, "subdir/sub.mp4") + createTestFileOnDisk(t, tmpDir, "subdir/sub.log") + createTestFileOnDisk(t, tmpDir, "subdir/sub.tmp") + + // Root .stashignore excludes *.tmp. + createStashIgnoreFile(t, tmpDir, "*.tmp\n") + + // Subdir .stashignore excludes *.log. + createStashIgnoreFile(t, filepath.Join(tmpDir, "subdir"), "*.log\n") + + // Create stashignore filter with library root. + stashIgnoreFilter := &stashIgnorePathFilter{ + filter: file.NewStashIgnoreFilter(), + libraryRoot: tmpDir, + } + + // Create scanner. + scanner := &file.Scanner{ + ScanFilters: []file.PathFilter{stashIgnoreFilter}, + } + + testScenarios := []struct { + path string + accepted bool + }{ + {filepath.Join(tmpDir, "root.mp4"), true}, + {filepath.Join(tmpDir, "root.tmp"), false}, + {filepath.Join(tmpDir, "subdir/sub.mp4"), true}, + {filepath.Join(tmpDir, "subdir/sub.log"), false}, + {filepath.Join(tmpDir, "subdir/sub.tmp"), false}, + } + + ctx := context.Background() + + for _, scenario := range testScenarios { + info, err := os.Stat(scenario.path) + if err != nil { + t.Fatalf("failed to stat file %s: %v", scenario.path, err) + } + accepted := scanner.AcceptEntry(ctx, scenario.path, info, "") + + if accepted != scenario.accepted { + t.Errorf("unexpected accept result for %s: expected %v, got %v", + scenario.path, scenario.accepted, accepted) + } + } +} + +func TestScannerWithoutStashIgnore(t *testing.T) { + // Create temp directory structure (no .stashignore). + tmpDir := t.TempDir() + + // Create test files. + createTestFileOnDisk(t, tmpDir, "video1.mp4") + createTestFileOnDisk(t, tmpDir, "video2.mp4") + createTestFileOnDisk(t, tmpDir, "subdir/video3.mp4") + + // Create stashignore filter with library root (but no .stashignore file exists). + stashIgnoreFilter := &stashIgnorePathFilter{ + filter: file.NewStashIgnoreFilter(), + libraryRoot: tmpDir, + } + + // Create scanner. + scanner := &file.Scanner{ + ScanFilters: []file.PathFilter{stashIgnoreFilter}, + } + + testScenarios := []struct { + path string + accepted bool + }{ + {filepath.Join(tmpDir, "video1.mp4"), true}, + {filepath.Join(tmpDir, "video2.mp4"), true}, + {filepath.Join(tmpDir, "subdir/video3.mp4"), true}, + } + + ctx := context.Background() + + for _, scenario := range testScenarios { + info, err := os.Stat(scenario.path) + if err != nil { + t.Fatalf("failed to stat file %s: %v", scenario.path, err) + } + accepted := scanner.AcceptEntry(ctx, scenario.path, info, "") + + if accepted != scenario.accepted { + t.Errorf("unexpected accept result for %s: expected %v, got %v", + scenario.path, scenario.accepted, accepted) + } + } +} + +func TestScannerWithNegationPattern(t *testing.T) { + // Create temp directory structure. + tmpDir := t.TempDir() + + // Create test files. + createTestFileOnDisk(t, tmpDir, "file1.tmp") + createTestFileOnDisk(t, tmpDir, "file2.tmp") + createTestFileOnDisk(t, tmpDir, "keep_this.tmp") + createTestFileOnDisk(t, tmpDir, "video.mp4") + + // Create .stashignore with negation. + stashignore := `*.tmp +!keep_this.tmp +` + createStashIgnoreFile(t, tmpDir, stashignore) + + // Create stashignore filter with library root. + stashIgnoreFilter := &stashIgnorePathFilter{ + filter: file.NewStashIgnoreFilter(), + libraryRoot: tmpDir, + } + + // Create scanner. + scanner := &file.Scanner{ + ScanFilters: []file.PathFilter{stashIgnoreFilter}, + } + + testScenarios := []struct { + path string + accepted bool + }{ + {filepath.Join(tmpDir, "file1.tmp"), false}, + {filepath.Join(tmpDir, "file2.tmp"), false}, + {filepath.Join(tmpDir, "keep_this.tmp"), true}, + {filepath.Join(tmpDir, "video.mp4"), true}, + } + + ctx := context.Background() + + for _, scenario := range testScenarios { + info, err := os.Stat(scenario.path) + if err != nil { + t.Fatalf("failed to stat file %s: %v", scenario.path, err) + } + accepted := scanner.AcceptEntry(ctx, scenario.path, info, "") + + if accepted != scenario.accepted { + t.Errorf("unexpected accept result for %s: expected %v, got %v", + scenario.path, scenario.accepted, accepted) + } + } +} diff --git a/internal/manager/task/clean_generated.go b/internal/manager/task/clean_generated.go index 902989046..a59bda6d1 100644 --- a/internal/manager/task/clean_generated.go +++ b/internal/manager/task/clean_generated.go @@ -565,6 +565,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. j.setProgressFromFilename(sceneHash[0:2], progress) // check if the scene exists + var walkErr error if err := j.Repository.WithReadTxn(ctx, func(ctx context.Context) error { var err error scenes, err = j.getScenesWithHash(ctx, sceneHash) @@ -575,15 +576,18 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. if len(scenes) == 0 { j.logDelete("deleting unused marker directory: %s", sceneHash) j.deleteDir(path) - } else { - // get the markers now - for _, scene := range scenes { - thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID) - if err != nil { - return fmt.Errorf("error getting markers for scene: %v", err) - } - markers = append(markers, thisMarkers...) + // #5911 - we've just deleted the directory, so skip it in the walk to avoid errors + walkErr = fs.SkipDir + return nil + } + + // get the markers now + for _, scene := range scenes { + thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID) + if err != nil { + return fmt.Errorf("error getting markers for scene: %v", err) } + markers = append(markers, thisMarkers...) } return nil @@ -591,7 +595,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job. logger.Error(err.Error()) } - return nil + return walkErr } filename := info.Name() diff --git a/internal/manager/task_clean.go b/internal/manager/task_clean.go index 9690cf4c8..67b7038b6 100644 --- a/internal/manager/task_clean.go +++ b/internal/manager/task_clean.go @@ -40,9 +40,10 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) error { } j.cleaner.Clean(ctx, file.CleanOptions{ - Paths: j.input.Paths, - DryRun: j.input.DryRun, - PathFilter: newCleanFilter(instance.Config), + Paths: j.input.Paths, + DryRun: j.input.DryRun, + IgnoreZipFileContents: j.input.IgnoreZipFileContents, + PathFilter: newCleanFilter(instance.Config), }, progress) if job.IsCancelled(ctx) { @@ -154,11 +155,12 @@ func newCleanFilter(c *config.Config) *cleanFilter { generatedPath: c.GetGeneratedPath(), videoExcludeRegex: generateRegexps(c.GetExcludes()), imageExcludeRegex: generateRegexps(c.GetImageExcludes()), + stashIgnoreFilter: file.NewStashIgnoreFilter(), }, } } -func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool { +func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool { // #1102 - clean anything in generated path generatedPath := f.generatedPath @@ -173,12 +175,18 @@ func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) } if stash == nil { - logger.Infof("%s not in any stash library directories. Marking to clean: \"%s\"", fileOrFolder, path) + logger.Infof("%s not in any stash library directories. Marking to clean: %q", fileOrFolder, path) return false } if fsutil.IsPathInDir(generatedPath, path) { - logger.Infof("%s is in generated path. Marking to clean: \"%s\"", fileOrFolder, path) + logger.Infof("%s is in generated path. Marking to clean: %q", fileOrFolder, path) + return false + } + + // Check .stashignore files, bounded to the library root. + if !f.stashIgnoreFilter.Accept(ctx, path, info, stash.Path, zipFilePath) { + logger.Infof("%s is excluded due to .stashignore. Marking to clean: %q", fileOrFolder, path) return false } @@ -300,7 +308,10 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil // only delete if the scene has no other files if len(scene.Files.List()) <= 1 { logger.Infof("Deleting scene %q since it has no other related files", scene.DisplayName()) - if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil { + const deleteGenerated = true + const deleteFile = false + const destroyFileEntry = false + if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } @@ -421,7 +432,10 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil if len(i.Files.List()) <= 1 { logger.Infof("Deleting image %q since it has no other related files", i.DisplayName()) - if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil { + const deleteGenerated = true + const deleteFile = false + const destroyFileEntry = false + if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return err } diff --git a/internal/manager/task_export.go b/internal/manager/task_export.go index 5f2897670..01bab9430 100644 --- a/internal/manager/task_export.go +++ b/internal/manager/task_export.go @@ -651,6 +651,7 @@ func (t *ExportTask) exportImage(ctx context.Context, wg *sync.WaitGroup, jobCha galleryReader := r.Gallery performerReader := r.Performer tagReader := r.Tag + imageReader := r.Image for s := range jobChan { imageHash := s.Checksum @@ -665,14 +666,17 @@ func (t *ExportTask) exportImage(ctx context.Context, wg *sync.WaitGroup, jobCha continue } - newImageJSON := image.ToBasicJSON(s) + newImageJSON, err := image.ToBasicJSON(ctx, imageReader, s) + if err != nil { + logger.Errorf("[images] <%s> error converting image to JSON: %v", imageHash, err) + continue + } // export files for _, f := range s.Files.List() { t.exportFile(f) } - var err error newImageJSON.Studio, err = image.GetStudioName(ctx, studioReader, s) if err != nil { logger.Errorf("[images] <%s> error getting image studio name: %v", imageHash, err) @@ -779,6 +783,7 @@ func (t *ExportTask) exportGallery(ctx context.Context, wg *sync.WaitGroup, jobC studioReader := r.Studio performerReader := r.Performer tagReader := r.Tag + galleryReader := r.Gallery galleryChapterReader := r.GalleryChapter for g := range jobChan { @@ -847,6 +852,12 @@ func (t *ExportTask) exportGallery(ctx context.Context, wg *sync.WaitGroup, jobC newGalleryJSON.Tags = tag.GetNames(tags) + newGalleryJSON.CustomFields, err = galleryReader.GetCustomFields(ctx, g.ID) + if err != nil { + logger.Errorf("[galleries] <%s> error getting gallery custom fields: %v", g.DisplayName(), err) + continue + } + if t.includeDependencies { if g.StudioID != nil { t.studios.IDs = sliceutil.AppendUnique(t.studios.IDs, *g.StudioID) diff --git a/internal/manager/task_generate.go b/internal/manager/task_generate.go index c28ffe55b..f2aab2b3c 100644 --- a/internal/manager/task_generate.go +++ b/internal/manager/task_generate.go @@ -29,6 +29,7 @@ type GenerateMetadataInput struct { // Generate transcodes even if not required ForceTranscodes bool `json:"forceTranscodes"` Phashes bool `json:"phashes"` + ImagePhashes bool `json:"imagePhashes"` InteractiveHeatmapsSpeeds bool `json:"interactiveHeatmapsSpeeds"` ClipPreviews bool `json:"clipPreviews"` ImageThumbnails bool `json:"imageThumbnails"` @@ -36,8 +37,14 @@ type GenerateMetadataInput struct { SceneIDs []string `json:"sceneIDs"` // marker ids to generate for MarkerIDs []string `json:"markerIDs"` + // image ids to generate for + ImageIDs []string `json:"imageIDs"` + // gallery ids to generate for + GalleryIDs []string `json:"galleryIDs"` // overwrite existing media Overwrite bool `json:"overwrite"` + // paths to run generate on, in addition to the other ID lists + Paths []string `json:"paths"` } type GeneratePreviewOptionsInput struct { @@ -73,6 +80,7 @@ type totalsGenerate struct { markers int64 transcodes int64 phashes int64 + imagePhashes int64 interactiveHeatmapSpeeds int64 clipPreviews int64 imageThumbnails int64 @@ -82,8 +90,9 @@ type totalsGenerate struct { func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error { var scenes []*models.Scene - var err error var markers []*models.SceneMarker + var images []*models.Image + var err error j.overwrite = j.input.Overwrite j.fileNamingAlgo = config.GetInstance().GetVideoFileNamingAlgorithm() @@ -105,6 +114,14 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error if err != nil { logger.Error(err.Error()) } + imageIDs, err := stringslice.StringSliceToIntSlice(j.input.ImageIDs) + if err != nil { + logger.Error(err.Error()) + } + galleryIDs, err := stringslice.StringSliceToIntSlice(j.input.GalleryIDs) + if err != nil { + logger.Error(err.Error()) + } g := &generate.Generator{ Encoder: instance.FFMpeg, @@ -118,8 +135,13 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error r := j.repository if err := r.WithReadTxn(ctx, func(ctx context.Context) error { qb := r.Scene - if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 { - j.queueTasks(ctx, g, queue) + if len(j.input.SceneIDs) == 0 && + len(j.input.MarkerIDs) == 0 && + len(j.input.ImageIDs) == 0 && + len(j.input.GalleryIDs) == 0 && + len(j.input.Paths) == 0 { + + j.queueTasks(ctx, g, nil, queue) } else { if len(j.input.SceneIDs) > 0 { scenes, err = qb.FindMany(ctx, sceneIDs) @@ -141,6 +163,38 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error j.queueMarkerJob(g, m, queue) } } + + if len(j.input.ImageIDs) > 0 { + images, err = r.Image.FindMany(ctx, imageIDs) + for _, i := range images { + if err := i.LoadFiles(ctx, r.Image); err != nil { + return err + } + + j.queueImageJob(g, i, queue) + } + } + + if len(j.input.GalleryIDs) > 0 { + for _, galleryID := range galleryIDs { + imgs, err := r.Image.FindByGalleryID(ctx, galleryID) + if err != nil { + return err + } + for _, img := range imgs { + if err := img.LoadFiles(ctx, r.Image); err != nil { + return err + } + + j.queueImageJob(g, img, queue) + } + } + } + + if len(j.input.Paths) > 0 { + paths := filterStashPaths(j.input.Paths) + j.queueTasks(ctx, g, paths, queue) + } } return nil @@ -172,14 +226,17 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error if j.input.Phashes { logMsg += fmt.Sprintf(" %d phashes", totals.phashes) } + if j.input.ImagePhashes { + logMsg += fmt.Sprintf(" %d image phashes", totals.imagePhashes) + } if j.input.InteractiveHeatmapsSpeeds { logMsg += fmt.Sprintf(" %d heatmaps & speeds", totals.interactiveHeatmapSpeeds) } if j.input.ClipPreviews { - logMsg += fmt.Sprintf(" %d Image Clip Previews", totals.clipPreviews) + logMsg += fmt.Sprintf(" %d image clip previews", totals.clipPreviews) } if j.input.ImageThumbnails { - logMsg += fmt.Sprintf(" %d Image Thumbnails", totals.imageThumbnails) + logMsg += fmt.Sprintf(" %d image thumbnails", totals.imageThumbnails) } if logMsg == "Generating" { logMsg = "Nothing selected to generate" @@ -231,17 +288,18 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error return nil } -func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) { +func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) { j.totals = totalsGenerate{} - j.queueScenesTasks(ctx, g, queue) - j.queueImagesTasks(ctx, g, queue) + j.queueScenesTasks(ctx, g, paths, queue) + j.queueImagesTasks(ctx, g, paths, queue) } -func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) { +func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) { const batchSize = 1000 findFilter := models.BatchFindFilter(batchSize) + sceneFilter := scene.FilterFromPaths(paths) r := j.repository @@ -250,7 +308,7 @@ func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generato return } - scenes, err := scene.Query(ctx, r.Scene, nil, findFilter) + scenes, err := scene.Query(ctx, r.Scene, sceneFilter, findFilter) if err != nil { logger.Errorf("Error encountered queuing files to scan: %s", err.Error()) return @@ -277,19 +335,20 @@ func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generato } } -func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) { +func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) { const batchSize = 1000 findFilter := models.BatchFindFilter(batchSize) + imageFilter := image.FilterFromPaths(paths) r := j.repository - for more := j.input.ClipPreviews || j.input.ImageThumbnails; more; { + for more := j.input.ClipPreviews || j.input.ImageThumbnails || j.input.ImagePhashes; more; { if job.IsCancelled(ctx) { return } - images, err := image.Query(ctx, r.Image, nil, findFilter) + images, err := image.Query(ctx, r.Image, imageFilter, findFilter) if err != nil { logger.Errorf("Error encountered queuing files to scan: %s", err.Error()) return @@ -411,12 +470,13 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator, } } - if j.input.Markers { + if j.input.Markers || j.input.MarkerImagePreviews || j.input.MarkerScreenshots { task := &GenerateMarkersTask{ repository: r, Scene: scene, Overwrite: j.overwrite, fileNamingAlgorithm: j.fileNamingAlgo, + VideoPreview: j.input.Markers, ImagePreview: j.input.MarkerImagePreviews, Screenshot: j.input.MarkerScreenshots, @@ -488,6 +548,9 @@ func (j *GenerateJob) queueMarkerJob(g *generate.Generator, marker *models.Scene Marker: marker, Overwrite: j.overwrite, fileNamingAlgorithm: j.fileNamingAlgo, + VideoPreview: j.input.Markers, + ImagePreview: j.input.MarkerImagePreviews, + Screenshot: j.input.MarkerScreenshots, generator: g, } j.totals.markers++ @@ -521,4 +584,23 @@ func (j *GenerateJob) queueImageJob(g *generate.Generator, image *models.Image, queue <- task } } + + if j.input.ImagePhashes { + // generate for all files in image + for _, f := range image.Files.List() { + if imageFile, ok := f.(*models.ImageFile); ok { + task := &GenerateImagePhashTask{ + repository: j.repository, + File: imageFile, + Overwrite: j.overwrite, + } + + if task.required() { + j.totals.imagePhashes++ + j.totals.tasks++ + queue <- task + } + } + } + } } diff --git a/internal/manager/task_generate_image_phash.go b/internal/manager/task_generate_image_phash.go new file mode 100644 index 000000000..a5c764df0 --- /dev/null +++ b/internal/manager/task_generate_image_phash.go @@ -0,0 +1,103 @@ +package manager + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/hash/imagephash" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type GenerateImagePhashTask struct { + repository models.Repository + File *models.ImageFile + Overwrite bool +} + +func (t *GenerateImagePhashTask) GetDescription() string { + return fmt.Sprintf("Generating phash for %s", t.File.Path) +} + +func (t *GenerateImagePhashTask) Start(ctx context.Context) { + if !t.required() { + return + } + + var hash int64 + set := false + + // #4393 - if there is a file with the same md5, we can use the same phash + // only use this if we're not overwriting + if !t.Overwrite { + existing, err := t.findExistingPhash(ctx) + if err != nil { + logger.Warnf("Error finding existing phash: %v", err) + } else if existing != nil { + logger.Infof("Using existing phash for %s", t.File.Path) + hash = existing.(int64) + set = true + } + } + + if !set { + generated, err := imagephash.Generate(instance.FFMpeg, t.File) + if err != nil { + logger.Errorf("Error generating phash for %q: %v", t.File.Path, err) + logErrorOutput(err) + return + } + + hash = int64(*generated) + } + + r := t.repository + if err := r.WithTxn(ctx, func(ctx context.Context) error { + t.File.Fingerprints = t.File.Fingerprints.AppendUnique(models.Fingerprint{ + Type: models.FingerprintTypePhash, + Fingerprint: hash, + }) + + return r.File.Update(ctx, t.File) + }); err != nil && ctx.Err() == nil { + logger.Errorf("Error setting phash: %v", err) + } +} + +func (t *GenerateImagePhashTask) findExistingPhash(ctx context.Context) (interface{}, error) { + r := t.repository + var ret interface{} + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + md5 := t.File.Fingerprints.Get(models.FingerprintTypeMD5) + + // find other files with the same md5 + files, err := r.File.FindByFingerprint(ctx, models.Fingerprint{ + Type: models.FingerprintTypeMD5, + Fingerprint: md5, + }) + if err != nil { + return fmt.Errorf("finding files by md5: %w", err) + } + + // find the first file with a phash + for _, file := range files { + if phash := file.Base().Fingerprints.Get(models.FingerprintTypePhash); phash != nil { + ret = phash + return nil + } + } + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (t *GenerateImagePhashTask) required() bool { + if t.Overwrite { + return true + } + + return t.File.Fingerprints.Get(models.FingerprintTypePhash) == nil +} diff --git a/internal/manager/task_generate_image_thumbnail.go b/internal/manager/task_generate_image_thumbnail.go index 2d32e2d60..14518d2bb 100644 --- a/internal/manager/task_generate_image_thumbnail.go +++ b/internal/manager/task_generate_image_thumbnail.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "os/exec" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/image" @@ -20,6 +21,13 @@ func (t *GenerateImageThumbnailTask) GetDescription() string { return fmt.Sprintf("Generating Thumbnail for image %s", t.Image.Path) } +func (t *GenerateImageThumbnailTask) logStderr(err error) { + var exitErr *exec.ExitError + if errors.As(err, &exitErr) { + logger.Debugf("[generator] error output: %s", exitErr.Stderr) + } +} + func (t *GenerateImageThumbnailTask) Start(ctx context.Context) { if !t.required() { return @@ -46,14 +54,15 @@ func (t *GenerateImageThumbnailTask) Start(ctx context.Context) { if err != nil { // don't log for animated images if !errors.Is(err, image.ErrNotSupportedForThumbnail) { - logger.Errorf("[generator] getting thumbnail for image %s: %w", path, err) + logger.Errorf("[generator] getting thumbnail for image %s: %s", path, err.Error()) + t.logStderr(err) } return } err = fsutil.WriteFile(thumbPath, data) if err != nil { - logger.Errorf("[generator] writing thumbnail for image %s: %w", path, err) + logger.Errorf("[generator] writing thumbnail for image %s: %s", path, err.Error()) return } } diff --git a/internal/manager/task_generate_markers.go b/internal/manager/task_generate_markers.go index f37c7aed1..1da458ba8 100644 --- a/internal/manager/task_generate_markers.go +++ b/internal/manager/task_generate_markers.go @@ -18,6 +18,7 @@ type GenerateMarkersTask struct { Overwrite bool fileNamingAlgorithm models.HashAlgorithm + VideoPreview bool ImagePreview bool Screenshot bool @@ -107,11 +108,19 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene sceneHash := scene.GetHash(t.fileNamingAlgorithm) seconds := float64(sceneMarker.Seconds) + // check if marker past duration + if seconds > float64(videoFile.Duration) { + logger.Warnf("[generator] scene marker at %.2f seconds exceeds video duration of %.2f seconds, skipping", seconds, float64(videoFile.Duration)) + return + } + g := t.generator - if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil { - logger.Errorf("[generator] failed to generate marker video: %v", err) - logErrorOutput(err) + if t.VideoPreview { + if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil { + logger.Errorf("[generator] failed to generate marker video: %v", err) + logErrorOutput(err) + } } if t.ImagePreview { @@ -158,7 +167,7 @@ func (t *GenerateMarkersTask) markerExists(sceneChecksum string, seconds int) bo return false } - videoExists := t.videoExists(sceneChecksum, seconds) + videoExists := !t.VideoPreview || t.videoExists(sceneChecksum, seconds) imageExists := !t.ImagePreview || t.imageExists(sceneChecksum, seconds) screenshotExists := !t.Screenshot || t.screenshotExists(sceneChecksum, seconds) diff --git a/internal/manager/task_generate_phash.go b/internal/manager/task_generate_phash.go index 54dc1a10b..5d35a8738 100644 --- a/internal/manager/task_generate_phash.go +++ b/internal/manager/task_generate_phash.go @@ -44,7 +44,7 @@ func (t *GeneratePhashTask) Start(ctx context.Context) { if !set { generated, err := videophash.Generate(instance.FFMpeg, t.File) if err != nil { - logger.Errorf("Error generating phash: %v", err) + logger.Errorf("Error generating phash for %q: %v", t.File.Path, err) logErrorOutput(err) return } diff --git a/internal/manager/task_generate_screenshot.go b/internal/manager/task_generate_screenshot.go index 77ad2be34..2f4031586 100644 --- a/internal/manager/task_generate_screenshot.go +++ b/internal/manager/task_generate_screenshot.go @@ -32,6 +32,7 @@ func (t *GenerateCoverTask) Start(ctx context.Context) { return t.Scene.LoadPrimaryFile(ctx, r.File) }); err != nil { logger.Error(err) + return } if !required { diff --git a/internal/manager/task_generate_sprite.go b/internal/manager/task_generate_sprite.go index 0275830ab..c173147cd 100644 --- a/internal/manager/task_generate_sprite.go +++ b/internal/manager/task_generate_sprite.go @@ -34,7 +34,17 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) { sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) imagePath := instance.Paths.Scene.GetSpriteImageFilePath(sceneHash) vttPath := instance.Paths.Scene.GetSpriteVttFilePath(sceneHash) - generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, 9, 9) + + cfg := DefaultSpriteGeneratorConfig + cfg.SpriteSize = instance.Config.GetSpriteScreenshotSize() + + if instance.Config.GetUseCustomSpriteInterval() { + cfg.MinimumSprites = instance.Config.GetMinimumSprites() + cfg.MaximumSprites = instance.Config.GetMaximumSprites() + cfg.SpriteInterval = instance.Config.GetSpriteInterval() + } + + generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, cfg) if err != nil { logger.Errorf("error creating sprite generator: %s", err.Error()) diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go index 6f7f34b3c..22849124c 100644 --- a/internal/manager/task_scan.go +++ b/internal/manager/task_scan.go @@ -2,13 +2,17 @@ package manager import ( "context" + "errors" "fmt" "io/fs" "path/filepath" "regexp" + "runtime/debug" + "sync" "time" "github.com/99designs/gqlgen/graphql/handler/lru" + "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" @@ -22,16 +26,18 @@ import ( "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene/generate" "github.com/stashapp/stash/pkg/txn" + "github.com/stashapp/stash/pkg/utils" ) -type scanner interface { - Scan(ctx context.Context, handlers []file.Handler, options file.ScanOptions, progressReporter file.ProgressReporter) -} - type ScanJob struct { - scanner scanner + scanner *file.Scanner input ScanMetadataInput subscriptions *subscriptionManager + + fileQueue chan file.ScannedFile + count int + + unmatchedCaptionFiles utils.MutexField[[]string] } func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { @@ -55,22 +61,24 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { start := time.Now() + nTasks := cfg.GetParallelTasksWithAutoDetection() + const taskQueueSize = 200000 - taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, cfg.GetParallelTasksWithAutoDetection()) + taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, nTasks) var minModTime time.Time if j.input.Filter != nil && j.input.Filter.MinModTime != nil { minModTime = *j.input.Filter.MinModTime } - j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{ - Paths: paths, - ScanFilters: []file.PathFilter{newScanFilter(c, repo, minModTime)}, - ZipFileExtensions: cfg.GetGalleryExtensions(), - ParallelTasks: cfg.GetParallelTasksWithAutoDetection(), - HandlerRequiredFilters: []file.Filter{newHandlerRequiredFilter(cfg, repo)}, - Rescan: j.input.Rescan, - }, progress) + // HACK - these should really be set in the scanner initialization + j.scanner.FileHandlers = getScanHandlers(j.input, taskQueue, progress) + j.scanner.ScanFilters = []file.PathFilter{newScanFilter(c, repo, minModTime)} + j.scanner.HandlerRequiredFilters = []file.Filter{newHandlerRequiredFilter(cfg, repo)} + + logger.Infof("Starting scan of %d paths with %d parallel tasks", len(paths), nTasks) + + j.runJob(ctx, paths, nTasks, progress) taskQueue.Close() @@ -80,12 +88,336 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error { } elapsed := time.Since(start) - logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed)) + logger.Infof("Scan finished (%s)", elapsed) j.subscriptions.notify() return nil } +func (j *ScanJob) runJob(ctx context.Context, paths []string, nTasks int, progress *job.Progress) { + var wg sync.WaitGroup + wg.Add(1) + + j.fileQueue = make(chan file.ScannedFile, scanQueueSize) + + go func() { + defer func() { + wg.Done() + + // handle panics in goroutine + if p := recover(); p != nil { + logger.Errorf("panic while queuing files for scan: %v", p) + logger.Errorf(string(debug.Stack())) + } + }() + + if err := j.queueFiles(ctx, paths, progress); err != nil { + if errors.Is(err, context.Canceled) { + return + } + + logger.Errorf("error queuing files for scan: %v", err) + return + } + + logger.Infof("Finished adding files to queue. %d files queued", j.count) + }() + + defer wg.Wait() + + j.processQueue(ctx, nTasks, progress) +} + +const scanQueueSize = 200000 + +func (j *ScanJob) queueFiles(ctx context.Context, paths []string, progress *job.Progress) error { + fs := &file.OsFS{} + + defer func() { + close(j.fileQueue) + + progress.AddTotal(j.count) + progress.Definite() + }() + + var err error + progress.ExecuteTask("Walking directory tree", func() { + for _, p := range paths { + err = file.SymWalk(fs, p, j.queueFileFunc(ctx, fs, nil, progress)) + if err != nil { + return + } + } + }) + + return err +} + +func (j *ScanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *file.ScannedFile, progress *job.Progress) fs.WalkDirFunc { + return func(path string, d fs.DirEntry, err error) error { + if err != nil { + // don't let errors prevent scanning + logger.Errorf("error scanning %s: %v", path, err) + return nil + } + + if err = ctx.Err(); err != nil { + return err + } + + info, err := d.Info() + if err != nil { + logger.Errorf("reading info for %q: %v", path, err) + return nil + } + + zipFilePath := "" + if zipFile != nil { + zipFilePath = zipFile.Path + } + + if !j.scanner.AcceptEntry(ctx, path, info, zipFilePath) { + if info.IsDir() { + logger.Debugf("Skipping directory %s", path) + return fs.SkipDir + } + + // we don't include caption files in the file scan, but we do need + // to handle them + if fsutil.MatchExtension(path, video.CaptionExts) { + fileRepo := j.scanner.Repository.File + matched := video.AssociateCaptions(ctx, path, j.scanner.Repository.TxnManager, fileRepo, fileRepo) + + if !matched { + logger.Debugf("No matching video file found for caption file %s", path) + j.unmatchedCaptionFiles.SetFunc(func(files []string) []string { + return append(files, path) + }) + } + + return nil + } + + logger.Debugf("Skipping file %s", path) + return nil + } + + size, err := file.GetFileSize(f, path, info) + if err != nil { + return err + } + + ff := file.ScannedFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ + ModTime: file.ModTime(info), + }, + Path: path, + Basename: filepath.Base(path), + Size: size, + }, + FS: f, + Info: info, + } + + if zipFile != nil { + ff.ZipFileID = &zipFile.ID + ff.ZipFile = zipFile + } + + if info.IsDir() { + // handle folders immediately + if err := j.handleFolder(ctx, ff, progress); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", path, err) + } + + // skip the directory since we won't be able to process the files anyway + return fs.SkipDir + } + + return nil + } + + // if zip file is present, we handle immediately + if zipFile != nil { + progress.ExecuteTask("Scanning "+path, func() { + // don't increment progress in zip files + if err := j.handleFile(ctx, ff, nil); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", path, err) + } + // don't return an error, just skip the file + } + }) + + return nil + } + + logger.Tracef("Queueing file %s for scanning", path) + j.fileQueue <- ff + + j.count++ + + return nil + } +} + +func (j *ScanJob) processQueue(ctx context.Context, parallelTasks int, progress *job.Progress) { + if parallelTasks < 1 { + parallelTasks = 1 + } + + wg := sizedwaitgroup.New(parallelTasks) + + func() { + defer func() { + wg.Wait() + + // handle panics in goroutine + if p := recover(); p != nil { + logger.Errorf("panic while scanning files: %v", p) + logger.Errorf(string(debug.Stack())) + } + }() + + for f := range j.fileQueue { + logger.Tracef("Processing queued file %s", f.Path) + if err := ctx.Err(); err != nil { + return + } + + wg.Add() + ff := f + go func() { + defer wg.Done() + j.processQueueItem(ctx, ff, progress) + }() + } + }() +} + +func (j *ScanJob) processQueueItem(ctx context.Context, f file.ScannedFile, progress *job.Progress) { + progress.ExecuteTask("Scanning "+f.Path, func() { + var err error + if f.Info.IsDir() { + err = j.handleFolder(ctx, f, progress) + } else { + err = j.handleFile(ctx, f, progress) + } + + if err != nil && !errors.Is(err, context.Canceled) { + logger.Errorf("error processing %q: %v", f.Path, err) + } + }) +} + +func (j *ScanJob) handleFolder(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + if progress != nil { + defer progress.Increment() + } + + _, err := j.scanner.ScanFolder(ctx, f) + if err != nil { + return err + } + + return nil +} + +func (j *ScanJob) handleFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + if progress != nil { + defer progress.Increment() + } + + r, err := j.scanner.ScanFile(ctx, f) + if err != nil { + return err + } + + // if this is a new video file, match it with any unmatched caption files + if r.New && len(j.unmatchedCaptionFiles.Get()) > 0 { + videoFile, _ := r.File.(*models.VideoFile) + + if videoFile != nil { + // try to match any unmatched caption files to this video file + for _, captionPath := range j.unmatchedCaptionFiles.Get() { + if video.MatchesCaption(videoFile.Path, captionPath) { + video.AssociateCaptions(ctx, captionPath, j.scanner.Repository.TxnManager, j.scanner.Repository.File, j.scanner.Repository.File) + + // remove from the unmatched list + j.unmatchedCaptionFiles.SetFunc(func(files []string) []string { + newFiles := make([]string, 0, len(files)-1) + for _, f := range files { + if f != captionPath { + newFiles = append(newFiles, f) + } + } + return newFiles + }) + } + } + } + } + + // clean captions - scene handler handles this as well, but + // unchanged files aren't processed by the scene handler + if r.IsUnchanged() { + videoFile, _ := r.File.(*models.VideoFile) + + if videoFile != nil { + txnMgr := j.scanner.Repository.TxnManager + fileRepo := j.scanner.Repository.File + if err := txn.WithDatabase(ctx, txnMgr, func(ctx context.Context) error { + return video.CleanCaptions(ctx, videoFile, txnMgr, fileRepo) + }); err != nil { + logger.Errorf("Error cleaning captions: %v", err) + } + } + } + + // handle rename should have already handled the contents of the zip file + // so shouldn't need to scan it again. + // Only scan zip contents if the file is new, the fingerprint changed, + // or if a force rescan was requested. + + if j.scanner.IsZipFile(f.Info.Name()) && (r.New || r.FingerprintChanged || j.scanner.Rescan) { + ff := r.File + f.BaseFile = ff.Base() + + // scan zip files with a different context that is not cancellable + // cancelling while scanning zip file contents results in the scan + // contents being partially completed + zipCtx := context.WithoutCancel(ctx) + + if err := j.scanZipFile(zipCtx, f, progress); err != nil { + logger.Errorf("Error scanning zip file %q: %v", f.Path, err) + } + } else if r.Updated && j.scanner.IsZipFile(f.Info.Name()) { + logger.Debugf("Skipping zip file scan for %q: fingerprint unchanged", f.Path) + } + + return nil +} + +func (j *ScanJob) scanZipFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error { + zipFS, err := f.FS.OpenZip(f.Path, f.Size) + if err != nil { + if errors.Is(err, file.ErrNotReaderAt) { + // can't walk the zip file + // just return + logger.Debugf("Skipping zip file %q as it cannot be opened for walking", f.Path) + return nil + } + + return err + } + + defer zipFS.Close() + + return file.SymWalk(zipFS, f.Path, j.queueFileFunc(ctx, zipFS, &f, progress)) +} + type extensionConfig struct { vidExt []string imgExt []string @@ -117,11 +449,10 @@ type sceneFinder interface { // handlerRequiredFilter returns true if a File's handler needs to be executed despite the file not being updated. type handlerRequiredFilter struct { extensionConfig - txnManager txn.Manager - SceneFinder sceneFinder - ImageFinder fileCounter - GalleryFinder galleryFinder - CaptionUpdater video.CaptionUpdater + txnManager txn.Manager + SceneFinder sceneFinder + ImageFinder fileCounter + GalleryFinder galleryFinder FolderCache *lru.LRU[bool] @@ -137,7 +468,6 @@ func newHandlerRequiredFilter(c *config.Config, repo models.Repository) *handler SceneFinder: repo.Scene, ImageFinder: repo.Image, GalleryFinder: repo.Gallery, - CaptionUpdater: repo.File, FolderCache: lru.New[bool](processes * 2), videoFileNamingAlgorithm: c.GetVideoFileNamingAlgorithm(), } @@ -212,65 +542,35 @@ func (f *handlerRequiredFilter) Accept(ctx context.Context, ff models.File) bool } } - if isVideoFile { - // TODO - check if the cover exists - // hash := scene.GetHash(ff, f.videoFileNamingAlgorithm) - // ssPath := instance.Paths.Scene.GetScreenshotPath(hash) - // if exists, _ := fsutil.FileExists(ssPath); !exists { - // // if not, check if the file is a primary file for a scene - // scenes, err := f.SceneFinder.FindByPrimaryFileID(ctx, ff.Base().ID) - // if err != nil { - // // just ignore - // return false - // } - - // if len(scenes) > 0 { - // // if it is, then it needs to be re-generated - // return true - // } - // } - - // clean captions - scene handler handles this as well, but - // unchanged files aren't processed by the scene handler - videoFile, _ := ff.(*models.VideoFile) - if videoFile != nil { - if err := video.CleanCaptions(ctx, videoFile, f.txnManager, f.CaptionUpdater); err != nil { - logger.Errorf("Error cleaning captions: %v", err) - } - } - } - return false } type scanFilter struct { extensionConfig - txnManager txn.Manager - FileFinder models.FileFinder - CaptionUpdater video.CaptionUpdater + txnManager txn.Manager stashPaths config.StashConfigs generatedPath string videoExcludeRegex []*regexp.Regexp imageExcludeRegex []*regexp.Regexp minModTime time.Time + stashIgnoreFilter *file.StashIgnoreFilter } func newScanFilter(c *config.Config, repo models.Repository, minModTime time.Time) *scanFilter { return &scanFilter{ extensionConfig: newExtensionConfig(c), txnManager: repo.TxnManager, - FileFinder: repo.File, - CaptionUpdater: repo.File, stashPaths: c.GetStashPaths(), generatedPath: c.GetGeneratedPath(), videoExcludeRegex: generateRegexps(c.GetExcludes()), imageExcludeRegex: generateRegexps(c.GetImageExcludes()), minModTime: minModTime, + stashIgnoreFilter: file.NewStashIgnoreFilter(), } } -func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool { +func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool { if fsutil.IsPathInDir(f.generatedPath, path) { logger.Warnf("Skipping %q as it overlaps with the generated folder", path) return false @@ -287,19 +587,16 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) return false } + // Check .stashignore files, bounded to the library root. + if !f.stashIgnoreFilter.Accept(ctx, path, info, s.Path, zipFilePath) { + logger.Debugf("Skipping %s due to .stashignore", path) + return false + } + isVideoFile := useAsVideo(path) isImageFile := useAsImage(path) isZipFile := fsutil.MatchExtension(path, f.zipExt) - // handle caption files - if fsutil.MatchExtension(path, video.CaptionExts) { - // we don't include caption files in the file scan, but we do need - // to handle them - video.AssociateCaptions(ctx, path, f.txnManager, f.FileFinder, f.CaptionUpdater) - - return false - } - if !info.IsDir() && !isVideoFile && !isImageFile && !isZipFile { logger.Debugf("Skipping %s as it does not match any known file extensions", path) return false @@ -363,8 +660,9 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre &file.FilteredHandler{ Filter: file.FilterFunc(imageFileFilter), Handler: &image.ScanHandler{ - CreatorUpdater: r.Image, - GalleryFinder: r.Gallery, + CreatorUpdater: r.Image, + GalleryFinder: r.Gallery, + SceneFinderUpdater: r.Scene, ScanGenerator: &imageGenerators{ input: options, taskQueue: taskQueue, @@ -393,9 +691,10 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre &file.FilteredHandler{ Filter: file.FilterFunc(videoFileFilter), Handler: &scene.ScanHandler{ - CreatorUpdater: r.Scene, - CaptionUpdater: r.File, - PluginCache: pluginCache, + CreatorUpdater: r.Scene, + GalleryFinderUpdater: r.Gallery, + CaptionUpdater: r.File, + PluginCache: pluginCache, ScanGenerator: &sceneGenerators{ input: options, taskQueue: taskQueue, @@ -463,6 +762,29 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f model } } + if t.ScanGenerateImagePhashes { + progress.AddTotal(1) + phashFn := func(ctx context.Context) { + mgr := GetInstance() + // Only generate phash for image files, not video files + if imageFile, ok := f.(*models.ImageFile); ok { + taskPhash := GenerateImagePhashTask{ + repository: mgr.Repository, + File: imageFile, + Overwrite: overwrite, + } + taskPhash.Start(ctx) + } + progress.Increment() + } + + if g.sequentialScanning { + phashFn(ctx) + } else { + g.taskQueue.Add(fmt.Sprintf("Generating phash for %s", path), phashFn) + } + } + return nil } diff --git a/internal/manager/task_stash_box_tag.go b/internal/manager/task_stash_box_tag.go index d20b71f06..264e7e96c 100644 --- a/internal/manager/task_stash_box_tag.go +++ b/internal/manager/task_stash_box_tag.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "strconv" + "strings" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" @@ -12,59 +13,36 @@ import ( "github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/stashbox" "github.com/stashapp/stash/pkg/studio" + "github.com/stashapp/stash/pkg/tag" ) -type StashBoxTagTaskType int - -const ( - Performer StashBoxTagTaskType = iota - Studio -) - -type StashBoxBatchTagTask struct { +// stashBoxBatchPerformerTagTask is used to tag or create performers from stash-box. +// +// Two modes of operation: +// - Update existing performer: set performer to update from stash-box data +// - Create new performer: set name or stashID to search stash-box and create locally +type stashBoxBatchPerformerTagTask struct { box *models.StashBox name *string + stashID *string performer *models.Performer - studio *models.Studio - refresh bool - createParent bool excludedFields []string - taskType StashBoxTagTaskType } -func (t *StashBoxBatchTagTask) Start(ctx context.Context) { - switch t.taskType { - case Performer: - t.stashBoxPerformerTag(ctx) - case Studio: - t.stashBoxStudioTag(ctx) +func (t *stashBoxBatchPerformerTagTask) getName() string { + switch { + case t.name != nil: + return *t.name + case t.stashID != nil: + return *t.stashID + case t.performer != nil: + return t.performer.Name default: - logger.Errorf("Error starting batch task, unknown task_type %d", t.taskType) + return "" } } -func (t *StashBoxBatchTagTask) Description() string { - if t.taskType == Performer { - var name string - if t.name != nil { - name = *t.name - } else { - name = t.performer.Name - } - return fmt.Sprintf("Tagging performer %s from stash-box", name) - } else if t.taskType == Studio { - var name string - if t.name != nil { - name = *t.name - } else { - name = t.studio.Name - } - return fmt.Sprintf("Tagging studio %s from stash-box", name) - } - return fmt.Sprintf("Unknown tagging task type %d from stash-box", t.taskType) -} - -func (t *StashBoxBatchTagTask) stashBoxPerformerTag(ctx context.Context) { +func (t *stashBoxBatchPerformerTagTask) Start(ctx context.Context) { performer, err := t.findStashBoxPerformer(ctx) if err != nil { logger.Errorf("Error fetching performer data from stash-box: %v", err) @@ -76,21 +54,18 @@ func (t *StashBoxBatchTagTask) stashBoxPerformerTag(ctx context.Context) { excluded[field] = true } - // performer will have a value if pulling from Stash-box by Stash ID or name was successful if performer != nil { t.processMatchedPerformer(ctx, performer, excluded) } else { - var name string - if t.name != nil { - name = *t.name - } else if t.performer != nil { - name = t.performer.Name - } - logger.Infof("No match found for %s", name) + logger.Infof("No match found for %s", t.getName()) } } -func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*models.ScrapedPerformer, error) { +func (t *stashBoxBatchPerformerTagTask) GetDescription() string { + return fmt.Sprintf("Tagging performer %s from stash-box", t.getName()) +} + +func (t *stashBoxBatchPerformerTagTask) findStashBoxPerformer(ctx context.Context) (*models.ScrapedPerformer, error) { var performer *models.ScrapedPerformer var err error @@ -98,7 +73,24 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns())) - if t.refresh { + switch { + case t.name != nil: + performer, err = client.FindPerformerByName(ctx, *t.name) + case t.stashID != nil: + performer, err = client.FindPerformerByID(ctx, *t.stashID) + + if performer != nil && performer.RemoteMergedIntoId != nil { + mergedPerformer, err := t.handleMergedPerformer(ctx, performer, client) + if err != nil { + return nil, err + } + + if mergedPerformer != nil { + logger.Infof("Performer id %s merged into %s, updating local performer", *t.stashID, *performer.RemoteMergedIntoId) + performer = mergedPerformer + } + } + case t.performer != nil: // tagging or updating existing performer var remoteID string if err := r.WithReadTxn(ctx, func(ctx context.Context) error { qb := r.Performer @@ -118,6 +110,7 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode }); err != nil { return nil, err } + if remoteID != "" { performer, err = client.FindPerformerByID(ctx, remoteID) @@ -132,15 +125,10 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode performer = mergedPerformer } } - } - } else { - var name string - if t.name != nil { - name = *t.name } else { - name = t.performer.Name + // find by performer name instead + performer, err = client.FindPerformerByName(ctx, t.performer.Name) } - performer, err = client.FindPerformerByName(ctx, name) } if performer != nil { @@ -154,7 +142,7 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode return performer, err } -func (t *StashBoxBatchTagTask) handleMergedPerformer(ctx context.Context, performer *models.ScrapedPerformer, client *stashbox.Client) (mergedPerformer *models.ScrapedPerformer, err error) { +func (t *stashBoxBatchPerformerTagTask) handleMergedPerformer(ctx context.Context, performer *models.ScrapedPerformer, client *stashbox.Client) (mergedPerformer *models.ScrapedPerformer, err error) { mergedPerformer, err = client.FindPerformerByID(ctx, *performer.RemoteMergedIntoId) if err != nil { return nil, fmt.Errorf("loading merged performer %s from stashbox", *performer.RemoteMergedIntoId) @@ -169,8 +157,7 @@ func (t *StashBoxBatchTagTask) handleMergedPerformer(ctx context.Context, perfor return mergedPerformer, nil } -func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *models.ScrapedPerformer, excluded map[string]bool) { - // Refreshing an existing performer +func (t *stashBoxBatchPerformerTagTask) processMatchedPerformer(ctx context.Context, p *models.ScrapedPerformer, excluded map[string]bool) { if t.performer != nil { storedID, _ := strconv.Atoi(*p.StoredID) @@ -180,7 +167,6 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m return } - // Start the transaction and update the performer r := instance.Repository err = r.WithTxn(ctx, func(ctx context.Context) error { qb := r.Performer @@ -226,8 +212,8 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m } else { logger.Infof("Updated performer %s", *p.Name) } - } else if t.name != nil && p.Name != nil { - // Creating a new performer + } else { + // no existing performer, create a new one newPerformer := p.ToPerformer(t.box.Endpoint, excluded) image, err := p.GetImage(ctx, excluded) if err != nil { @@ -263,7 +249,40 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m } } -func (t *StashBoxBatchTagTask) stashBoxStudioTag(ctx context.Context) { +// stashBoxBatchStudioTagTask is used to tag or create studios from stash-box. +// +// Two modes of operation: +// - Update existing studio: set studio to update from stash-box data +// - Create new studio: set name or stashID to search stash-box and create locally +type stashBoxBatchStudioTagTask struct { + box *models.StashBox + name *string + stashID *string + studio *models.Studio + createParent bool + excludedFields []string +} + +func (t *stashBoxBatchStudioTagTask) getName() string { + switch { + case t.name != nil: + return *t.name + case t.stashID != nil: + return *t.stashID + case t.studio != nil: + return t.studio.Name + default: + return "" + } +} + +func (t *stashBoxBatchStudioTagTask) Start(ctx context.Context) { + // Skip organized studios + if t.studio != nil && t.studio.Organized { + logger.Infof("Skipping organized studio %s", t.studio.Name) + return + } + studio, err := t.findStashBoxStudio(ctx) if err != nil { logger.Errorf("Error fetching studio data from stash-box: %v", err) @@ -275,21 +294,18 @@ func (t *StashBoxBatchTagTask) stashBoxStudioTag(ctx context.Context) { excluded[field] = true } - // studio will have a value if pulling from Stash-box by Stash ID or name was successful if studio != nil { t.processMatchedStudio(ctx, studio, excluded) } else { - var name string - if t.name != nil { - name = *t.name - } else if t.studio != nil { - name = t.studio.Name - } - logger.Infof("No match found for %s", name) + logger.Infof("No match found for %s", t.getName()) } } -func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models.ScrapedStudio, error) { +func (t *stashBoxBatchStudioTagTask) GetDescription() string { + return fmt.Sprintf("Tagging studio %s from stash-box", t.getName()) +} + +func (t *stashBoxBatchStudioTagTask) findStashBoxStudio(ctx context.Context) (*models.ScrapedStudio, error) { var studio *models.ScrapedStudio var err error @@ -297,7 +313,12 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models. client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns())) - if t.refresh { + switch { + case t.name != nil: + studio, err = client.FindStudio(ctx, *t.name) + case t.stashID != nil: + studio, err = client.FindStudio(ctx, *t.stashID) + case t.studio != nil: var remoteID string if err := r.WithReadTxn(ctx, func(ctx context.Context) error { if !t.studio.StashIDs.Loaded() { @@ -315,17 +336,13 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models. }); err != nil { return nil, err } + if remoteID != "" { studio, err = client.FindStudio(ctx, remoteID) - } - } else { - var name string - if t.name != nil { - name = *t.name } else { - name = t.studio.Name + // find by studio name instead + studio, err = client.FindStudio(ctx, t.studio.Name) } - studio, err = client.FindStudio(ctx, name) } if err := r.WithReadTxn(ctx, func(ctx context.Context) error { @@ -343,8 +360,7 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models. return studio, err } -func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *models.ScrapedStudio, excluded map[string]bool) { - // Refreshing an existing studio +func (t *stashBoxBatchStudioTagTask) processMatchedStudio(ctx context.Context, s *models.ScrapedStudio, excluded map[string]bool) { if t.studio != nil { storedID, _ := strconv.Atoi(*s.StoredID) @@ -361,7 +377,6 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode return } - // Start the transaction and update the studio r := instance.Repository err = r.WithTxn(ctx, func(ctx context.Context) error { qb := r.Studio @@ -394,8 +409,8 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode } else { logger.Infof("Updated studio %s", s.Name) } - } else if t.name != nil && s.Name != "" { - // Creating a new studio + } else if s.Name != "" { + // no existing studio, create a new one if s.Parent != nil && t.createParent { err := t.processParentStudio(ctx, s.Parent, excluded) if err != nil { @@ -410,7 +425,6 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode return } - // Start the transaction and save the studio r := instance.Repository err = r.WithTxn(ctx, func(ctx context.Context) error { qb := r.Studio @@ -439,9 +453,8 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode } } -func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent *models.ScrapedStudio, excluded map[string]bool) error { +func (t *stashBoxBatchStudioTagTask) processParentStudio(ctx context.Context, parent *models.ScrapedStudio, excluded map[string]bool) error { if parent.StoredID == nil { - // The parent needs to be created newParentStudio := parent.ToStudio(t.box.Endpoint, excluded) image, err := parent.GetImage(ctx, excluded) @@ -450,7 +463,6 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent * return err } - // Start the transaction and save the studio r := instance.Repository err = r.WithTxn(ctx, func(ctx context.Context) error { qb := r.Studio @@ -476,7 +488,6 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent * } return err } else { - // The parent studio matched an existing one and the user has chosen in the UI to link and/or update it storedID, _ := strconv.Atoi(*parent.StoredID) image, err := parent.GetImage(ctx, excluded) @@ -485,7 +496,6 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent * return err } - // Start the transaction and update the studio r := instance.Repository err = r.WithTxn(ctx, func(ctx context.Context) error { qb := r.Studio @@ -521,3 +531,235 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent * return err } } + +// stashBoxBatchTagTagTask is used to tag or create tags from stash-box. +// +// Two modes of operation: +// - Update existing tag: set tag to update from stash-box data +// - Create new tag: set name or stashID to search stash-box and create locally +type stashBoxBatchTagTagTask struct { + box *models.StashBox + name *string + stashID *string + tag *models.Tag + createParent bool + excludedFields []string +} + +func (t *stashBoxBatchTagTagTask) getName() string { + switch { + case t.name != nil: + return *t.name + case t.stashID != nil: + return *t.stashID + case t.tag != nil: + return t.tag.Name + default: + return "" + } +} + +func (t *stashBoxBatchTagTagTask) Start(ctx context.Context) { + scrapedTag, err := t.findStashBoxTag(ctx) + if err != nil { + logger.Errorf("Error fetching tag data from stash-box: %v", err) + return + } + + excluded := map[string]bool{} + for _, field := range t.excludedFields { + excluded[field] = true + } + + if scrapedTag != nil { + t.processMatchedTag(ctx, scrapedTag, excluded) + } else { + logger.Infof("No match found for %s", t.getName()) + } +} + +func (t *stashBoxBatchTagTagTask) GetDescription() string { + return fmt.Sprintf("Tagging tag %s from stash-box", t.getName()) +} + +func (t *stashBoxBatchTagTagTask) findStashBoxTag(ctx context.Context) (*models.ScrapedTag, error) { + var results []*models.ScrapedTag + var err error + + r := instance.Repository + + client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns())) + + nameQuery := "" + + switch { + case t.name != nil: + nameQuery = *t.name + results, err = client.QueryTag(ctx, *t.name) + case t.stashID != nil: + results, err = client.QueryTag(ctx, *t.stashID) + case t.tag != nil: + var remoteID string + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + if !t.tag.StashIDs.Loaded() { + err = t.tag.LoadStashIDs(ctx, r.Tag) + if err != nil { + return err + } + } + for _, id := range t.tag.StashIDs.List() { + if id.Endpoint == t.box.Endpoint { + remoteID = id.StashID + } + } + return nil + }); err != nil { + return nil, err + } + + if remoteID != "" { + results, err = client.QueryTag(ctx, remoteID) + } else { + nameQuery = t.tag.Name + results, err = client.QueryTag(ctx, t.tag.Name) + } + } + + if err != nil { + return nil, err + } + + if len(results) == 0 { + return nil, nil + } + + var result *models.ScrapedTag + + // QueryTag returns tags that partially match the name, so find the exact match if searching by name + if nameQuery != "" { + for _, r := range results { + if strings.EqualFold(r.Name, nameQuery) { + result = r + break + } + } + } else { + result = results[0] + } + + if result == nil { + return nil, nil + } + + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + return match.ScrapedTagHierarchy(ctx, r.Tag, result, t.box.Endpoint) + }); err != nil { + return nil, err + } + + return result, nil +} + +func (t *stashBoxBatchTagTagTask) processParentTag(ctx context.Context, parent *models.ScrapedTag, excluded map[string]bool) error { + if parent.StoredID == nil { + // Create new parent tag + newParentTag := parent.ToTag(t.box.Endpoint, excluded) + + r := instance.Repository + err := r.WithTxn(ctx, func(ctx context.Context) error { + qb := r.Tag + + if err := tag.ValidateCreate(ctx, *newParentTag, qb); err != nil { + return err + } + + if err := qb.Create(ctx, &models.CreateTagInput{Tag: newParentTag}); err != nil { + return err + } + + storedID := strconv.Itoa(newParentTag.ID) + parent.StoredID = &storedID + return nil + }) + if err != nil { + logger.Errorf("Failed to create parent tag %s: %v", parent.Name, err) + } else { + logger.Infof("Created parent tag %s", parent.Name) + } + return err + } + + // Parent already exists — nothing to update for categories + return nil +} + +func (t *stashBoxBatchTagTagTask) processMatchedTag(ctx context.Context, s *models.ScrapedTag, excluded map[string]bool) { + // Determine the tag ID to update — either from the task's tag or from the + // StoredID set by match.ScrapedTag (when batch adding by name and the tag + // already exists locally). + tagID := 0 + if t.tag != nil { + tagID = t.tag.ID + } else if s.StoredID != nil { + tagID, _ = strconv.Atoi(*s.StoredID) + } + + if s.Parent != nil && t.createParent { + if err := t.processParentTag(ctx, s.Parent, excluded); err != nil { + return + } + } + + if tagID > 0 { + r := instance.Repository + err := r.WithTxn(ctx, func(ctx context.Context) error { + qb := r.Tag + + existingStashIDs, err := qb.GetStashIDs(ctx, tagID) + if err != nil { + return err + } + + storedID := strconv.Itoa(tagID) + partial := s.ToPartial(storedID, t.box.Endpoint, excluded, existingStashIDs) + + if err := tag.ValidateUpdate(ctx, tagID, partial, qb); err != nil { + return err + } + + if _, err := qb.UpdatePartial(ctx, tagID, partial); err != nil { + return err + } + + return nil + }) + if err != nil { + logger.Errorf("Failed to update tag %s: %v", s.Name, err) + } else { + logger.Infof("Updated tag %s", s.Name) + } + } else if s.Name != "" { + // no existing tag, create a new one + newTag := s.ToTag(t.box.Endpoint, excluded) + + r := instance.Repository + err := r.WithTxn(ctx, func(ctx context.Context) error { + qb := r.Tag + + if err := tag.ValidateCreate(ctx, *newTag, qb); err != nil { + return err + } + + if err := qb.Create(ctx, &models.CreateTagInput{Tag: newTag}); err != nil { + return err + } + + return nil + }) + if err != nil { + logger.Errorf("Failed to create tag %s: %v", s.Name, err) + } else { + logger.Infof("Created tag %s", s.Name) + } + } +} diff --git a/internal/static/embed.go b/internal/static/embed.go index 91437a81f..665c5a892 100644 --- a/internal/static/embed.go +++ b/internal/static/embed.go @@ -8,12 +8,13 @@ import ( "io/fs" ) -//go:embed performer performer_male scene image gallery tag studio group +//go:embed performer performer_male performer_sfw scene image gallery tag studio group var data embed.FS const ( - Performer = "performer" - PerformerMale = "performer_male" + Performer = "performer" + PerformerMale = "performer_male" + DefaultSFWPerformerImage = "performer_sfw/performer.svg" Scene = "scene" DefaultSceneImage = "scene/scene.svg" diff --git a/internal/static/performer/NoName01.png b/internal/static/performer/NoName01.png deleted file mode 100644 index cdcba1db9..000000000 Binary files a/internal/static/performer/NoName01.png and /dev/null differ diff --git a/internal/static/performer/NoName02.png b/internal/static/performer/NoName02.png deleted file mode 100644 index 4687adc08..000000000 Binary files a/internal/static/performer/NoName02.png and /dev/null differ diff --git a/internal/static/performer/NoName02.svg b/internal/static/performer/NoName02.svg new file mode 100644 index 000000000..b5dbaf2b9 --- /dev/null +++ b/internal/static/performer/NoName02.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName03.png b/internal/static/performer/NoName03.png deleted file mode 100644 index 8ac0d13b7..000000000 Binary files a/internal/static/performer/NoName03.png and /dev/null differ diff --git a/internal/static/performer/NoName04.png b/internal/static/performer/NoName04.png deleted file mode 100644 index 41b55b816..000000000 Binary files a/internal/static/performer/NoName04.png and /dev/null differ diff --git a/internal/static/performer/NoName05.png b/internal/static/performer/NoName05.png deleted file mode 100644 index 8a49ba6d3..000000000 Binary files a/internal/static/performer/NoName05.png and /dev/null differ diff --git a/internal/static/performer/NoName05.svg b/internal/static/performer/NoName05.svg new file mode 100644 index 000000000..5a26d98d8 --- /dev/null +++ b/internal/static/performer/NoName05.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName06.png b/internal/static/performer/NoName06.png index 4359911ae..f2a8016e2 100644 Binary files a/internal/static/performer/NoName06.png and b/internal/static/performer/NoName06.png differ diff --git a/internal/static/performer/NoName07.png b/internal/static/performer/NoName07.png deleted file mode 100644 index 1bb5f6f82..000000000 Binary files a/internal/static/performer/NoName07.png and /dev/null differ diff --git a/internal/static/performer/NoName07.svg b/internal/static/performer/NoName07.svg new file mode 100644 index 000000000..ac90cf6d1 --- /dev/null +++ b/internal/static/performer/NoName07.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName08.png b/internal/static/performer/NoName08.png deleted file mode 100644 index 8ff7ff734..000000000 Binary files a/internal/static/performer/NoName08.png and /dev/null differ diff --git a/internal/static/performer/NoName09.png b/internal/static/performer/NoName09.png deleted file mode 100644 index 49b54b725..000000000 Binary files a/internal/static/performer/NoName09.png and /dev/null differ diff --git a/internal/static/performer/NoName09.svg b/internal/static/performer/NoName09.svg new file mode 100644 index 000000000..6009133a4 --- /dev/null +++ b/internal/static/performer/NoName09.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName10.png b/internal/static/performer/NoName10.png deleted file mode 100644 index a2b72043a..000000000 Binary files a/internal/static/performer/NoName10.png and /dev/null differ diff --git a/internal/static/performer/NoName11.png b/internal/static/performer/NoName11.png index 01034c2b0..45158b094 100644 Binary files a/internal/static/performer/NoName11.png and b/internal/static/performer/NoName11.png differ diff --git a/internal/static/performer/NoName12.png b/internal/static/performer/NoName12.png deleted file mode 100644 index 7f48ba39a..000000000 Binary files a/internal/static/performer/NoName12.png and /dev/null differ diff --git a/internal/static/performer/NoName12.svg b/internal/static/performer/NoName12.svg new file mode 100644 index 000000000..89843a774 --- /dev/null +++ b/internal/static/performer/NoName12.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName13.png b/internal/static/performer/NoName13.png deleted file mode 100644 index fdefafb59..000000000 Binary files a/internal/static/performer/NoName13.png and /dev/null differ diff --git a/internal/static/performer/NoName13.svg b/internal/static/performer/NoName13.svg new file mode 100644 index 000000000..fbbacaacf --- /dev/null +++ b/internal/static/performer/NoName13.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName14.png b/internal/static/performer/NoName14.png deleted file mode 100644 index 20a20a209..000000000 Binary files a/internal/static/performer/NoName14.png and /dev/null differ diff --git a/internal/static/performer/NoName14.svg b/internal/static/performer/NoName14.svg new file mode 100644 index 000000000..1d0231ab3 --- /dev/null +++ b/internal/static/performer/NoName14.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName15.png b/internal/static/performer/NoName15.png deleted file mode 100644 index cfc9d3a8c..000000000 Binary files a/internal/static/performer/NoName15.png and /dev/null differ diff --git a/internal/static/performer/NoName16.png b/internal/static/performer/NoName16.png deleted file mode 100644 index f54744280..000000000 Binary files a/internal/static/performer/NoName16.png and /dev/null differ diff --git a/internal/static/performer/NoName17.png b/internal/static/performer/NoName17.png deleted file mode 100644 index 068d1cf73..000000000 Binary files a/internal/static/performer/NoName17.png and /dev/null differ diff --git a/internal/static/performer/NoName17.svg b/internal/static/performer/NoName17.svg new file mode 100644 index 000000000..8df98d6c4 --- /dev/null +++ b/internal/static/performer/NoName17.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName18.png b/internal/static/performer/NoName18.png deleted file mode 100644 index 179d1d323..000000000 Binary files a/internal/static/performer/NoName18.png and /dev/null differ diff --git a/internal/static/performer/NoName19.png b/internal/static/performer/NoName19.png deleted file mode 100644 index 7349c26b2..000000000 Binary files a/internal/static/performer/NoName19.png and /dev/null differ diff --git a/internal/static/performer/NoName19.svg b/internal/static/performer/NoName19.svg new file mode 100644 index 000000000..a35c979d6 --- /dev/null +++ b/internal/static/performer/NoName19.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName20.png b/internal/static/performer/NoName20.png deleted file mode 100644 index 86dd404bc..000000000 Binary files a/internal/static/performer/NoName20.png and /dev/null differ diff --git a/internal/static/performer/NoName21.png b/internal/static/performer/NoName21.png deleted file mode 100644 index 7bee5cdb6..000000000 Binary files a/internal/static/performer/NoName21.png and /dev/null differ diff --git a/internal/static/performer/NoName21.svg b/internal/static/performer/NoName21.svg new file mode 100644 index 000000000..2d7647c1d --- /dev/null +++ b/internal/static/performer/NoName21.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName22.png b/internal/static/performer/NoName22.png deleted file mode 100644 index d92384f93..000000000 Binary files a/internal/static/performer/NoName22.png and /dev/null differ diff --git a/internal/static/performer/NoName22.svg b/internal/static/performer/NoName22.svg new file mode 100644 index 000000000..c81400587 --- /dev/null +++ b/internal/static/performer/NoName22.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName23.png b/internal/static/performer/NoName23.png deleted file mode 100644 index f28ca89c8..000000000 Binary files a/internal/static/performer/NoName23.png and /dev/null differ diff --git a/internal/static/performer/NoName23.svg b/internal/static/performer/NoName23.svg new file mode 100644 index 000000000..3156c267f --- /dev/null +++ b/internal/static/performer/NoName23.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName24.png b/internal/static/performer/NoName24.png deleted file mode 100644 index 7b9bb42a2..000000000 Binary files a/internal/static/performer/NoName24.png and /dev/null differ diff --git a/internal/static/performer/NoName24.svg b/internal/static/performer/NoName24.svg new file mode 100644 index 000000000..3afd26f25 --- /dev/null +++ b/internal/static/performer/NoName24.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName25.png b/internal/static/performer/NoName25.png deleted file mode 100644 index 1f4864eed..000000000 Binary files a/internal/static/performer/NoName25.png and /dev/null differ diff --git a/internal/static/performer/NoName25.svg b/internal/static/performer/NoName25.svg new file mode 100644 index 000000000..ab040b917 --- /dev/null +++ b/internal/static/performer/NoName25.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName26.png b/internal/static/performer/NoName26.png deleted file mode 100644 index b63c47ab5..000000000 Binary files a/internal/static/performer/NoName26.png and /dev/null differ diff --git a/internal/static/performer/NoName26.svg b/internal/static/performer/NoName26.svg new file mode 100644 index 000000000..0c1679e16 --- /dev/null +++ b/internal/static/performer/NoName26.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName27.png b/internal/static/performer/NoName27.png deleted file mode 100644 index eb57d9cf4..000000000 Binary files a/internal/static/performer/NoName27.png and /dev/null differ diff --git a/internal/static/performer/NoName27.svg b/internal/static/performer/NoName27.svg new file mode 100644 index 000000000..4bf73d04a --- /dev/null +++ b/internal/static/performer/NoName27.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName28.png b/internal/static/performer/NoName28.png deleted file mode 100644 index c00fb15b5..000000000 Binary files a/internal/static/performer/NoName28.png and /dev/null differ diff --git a/internal/static/performer/NoName28.svg b/internal/static/performer/NoName28.svg new file mode 100644 index 000000000..5af3dbc38 --- /dev/null +++ b/internal/static/performer/NoName28.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName29.png b/internal/static/performer/NoName29.png index 21e9e27fa..8a53967a5 100644 Binary files a/internal/static/performer/NoName29.png and b/internal/static/performer/NoName29.png differ diff --git a/internal/static/performer/NoName30.png b/internal/static/performer/NoName30.png deleted file mode 100644 index ba968026d..000000000 Binary files a/internal/static/performer/NoName30.png and /dev/null differ diff --git a/internal/static/performer/NoName30.svg b/internal/static/performer/NoName30.svg new file mode 100644 index 000000000..c77b1163f --- /dev/null +++ b/internal/static/performer/NoName30.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName31.png b/internal/static/performer/NoName31.png deleted file mode 100644 index a4003fa75..000000000 Binary files a/internal/static/performer/NoName31.png and /dev/null differ diff --git a/internal/static/performer/NoName31.svg b/internal/static/performer/NoName31.svg new file mode 100644 index 000000000..5504136d2 --- /dev/null +++ b/internal/static/performer/NoName31.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName32.png b/internal/static/performer/NoName32.png deleted file mode 100644 index 0ca4aca17..000000000 Binary files a/internal/static/performer/NoName32.png and /dev/null differ diff --git a/internal/static/performer/NoName32.svg b/internal/static/performer/NoName32.svg new file mode 100644 index 000000000..ec72d0836 --- /dev/null +++ b/internal/static/performer/NoName32.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName33.png b/internal/static/performer/NoName33.png index 38ae2116c..025a1ff7f 100644 Binary files a/internal/static/performer/NoName33.png and b/internal/static/performer/NoName33.png differ diff --git a/internal/static/performer/NoName34.png b/internal/static/performer/NoName34.png deleted file mode 100644 index c40683098..000000000 Binary files a/internal/static/performer/NoName34.png and /dev/null differ diff --git a/internal/static/performer/NoName34.svg b/internal/static/performer/NoName34.svg new file mode 100644 index 000000000..49086ca8a --- /dev/null +++ b/internal/static/performer/NoName34.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName35.png b/internal/static/performer/NoName35.png index 92d9ad784..70dc81443 100644 Binary files a/internal/static/performer/NoName35.png and b/internal/static/performer/NoName35.png differ diff --git a/internal/static/performer/NoName36.png b/internal/static/performer/NoName36.png deleted file mode 100644 index 7796c8b63..000000000 Binary files a/internal/static/performer/NoName36.png and /dev/null differ diff --git a/internal/static/performer/NoName36.svg b/internal/static/performer/NoName36.svg new file mode 100644 index 000000000..b69ce0aa3 --- /dev/null +++ b/internal/static/performer/NoName36.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName37.png b/internal/static/performer/NoName37.png deleted file mode 100644 index c47f0abac..000000000 Binary files a/internal/static/performer/NoName37.png and /dev/null differ diff --git a/internal/static/performer/NoName37.svg b/internal/static/performer/NoName37.svg new file mode 100644 index 000000000..d0053cb58 --- /dev/null +++ b/internal/static/performer/NoName37.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName38.png b/internal/static/performer/NoName38.png deleted file mode 100644 index da9fa37c9..000000000 Binary files a/internal/static/performer/NoName38.png and /dev/null differ diff --git a/internal/static/performer/NoName38.svg b/internal/static/performer/NoName38.svg new file mode 100644 index 000000000..0131c7efe --- /dev/null +++ b/internal/static/performer/NoName38.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName39.png b/internal/static/performer/NoName39.png deleted file mode 100644 index a7921d01d..000000000 Binary files a/internal/static/performer/NoName39.png and /dev/null differ diff --git a/internal/static/performer/NoName39.svg b/internal/static/performer/NoName39.svg new file mode 100644 index 000000000..6cc5080ac --- /dev/null +++ b/internal/static/performer/NoName39.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer/NoName40.png b/internal/static/performer/NoName40.png deleted file mode 100644 index 0214efad4..000000000 Binary files a/internal/static/performer/NoName40.png and /dev/null differ diff --git a/internal/static/performer/attribution.md b/internal/static/performer/attribution.md new file mode 100644 index 000000000..3cb40ca04 --- /dev/null +++ b/internal/static/performer/attribution.md @@ -0,0 +1,34 @@ +NoName02.svg - "[Exotic dancer silhouette](https://freesvg.org/exotic-dancer-silhouette)" by OpenClipart-Vectors under CC0 License +NoName05.svg - "[Fashion girl silhouette](https://creazilla.com/media/silhouette/76433/fashion-girl)" by Creazilla under CC0 License +NoName06.png - "[Woman, Female, Girl](https://pixabay.com/illustrations/woman-female-girl-lady-silhouette-163525/)" by No-longer-here under Pixabay License +NoName07.svg - "[Woman Silhouette 11](https://openclipart.org/detail/14083/woman-silhouette-11)" by nicubunu under CC0 License +NoName09.svg - "[Girl, Pose, Posing](https://pixabay.com/vectors/girl-pose-posing-female-woman-311535/)" by Clker-Free-Vector-Images under CC0 License +NoName11.png - "[Alpha Mask, Silhouette, Woman](https://pixabay.com/illustrations/alpha-mask-silhouette-woman-girl-3072470/)" by Wolfgang Eckert under Pixabay License +NoName12.svg - "[Dance, Dancer, Dancing](https://pixabay.com/vectors/dance-dancer-dancing-female-girl-2023863/)" by OpenClipart-Vectors under CC0 License +NoName13.svg - "[Dress, Silhouette, Woman](https://pixabay.com/vectors/dress-silhouette-woman-female-148745/)" by OpenClipart-Vectors under CC0 License +NoName14.svg - "[Woman in long dress silhouette](https://freesvg.org/woman-in-long-dress-silhouette)" by OpenClipart-Vectors under CC0 License +NoName17.svg - "[Female Model silhouette](https://creazilla.com/media/silhouette/2495/female-model)" by Natasha Sinegina under CC-BY-4.0 +NoName19.svg - "[Female, Girl, Heel](https://pixabay.com/vectors/female-girl-heel-silhouette-woman-2023898/)" by OpenClipart-Vectors under CC0 License +NoName21.svg - "[Lady, Silhouette, Woman](https://pixabay.com/vectors/lady-silhouette-woman-pink-296698/)" by Clker-Free-Vector-Images under CC0 License +NoName22.svg - "[Female, Girl, Heel](https://pixabay.com/vectors/female-girl-heel-silhouette-woman-2023856/)" by OpenClipart-Vectors under CC0 License +NoName23.svg - "[Woman, Female, Figure](https://pixabay.com/vectors/woman-female-figure-slender-slim-149723/)" by OpenClipart-Vectors under CC0 License +NoName24.svg - "[Silhouette, Woman, Bunny](https://pixabay.com/illustrations/silhouette-woman-bunny-girl-female-3196716/)" by Wolfgang Eckert under Pixabay License +NoName25.svg - "[Female, Girl, Silhouette](https://pixabay.com/vectors/female-girl-silhouette-woman-2023857/)" by OpenClipart-Vectors under CC0 License +NoName26.svg - "[Female, Girl, Silhouette](https://pixabay.com/vectors/female-girl-silhouette-woman-2024047/)" by OpenClipart-Vectors under CC0 License +NoName27.svg - "[Woman, School Clothes, Uniform](https://pixabay.com/illustrations/woman-school-clothes-uniform-644569/)" by Silvia under Pixabay License +NoName28.svg - "[Girl, Woman, Feminine](https://pixabay.com/illustrations/girl-woman-feminine-sensual-1369733/)" by Calzas under Pixabay License +NoName29.png - "[Alpha Mask, Silhouette, Woman](https://pixabay.com/illustrations/alpha-mask-silhouette-woman-girl-3066005/)" by Wolfgang Eckert under Pixabay License +NoName30.svg - "[Architetto](https://openclipart.org/detail/68047)" by Emilie Rollandin under CC0 License +NoName31.svg - "[Model silhouette](https://creazilla.com/media/silhouette/1785/model)" by Bob Comix under CC-BY-4.0 License +NoName32.svg - "[Fashion, Female, Girl](https://pixabay.com/vectors/fashion-female-girl-heel-model-2023859/)" by OpenClipart-Vectors under CC0 License +NoName33.png - "[Silhouette Donna 6](https://www.publicdomainpictures.net/view-image.php?image=82268)" by Tammy Sue under CC0 License +NoName34.svg - "[Donna in piedi 01](https://openclipart.org/detail/33139)" by Emilie Rollandin under CC0 License +NoName35.png - "[Silhouette, Woman, Young](https://pixabay.com/illustrations/silhouette-woman-young-move-female-3104942/)" by Wolfgang Eckert under Pixabay License +NoName36.svg - "[Fashion Model silhouette](https://creazilla.com/media/silhouette/2506/fashion-model)" by Natasha Sinegina under CC-BY-4.0 License +NoName37.svg - "[Female, Woman, Standing](https://pixabay.com/vectors/female-woman-standing-confident-2816234/)" by Mohamed Hassan under Pixabay License +NoName38.svg - "[Dress, Silhouette, Women](https://pixabay.com/vectors/dress-silhouette-women-dance-lady-3360422/)" by Mohamed Hassan under Pixabay License +NoName39.svg - "[Woman, Female, Lady](https://pixabay.com/illustrations/woman-female-lady-business-woman-220260/)" by No-longer-here under Pixabay License + +CC0 License: https://creativecommons.org/publicdomain/zero/1.0/ +CC-BY-4.0 License: https://creativecommons.org/licenses/by/4.0/ +Pixabay License: https://pixabay.com/service/license-summary/ \ No newline at end of file diff --git a/internal/static/performer_male/Male01.png b/internal/static/performer_male/Male01.png deleted file mode 100644 index 8a486299a..000000000 Binary files a/internal/static/performer_male/Male01.png and /dev/null differ diff --git a/internal/static/performer_male/Male01.svg b/internal/static/performer_male/Male01.svg new file mode 100644 index 000000000..72599423a --- /dev/null +++ b/internal/static/performer_male/Male01.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male02.png b/internal/static/performer_male/Male02.png deleted file mode 100644 index 673b120eb..000000000 Binary files a/internal/static/performer_male/Male02.png and /dev/null differ diff --git a/internal/static/performer_male/Male02.svg b/internal/static/performer_male/Male02.svg new file mode 100644 index 000000000..1f7f4072e --- /dev/null +++ b/internal/static/performer_male/Male02.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male03.png b/internal/static/performer_male/Male03.png deleted file mode 100644 index 1814d05bb..000000000 Binary files a/internal/static/performer_male/Male03.png and /dev/null differ diff --git a/internal/static/performer_male/Male03.svg b/internal/static/performer_male/Male03.svg new file mode 100644 index 000000000..60e0857ce --- /dev/null +++ b/internal/static/performer_male/Male03.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male04.png b/internal/static/performer_male/Male04.png deleted file mode 100644 index 9dd1f0bcc..000000000 Binary files a/internal/static/performer_male/Male04.png and /dev/null differ diff --git a/internal/static/performer_male/Male04.svg b/internal/static/performer_male/Male04.svg new file mode 100644 index 000000000..7e7e29fae --- /dev/null +++ b/internal/static/performer_male/Male04.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male05.png b/internal/static/performer_male/Male05.png deleted file mode 100644 index 35231f914..000000000 Binary files a/internal/static/performer_male/Male05.png and /dev/null differ diff --git a/internal/static/performer_male/Male05.svg b/internal/static/performer_male/Male05.svg new file mode 100644 index 000000000..b41f8d1cd --- /dev/null +++ b/internal/static/performer_male/Male05.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/Male06.png b/internal/static/performer_male/Male06.png deleted file mode 100644 index 9530d274a..000000000 Binary files a/internal/static/performer_male/Male06.png and /dev/null differ diff --git a/internal/static/performer_male/Male06.svg b/internal/static/performer_male/Male06.svg new file mode 100644 index 000000000..14578c380 --- /dev/null +++ b/internal/static/performer_male/Male06.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/internal/static/performer_male/attribution.md b/internal/static/performer_male/attribution.md new file mode 100644 index 000000000..119d73757 --- /dev/null +++ b/internal/static/performer_male/attribution.md @@ -0,0 +1,8 @@ +Male01.svg - "[Man Silhouette](https://freesvg.org/1528398040)" by "OpenClipart" under CC0 License +Male02.svg - "[Male pose silhouette](https://freesvg.org/male-pose-silhouette)" by OpenClipart under CC0 License +Male03.svg - "[Bald man walking in a suit silhouette vector image](https://freesvg.org/bald-man-walking-in-a-suit-silhouette-vector-image)" by OpenClipart under CC0 License +Male04.svg - "[Man silhouette vector clip art](https://freesvg.org/man-silhouette-vector-clip-art) by OpenClipart under CC0 License +Male05.svg - "[Man, Walking, Confident](https://pixabay.com/vectors/man-walking-confident-silhouette-2759950/)" by Mohamed Hassan under Pixabay License + +CC0 Licence: https://creativecommons.org/public-domain/cc0/ +Pixabay License: https://pixabay.com/service/license-summary/ \ No newline at end of file diff --git a/internal/static/performer_sfw/performer.svg b/internal/static/performer_sfw/performer.svg new file mode 100644 index 000000000..24b444171 --- /dev/null +++ b/internal/static/performer_sfw/performer.svg @@ -0,0 +1,7 @@ + + + \ No newline at end of file diff --git a/pkg/ffmpeg/codec_hardware.go b/pkg/ffmpeg/codec_hardware.go index 5151e7efe..66480c5bb 100644 --- a/pkg/ffmpeg/codec_hardware.go +++ b/pkg/ffmpeg/codec_hardware.go @@ -5,9 +5,11 @@ import ( "context" "fmt" "math" + "os" "regexp" "strconv" "strings" + "time" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -27,12 +29,39 @@ var ( VideoCodecIVP9 = makeVideoCodec("VP9 Intel Quick Sync Video (QSV)", "vp9_qsv") VideoCodecVVP9 = makeVideoCodec("VP9 VAAPI", "vp9_vaapi") VideoCodecVVPX = makeVideoCodec("VP8 VAAPI", "vp8_vaapi") + VideoCodecRK264 = makeVideoCodec("H264 Rockchip MPP (rkmpp)", "h264_rkmpp") ) const minHeight int = 480 // Tests all (given) hardware codec's func (f *FFMpeg) InitHWSupport(ctx context.Context) { + // do the hardware codec tests in a separate goroutine to avoid blocking + done := make(chan struct{}) + go func() { + f.initHWSupport(ctx) + close(done) + }() + + // log if the initialization takes too long + const hwInitLogTimeoutSecondsDefault = 5 + hwInitLogTimeoutSeconds := hwInitLogTimeoutSecondsDefault * time.Second + timer := time.NewTimer(hwInitLogTimeoutSeconds) + + go func() { + select { + case <-timer.C: + logger.Warnf("[InitHWSupport] Hardware codec initialization is taking longer than %s...", hwInitLogTimeoutSeconds) + logger.Info("[InitHWSupport] Hardware encoding will not be available until initialization is complete.") + case <-done: + if !timer.Stop() { + <-timer.C + } + } + }() +} + +func (f *FFMpeg) initHWSupport(ctx context.Context) { var hwCodecSupport []VideoCodec // Note that the first compatible codec is returned, so order is important @@ -43,6 +72,7 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) { VideoCodecI264C, VideoCodecV264, VideoCodecR264, + VideoCodecRK264, VideoCodecIVP9, VideoCodecVVP9, VideoCodecM264, @@ -64,12 +94,33 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) { args = args.Format("null") args = args.Output("-") - cmd := f.Command(ctx, args) + // #6064 - add timeout to context to prevent hangs + const hwTestTimeoutSecondsDefault = 10 + hwTestTimeoutSeconds := hwTestTimeoutSecondsDefault * time.Second + + // allow timeout to be overridden with environment variable + if timeout := os.Getenv("STASH_HW_TEST_TIMEOUT"); timeout != "" { + if seconds, err := strconv.Atoi(timeout); err == nil { + hwTestTimeoutSeconds = time.Duration(seconds) * time.Second + } + } + + testCtx, cancel := context.WithTimeout(ctx, hwTestTimeoutSeconds) + defer cancel() + + cmd := f.Command(testCtx, args) + cmd.WaitDelay = time.Second + logger.Tracef("[InitHWSupport] Testing codec %s: %v", codec, cmd.Args) var stderr bytes.Buffer cmd.Stderr = &stderr if err := cmd.Run(); err != nil { + if testCtx.Err() != nil { + logger.Debugf("[InitHWSupport] Codec %s test timed out after %s", codec, hwTestTimeoutSeconds) + continue + } + errOutput := stderr.String() if len(errOutput) == 0 { @@ -88,6 +139,8 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) { } logger.Info(outstr) + f.hwCodecSupportMutex.Lock() + defer f.hwCodecSupportMutex.Unlock() f.hwCodecSupport = hwCodecSupport } @@ -132,6 +185,12 @@ func (f *FFMpeg) hwCanFullHWTranscode(ctx context.Context, codec VideoCodec, vf // Prepend input for hardware encoding only func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args { + // check for custom /dev/dri device #6435 + driDevice := os.Getenv("STASH_HW_DRI_DEVICE") + if driDevice == "" { + driDevice = "/dev/dri/renderD128" + } + switch toCodec { case VideoCodecN264, VideoCodecN264H: @@ -148,7 +207,7 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args { case VideoCodecV264, VideoCodecVVP9: args = append(args, "-vaapi_device") - args = append(args, "/dev/dri/renderD128") + args = append(args, driDevice) if fullhw { args = append(args, "-hwaccel") args = append(args, "vaapi") @@ -179,6 +238,19 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args { args = append(args, "-init_hw_device") args = append(args, "videotoolbox=vt") } + case VideoCodecRK264: + // Rockchip: always create rkmpp device and make it the filter device, so + // scale_rkrga and subsequent hwupload/hwmap operate in the right context. + args = append(args, "-init_hw_device") + args = append(args, "rkmpp=rk") + args = append(args, "-filter_hw_device") + args = append(args, "rk") + if fullhw { + args = append(args, "-hwaccel") + args = append(args, "rkmpp") + args = append(args, "-hwaccel_output_format") + args = append(args, "drm_prime") + } } return args @@ -211,6 +283,14 @@ func (f *FFMpeg) hwFilterInit(toCodec VideoCodec, fullhw bool) VideoFilter { videoFilter = videoFilter.Append("format=nv12") videoFilter = videoFilter.Append("hwupload") } + case VideoCodecRK264: + // For Rockchip full-hw, do NOT pre-map to rkrga here. scale_rkrga can + // consume DRM_PRIME frames directly when filter_hw_device is set. + // For non-fullhw, keep a sane software format. + if !fullhw { + videoFilter = videoFilter.Append("format=nv12") + videoFilter = videoFilter.Append("hwupload") + } } return videoFilter @@ -288,6 +368,12 @@ func (f *FFMpeg) hwApplyFullHWFilter(args VideoFilter, codec VideoCodec, fullhw if fullhw && f.version.Gteq(Version{major: 3, minor: 3}) { // Added in FFMpeg 3.3 args = args.Append("scale_qsv=format=nv12") } + case VideoCodecRK264: + // Full-hw decode on 10-bit sources often produces DRM_PRIME with sw_pix_fmt=nv15. + // h264_rkmpp does NOT accept nv15, so we must force a conversion to nv12 + if fullhw { + args = args.Append("scale_rkrga=w=iw:h=ih:format=nv12") + } } return args @@ -315,6 +401,14 @@ func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []in } case VideoCodecM264: template = "scale_vt=$value" + case VideoCodecRK264: + // The original filter chain is a fallback for maximum compatibility: + // "scale_rkrga=$value:format=nv12,hwdownload,format=nv12,hwupload" + // It avoids hwmap(rkrga→rkmpp) failures (-38/-12) seen on some builds + // by downloading the scaled frame to system RAM and re-uploading it. + // The filter chain below uses a zero-copy approach, passing the hardware-scaled + // frame directly to the encoder. This is more efficient but may be less stable. + template = "scale_rkrga=$value:format=nv12" default: return VideoFilter(sargs) } @@ -323,12 +417,15 @@ func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []in isIntel := codec == VideoCodecI264 || codec == VideoCodecI264C || codec == VideoCodecIVP9 // BUG: scale_vt doesn't call ff_scale_adjust_dimensions, thus cant accept negative size values isApple := codec == VideoCodecM264 + // Rockchip's scale_rkrga supports -1/-2; don't apply minus-one hack here. return VideoFilter(templateReplaceScale(sargs, template, match, vf, isIntel || isApple)) } // Returns the max resolution for a given codec, or a default func (f *FFMpeg) hwCodecMaxRes(codec VideoCodec) (int, int) { switch codec { + case VideoCodecRK264: + return 8192, 8192 case VideoCodecN264, VideoCodecN264H, VideoCodecI264, @@ -352,7 +449,7 @@ func (f *FFMpeg) hwMaxResFilter(toCodec VideoCodec, vf *models.VideoFile, reqHei // Return if a hardware accelerated for HLS is available func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec { - for _, element := range f.hwCodecSupport { + for _, element := range f.getHWCodecSupport() { switch element { case VideoCodecN264, VideoCodecN264H, @@ -360,7 +457,8 @@ func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec { VideoCodecI264C, VideoCodecV264, VideoCodecR264, - VideoCodecM264: // Note that the Apple encoder sucks at startup, thus HLS quality is crap + VideoCodecM264, // Note that the Apple encoder sucks at startup, thus HLS quality is crap + VideoCodecRK264: return &element } } @@ -369,13 +467,14 @@ func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec { // Return if a hardware accelerated codec for MP4 is available func (f *FFMpeg) hwCodecMP4Compatible() *VideoCodec { - for _, element := range f.hwCodecSupport { + for _, element := range f.getHWCodecSupport() { switch element { case VideoCodecN264, VideoCodecN264H, VideoCodecI264, VideoCodecI264C, - VideoCodecM264: + VideoCodecM264, + VideoCodecRK264: return &element } } @@ -384,7 +483,7 @@ func (f *FFMpeg) hwCodecMP4Compatible() *VideoCodec { // Return if a hardware accelerated codec for WebM is available func (f *FFMpeg) hwCodecWEBMCompatible() *VideoCodec { - for _, element := range f.hwCodecSupport { + for _, element := range f.getHWCodecSupport() { switch element { case VideoCodecIVP9, VideoCodecVVP9: diff --git a/pkg/ffmpeg/ffmpeg.go b/pkg/ffmpeg/ffmpeg.go index ce1232e5d..04c58f04b 100644 --- a/pkg/ffmpeg/ffmpeg.go +++ b/pkg/ffmpeg/ffmpeg.go @@ -10,6 +10,7 @@ import ( "regexp" "strconv" "strings" + "sync" stashExec "github.com/stashapp/stash/pkg/exec" "github.com/stashapp/stash/pkg/fsutil" @@ -216,9 +217,10 @@ func (v Version) String() string { // FFMpeg provides an interface to ffmpeg. type FFMpeg struct { - ffmpeg string - version Version - hwCodecSupport []VideoCodec + ffmpeg string + version Version + hwCodecSupport []VideoCodec + hwCodecSupportMutex sync.RWMutex } // Creates a new FFMpeg encoder @@ -241,3 +243,9 @@ func (f *FFMpeg) Command(ctx context.Context, args []string) *exec.Cmd { func (f *FFMpeg) Path() string { return f.ffmpeg } + +func (f *FFMpeg) getHWCodecSupport() []VideoCodec { + f.hwCodecSupportMutex.RLock() + defer f.hwCodecSupportMutex.RUnlock() + return f.hwCodecSupport +} diff --git a/pkg/ffmpeg/transcoder/screenshot.go b/pkg/ffmpeg/transcoder/screenshot.go index c3343d594..c65f23941 100644 --- a/pkg/ffmpeg/transcoder/screenshot.go +++ b/pkg/ffmpeg/transcoder/screenshot.go @@ -9,7 +9,11 @@ type ScreenshotOptions struct { // Quality is the quality scale. See https://ffmpeg.org/ffmpeg.html#Main-options Quality int + // Width is the width to scale the screenshot to. If 0, no scaling will be applied. Width int + // Height is the height to scale the screenshot to. If 0, no scaling will be applied. + // Not used if Width is set. + Height int // Verbosity is the logging verbosity. Defaults to LogLevelError if not set. Verbosity ffmpeg.LogLevel @@ -70,6 +74,9 @@ func ScreenshotTime(input string, t float64, options ScreenshotOptions) ffmpeg.A if options.Width > 0 { vf = vf.ScaleWidth(options.Width) args = args.VideoFilter(vf) + } else if options.Height > 0 { + vf = vf.ScaleHeight(options.Height) + args = args.VideoFilter(vf) } args = args.AppendArgs(options.OutputType) diff --git a/pkg/file/clean.go b/pkg/file/clean.go index 8c54fd0e0..369600f4c 100644 --- a/pkg/file/clean.go +++ b/pkg/file/clean.go @@ -18,7 +18,8 @@ type Cleaner struct { FS models.FS Repository Repository - Handlers []CleanHandler + Handlers []CleanHandler + TrashPath string } type cleanJob struct { @@ -32,6 +33,11 @@ type cleanJob struct { type CleanOptions struct { Paths []string + // IgnoreZipFileContents will skip checking the contents of zip files when determining whether to clean a file. + // This can significantly speed up the clean process, but will potentially miss removed files within zip files. + // Where users do not modify zip files contents directly, this should be safe to use. + IgnoreZipFileContents bool + // Do a dry run. Don't delete any files DryRun bool @@ -173,13 +179,16 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error { more := true r := j.Repository + + includeZipContents := !j.options.IgnoreZipFileContents + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { for more { if job.IsCancelled(ctx) { return nil } - files, err := r.File.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) + files, err := r.File.FindAllInPaths(ctx, j.options.Paths, includeZipContents, batchSize, offset) if err != nil { return fmt.Errorf("error querying for files: %w", err) } @@ -257,6 +266,8 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error offset := 0 progress := j.progress + includeZipContents := !j.options.IgnoreZipFileContents + more := true r := j.Repository if err := r.WithReadTxn(ctx, func(ctx context.Context) error { @@ -265,7 +276,7 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error return nil } - folders, err := r.Folder.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) + folders, err := r.Folder.FindAllInPaths(ctx, j.options.Paths, includeZipContents, batchSize, offset) if err != nil { return fmt.Errorf("error querying for folders: %w", err) } @@ -347,8 +358,14 @@ func (j *cleanJob) shouldClean(ctx context.Context, f models.File) bool { // run through path filter, if returns false then the file should be cleaned filter := j.options.PathFilter + // need to get the zip file path if present + zipFilePath := "" + if f.Base().ZipFile != nil { + zipFilePath = f.Base().ZipFile.Base().Path + } + // don't log anything - assume filter will have logged the reason - return !filter.Accept(ctx, path, info) + return !filter.Accept(ctx, path, info, zipFilePath) } func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool { @@ -386,13 +403,19 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool // run through path filter, if returns false then the file should be cleaned filter := j.options.PathFilter + // need to get the zip file path if present + zipFilePath := "" + if f.ZipFile != nil { + zipFilePath = f.ZipFile.Base().Path + } + // don't log anything - assume filter will have logged the reason - return !filter.Accept(ctx, path, info) + return !filter.Accept(ctx, path, info, zipFilePath) } func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) { // delete associated objects - fileDeleter := NewDeleter() + fileDeleter := NewDeleterWithTrash(j.TrashPath) r := j.Repository if err := r.WithTxn(ctx, func(ctx context.Context) error { fileDeleter.RegisterHooks(ctx) @@ -410,7 +433,7 @@ func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn stri func (j *cleanJob) deleteFolder(ctx context.Context, folderID models.FolderID, fn string) { // delete associated objects - fileDeleter := NewDeleter() + fileDeleter := NewDeleterWithTrash(j.TrashPath) r := j.Repository if err := r.WithTxn(ctx, func(ctx context.Context) error { fileDeleter.RegisterHooks(ctx) diff --git a/pkg/file/delete.go b/pkg/file/delete.go index 88eb5169e..c36068faa 100644 --- a/pkg/file/delete.go +++ b/pkg/file/delete.go @@ -58,20 +58,33 @@ func newRenamerRemoverImpl() renamerRemoverImpl { // Deleter is used to safely delete files and directories from the filesystem. // During a transaction, files and directories are marked for deletion using -// the Files and Dirs methods. This will rename the files/directories to be -// deleted. If the transaction is rolled back, then the files/directories can -// be restored to their original state with the Abort method. If the -// transaction is committed, the marked files are then deleted from the -// filesystem using the Complete method. +// the Files and Dirs methods. If TrashPath is set, files are moved to trash +// immediately. Otherwise, they are renamed with a .delete suffix. If the +// transaction is rolled back, then the files/directories can be restored to +// their original state with the Rollback method. If the transaction is +// committed, the marked files are then deleted from the filesystem using the +// Commit method. type Deleter struct { RenamerRemover RenamerRemover files []string dirs []string + TrashPath string // if set, files will be moved to this directory instead of being permanently deleted + trashedPaths map[string]string // map of original path -> trash path (only used when TrashPath is set) } func NewDeleter() *Deleter { return &Deleter{ RenamerRemover: newRenamerRemoverImpl(), + TrashPath: "", + trashedPaths: make(map[string]string), + } +} + +func NewDeleterWithTrash(trashPath string) *Deleter { + return &Deleter{ + RenamerRemover: newRenamerRemoverImpl(), + TrashPath: trashPath, + trashedPaths: make(map[string]string), } } @@ -92,6 +105,17 @@ func (d *Deleter) RegisterHooks(ctx context.Context) { // Abort should be called to restore marked files if this function returns an // error. func (d *Deleter) Files(paths []string) error { + return d.filesInternal(paths, false) +} + +// FilesWithoutTrash designates files to be deleted, bypassing the trash directory. +// Files will be permanently deleted even if TrashPath is configured. +// This is useful for deleting generated files that can be easily recreated. +func (d *Deleter) FilesWithoutTrash(paths []string) error { + return d.filesInternal(paths, true) +} + +func (d *Deleter) filesInternal(paths []string, bypassTrash bool) error { for _, p := range paths { // fail silently if the file does not exist if _, err := d.RenamerRemover.Stat(p); err != nil { @@ -103,7 +127,7 @@ func (d *Deleter) Files(paths []string) error { return fmt.Errorf("check file %q exists: %w", p, err) } - if err := d.renameForDelete(p); err != nil { + if err := d.renameForDelete(p, bypassTrash); err != nil { return fmt.Errorf("marking file %q for deletion: %w", p, err) } d.files = append(d.files, p) @@ -118,6 +142,17 @@ func (d *Deleter) Files(paths []string) error { // Abort should be called to restore marked files/directories if this function returns an // error. func (d *Deleter) Dirs(paths []string) error { + return d.dirsInternal(paths, false) +} + +// DirsWithoutTrash designates directories to be deleted, bypassing the trash directory. +// Directories will be permanently deleted even if TrashPath is configured. +// This is useful for deleting generated directories that can be easily recreated. +func (d *Deleter) DirsWithoutTrash(paths []string) error { + return d.dirsInternal(paths, true) +} + +func (d *Deleter) dirsInternal(paths []string, bypassTrash bool) error { for _, p := range paths { // fail silently if the file does not exist if _, err := d.RenamerRemover.Stat(p); err != nil { @@ -129,7 +164,7 @@ func (d *Deleter) Dirs(paths []string) error { return fmt.Errorf("check directory %q exists: %w", p, err) } - if err := d.renameForDelete(p); err != nil { + if err := d.renameForDelete(p, bypassTrash); err != nil { return fmt.Errorf("marking directory %q for deletion: %w", p, err) } d.dirs = append(d.dirs, p) @@ -150,33 +185,65 @@ func (d *Deleter) Rollback() { d.files = nil d.dirs = nil + d.trashedPaths = make(map[string]string) } // Commit deletes all files marked for deletion and clears the marked list. +// When using trash, files have already been moved during renameForDelete, so +// this just clears the tracking. Otherwise, permanently delete the .delete files. // Any errors encountered are logged. All files will be attempted, regardless // of the errors encountered. func (d *Deleter) Commit() { - for _, f := range d.files { - if err := d.RenamerRemover.Remove(f + deleteFileSuffix); err != nil { - logger.Warnf("Error deleting file %q: %v", f+deleteFileSuffix, err) + if d.TrashPath != "" { + // Files were already moved to trash during renameForDelete, just clear tracking + logger.Debugf("Commit: %d files and %d directories already in trash, clearing tracking", len(d.files), len(d.dirs)) + } else { + // Permanently delete files and directories marked with .delete suffix + for _, f := range d.files { + if err := d.RenamerRemover.Remove(f + deleteFileSuffix); err != nil { + logger.Warnf("Error deleting file %q: %v", f+deleteFileSuffix, err) + } } - } - for _, f := range d.dirs { - if err := d.RenamerRemover.RemoveAll(f + deleteFileSuffix); err != nil { - logger.Warnf("Error deleting directory %q: %v", f+deleteFileSuffix, err) + for _, f := range d.dirs { + if err := d.RenamerRemover.RemoveAll(f + deleteFileSuffix); err != nil { + logger.Warnf("Error deleting directory %q: %v", f+deleteFileSuffix, err) + } } } d.files = nil d.dirs = nil + d.trashedPaths = make(map[string]string) } -func (d *Deleter) renameForDelete(path string) error { +func (d *Deleter) renameForDelete(path string, bypassTrash bool) error { + if d.TrashPath != "" && !bypassTrash { + // Move file to trash immediately + trashDest, err := fsutil.MoveToTrash(path, d.TrashPath) + if err != nil { + return err + } + d.trashedPaths[path] = trashDest + logger.Infof("Moved %q to trash at %s", path, trashDest) + return nil + } + + // Standard behavior: rename with .delete suffix (or when bypassing trash) return d.RenamerRemover.Rename(path, path+deleteFileSuffix) } func (d *Deleter) renameForRestore(path string) error { + if d.TrashPath != "" { + // Restore file from trash + trashPath, ok := d.trashedPaths[path] + if !ok { + return fmt.Errorf("no trash path found for %q", path) + } + return d.RenamerRemover.Rename(trashPath, path) + } + + // Standard behavior: restore from .delete suffix return d.RenamerRemover.Rename(path+deleteFileSuffix, path) } diff --git a/pkg/file/file.go b/pkg/file/file.go index 407949ba1..b93083b35 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -3,6 +3,10 @@ package file import ( "context" + "fmt" + "io/fs" + "os" + "time" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -35,3 +39,23 @@ func (r *Repository) WithReadTxn(ctx context.Context, fn txn.TxnFunc) error { func (r *Repository) WithDB(ctx context.Context, fn txn.TxnFunc) error { return txn.WithDatabase(ctx, r.TxnManager, fn) } + +// ModTime returns the modification time truncated to seconds. +func ModTime(info fs.FileInfo) time.Time { + // truncate to seconds, since we don't store beyond that in the database + return info.ModTime().Truncate(time.Second) +} + +// GetFileSize gets the size of the file, taking into account symlinks. +func GetFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { + // #2196/#3042 - replace size with target size if file is a symlink + if info.Mode()&os.ModeSymlink == os.ModeSymlink { + targetInfo, err := f.Stat(path) + if err != nil { + return 0, fmt.Errorf("reading info for symlink %q: %w", path, err) + } + return targetInfo.Size(), nil + } + + return info.Size(), nil +} diff --git a/pkg/file/folder.go b/pkg/file/folder.go index 451bb1d93..249f73a7a 100644 --- a/pkg/file/folder.go +++ b/pkg/file/folder.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "path/filepath" + "slices" "strings" "time" @@ -12,26 +13,45 @@ import ( ) // GetOrCreateFolderHierarchy gets the folder for the given path, or creates a folder hierarchy for the given path if one if no existing folder is found. -// Does not create any folders in the file system -func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string) (*models.Folder, error) { +// Creates folder entries for each level of the hierarchy that doesn't already exist, up to the provided root paths. +// Does not create any folders in the file system. +func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string, rootPaths []string) (*models.Folder, error) { // get or create folder hierarchy - folder, err := fc.FindByPath(ctx, path) + // assume case sensitive when searching for the folder + const caseSensitive = true + folder, err := fc.FindByPath(ctx, path, caseSensitive) if err != nil { return nil, err } if folder == nil { - parentPath := filepath.Dir(path) - parent, err := GetOrCreateFolderHierarchy(ctx, fc, parentPath) - if err != nil { - return nil, err + var parentID *models.FolderID + + if !slices.Contains(rootPaths, path) { + parentPath := filepath.Dir(path) + + // safety check - don't allow parent path to be the same as the current path, + // otherwise we could end up in an infinite loop + if parentPath == path { + // #6618 - log a warning and return nil for the parent ID, + // which will cause the folder to be created with no parent + logger.Warnf("parent path is the same as the current path: %s", path) + return nil, nil + } + + parent, err := GetOrCreateFolderHierarchy(ctx, fc, parentPath, rootPaths) + if err != nil { + return nil, err + } + + parentID = &parent.ID } now := time.Now() folder = &models.Folder{ Path: path, - ParentFolderID: &parent.ID, + ParentFolderID: parentID, DirEntry: models.DirEntry{ // leave mod time empty for now - it will be updated when the folder is scanned }, @@ -39,6 +59,8 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreat UpdatedAt: now, } + logger.Infof("%s doesn't exist. Creating new folder entry...", path) + if err = fc.Create(ctx, folder); err != nil { return nil, fmt.Errorf("creating folder %s: %w", path, err) } @@ -47,12 +69,18 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreat return folder, nil } -func transferZipHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, files models.FileFinderUpdater, zipFileID models.FileID, oldPath string, newPath string) error { - if err := transferZipFolderHierarchy(ctx, folderStore, zipFileID, oldPath, newPath); err != nil { +type zipHierarchyMover struct { + folderStore models.FolderReaderWriter + files models.FileFinderUpdater + rootPaths []string +} + +func (m zipHierarchyMover) transferZipHierarchy(ctx context.Context, zipFileID models.FileID, oldPath string, newPath string) error { + if err := m.transferZipFolderHierarchy(ctx, zipFileID, oldPath, newPath); err != nil { return fmt.Errorf("moving folder hierarchy for file %s: %w", oldPath, err) } - if err := transferZipFileEntries(ctx, folderStore, files, zipFileID, oldPath, newPath); err != nil { + if err := m.transferZipFileEntries(ctx, zipFileID, oldPath, newPath); err != nil { return fmt.Errorf("moving zip file contents for file %s: %w", oldPath, err) } @@ -61,8 +89,8 @@ func transferZipHierarchy(ctx context.Context, folderStore models.FolderReaderWr // transferZipFolderHierarchy creates the folder hierarchy for zipFileID under newPath, and removes // ZipFileID from folders under oldPath. -func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, zipFileID models.FileID, oldPath string, newPath string) error { - zipFolders, err := folderStore.FindByZipFileID(ctx, zipFileID) +func (m zipHierarchyMover) transferZipFolderHierarchy(ctx context.Context, zipFileID models.FileID, oldPath string, newPath string) error { + zipFolders, err := m.folderStore.FindByZipFileID(ctx, zipFileID) if err != nil { return err } @@ -81,7 +109,7 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe } newZfPath := filepath.Join(newPath, relZfPath) - newFolder, err := GetOrCreateFolderHierarchy(ctx, folderStore, newZfPath) + newFolder, err := GetOrCreateFolderHierarchy(ctx, m.folderStore, newZfPath, m.rootPaths) if err != nil { return err } @@ -89,14 +117,14 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe // add ZipFileID to new folder logger.Debugf("adding zip file %s to folder %s", zipFileID, newFolder.Path) newFolder.ZipFileID = &zipFileID - if err = folderStore.Update(ctx, newFolder); err != nil { + if err = m.folderStore.Update(ctx, newFolder); err != nil { return err } // remove ZipFileID from old folder logger.Debugf("removing zip file %s from folder %s", zipFileID, oldFolder.Path) oldFolder.ZipFileID = nil - if err = folderStore.Update(ctx, oldFolder); err != nil { + if err = m.folderStore.Update(ctx, oldFolder); err != nil { return err } } @@ -104,9 +132,9 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe return nil } -func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCreator, files models.FileFinderUpdater, zipFileID models.FileID, oldPath, newPath string) error { +func (m zipHierarchyMover) transferZipFileEntries(ctx context.Context, zipFileID models.FileID, oldPath, newPath string) error { // move contained files if file is a zip file - zipFiles, err := files.FindByZipFileID(ctx, zipFileID) + zipFiles, err := m.files.FindByZipFileID(ctx, zipFileID) if err != nil { return fmt.Errorf("finding contained files in file %s: %w", oldPath, err) } @@ -127,7 +155,7 @@ func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCrea newZfDir := filepath.Join(newPath, relZfDir) // folder should have been created by transferZipFolderHierarchy - newZfFolder, err := GetOrCreateFolderHierarchy(ctx, folders, newZfDir) + newZfFolder, err := GetOrCreateFolderHierarchy(ctx, m.folderStore, newZfDir, m.rootPaths) if err != nil { return fmt.Errorf("getting or creating folder hierarchy: %w", err) } @@ -135,7 +163,7 @@ func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCrea // update file parent folder zfBase.ParentFolderID = newZfFolder.ID logger.Debugf("moving %s to folder %s", zfBase.Path, newZfFolder.Path) - if err := files.Update(ctx, zf); err != nil { + if err := m.files.Update(ctx, zf); err != nil { return fmt.Errorf("updating file %s: %w", oldZfPath, err) } } diff --git a/pkg/file/folder_rename_detect.go b/pkg/file/folder_rename_detect.go index 4f6d31bd5..d45593b28 100644 --- a/pkg/file/folder_rename_detect.go +++ b/pkg/file/folder_rename_detect.go @@ -2,7 +2,6 @@ package file import ( "context" - "errors" "fmt" "io/fs" @@ -75,7 +74,7 @@ func (d *folderRenameDetector) bestCandidate() *models.Folder { return best.folder } -func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*models.Folder, error) { // in order for a folder to be considered moved, the existing folder must be // missing, and the majority of the old folder's files must be present, unchanged, // in the new folder. @@ -88,7 +87,12 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. r := s.Repository - if err := symWalk(file.fs, file.Path, func(path string, d fs.DirEntry, err error) error { + zipFilePath := "" + if file.ZipFile != nil { + zipFilePath = file.ZipFile.Base().Path + } + + if err := SymWalk(file.FS, file.Path, func(path string, d fs.DirEntry, err error) error { if err != nil { // don't let errors prevent scanning logger.Errorf("error scanning %s: %v", path, err) @@ -107,14 +111,15 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. info, err := d.Info() if err != nil { - return fmt.Errorf("reading info for %q: %w", path, err) - } - - if !s.acceptEntry(ctx, path, info) { + logger.Errorf("reading info for %q: %v", path, err) return nil } - size, err := getFileSize(file.fs, path, info) + if !s.AcceptEntry(ctx, path, info, zipFilePath) { + return nil + } + + size, err := GetFileSize(file.FS, path, info) if err != nil { return fmt.Errorf("getting file size for %q: %w", path, err) } @@ -153,16 +158,14 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models. } // parent folder must be missing - _, err = file.fs.Lstat(pf.Path) + _, err = file.FS.Lstat(pf.Path) if err == nil { // parent folder exists, not a candidate detector.reject(parentFolderID) continue } - if !errors.Is(err, fs.ErrNotExist) { - return fmt.Errorf("checking for parent folder %q: %w", pf.Path, err) - } + // treat any error as missing folder // parent folder is missing, possible candidate // count the total number of files in the existing folder diff --git a/pkg/file/handler.go b/pkg/file/handler.go index 10616eefa..b4056f195 100644 --- a/pkg/file/handler.go +++ b/pkg/file/handler.go @@ -9,7 +9,7 @@ import ( // PathFilter provides a filter function for paths. type PathFilter interface { - Accept(ctx context.Context, path string, info fs.FileInfo) bool + Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool } type PathFilterFunc func(path string) bool diff --git a/pkg/file/image/orientation.go b/pkg/file/image/orientation.go index 84f5774cf..0d9ebb2e3 100644 --- a/pkg/file/image/orientation.go +++ b/pkg/file/image/orientation.go @@ -4,6 +4,7 @@ import ( "errors" "fmt" "io" + "strings" "github.com/rwcarlsen/goexif/exif" "github.com/stashapp/stash/pkg/logger" @@ -33,7 +34,7 @@ func areDimensionsFlipped(fs models.FS, path string) (bool, error) { x, err := exif.Decode(r) if err != nil { - if errors.Is(err, io.EOF) { + if errors.Is(err, io.EOF) || strings.Contains(err.Error(), "failed to find exif") { // no exif data return false, nil } diff --git a/pkg/file/image/scan.go b/pkg/file/image/scan.go index a1d63f649..7ac69480c 100644 --- a/pkg/file/image/scan.go +++ b/pkg/file/image/scan.go @@ -2,8 +2,11 @@ package image import ( "context" + "errors" "fmt" "image" + "path/filepath" + "strings" _ "image/gif" _ "image/jpeg" @@ -17,6 +20,8 @@ import ( _ "golang.org/x/image/webp" ) +var ErrUnsupportedAVIFInZip = errors.New("AVIF images in zip files is unsupported") + // Decorator adds image specific fields to a File. type Decorator struct { FFProbe *ffmpeg.FFProbe @@ -28,6 +33,10 @@ func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) ( // ignore clips in non-OsFS filesystems as ffprobe cannot read them // TODO - copy to temp file if not an OsFS if _, isOs := fs.(*file.OsFS); !isOs { + // AVIF images inside zip files are not supported + if strings.ToLower(filepath.Ext(base.Path)) == ".avif" { + return nil, fmt.Errorf("%w: %s", ErrUnsupportedAVIFInZip, base.Path) + } logger.Debugf("assuming ImageFile for non-OsFS file %q", base.Path) return decorateFallback(fs, f) } @@ -50,7 +59,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) ( isClip := true // This list is derived from ffmpegImageThumbnail in pkg/image/thumbnail. If one gets updated, the other should be as well - for _, item := range []string{"png", "mjpeg", "webp", "bmp"} { + for _, item := range []string{"png", "mjpeg", "webp", "bmp", "jpegxl"} { if item == probe.VideoCodec { isClip = false } @@ -67,6 +76,25 @@ func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) ( Height: probe.Height, } + // FFprobe has a known bug where it returns 0x0 dimensions for some animated WebP files + // Fall back to image.DecodeConfig in this case. + // See: https://trac.ffmpeg.org/ticket/4907 + if ret.Width == 0 || ret.Height == 0 { + logger.Warnf("FFprobe returned invalid dimensions (%dx%d) for %q, trying fallback decoder", ret.Width, ret.Height, base.Path) + c, format, err := decodeConfig(fs, base.Path) + if err != nil { + logger.Warnf("Fallback decoder failed for %q: %s. Proceeding with original FFprobe result", base.Path, err) + } else { + ret.Width = c.Width + ret.Height = c.Height + // Update format if it differs (fallback decoder may be more accurate) + if format != "" && format != ret.Format { + logger.Debugf("Updating format from %q to %q for %q", ret.Format, format, base.Path) + ret.Format = format + } + } + } + adjustForOrientation(fs, base.Path, ret) return ret, nil diff --git a/pkg/file/import.go b/pkg/file/import.go index 7c28197b8..8ca7487cb 100644 --- a/pkg/file/import.go +++ b/pkg/file/import.go @@ -120,7 +120,7 @@ func (i *Importer) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonsch func (i *Importer) populateZipFileID(ctx context.Context, f *models.DirEntry) error { zipFilePath := i.Input.DirEntry().ZipFile if zipFilePath != "" { - zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath) + zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath, true) if err != nil { return fmt.Errorf("error finding file by path %q: %v", zipFilePath, err) } @@ -146,7 +146,7 @@ func (i *Importer) Name() string { func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { path := i.Input.DirEntry().Path - existing, err := i.ReaderWriter.FindByPath(ctx, path) + existing, err := i.ReaderWriter.FindByPath(ctx, path, true) if err != nil { return nil, err } @@ -176,7 +176,7 @@ func (i *Importer) createFolderHierarchy(ctx context.Context, p string) (*models } func (i *Importer) getOrCreateFolder(ctx context.Context, path string, parent *models.Folder) (*models.Folder, error) { - folder, err := i.FolderStore.FindByPath(ctx, path) + folder, err := i.FolderStore.FindByPath(ctx, path, true) if err != nil { return nil, err } diff --git a/pkg/file/move.go b/pkg/file/move.go index ba2a496bb..1f0a5012c 100644 --- a/pkg/file/move.go +++ b/pkg/file/move.go @@ -45,9 +45,12 @@ type Mover struct { moved map[string]string foldersCreated []string + + // needed for creating folder hierarchy when moving zip file entries + rootPaths []string } -func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter) *Mover { +func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter, rootPaths []string) *Mover { return &Mover{ Files: fileStore, Folders: folderStore, @@ -55,6 +58,7 @@ func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReade renamerRemoverImpl: newRenamerRemoverImpl(), mkDirFn: os.Mkdir, }, + rootPaths: rootPaths, } } @@ -87,7 +91,13 @@ func (m *Mover) Move(ctx context.Context, f models.File, folder *models.Folder, return fmt.Errorf("file %s already exists", newPath) } - if err := transferZipHierarchy(ctx, m.Folders, m.Files, fBase.ID, oldPath, newPath); err != nil { + zipMover := zipHierarchyMover{ + folderStore: m.Folders, + files: m.Files, + rootPaths: m.rootPaths, + } + + if err := zipMover.transferZipHierarchy(ctx, fBase.ID, oldPath, newPath); err != nil { return fmt.Errorf("moving folder hierarchy for file %s: %w", fBase.Path, err) } @@ -195,6 +205,25 @@ func correctSubFolderHierarchy(ctx context.Context, rw models.FolderReaderWriter logger.Debugf("updating folder %s to %s", oldPath, correctPath) + // #6427 - ensure folder entry with new path doesn't already exist + const caseSensitive = true + existing, err := rw.FindByPath(ctx, correctPath, caseSensitive) + if err != nil { + return fmt.Errorf("finding folder by path %s: %w", correctPath, err) + } + + if existing != nil { + // this should no longer be possible, but if it does happen, log a warning + // and skip updating this folder and its subfolders + logger.Warnf("folder with path %s already exists, setting parent_folder_id of %s to NULL and skipping", correctPath, oldPath) + f.ParentFolderID = nil + if err := rw.Update(ctx, f); err != nil { + return fmt.Errorf("updating folder parent id to NULL for folder %s: %w", oldPath, err) + } + + continue + } + f.Path = correctPath if err := rw.Update(ctx, f); err != nil { return fmt.Errorf("updating folder path %s -> %s: %w", oldPath, f.Path, err) diff --git a/pkg/file/scan.go b/pkg/file/scan.go index 8b0ec956e..4cfcaf7ae 100644 --- a/pkg/file/scan.go +++ b/pkg/file/scan.go @@ -2,27 +2,17 @@ package file import ( "context" - "errors" "fmt" "io/fs" - "os" "path/filepath" + "slices" "strings" "sync" "time" - "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" - "github.com/stashapp/stash/pkg/utils" -) - -const ( - scanQueueSize = 200000 - // maximum number of times to retry in the event of a locked database - // use -1 to retry forever - maxRetries = -1 ) // Scanner scans files into the database. @@ -55,8 +45,30 @@ type Scanner struct { Repository Repository FingerprintCalculator FingerprintCalculator + // ZipFileExtensions is a list of file extensions that are considered zip files. + // Extension does not include the . character. + ZipFileExtensions []string + + // ScanFilters are used to determine if a file should be scanned. + ScanFilters []PathFilter + + // HandlerRequiredFilters are used to determine if an unchanged file needs to be handled + HandlerRequiredFilters []Filter + // FileDecorators are applied to files as they are scanned. FileDecorators []Decorator + + // handlers are called after a file has been scanned. + FileHandlers []Handler + + // RootPaths form the top-level paths for the library. + // Used to determine the root of the folder hierarchy when creating folders. + RootPaths []string + + // Rescan indicates whether files should be rescanned even if they haven't changed. + Rescan bool + + folderPathToID sync.Map } // FingerprintCalculator calculates a fingerprint for the provided file. @@ -91,247 +103,20 @@ func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs models.FS, return false } -// ProgressReporter is used to report progress of the scan. -type ProgressReporter interface { - AddTotal(total int) - Increment() - Definite() - ExecuteTask(description string, fn func()) -} - -type scanJob struct { - *Scanner - - // handlers are called after a file has been scanned. - handlers []Handler - - ProgressReports ProgressReporter - options ScanOptions - - startTime time.Time - fileQueue chan scanFile - retryList []scanFile - retrying bool - folderPathToID sync.Map - zipPathToID sync.Map - count int - - txnRetryer txn.Retryer -} - -// ScanOptions provides options for scanning files. -type ScanOptions struct { - Paths []string - - // ZipFileExtensions is a list of file extensions that are considered zip files. - // Extension does not include the . character. - ZipFileExtensions []string - - // ScanFilters are used to determine if a file should be scanned. - ScanFilters []PathFilter - - // HandlerRequiredFilters are used to determine if an unchanged file needs to be handled - HandlerRequiredFilters []Filter - - ParallelTasks int - - // When true files in path will be rescanned even if they haven't changed - Rescan bool -} - -// Scan starts the scanning process. -func (s *Scanner) Scan(ctx context.Context, handlers []Handler, options ScanOptions, progressReporter ProgressReporter) { - job := &scanJob{ - Scanner: s, - handlers: handlers, - ProgressReports: progressReporter, - options: options, - txnRetryer: txn.Retryer{ - Manager: s.Repository.TxnManager, - Retries: maxRetries, - }, - } - - job.execute(ctx) -} - -type scanFile struct { +// ScannedFile represents a file being scanned. +type ScannedFile struct { *models.BaseFile - fs models.FS - info fs.FileInfo + FS models.FS + Info fs.FileInfo } -func (s *scanJob) withTxn(ctx context.Context, fn func(ctx context.Context) error) error { - return s.txnRetryer.WithTxn(ctx, fn) -} - -func (s *scanJob) withDB(ctx context.Context, fn func(ctx context.Context) error) error { - return s.Repository.WithDB(ctx, fn) -} - -func (s *scanJob) execute(ctx context.Context) { - paths := s.options.Paths - logger.Infof("scanning %d paths", len(paths)) - s.startTime = time.Now() - - s.fileQueue = make(chan scanFile, scanQueueSize) - var wg sync.WaitGroup - wg.Add(1) - - go func() { - defer wg.Done() - if err := s.queueFiles(ctx, paths); err != nil { - if errors.Is(err, context.Canceled) { - return - } - - logger.Errorf("error queuing files for scan: %v", err) - return - } - - logger.Infof("Finished adding files to queue. %d files queued", s.count) - }() - - defer wg.Wait() - - if err := s.processQueue(ctx); err != nil { - if errors.Is(err, context.Canceled) { - return - } - - logger.Errorf("error scanning files: %v", err) - return - } -} - -func (s *scanJob) queueFiles(ctx context.Context, paths []string) error { - var err error - s.ProgressReports.ExecuteTask("Walking directory tree", func() { - for _, p := range paths { - err = symWalk(s.FS, p, s.queueFileFunc(ctx, s.FS, nil)) - if err != nil { - return - } - } - }) - - close(s.fileQueue) - - if s.ProgressReports != nil { - s.ProgressReports.AddTotal(s.count) - s.ProgressReports.Definite() - } - - return err -} - -func (s *scanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *scanFile) fs.WalkDirFunc { - return func(path string, d fs.DirEntry, err error) error { - if err != nil { - // don't let errors prevent scanning - logger.Errorf("error scanning %s: %v", path, err) - return nil - } - - if err = ctx.Err(); err != nil { - return err - } - - info, err := d.Info() - if err != nil { - return fmt.Errorf("reading info for %q: %w", path, err) - } - - if !s.acceptEntry(ctx, path, info) { - if info.IsDir() { - return fs.SkipDir - } - - return nil - } - - size, err := getFileSize(f, path, info) - if err != nil { - return err - } - - ff := scanFile{ - BaseFile: &models.BaseFile{ - DirEntry: models.DirEntry{ - ModTime: modTime(info), - }, - Path: path, - Basename: filepath.Base(path), - Size: size, - }, - fs: f, - info: info, - } - - if zipFile != nil { - zipFileID, err := s.getZipFileID(ctx, zipFile) - if err != nil { - return err - } - ff.ZipFileID = zipFileID - ff.ZipFile = zipFile - } - - if info.IsDir() { - // handle folders immediately - if err := s.handleFolder(ctx, ff); err != nil { - if !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", path, err) - } - - // skip the directory since we won't be able to process the files anyway - return fs.SkipDir - } - - return nil - } - - // if zip file is present, we handle immediately - if zipFile != nil { - s.ProgressReports.ExecuteTask("Scanning "+path, func() { - if err := s.handleFile(ctx, ff); err != nil { - if !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", path, err) - } - // don't return an error, just skip the file - } - }) - - return nil - } - - s.fileQueue <- ff - - s.count++ - - return nil - } -} - -func getFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { - // #2196/#3042 - replace size with target size if file is a symlink - if info.Mode()&os.ModeSymlink == os.ModeSymlink { - targetInfo, err := f.Stat(path) - if err != nil { - return 0, fmt.Errorf("reading info for symlink %q: %w", path, err) - } - return targetInfo.Size(), nil - } - - return info.Size(), nil -} - -func (s *scanJob) acceptEntry(ctx context.Context, path string, info fs.FileInfo) bool { +// AcceptEntry determines if the file entry should be accepted for scanning +func (s *Scanner) AcceptEntry(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool { // always accept if there's no filters - accept := len(s.options.ScanFilters) == 0 - for _, filter := range s.options.ScanFilters { + accept := len(s.ScanFilters) == 0 + for _, filter := range s.ScanFilters { // accept if any filter accepts the file - if filter.Accept(ctx, path, info) { + if filter.Accept(ctx, path, info, zipFilePath) { accept = true break } @@ -340,109 +125,17 @@ func (s *scanJob) acceptEntry(ctx context.Context, path string, info fs.FileInfo return accept } -func (s *scanJob) scanZipFile(ctx context.Context, f scanFile) error { - zipFS, err := f.fs.OpenZip(f.Path, f.Size) - if err != nil { - if errors.Is(err, errNotReaderAt) { - // can't walk the zip file - // just return - return nil - } - - return err - } - - defer zipFS.Close() - - return symWalk(zipFS, f.Path, s.queueFileFunc(ctx, zipFS, &f)) -} - -func (s *scanJob) processQueue(ctx context.Context) error { - parallelTasks := s.options.ParallelTasks - if parallelTasks < 1 { - parallelTasks = 1 - } - - wg := sizedwaitgroup.New(parallelTasks) - - if err := func() error { - defer wg.Wait() - - for f := range s.fileQueue { - if err := ctx.Err(); err != nil { - return err - } - - wg.Add() - ff := f - go func() { - defer wg.Done() - s.processQueueItem(ctx, ff) - }() - } - - return nil - }(); err != nil { - return err - } - - s.retrying = true - - if err := func() error { - defer wg.Wait() - - for _, f := range s.retryList { - if err := ctx.Err(); err != nil { - return err - } - - wg.Add() - ff := f - go func() { - defer wg.Done() - s.processQueueItem(ctx, ff) - }() - } - - return nil - }(); err != nil { - return err - } - - return nil -} - -func (s *scanJob) incrementProgress(f scanFile) { - // don't increment for files inside zip files since these aren't - // counted during the initial walking - if s.ProgressReports != nil && f.ZipFile == nil { - s.ProgressReports.Increment() - } -} - -func (s *scanJob) processQueueItem(ctx context.Context, f scanFile) { - s.ProgressReports.ExecuteTask("Scanning "+f.Path, func() { - var err error - if f.info.IsDir() { - err = s.handleFolder(ctx, f) - } else { - err = s.handleFile(ctx, f) - } - - if err != nil && !errors.Is(err, context.Canceled) { - logger.Errorf("error processing %q: %v", f.Path, err) - } - }) -} - -func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { +func (s *Scanner) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { // check the folder cache first if f, ok := s.folderPathToID.Load(path); ok { v := f.(models.FolderID) return &v, nil } - ret, err := s.Repository.Folder.FindByPath(ctx, path) + // assume case sensitive when searching for the folder + const caseSensitive = true + + ret, err := s.Repository.Folder.FindByPath(ctx, path, caseSensitive) if err != nil { return nil, err } @@ -455,48 +148,35 @@ func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderI return &ret.ID, nil } -func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*models.FileID, error) { - if zipFile == nil { - return nil, nil - } - - if zipFile.ID != 0 { - return &zipFile.ID, nil - } - - path := zipFile.Path - - // check the folder cache first - if f, ok := s.zipPathToID.Load(path); ok { - v := f.(models.FileID) - return &v, nil - } - - ret, err := s.Repository.File.FindByPath(ctx, path) - if err != nil { - return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err) - } - - if ret == nil { - return nil, fmt.Errorf("zip file %q doesn't exist in database", zipFile.Path) - } - - s.zipPathToID.Store(path, ret.Base().ID) - return &ret.Base().ID, nil -} - -func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { +// ScanFolder scans the provided folder into the database, returning the folder entry. +// If the folder already exists, it is updated if necessary. +func (s *Scanner) ScanFolder(ctx context.Context, file ScannedFile) (*models.Folder, error) { + var f *models.Folder + var err error path := file.Path - return s.withTxn(ctx, func(ctx context.Context) error { - defer s.incrementProgress(file) - + err = s.Repository.WithTxn(ctx, func(ctx context.Context) error { // determine if folder already exists in data store (by path) - f, err := s.Repository.Folder.FindByPath(ctx, path) + // assume case sensitive by default + f, err = s.Repository.Folder.FindByPath(ctx, path, true) if err != nil { return fmt.Errorf("checking for existing folder %q: %w", path, err) } + // #1426 / #6326 - if folder is in a case-insensitive filesystem, then try + // case insensitive searching + // assume case sensitive if in zip + if f == nil && file.ZipFileID == nil { + caseSensitive, _ := file.FS.IsPathCaseSensitive(file.Path) + + if !caseSensitive { + f, err = s.Repository.Folder.FindByPath(ctx, path, false) + if err != nil { + return fmt.Errorf("checking for existing folder %q: %w", path, err) + } + } + } + // if folder not exists, create it if f == nil { f, err = s.onNewFolder(ctx, file) @@ -514,9 +194,15 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { return nil }) + + return f, err } -func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) isRootPath(path string) bool { + return path == "." || slices.Contains(s.RootPaths, path) +} + +func (s *Scanner) onNewFolder(ctx context.Context, file ScannedFile) (*models.Folder, error) { renamed, err := s.handleFolderRename(ctx, file) if err != nil { return nil, err @@ -535,18 +221,16 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folde UpdatedAt: now, } - dir := filepath.Dir(file.Path) - if dir != "." { - parentFolderID, err := s.getFolderID(ctx, dir) + if !s.isRootPath(file.Path) { + dir := filepath.Dir(file.Path) + + // create full folder hierarchy if parent folder doesn't exist, and set parent folder ID + parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, dir, s.RootPaths) if err != nil { return nil, fmt.Errorf("getting parent folder %q: %w", dir, err) } - // if parent folder doesn't exist, assume it's a top-level folder - // this may not be true if we're using multiple goroutines - if parentFolderID != nil { - toCreate.ParentFolderID = parentFolderID - } + toCreate.ParentFolderID = &parentFolder.ID } txn.AddPostCommitHook(ctx, func(ctx context.Context) { @@ -563,7 +247,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folde return toCreate, nil } -func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*models.Folder, error) { +func (s *Scanner) handleFolderRename(ctx context.Context, file ScannedFile) (*models.Folder, error) { // ignore folders in zip files if file.ZipFileID != nil { return nil, nil @@ -604,16 +288,24 @@ func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*model return renamedFrom, nil } -func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *models.Folder) (*models.Folder, error) { +func (s *Scanner) onExistingFolder(ctx context.Context, f ScannedFile, existing *models.Folder) (*models.Folder, error) { update := false // update if mod time is changed entryModTime := f.ModTime if !entryModTime.Equal(existing.ModTime) { + existing.Path = f.Path existing.ModTime = entryModTime update = true } + // #6326 - update if path has changed - should only happen if case is + // changed and filesystem is case insensitive + if existing.Path != f.Path { + existing.Path = f.Path + update = true + } + // update if zip file ID has changed fZfID := f.ZipFileID existingZfID := existing.ZipFileID @@ -627,6 +319,19 @@ func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *mo } } + // handle case where parent folder was not previously set + if existing.ParentFolderID == nil && !s.isRootPath(existing.Path) { + logger.Infof("Existing folder entry %q has no parent folder. Creating folder hierarchy and setting parent ID...", existing.Path) + + // create full folder hierarchy if parent folder doesn't exist, and set parent folder ID + parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, filepath.Dir(f.Path), s.RootPaths) + if err != nil { + return nil, fmt.Errorf("getting parent folder for %q: %w", f.Path, err) + } + existing.ParentFolderID = &parentFolder.ID + update = true + } + if update { var err error if err = s.Repository.Folder.Update(ctx, existing); err != nil { @@ -637,55 +342,64 @@ func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *mo return existing, nil } -func modTime(info fs.FileInfo) time.Time { - // truncate to seconds, since we don't store beyond that in the database - return info.ModTime().Truncate(time.Second) +type ScanFileResult struct { + File models.File + New bool + Renamed bool + Updated bool + FingerprintChanged bool } -func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { - defer s.incrementProgress(f) +func (r ScanFileResult) IsUnchanged() bool { + return !r.New && !r.Renamed && !r.Updated +} + +// ScanFile scans the provided file into the database, returning the scan result. +func (s *Scanner) ScanFile(ctx context.Context, f ScannedFile) (*ScanFileResult, error) { + var r *ScanFileResult - var ff models.File // don't use a transaction to check if new or existing - if err := s.withDB(ctx, func(ctx context.Context) error { + if err := s.Repository.WithDB(ctx, func(ctx context.Context) error { // determine if file already exists in data store - var err error - ff, err = s.Repository.File.FindByPath(ctx, f.Path) + // assume case sensitive when searching for the file to begin with + ff, err := s.Repository.File.FindByPath(ctx, f.Path, true) if err != nil { return fmt.Errorf("checking for existing file %q: %w", f.Path, err) } + // #1426 / #6326 - if file is in a case-insensitive filesystem, then try + // case insensitive search + // assume case sensitive if in zip + if ff == nil && f.ZipFileID != nil { + caseSensitive, _ := f.FS.IsPathCaseSensitive(f.Path) + + if !caseSensitive { + ff, err = s.Repository.File.FindByPath(ctx, f.Path, false) + if err != nil { + return fmt.Errorf("checking for existing file %q: %w", f.Path, err) + } + } + } + if ff == nil { // returns a file only if it is actually new - ff, err = s.onNewFile(ctx, f) + r, err = s.onNewFile(ctx, f) return err } - ff, err = s.onExistingFile(ctx, f, ff) + r, err = s.onExistingFile(ctx, f, ff) return err }); err != nil { - return err + return nil, err } - if ff != nil && s.isZipFile(f.info.Name()) { - f.BaseFile = ff.Base() - - // scan zip files with a different context that is not cancellable - // cancelling while scanning zip file contents results in the scan - // contents being partially completed - zipCtx := utils.ValueOnlyContext{Context: ctx} - - if err := s.scanZipFile(zipCtx, f); err != nil { - logger.Errorf("Error scanning zip file %q: %v", f.Path, err) - } - } - - return nil + return r, nil } -func (s *scanJob) isZipFile(path string) bool { +// IsZipFile determines if the provided path is a zip file based on its extension. +func (s *Scanner) IsZipFile(path string) bool { fExt := filepath.Ext(path) - for _, ext := range s.options.ZipFileExtensions { + for _, ext := range s.ZipFileExtensions { if strings.EqualFold(fExt, "."+ext) { return true } @@ -694,7 +408,7 @@ func (s *scanJob) isZipFile(path string) bool { return false } -func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error) { +func (s *Scanner) onNewFile(ctx context.Context, f ScannedFile) (*ScanFileResult, error) { now := time.Now() baseFile := f.BaseFile @@ -704,54 +418,71 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error baseFile.UpdatedAt = now // find the parent folder - parentFolderID, err := s.getFolderID(ctx, filepath.Dir(path)) + folderPath := filepath.Dir(path) + parentFolderID, err := s.getFolderID(ctx, folderPath) if err != nil { return nil, fmt.Errorf("getting parent folder for %q: %w", path, err) } if parentFolderID == nil { - // if parent folder doesn't exist, assume it's not yet created - // add this file to the queue to be created later - if s.retrying { - // if we're retrying and the folder still doesn't exist, then it's a problem - return nil, fmt.Errorf("parent folder for %q doesn't exist", path) - } + // parent folders should have been created before scanning this file in a recursive scan + // assume that we are scanning specifically and only this file, + // so we should create the parent folder hierarchy if it doesn't exist + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { + parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, folderPath, s.RootPaths) + if err != nil { + return fmt.Errorf("getting parent folder for %q: %w", f.Path, err) + } - s.retryList = append(s.retryList, f) - return nil, nil + parentFolderID = &parentFolder.ID + return nil + }); err != nil { + return nil, err + } + } + if parentFolderID == nil { + // shouldn't happen + return nil, fmt.Errorf("parent folder ID is nil for %q", path) } baseFile.ParentFolderID = *parentFolderID const useExisting = false - fp, err := s.calculateFingerprints(f.fs, baseFile, path, useExisting) + fp, err := s.calculateFingerprints(f.FS, baseFile, path, useExisting) if err != nil { return nil, err } baseFile.SetFingerprints(fp) - file, err := s.fireDecorators(ctx, f.fs, baseFile) + file, err := s.fireDecorators(ctx, f.FS, baseFile) if err != nil { return nil, err } // determine if the file is renamed from an existing file in the store // do this after decoration so that missing fields can be populated - renamed, err := s.handleRename(ctx, file, fp) + zipFilePath := "" + if f.ZipFile != nil { + zipFilePath = f.ZipFile.Base().Path + } + renamed, err := s.handleRename(ctx, file, fp, zipFilePath) if err != nil { return nil, err } if renamed != nil { + return &ScanFileResult{ + File: renamed, + Renamed: true, + }, nil // handle rename should have already handled the contents of the zip file // so shouldn't need to scan it again // return nil so it doesn't - return nil, nil } // if not renamed, queue file for creation - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Create(ctx, file); err != nil { return fmt.Errorf("creating file %q: %w", path, err) } @@ -765,10 +496,13 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error return nil, err } - return file, nil + return &ScanFileResult{ + File: file, + New: true, + }, nil } -func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { +func (s *Scanner) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { for _, h := range s.FileDecorators { var err error f, err = h.Decorate(ctx, fs, f) @@ -780,8 +514,8 @@ func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.Fil return f, nil } -func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { - for _, h := range s.handlers { +func (s *Scanner) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { + for _, h := range s.FileHandlers { if err := h.Handle(ctx, f, oldFile); err != nil { return err } @@ -790,7 +524,7 @@ func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile model return nil } -func (s *scanJob) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { +func (s *Scanner) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { // only log if we're (re)calculating fingerprints if !useExisting { logger.Infof("Calculating fingerprints for %s ...", path) @@ -827,7 +561,7 @@ func appendFileUnique(v []models.File, toAdd []models.File) []models.File { return v } -func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { +func (s *Scanner) getFileFS(f *models.BaseFile) (models.FS, error) { if f.ZipFile == nil { return s.FS, nil } @@ -838,10 +572,11 @@ func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { } zipPath := f.ZipFile.Base().Path - return fs.OpenZip(zipPath, f.Size) + zipSize := f.ZipFile.Base().Size + return fs.OpenZip(zipPath, zipSize) } -func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) { +func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint, zipFilePath string) (models.File, error) { var others []models.File for _, tfp := range fp { @@ -878,11 +613,12 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F // #1426 - if file exists but is a case-insensitive match for the // original filename, and the filesystem is case-insensitive // then treat it as a move + // #6326 - this should now be handled earlier, and this shouldn't be necessary if caseSensitive, _ := fs.IsPathCaseSensitive(other.Base().Path); !caseSensitive { // treat as a move missing = append(missing, other) } - case !s.acceptEntry(ctx, other.Base().Path, info): + case !s.AcceptEntry(ctx, other.Base().Path, info, zipFilePath): // #4393 - if the file is no longer in the configured library paths, treat it as a move logger.Debugf("File %q no longer in library paths. Treating as a move.", other.Base().Path) missing = append(missing, other) @@ -915,13 +651,19 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F fBaseCopy.Fingerprints = updatedBase.Fingerprints *updatedBase = fBaseCopy - if err := s.withTxn(ctx, func(ctx context.Context) error { + zipMover := zipHierarchyMover{ + folderStore: s.Repository.Folder, + files: s.Repository.File, + rootPaths: s.RootPaths, + } + + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, updated); err != nil { return fmt.Errorf("updating file for rename %q: %w", newPath, err) } - if s.isZipFile(updatedBase.Basename) { - if err := transferZipHierarchy(ctx, s.Repository.Folder, s.Repository.File, updatedBase.ID, oldPath, newPath); err != nil { + if s.IsZipFile(updatedBase.Basename) { + if err := zipMover.transferZipHierarchy(ctx, updatedBase.ID, oldPath, newPath); err != nil { return fmt.Errorf("moving zip hierarchy for renamed zip file %q: %w", newPath, err) } } @@ -938,9 +680,9 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F return updated, nil } -func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { - accept := len(s.options.HandlerRequiredFilters) == 0 - for _, filter := range s.options.HandlerRequiredFilters { +func (s *Scanner) isHandlerRequired(ctx context.Context, f models.File) bool { + accept := len(s.HandlerRequiredFilters) == 0 + for _, filter := range s.HandlerRequiredFilters { // accept if any filter accepts the file if filter.Accept(ctx, f) { accept = true @@ -959,9 +701,9 @@ func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { // - file size // - image format, width or height // - video codec, audio codec, format, width, height, framerate or bitrate -func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing models.File) bool { +func (s *Scanner) isMissingMetadata(ctx context.Context, f ScannedFile, existing models.File) bool { for _, h := range s.FileDecorators { - if h.IsMissingMetadata(ctx, f.fs, existing) { + if h.IsMissingMetadata(ctx, f.FS, existing) { return true } } @@ -969,20 +711,20 @@ func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing mo return false } -func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) setMissingMetadata(ctx context.Context, f ScannedFile, existing models.File) (models.File, error) { path := existing.Base().Path logger.Infof("Updating metadata for %s", path) existing.Base().Size = f.Size var err error - existing, err = s.fireDecorators(ctx, f.fs, existing) + existing, err = s.fireDecorators(ctx, f.FS, existing) if err != nil { return nil, err } // queue file for update - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -995,9 +737,9 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing m return existing, nil } -func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) setMissingFingerprints(ctx context.Context, f ScannedFile, existing models.File) (models.File, error) { const useExisting = true - fp, err := s.calculateFingerprints(f.fs, existing.Base(), f.Path, useExisting) + fp, err := s.calculateFingerprints(f.FS, existing.Base(), f.Path, useExisting) if err != nil { return nil, err } @@ -1005,7 +747,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi if fp.ContentsChanged(existing.Base().Fingerprints) { existing.SetFingerprints(fp) - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", f.Path, err) } @@ -1020,13 +762,14 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi } // returns a file only if it was updated -func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) onExistingFile(ctx context.Context, f ScannedFile, existing models.File) (*ScanFileResult, error) { base := existing.Base() path := base.Path fileModTime := f.ModTime - updated := !fileModTime.Equal(base.ModTime) - forceRescan := s.options.Rescan + // #6326 - also force a rescan if the basename changed + updated := !fileModTime.Equal(base.ModTime) || base.Basename != f.Basename + forceRescan := s.Rescan if !updated && !forceRescan { return s.onUnchangedFile(ctx, f, existing) @@ -1040,27 +783,32 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model logger.Infof("%s has been updated: rescanning", path) } + // #6326 - update basename in case it changed + base.Basename = f.Basename base.ModTime = fileModTime base.Size = f.Size base.UpdatedAt = time.Now() // calculate and update fingerprints for the file const useExisting = false - fp, err := s.calculateFingerprints(f.fs, base, path, useExisting) + fp, err := s.calculateFingerprints(f.FS, base, path, useExisting) if err != nil { return nil, err } + oldFingerprints := existing.Base().Fingerprints + fingerprintChanged := fp.ContentsChanged(oldFingerprints) + s.removeOutdatedFingerprints(existing, fp) existing.SetFingerprints(fp) - existing, err = s.fireDecorators(ctx, f.fs, existing) + existing, err = s.fireDecorators(ctx, f.FS, existing) if err != nil { return nil, err } // queue file for update - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.Repository.File.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -1073,11 +821,14 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model }); err != nil { return nil, err } - - return existing, nil + return &ScanFileResult{ + File: existing, + Updated: true, + FingerprintChanged: fingerprintChanged, + }, nil } -func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { +func (s *Scanner) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { // HACK - if no MD5 fingerprint was returned, and the oshash is changed // then remove the MD5 fingerprint oshash := fp.For(models.FingerprintTypeOshash) @@ -1105,7 +856,7 @@ func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fin } // returns a file only if it was updated -func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { +func (s *Scanner) onUnchangedFile(ctx context.Context, f ScannedFile, existing models.File) (*ScanFileResult, error) { var err error isMissingMetdata := s.isMissingMetadata(ctx, f, existing) @@ -1124,7 +875,7 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode } handlerRequired := false - if err := s.withDB(ctx, func(ctx context.Context) error { + if err := s.Repository.WithDB(ctx, func(ctx context.Context) error { // check if the handler needs to be run handlerRequired = s.isHandlerRequired(ctx, existing) return nil @@ -1134,15 +885,20 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode if !handlerRequired { // if this file is a zip file, then we need to rescan the contents - // as well. We do this by returning the file, instead of nil. + // as well. We do this by indicating that the file is updated. if isMissingMetdata { - return existing, nil + return &ScanFileResult{ + File: existing, + Updated: true, + }, nil } - return nil, nil + return &ScanFileResult{ + File: existing, + }, nil } - if err := s.withTxn(ctx, func(ctx context.Context) error { + if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error { if err := s.fireHandlers(ctx, existing, nil); err != nil { return err } @@ -1153,6 +909,9 @@ func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing mode } // if this file is a zip file, then we need to rescan the contents - // as well. We do this by returning the file, instead of nil. - return existing, nil + // as well. We do this by indicating that the file is updated. + return &ScanFileResult{ + File: existing, + Updated: true, + }, nil } diff --git a/pkg/file/stashignore.go b/pkg/file/stashignore.go new file mode 100644 index 000000000..681ccf795 --- /dev/null +++ b/pkg/file/stashignore.go @@ -0,0 +1,262 @@ +package file + +import ( + "context" + "io/fs" + "os" + "path/filepath" + "strings" + "sync" + + lru "github.com/hashicorp/golang-lru/v2" + ignore "github.com/sabhiram/go-gitignore" + "github.com/stashapp/stash/pkg/logger" +) + +const stashIgnoreFilename = ".stashignore" + +// entriesCacheSize is the size of the LRU cache for collected ignore entries. +// This cache stores the computed list of ignore entries per directory, avoiding +// repeated directory tree walks for files in the same directory. +const entriesCacheSize = 500 + +// StashIgnoreFilter implements PathFilter to exclude files/directories +// based on .stashignore files with gitignore-style patterns. +type StashIgnoreFilter struct { + // cache stores compiled ignore patterns per directory. + cache sync.Map // map[string]*ignoreEntry + // entriesCache stores collected ignore entries per (dir, libraryRoot) pair. + // This avoids recomputing the entry list for every file in the same directory. + entriesCache *lru.Cache[string, []*ignoreEntry] +} + +// ignoreEntry holds the compiled ignore patterns for a directory. +type ignoreEntry struct { + // patterns is the compiled gitignore matcher for this directory. + patterns *ignore.GitIgnore + // dir is the directory this entry applies to. + dir string +} + +// NewStashIgnoreFilter creates a new StashIgnoreFilter. +func NewStashIgnoreFilter() *StashIgnoreFilter { + // Create the LRU cache for collected entries. + // Ignore error as it only fails if size <= 0. + entriesCache, _ := lru.New[string, []*ignoreEntry](entriesCacheSize) + return &StashIgnoreFilter{ + entriesCache: entriesCache, + } +} + +// Accept returns true if the path should be included in the scan. +// It checks for .stashignore files in the directory hierarchy and +// applies gitignore-style pattern matching. +// The libraryRoot parameter bounds the search for .stashignore files - +// only directories within the library root are checked. +// zipFilepath is the path of the zip file if the file is inside a zip. +// .stashignore files will not be read within zip files. +func (f *StashIgnoreFilter) Accept(ctx context.Context, path string, info fs.FileInfo, libraryRoot string, zipFilePath string) bool { + // If no library root provided, accept the file (safety fallback). + if libraryRoot == "" { + return true + } + + // Get the directory containing this path. + dir := filepath.Dir(path) + + // If the file is inside a zip, use the zip file's directory as the base for .stashignore lookup. + if zipFilePath != "" { + dir = filepath.Dir(zipFilePath) + } + + // Collect all applicable ignore entries from library root to this directory. + entries := f.collectIgnoreEntries(dir, libraryRoot) + + // If no .stashignore files found, accept the file. + if len(entries) == 0 { + return true + } + + // Check each ignore entry in order (from root to most specific). + // Later entries can override earlier ones with negation patterns. + ignored := false + for _, entry := range entries { + // Get path relative to the ignore file's directory. + entryRelPath, err := filepath.Rel(entry.dir, path) + if err != nil { + continue + } + entryRelPath = filepath.ToSlash(entryRelPath) + if info.IsDir() { + entryRelPath += "/" + } + + if entry.patterns.MatchesPath(entryRelPath) { + ignored = true + } + } + + return !ignored +} + +// collectIgnoreEntries gathers all ignore entries from library root to the given directory. +// It walks up the directory tree from dir to libraryRoot and returns entries in order +// from root to most specific. Results are cached to avoid repeated computation for +// files in the same directory. +func (f *StashIgnoreFilter) collectIgnoreEntries(dir string, libraryRoot string) []*ignoreEntry { + // Clean paths for consistent comparison and cache key generation. + dir = filepath.Clean(dir) + libraryRoot = filepath.Clean(libraryRoot) + + // Build cache key from dir and libraryRoot. + cacheKey := dir + "\x00" + libraryRoot + + // Check the entries cache first. + if cached, ok := f.entriesCache.Get(cacheKey); ok { + return cached + } + + // Try subdirectory shortcut: if parent's entries are cached, extend them. + if dir != libraryRoot { + parent := filepath.Dir(dir) + if isPathInOrEqual(libraryRoot, parent) { + parentKey := parent + "\x00" + libraryRoot + if parentEntries, ok := f.entriesCache.Get(parentKey); ok { + // Parent is cached - just check if current dir has a .stashignore. + entries := parentEntries + if entry := f.getOrLoadIgnoreEntry(dir); entry != nil { + // Copy parent slice and append to avoid mutating cached slice. + entries = make([]*ignoreEntry, len(parentEntries), len(parentEntries)+1) + copy(entries, parentEntries) + entries = append(entries, entry) + } + f.entriesCache.Add(cacheKey, entries) + return entries + } + } + } + + // No cache hit - compute from scratch. + // Walk up from dir to library root, collecting directories. + var dirs []string + current := dir + for { + // Check if we're still within the library root. + if !isPathInOrEqual(libraryRoot, current) { + break + } + + dirs = append(dirs, current) + + // Stop if we've reached the library root. + if current == libraryRoot { + break + } + + parent := filepath.Dir(current) + if parent == current { + // Reached filesystem root without finding library root. + break + } + current = parent + } + + // Reverse to get root-to-leaf order. + for i, j := 0, len(dirs)-1; i < j; i, j = i+1, j-1 { + dirs[i], dirs[j] = dirs[j], dirs[i] + } + + // Check each directory for .stashignore files. + var entries []*ignoreEntry + for _, d := range dirs { + if entry := f.getOrLoadIgnoreEntry(d); entry != nil { + entries = append(entries, entry) + } + } + + // Cache the result. + f.entriesCache.Add(cacheKey, entries) + + return entries +} + +// isPathInOrEqual checks if path is equal to or inside root. +func isPathInOrEqual(root, path string) bool { + if path == root { + return true + } + // Check if path starts with root + separator. + return strings.HasPrefix(path, root+string(filepath.Separator)) +} + +// getOrLoadIgnoreEntry returns the cached ignore entry for a directory, or loads it. +func (f *StashIgnoreFilter) getOrLoadIgnoreEntry(dir string) *ignoreEntry { + // Check cache first. + if cached, ok := f.cache.Load(dir); ok { + entry := cached.(*ignoreEntry) + if entry.patterns == nil { + return nil // Cached negative result. + } + return entry + } + + // Try to load .stashignore from this directory. + stashIgnorePath := filepath.Join(dir, stashIgnoreFilename) + patterns, err := f.loadIgnoreFile(stashIgnorePath) + if err != nil { + if !os.IsNotExist(err) { + logger.Warnf("Failed to load .stashignore from %s: %v", dir, err) + } + f.cache.Store(dir, &ignoreEntry{patterns: nil, dir: dir}) + return nil + } + if patterns == nil { + // File exists but has no patterns (empty or only comments). + f.cache.Store(dir, &ignoreEntry{patterns: nil, dir: dir}) + return nil + } + + logger.Debugf("Loaded .stashignore from %s", dir) + + entry := &ignoreEntry{ + patterns: patterns, + dir: dir, + } + f.cache.Store(dir, entry) + return entry +} + +// loadIgnoreFile loads and compiles a .stashignore file. +func (f *StashIgnoreFilter) loadIgnoreFile(path string) (*ignore.GitIgnore, error) { + data, err := os.ReadFile(path) + if err != nil { + return nil, err + } + + lines := strings.Split(string(data), "\n") + var patterns []string + + for _, line := range lines { + // Trim trailing whitespace (but preserve leading for patterns). + line = strings.TrimRight(line, " \t\r") + + // Skip empty lines. + if line == "" { + continue + } + + // Skip comments (but not escaped #). + if strings.HasPrefix(line, "#") && !strings.HasPrefix(line, "\\#") { + continue + } + + patterns = append(patterns, line) + } + + if len(patterns) == 0 { + // File exists but has no patterns (e.g., only comments). + return nil, nil + } + + return ignore.CompileIgnoreLines(patterns...), nil +} diff --git a/pkg/file/stashignore_test.go b/pkg/file/stashignore_test.go new file mode 100644 index 000000000..41668b51b --- /dev/null +++ b/pkg/file/stashignore_test.go @@ -0,0 +1,523 @@ +package file + +import ( + "context" + "io/fs" + "os" + "path/filepath" + "sort" + "testing" +) + +// Helper to create an empty file. +func createTestFile(t *testing.T, dir, name string) { + t.Helper() + path := filepath.Join(dir, name) + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + t.Fatalf("failed to create directory for %s: %v", path, err) + } + if err := os.WriteFile(path, []byte{}, 0644); err != nil { + t.Fatalf("failed to create file %s: %v", path, err) + } +} + +// Helper to create a file with content. +func createTestFileWithContent(t *testing.T, dir, name, content string) { + t.Helper() + path := filepath.Join(dir, name) + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + t.Fatalf("failed to create directory for %s: %v", path, err) + } + if err := os.WriteFile(path, []byte(content), 0644); err != nil { + t.Fatalf("failed to create file %s: %v", path, err) + } +} + +// Helper to create a directory. +func createTestDir(t *testing.T, dir, name string) { + t.Helper() + path := filepath.Join(dir, name) + if err := os.MkdirAll(path, 0755); err != nil { + t.Fatalf("failed to create directory %s: %v", path, err) + } +} + +// walkAndFilter walks the directory tree and returns paths accepted by the filter. +// Returns paths relative to root for easier assertion. +func walkAndFilter(t *testing.T, root string, filter *StashIgnoreFilter) []string { + t.Helper() + var accepted []string + ctx := context.Background() + + err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + // Skip the root directory itself. + if path == root { + return nil + } + + info, err := d.Info() + if err != nil { + return err + } + + if filter.Accept(ctx, path, info, root, "") { + relPath, _ := filepath.Rel(root, path) + accepted = append(accepted, relPath) + } else if info.IsDir() { + // If directory is rejected, skip it. + return filepath.SkipDir + } + + return nil + }) + + if err != nil { + t.Fatalf("walk failed: %v", err) + } + + sort.Strings(accepted) + return accepted +} + +// assertPathsEqual checks that the accepted paths match expected. +func assertPathsEqual(t *testing.T, expected, actual []string) { + t.Helper() + sort.Strings(expected) + + if len(expected) != len(actual) { + t.Errorf("path count mismatch:\nexpected %d: %v\nactual %d: %v", len(expected), expected, len(actual), actual) + return + } + + for i := range expected { + if expected[i] != actual[i] { + t.Errorf("path mismatch at index %d:\nexpected: %s\nactual: %s", i, expected[i], actual[i]) + } + } +} + +func TestStashIgnore_ExactFilename(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "video2.mp4") + createTestFile(t, tmpDir, "ignore_me.mp4") + + // Create .stashignore that excludes exact filename. + createTestFileWithContent(t, tmpDir, ".stashignore", "ignore_me.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + "video2.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_WildcardPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "video2.mp4") + createTestFile(t, tmpDir, "temp1.tmp") + createTestFile(t, tmpDir, "temp2.tmp") + createTestFile(t, tmpDir, "notes.log") + + // Create .stashignore that excludes by extension. + createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n*.log\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + "video2.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_DirectoryExclusion(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestDir(t, tmpDir, "excluded_dir") + createTestFile(t, tmpDir, "excluded_dir/video2.mp4") + createTestFile(t, tmpDir, "excluded_dir/video3.mp4") + createTestDir(t, tmpDir, "included_dir") + createTestFile(t, tmpDir, "included_dir/video4.mp4") + + // Create .stashignore that excludes a directory. + createTestFileWithContent(t, tmpDir, ".stashignore", "excluded_dir/\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "included_dir", + "included_dir/video4.mp4", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_NegationPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "file1.tmp") + createTestFile(t, tmpDir, "file2.tmp") + createTestFile(t, tmpDir, "keep_this.tmp") + + // Create .stashignore that excludes *.tmp but keeps one. + createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n!keep_this.tmp\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "keep_this.tmp", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_CommentsAndEmptyLines(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "ignore_me.mp4") + + // Create .stashignore with comments and empty lines. + stashignore := `# This is a comment +ignore_me.mp4 + +# Another comment + +` + createTestFileWithContent(t, tmpDir, ".stashignore", stashignore) + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_NestedStashIgnoreFiles(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "root_video.mp4") + createTestFile(t, tmpDir, "root_ignore.tmp") + createTestDir(t, tmpDir, "subdir") + createTestFile(t, tmpDir, "subdir/sub_video.mp4") + createTestFile(t, tmpDir, "subdir/sub_ignore.log") + createTestFile(t, tmpDir, "subdir/also_tmp.tmp") + + // Root .stashignore excludes *.tmp. + createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n") + + // Subdir .stashignore excludes *.log. + createTestFileWithContent(t, tmpDir, "subdir/.stashignore", "*.log\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + // *.tmp from root should apply everywhere. + // *.log from subdir should only apply in subdir. + expected := []string{ + ".stashignore", + "root_video.mp4", + "subdir", + "subdir/.stashignore", + "subdir/sub_video.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_PathPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestDir(t, tmpDir, "subdir") + createTestFile(t, tmpDir, "subdir/video2.mp4") + createTestFile(t, tmpDir, "subdir/skip_this.mp4") + + // Create .stashignore that excludes a specific path. + createTestFileWithContent(t, tmpDir, ".stashignore", "subdir/skip_this.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "subdir", + "subdir/video2.mp4", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_DoubleStarPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestDir(t, tmpDir, "a") + createTestFile(t, tmpDir, "a/video2.mp4") + createTestDir(t, tmpDir, "a/temp") + createTestFile(t, tmpDir, "a/temp/video3.mp4") + createTestDir(t, tmpDir, "a/b") + createTestDir(t, tmpDir, "a/b/temp") + createTestFile(t, tmpDir, "a/b/temp/video4.mp4") + + // Create .stashignore that excludes temp directories at any level. + createTestFileWithContent(t, tmpDir, ".stashignore", "**/temp/\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "a", + "a/b", + "a/video2.mp4", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_LeadingSlashPattern(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "ignore.mp4") + createTestDir(t, tmpDir, "subdir") + createTestFile(t, tmpDir, "subdir/ignore.mp4") + + // Create .stashignore that excludes only at root level. + createTestFileWithContent(t, tmpDir, ".stashignore", "/ignore.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + // Only root ignore.mp4 should be excluded. + expected := []string{ + ".stashignore", + "subdir", + "subdir/ignore.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_NoStashIgnoreFile(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files without any .stashignore. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "video2.mp4") + createTestDir(t, tmpDir, "subdir") + createTestFile(t, tmpDir, "subdir/video3.mp4") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + // All files should be accepted. + expected := []string{ + "subdir", + "subdir/video3.mp4", + "video1.mp4", + "video2.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_HiddenDirectories(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files including hidden directory. + createTestFile(t, tmpDir, "video1.mp4") + createTestDir(t, tmpDir, ".hidden") + createTestFile(t, tmpDir, ".hidden/video2.mp4") + + // Create .stashignore that excludes hidden directories. + createTestFileWithContent(t, tmpDir, ".stashignore", ".*\n!.stashignore\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_MultiplePatternsSameLine(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "file.tmp") + createTestFile(t, tmpDir, "file.log") + createTestFile(t, tmpDir, "file.bak") + + // Each pattern should be on its own line. + createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n*.log\n*.bak\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_TrailingSpaces(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "ignore_me.mp4") + + // Pattern with trailing spaces (should be trimmed). + createTestFileWithContent(t, tmpDir, ".stashignore", "ignore_me.mp4 \n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_EscapedHash(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "#filename.mp4") + + // Escaped hash should match literal # character. + createTestFileWithContent(t, tmpDir, ".stashignore", "\\#filename.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "video1.mp4", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_CaseSensitiveMatching(t *testing.T) { + tmpDir := t.TempDir() + + // Create test files - use distinct names that work on all filesystems. + createTestFile(t, tmpDir, "video_lower.mp4") + createTestFile(t, tmpDir, "VIDEO_UPPER.mp4") + createTestFile(t, tmpDir, "other.avi") + + // Pattern should match exactly (case-sensitive). + createTestFileWithContent(t, tmpDir, ".stashignore", "video_lower.mp4\n") + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + // Only exact match is excluded. + expected := []string{ + ".stashignore", + "VIDEO_UPPER.mp4", + "other.avi", + } + + assertPathsEqual(t, expected, accepted) +} + +func TestStashIgnore_ComplexScenario(t *testing.T) { + tmpDir := t.TempDir() + + // Create a complex directory structure. + createTestFile(t, tmpDir, "video1.mp4") + createTestFile(t, tmpDir, "video2.avi") + createTestFile(t, tmpDir, "thumbnail.jpg") + createTestFile(t, tmpDir, "metadata.nfo") + createTestDir(t, tmpDir, "movies") + createTestFile(t, tmpDir, "movies/movie1.mp4") + createTestFile(t, tmpDir, "movies/movie1.nfo") + createTestDir(t, tmpDir, "movies/.thumbnails") + createTestFile(t, tmpDir, "movies/.thumbnails/thumb1.jpg") + createTestDir(t, tmpDir, "temp") + createTestFile(t, tmpDir, "temp/processing.mp4") + createTestDir(t, tmpDir, "backup") + createTestFile(t, tmpDir, "backup/video1.mp4.bak") + + // Complex .stashignore. + stashignore := `# Ignore metadata files +*.nfo + +# Ignore hidden directories +.* +!.stashignore + +# Ignore temp and backup directories +temp/ +backup/ + +# But keep thumbnails in specific location +!movies/.thumbnails/ +` + createTestFileWithContent(t, tmpDir, ".stashignore", stashignore) + + filter := NewStashIgnoreFilter() + accepted := walkAndFilter(t, tmpDir, filter) + + expected := []string{ + ".stashignore", + "movies", + "movies/.thumbnails", + "movies/.thumbnails/thumb1.jpg", + "movies/movie1.mp4", + "thumbnail.jpg", + "video1.mp4", + "video2.avi", + } + + assertPathsEqual(t, expected, accepted) +} diff --git a/pkg/file/video/caption.go b/pkg/file/video/caption.go index bec3db6fd..46317d90c 100644 --- a/pkg/file/video/caption.go +++ b/pkg/file/video/caption.go @@ -90,14 +90,23 @@ type CaptionUpdater interface { UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error } +// MatchesCaption returns true if the caption file matches the video file based on the filename +func MatchesCaption(videoPath, captionPath string) bool { + captionPrefix := getCaptionPrefix(captionPath) + videoPrefix := strings.TrimSuffix(videoPath, filepath.Ext(videoPath)) + "." + return captionPrefix == videoPrefix +} + // associates captions to scene/s with the same basename -func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) { +// returns true if the caption file was matched to a video file and processed, false otherwise +func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) bool { captionLang := getCaptionsLangFromPath(captionPath) captionPrefix := getCaptionPrefix(captionPath) + matched := false if err := txn.WithTxn(ctx, txnMgr, func(ctx context.Context) error { var err error - files, er := fqb.FindAllByPath(ctx, captionPrefix+"*") + files, er := fqb.FindAllByPath(ctx, captionPrefix+"*", true) if er != nil { return fmt.Errorf("searching for scene %s: %w", captionPrefix, er) @@ -117,28 +126,36 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag path := f.Base().Path logger.Debugf("Matched captions to file %s", path) + matched = true + captions, er := w.GetCaptions(ctx, fileID) - if er == nil { - fileExt := filepath.Ext(captionPath) - ext := fileExt[1:] - if !IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present - newCaption := &models.VideoCaption{ - LanguageCode: captionLang, - Filename: filepath.Base(captionPath), - CaptionType: ext, - } - captions = append(captions, newCaption) - er = w.UpdateCaptions(ctx, fileID, captions) - if er == nil { - logger.Debugf("Updated captions for file %s. Added %s", path, captionLang) - } + if er != nil { + return fmt.Errorf("getting captions for file %s: %w", path, er) + } + + fileExt := filepath.Ext(captionPath) + ext := fileExt[1:] + if !IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present + newCaption := &models.VideoCaption{ + LanguageCode: captionLang, + Filename: filepath.Base(captionPath), + CaptionType: ext, } + captions = append(captions, newCaption) + er = w.UpdateCaptions(ctx, fileID, captions) + if er != nil { + return fmt.Errorf("updating captions for file %s: %w", path, er) + } + + logger.Debugf("Updated captions for file %s. Added %s", path, captionLang) } } return err }); err != nil { logger.Error(err.Error()) } + + return matched } // CleanCaptions removes non existent/accessible language codes from captions diff --git a/pkg/file/walk.go b/pkg/file/walk.go index 3c6a157b7..bd33f42c3 100644 --- a/pkg/file/walk.go +++ b/pkg/file/walk.go @@ -81,8 +81,8 @@ func walkSym(f models.FS, filename string, linkDirname string, walkFn fs.WalkDir return fsWalk(f, filename, symWalkFunc) } -// symWalk extends filepath.Walk to also follow symlinks -func symWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { +// SymWalk extends filepath.Walk to also follow symlinks +func SymWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { return walkSym(fs, path, path, walkFn) } diff --git a/pkg/file/zip.go b/pkg/file/zip.go index 4df2453dc..6d00c7e35 100644 --- a/pkg/file/zip.go +++ b/pkg/file/zip.go @@ -18,7 +18,7 @@ import ( ) var ( - errNotReaderAt = errors.New("not a ReaderAt") + ErrNotReaderAt = errors.New("invalid reader: does not implement io.ReaderAt") errZipFSOpenZip = errors.New("cannot open zip file inside zip file") ) @@ -38,7 +38,7 @@ func newZipFS(fs models.FS, path string, size int64) (*zipFS, error) { asReaderAt, _ := reader.(io.ReaderAt) if asReaderAt == nil { reader.Close() - return nil, errNotReaderAt + return nil, ErrNotReaderAt } zipReader, err := zip.NewReader(asReaderAt, size) @@ -99,7 +99,9 @@ func (f *zipFS) rel(name string) (string, error) { relName, err := filepath.Rel(f.zipPath, name) if err != nil { - return "", fmt.Errorf("internal error getting relative path: %w", err) + // if the path is not relative to the zip path, then it's not found in the zip file, + // so treat this as a file not found + return "", fs.ErrNotExist } // convert relName to use slash, since zip files do so regardless diff --git a/pkg/fsutil/file.go b/pkg/fsutil/file.go index 1d0c0c473..05a127129 100644 --- a/pkg/fsutil/file.go +++ b/pkg/fsutil/file.go @@ -148,7 +148,7 @@ func Touch(path string) error { var ( replaceCharsRE = regexp.MustCompile(`[&=\\/:*"?_ ]`) - removeCharsRE = regexp.MustCompile(`[^[:alnum:]-.]`) + removeCharsRE = regexp.MustCompile(`[^\p{L}\p{N}\-.]`) multiHyphenRE = regexp.MustCompile(`\-+`) ) diff --git a/pkg/fsutil/file_test.go b/pkg/fsutil/file_test.go index 4d84f8a47..df1077df2 100644 --- a/pkg/fsutil/file_test.go +++ b/pkg/fsutil/file_test.go @@ -15,6 +15,9 @@ func TestSanitiseBasename(t *testing.T) { {"multi-hyphen", `hyphened--name`, "hyphened-name-2da2a58f"}, {"replaced characters", `a&b=c\d/:e*"f?_ g`, "a-b-c-d-e-f-g-ffca6fb0"}, {"removed characters", `foo!!bar@@and, more`, "foobarand-more-7cee02ab"}, + {"unicode cjk", `テスト`, "テスト-63b560db"}, + {"unicode korean", `시험`, "시험-3fcc7beb"}, + {"mixed unicode", `Test テスト`, "Test-テスト-366aff1e"}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/fsutil/fs.go b/pkg/fsutil/fs.go index 2b5c37f62..032bec53c 100644 --- a/pkg/fsutil/fs.go +++ b/pkg/fsutil/fs.go @@ -5,7 +5,6 @@ import ( "fmt" "os" "path/filepath" - "strings" "unicode" ) @@ -27,23 +26,15 @@ func IsFsPathCaseSensitive(path string) (bool, error) { if err != nil { // cannot be case flipped return false, err } - i := strings.LastIndex(path, base) - if i < 0 { // shouldn't happen - return false, fmt.Errorf("could not case flip path %s", path) - } - flipped := []byte(path) - for _, c := range []byte(fBase) { // replace base of path with the flipped one ( we need to flip the base or last dir part ) - flipped[i] = c - i++ - } + flippedPath := filepath.Join(filepath.Dir(path), fBase) - fiCase, err := os.Stat(string(flipped)) + fiCase, err := os.Stat(flippedPath) if err != nil { // cannot stat the case flipped path return true, nil // fs of path should be case sensitive } - if fiCase.ModTime() == fi.ModTime() { // file path exists and is the same + if fiCase.ModTime().Equal(fi.ModTime()) { // file path exists and is the same return false, nil // fs of path is not case sensitive } return false, fmt.Errorf("can not determine case sensitivity of path %s", path) diff --git a/pkg/fsutil/fs_test.go b/pkg/fsutil/fs_test.go new file mode 100644 index 000000000..155e76ba5 --- /dev/null +++ b/pkg/fsutil/fs_test.go @@ -0,0 +1,55 @@ +package fsutil + +import ( + "os" + "path/filepath" + "testing" +) + +func TestIsFsPathCaseSensitive_UnicodeByteLength(t *testing.T) { + // Ⱥ (U+023A) is 2 bytes in UTF-8 + // Its lowercase ⱥ (U+2C65) is 3 bytes in UTF-8 + + dir := t.TempDir() + makeDir := func(path string) { + // Create the directory so os.Stat succeeds + if err := os.Mkdir(path, 0755); err != nil { + t.Fatal(err) + } + } + + path := filepath.Join(dir, "Ⱥtest") + makeDir(path) + + // ensure the test does not panic due to byte length differences in the case flipped path + _, err := IsFsPathCaseSensitive(path) + if err != nil { + t.Fatal(err) + } + + // no guarantee about case sensitivity of the fs running the tests, + // so we just want to ensure the function works and does not panic + // assert.True(t, r, "expected fs to be case sensitive") + + // test regular ASCII paths still work + path2 := filepath.Join(dir, "Test") + makeDir(path2) + + _, err = IsFsPathCaseSensitive(path2) + if err != nil { + t.Fatal(err) + } + + // assert.True(t, r, "expected fs to be case sensitive") + + // Ensure that subfolders of a folder with multi-byte chars is not causing a panic + path3 := filepath.Join(dir, "NoPanic ❤️") + makeDir(path3) + path4 := filepath.Join(path3, "Test") + makeDir(path4) + + _, err = IsFsPathCaseSensitive(path4) + if err != nil { + t.Fatal(err) + } +} diff --git a/pkg/fsutil/trash.go b/pkg/fsutil/trash.go new file mode 100644 index 000000000..9a3bed835 --- /dev/null +++ b/pkg/fsutil/trash.go @@ -0,0 +1,43 @@ +package fsutil + +import ( + "fmt" + "os" + "path/filepath" + "time" +) + +// MoveToTrash moves a file or directory to a custom trash directory. +// If a file with the same name already exists in the trash, a timestamp is appended. +// Returns the destination path where the file was moved to. +func MoveToTrash(sourcePath string, trashPath string) (string, error) { + // Get absolute path for the source + absSourcePath, err := filepath.Abs(sourcePath) + if err != nil { + return "", fmt.Errorf("failed to get absolute path: %w", err) + } + + // Ensure trash directory exists + if err := os.MkdirAll(trashPath, 0755); err != nil { + return "", fmt.Errorf("failed to create trash directory: %w", err) + } + + // Get the base name of the file/directory + baseName := filepath.Base(absSourcePath) + destPath := filepath.Join(trashPath, baseName) + + // If a file with the same name already exists in trash, append timestamp + if _, err := os.Stat(destPath); err == nil { + ext := filepath.Ext(baseName) + nameWithoutExt := baseName[:len(baseName)-len(ext)] + timestamp := time.Now().Format("20060102-150405") + destPath = filepath.Join(trashPath, fmt.Sprintf("%s_%s%s", nameWithoutExt, timestamp, ext)) + } + + // Move the file to trash using SafeMove to support cross-filesystem moves + if err := SafeMove(absSourcePath, destPath); err != nil { + return "", fmt.Errorf("failed to move to trash: %w", err) + } + + return destPath, nil +} diff --git a/pkg/gallery/delete.go b/pkg/gallery/delete.go index f5186f948..4bc2e2492 100644 --- a/pkg/gallery/delete.go +++ b/pkg/gallery/delete.go @@ -8,13 +8,13 @@ import ( "github.com/stashapp/stash/pkg/models" ) -func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) { +func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) { var imgsDestroyed []*models.Image // chapter deletion is done via delete cascade, so we don't need to do anything here // if this is a zip-based gallery, delete the images as well first - zipImgsDestroyed, err := s.destroyZipFileImages(ctx, i, fileDeleter, deleteGenerated, deleteFile) + zipImgsDestroyed, err := s.destroyZipFileImages(ctx, i, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) if err != nil { return nil, err } @@ -45,7 +45,7 @@ func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, return qb.Destroy(ctx, galleryChapter.ID) } -func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) { +func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error) { if err := i.LoadFiles(ctx, s.Repository); err != nil { return nil, err } @@ -81,6 +81,12 @@ func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, f if err := destroyer.DestroyZip(ctx, f, fileDeleter.Deleter, deleteFile); err != nil { return nil, err } + } else if destroyFileEntry { + // destroy file DB entry without deleting filesystem file + const deleteFileFromFS = false + if err := destroyer.DestroyZip(ctx, f, nil, deleteFileFromFS); err != nil { + return nil, err + } } } diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index 7cdf53691..e33297bdb 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -28,8 +28,9 @@ type Importer struct { Input jsonschema.Gallery MissingRefBehaviour models.ImportMissingRefEnum - ID int - gallery models.Gallery + ID int + gallery models.Gallery + customFields map[string]interface{} } func (i *Importer) PreImport(ctx context.Context) error { @@ -51,6 +52,8 @@ func (i *Importer) PreImport(ctx context.Context) error { return err } + i.customFields = i.Input.CustomFields + return nil } @@ -126,7 +129,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -249,7 +252,9 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta newTag := models.NewTag() newTag.Name = name - err := i.TagWriter.Create(ctx, &newTag) + err := i.TagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -265,7 +270,7 @@ func (i *Importer) populateFilesFolder(ctx context.Context) error { for _, ref := range i.Input.ZipFiles { path := ref - f, err := i.FileFinder.FindByPath(ctx, path) + f, err := i.FileFinder.FindByPath(ctx, path, true) if err != nil { return fmt.Errorf("error finding file: %w", err) } @@ -281,7 +286,7 @@ func (i *Importer) populateFilesFolder(ctx context.Context) error { if i.Input.FolderPath != "" { path := i.Input.FolderPath - f, err := i.FolderFinder.FindByPath(ctx, path) + f, err := i.FolderFinder.FindByPath(ctx, path, true) if err != nil { return fmt.Errorf("error finding folder: %w", err) } @@ -354,7 +359,11 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { for _, f := range i.gallery.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } - err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs) + err := i.ReaderWriter.Create(ctx, &models.CreateGalleryInput{ + Gallery: &i.gallery, + FileIDs: fileIDs, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating gallery: %v", err) } @@ -366,7 +375,12 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Update(ctx context.Context, id int) error { gallery := i.gallery gallery.ID = id - err := i.ReaderWriter.Update(ctx, &gallery) + err := i.ReaderWriter.Update(ctx, &models.UpdateGalleryInput{ + Gallery: &gallery, + CustomFields: models.CustomFieldsInput{ + Full: i.customFields, + }, + }) if err != nil { return fmt.Errorf("error updating existing gallery: %v", err) } diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go index b64f80d8f..932f84d48 100644 --- a/pkg/gallery/import_test.go +++ b/pkg/gallery/import_test.go @@ -115,9 +115,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -147,7 +147,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -289,9 +289,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -323,7 +323,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/gallery/scan.go b/pkg/gallery/scan.go index 9d0313b17..7689bb9b6 100644 --- a/pkg/gallery/scan.go +++ b/pkg/gallery/scan.go @@ -17,14 +17,13 @@ type ScanCreatorUpdater interface { FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) - Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error + models.GalleryCreator UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) AddFileID(ctx context.Context, id int, fileID models.FileID) error } type ScanSceneFinderUpdater interface { FindByPath(ctx context.Context, p string) ([]*models.Scene, error) - Update(ctx context.Context, updatedScene *models.Scene) error AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error } @@ -80,7 +79,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models. logger.Infof("%s doesn't exist. Creating new gallery...", f.Base().Path) - if err := h.CreatorUpdater.Create(ctx, &newGallery, []models.FileID{baseFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, &models.CreateGalleryInput{ + Gallery: &newGallery, + FileIDs: []models.FileID{baseFile.ID}, + }); err != nil { return fmt.Errorf("creating new gallery: %w", err) } @@ -132,13 +134,14 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. if err := h.CreatorUpdater.AddFileID(ctx, i.ID, f.Base().ID); err != nil { return fmt.Errorf("adding file to gallery: %w", err) } - // update updated_at time - if _, err := h.CreatorUpdater.UpdatePartial(ctx, i.ID, models.NewGalleryPartial()); err != nil { - return fmt.Errorf("updating gallery: %w", err) - } } if !found || updateExisting { + // update updated_at time when file association or content changes + if _, err := h.CreatorUpdater.UpdatePartial(ctx, i.ID, models.NewGalleryPartial()); err != nil { + return fmt.Errorf("updating gallery: %w", err) + } + h.PluginCache.RegisterPostHooks(ctx, i.ID, hook.GalleryUpdatePost, nil, nil) } } diff --git a/pkg/gallery/scan_test.go b/pkg/gallery/scan_test.go new file mode 100644 index 000000000..4a89206e3 --- /dev/null +++ b/pkg/gallery/scan_test.go @@ -0,0 +1,108 @@ +package gallery + +import ( + "context" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestAssociateExisting_UpdatePartialOnContentChange(t *testing.T) { + const ( + testGalleryID = 1 + testFileID = 100 + ) + + existingFile := &models.BaseFile{ID: models.FileID(testFileID), Path: "test.zip"} + + makeGallery := func() *models.Gallery { + return &models.Gallery{ + ID: testGalleryID, + Files: models.NewRelatedFiles([]models.File{existingFile}), + } + } + + tests := []struct { + name string + updateExisting bool + expectUpdate bool + }{ + { + name: "calls UpdatePartial when file content changed", + updateExisting: true, + expectUpdate: true, + }, + { + name: "skips UpdatePartial when file unchanged and already associated", + updateExisting: false, + expectUpdate: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + db := mocks.NewDatabase() + db.Gallery.On("GetFiles", mock.Anything, testGalleryID).Return([]models.File{existingFile}, nil) + + if tt.expectUpdate { + db.Gallery.On("UpdatePartial", mock.Anything, testGalleryID, mock.Anything). + Return(&models.Gallery{ID: testGalleryID}, nil) + } + + h := &ScanHandler{ + CreatorUpdater: db.Gallery, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Gallery{makeGallery()}, existingFile, tt.updateExisting) + assert.NoError(t, err) + }) + + if tt.expectUpdate { + db.Gallery.AssertCalled(t, "UpdatePartial", mock.Anything, testGalleryID, mock.Anything) + } else { + db.Gallery.AssertNotCalled(t, "UpdatePartial", mock.Anything, mock.Anything, mock.Anything) + } + }) + } +} + +func TestAssociateExisting_UpdatePartialOnNewFile(t *testing.T) { + const ( + testGalleryID = 1 + existFileID = 100 + newFileID = 200 + ) + + existingFile := &models.BaseFile{ID: models.FileID(existFileID), Path: "existing.zip"} + newFile := &models.BaseFile{ID: models.FileID(newFileID), Path: "new.zip"} + + gallery := &models.Gallery{ + ID: testGalleryID, + Files: models.NewRelatedFiles([]models.File{existingFile}), + } + + db := mocks.NewDatabase() + db.Gallery.On("GetFiles", mock.Anything, testGalleryID).Return([]models.File{existingFile}, nil) + db.Gallery.On("AddFileID", mock.Anything, testGalleryID, models.FileID(newFileID)).Return(nil) + db.Gallery.On("UpdatePartial", mock.Anything, testGalleryID, mock.Anything). + Return(&models.Gallery{ID: testGalleryID}, nil) + + h := &ScanHandler{ + CreatorUpdater: db.Gallery, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Gallery{gallery}, newFile, false) + assert.NoError(t, err) + }) + + db.Gallery.AssertCalled(t, "AddFileID", mock.Anything, testGalleryID, models.FileID(newFileID)) + db.Gallery.AssertCalled(t, "UpdatePartial", mock.Anything, testGalleryID, mock.Anything) +} diff --git a/pkg/gallery/service.go b/pkg/gallery/service.go index 62604e0c5..5b2678480 100644 --- a/pkg/gallery/service.go +++ b/pkg/gallery/service.go @@ -16,7 +16,7 @@ type ImageFinder interface { } type ImageService interface { - Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error + Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) DestroyFolderImages(ctx context.Context, folderID models.FolderID, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) } diff --git a/pkg/group/create.go b/pkg/group/create.go index 56d6b7a4e..9cc578b23 100644 --- a/pkg/group/create.go +++ b/pkg/group/create.go @@ -12,27 +12,37 @@ var ( ErrHierarchyLoop = errors.New("a group cannot be contained by one of its subgroups") ) -func (s *Service) Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error { +func (s *Service) Create(ctx context.Context, input *models.CreateGroupInput) error { r := s.Repository + group := input.Group if err := s.validateCreate(ctx, group); err != nil { return err } - err := r.Create(ctx, group) + err := r.Create(ctx, input.Group) if err != nil { return err } - // update image table - if len(frontimageData) > 0 { - if err := r.UpdateFrontImage(ctx, group.ID, frontimageData); err != nil { + // set custom fields + if len(input.CustomFields) > 0 { + if err := r.SetCustomFields(ctx, group.ID, models.CustomFieldsInput{ + Full: input.CustomFields, + }); err != nil { return err } } - if len(backimageData) > 0 { - if err := r.UpdateBackImage(ctx, group.ID, backimageData); err != nil { + // update image table + if len(input.FrontImageData) > 0 { + if err := r.UpdateFrontImage(ctx, group.ID, input.FrontImageData); err != nil { + return err + } + } + + if len(input.BackImageData) > 0 { + if err := r.UpdateBackImage(ctx, group.ID, input.BackImageData); err != nil { return err } } diff --git a/pkg/group/export.go b/pkg/group/export.go index 418ce7bed..0a56fbdbb 100644 --- a/pkg/group/export.go +++ b/pkg/group/export.go @@ -11,61 +11,67 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type ImageGetter interface { - GetFrontImage(ctx context.Context, movieID int) ([]byte, error) - GetBackImage(ctx context.Context, movieID int) ([]byte, error) +type GroupExportReader interface { + GetFrontImage(ctx context.Context, groupID int) ([]byte, error) + GetBackImage(ctx context.Context, groupID int) ([]byte, error) + GetCustomFields(ctx context.Context, groupID int) (map[string]interface{}, error) } -// ToJSON converts a Movie into its JSON equivalent. -func ToJSON(ctx context.Context, reader ImageGetter, studioReader models.StudioGetter, movie *models.Group) (*jsonschema.Group, error) { - newMovieJSON := jsonschema.Group{ - Name: movie.Name, - Aliases: movie.Aliases, - Director: movie.Director, - Synopsis: movie.Synopsis, - URLs: movie.URLs.List(), - CreatedAt: json.JSONTime{Time: movie.CreatedAt}, - UpdatedAt: json.JSONTime{Time: movie.UpdatedAt}, +// ToJSON converts a Group into its JSON equivalent. +func ToJSON(ctx context.Context, reader GroupExportReader, studioReader models.StudioGetter, group *models.Group) (*jsonschema.Group, error) { + newGroupJSON := jsonschema.Group{ + Name: group.Name, + Aliases: group.Aliases, + Director: group.Director, + Synopsis: group.Synopsis, + URLs: group.URLs.List(), + CreatedAt: json.JSONTime{Time: group.CreatedAt}, + UpdatedAt: json.JSONTime{Time: group.UpdatedAt}, } - if movie.Date != nil { - newMovieJSON.Date = movie.Date.String() + if group.Date != nil { + newGroupJSON.Date = group.Date.String() } - if movie.Rating != nil { - newMovieJSON.Rating = *movie.Rating + if group.Rating != nil { + newGroupJSON.Rating = *group.Rating } - if movie.Duration != nil { - newMovieJSON.Duration = *movie.Duration + if group.Duration != nil { + newGroupJSON.Duration = *group.Duration } - if movie.StudioID != nil { - studio, err := studioReader.Find(ctx, *movie.StudioID) + if group.StudioID != nil { + studio, err := studioReader.Find(ctx, *group.StudioID) if err != nil { return nil, fmt.Errorf("error getting movie studio: %v", err) } if studio != nil { - newMovieJSON.Studio = studio.Name + newGroupJSON.Studio = studio.Name } } - frontImage, err := reader.GetFrontImage(ctx, movie.ID) + frontImage, err := reader.GetFrontImage(ctx, group.ID) if err != nil { logger.Errorf("Error getting movie front image: %v", err) } if len(frontImage) > 0 { - newMovieJSON.FrontImage = utils.GetBase64StringFromData(frontImage) + newGroupJSON.FrontImage = utils.GetBase64StringFromData(frontImage) } - backImage, err := reader.GetBackImage(ctx, movie.ID) + backImage, err := reader.GetBackImage(ctx, group.ID) if err != nil { logger.Errorf("Error getting movie back image: %v", err) } if len(backImage) > 0 { - newMovieJSON.BackImage = utils.GetBase64StringFromData(backImage) + newGroupJSON.BackImage = utils.GetBase64StringFromData(backImage) } - return &newMovieJSON, nil + newGroupJSON.CustomFields, err = reader.GetCustomFields(ctx, group.ID) + if err != nil { + return nil, fmt.Errorf("getting group custom fields: %v", err) + } + + return &newGroupJSON, nil } diff --git a/pkg/group/export_test.go b/pkg/group/export_test.go index 5f8d9f7dc..bff50de5e 100644 --- a/pkg/group/export_test.go +++ b/pkg/group/export_test.go @@ -8,24 +8,26 @@ import ( "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/mocks" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "testing" "time" ) const ( - movieID = 1 - emptyID = 2 - errFrontImageID = 3 - errBackImageID = 4 - errStudioMovieID = 5 - missingStudioMovieID = 6 + movieID = iota + 1 + emptyID + errFrontImageID + errBackImageID + errStudioMovieID + missingStudioMovieID + errCustomFieldsID ) const ( - studioID = 1 - missingStudioID = 2 - errStudioID = 3 + studioID = iota + 1 + missingStudioID + errStudioID ) const movieName = "testMovie" @@ -51,6 +53,11 @@ const ( var ( frontImageBytes = []byte("frontImageBytes") backImageBytes = []byte("backImageBytes") + + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) var movieStudio models.Studio = models.Studio{ @@ -88,7 +95,7 @@ func createEmptyMovie(id int) models.Group { } } -func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Group { +func createFullJSONMovie(studio, frontImage, backImage string, customFields map[string]interface{}) *jsonschema.Group { return &jsonschema.Group{ Name: movieName, Aliases: movieAliases, @@ -107,6 +114,7 @@ func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Group UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: customFields, } } @@ -119,13 +127,15 @@ func createEmptyJSONMovie() *jsonschema.Group { UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: emptyCustomFields, } } type testScenario struct { - movie models.Group - expected *jsonschema.Group - err bool + movie models.Group + customFields map[string]interface{} + expected *jsonschema.Group + err bool } var scenarios []testScenario @@ -134,36 +144,48 @@ func initTestTable() { scenarios = []testScenario{ { createFullMovie(movieID, studioID), - createFullJSONMovie(studioName, frontImage, backImage), + customFields, + createFullJSONMovie(studioName, frontImage, backImage, customFields), false, }, { createEmptyMovie(emptyID), + emptyCustomFields, createEmptyJSONMovie(), false, }, { createFullMovie(errFrontImageID, studioID), - createFullJSONMovie(studioName, "", backImage), + emptyCustomFields, + createFullJSONMovie(studioName, "", backImage, emptyCustomFields), // failure to get front image should not cause error false, }, { createFullMovie(errBackImageID, studioID), - createFullJSONMovie(studioName, frontImage, ""), + emptyCustomFields, + createFullJSONMovie(studioName, frontImage, "", emptyCustomFields), // failure to get back image should not cause error false, }, { createFullMovie(errStudioMovieID, errStudioID), + emptyCustomFields, nil, true, }, { createFullMovie(missingStudioMovieID, missingStudioID), - createFullJSONMovie("", frontImage, backImage), + emptyCustomFields, + createFullJSONMovie("", frontImage, backImage, emptyCustomFields), false, }, + { + createFullMovie(errCustomFieldsID, studioID), + customFields, + nil, + true, + }, } } @@ -179,6 +201,7 @@ func TestToJSON(t *testing.T) { db.Group.On("GetFrontImage", testCtx, emptyID).Return(nil, nil).Once().Maybe() db.Group.On("GetFrontImage", testCtx, errFrontImageID).Return(nil, imageErr).Once() db.Group.On("GetFrontImage", testCtx, errBackImageID).Return(frontImageBytes, nil).Once() + db.Group.On("GetFrontImage", testCtx, errCustomFieldsID).Return(nil, nil).Once() db.Group.On("GetBackImage", testCtx, movieID).Return(backImageBytes, nil).Once() db.Group.On("GetBackImage", testCtx, missingStudioMovieID).Return(backImageBytes, nil).Once() @@ -186,6 +209,11 @@ func TestToJSON(t *testing.T) { db.Group.On("GetBackImage", testCtx, errBackImageID).Return(nil, imageErr).Once() db.Group.On("GetBackImage", testCtx, errFrontImageID).Return(backImageBytes, nil).Maybe() db.Group.On("GetBackImage", testCtx, errStudioMovieID).Return(backImageBytes, nil).Maybe() + db.Group.On("GetBackImage", testCtx, errCustomFieldsID).Return(nil, nil).Once() + + db.Group.On("GetCustomFields", testCtx, movieID).Return(customFields, nil).Once() + db.Group.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, errors.New("error getting custom fields")).Once() + db.Group.On("GetCustomFields", testCtx, mock.Anything).Return(emptyCustomFields, nil).Times(4) studioErr := errors.New("error getting studio") diff --git a/pkg/group/import.go b/pkg/group/import.go index 3fc7db8f1..1a332bac2 100644 --- a/pkg/group/import.go +++ b/pkg/group/import.go @@ -14,6 +14,7 @@ import ( type ImporterReaderWriter interface { models.GroupCreatorUpdater + models.CustomFieldsWriter FindByName(ctx context.Context, name string, nocase bool) (*models.Group, error) } @@ -126,7 +127,9 @@ func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names [] newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -203,7 +206,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -231,6 +234,14 @@ func (i *Importer) PostImport(ctx context.Context, id int) error { } } + if len(i.Input.CustomFields) > 0 { + if err := i.ReaderWriter.SetCustomFields(ctx, id, models.CustomFieldsInput{ + Full: i.Input.CustomFields, + }); err != nil { + return fmt.Errorf("error setting custom fields: %v", err) + } + } + if len(i.frontImageData) > 0 { if err := i.ReaderWriter.UpdateFrontImage(ctx, id, i.frontImageData); err != nil { return fmt.Errorf("error setting group front image: %v", err) diff --git a/pkg/group/import_test.go b/pkg/group/import_test.go index c4ca47442..006c91327 100644 --- a/pkg/group/import_test.go +++ b/pkg/group/import_test.go @@ -121,9 +121,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -156,7 +156,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -212,9 +212,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -247,7 +247,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -259,17 +259,29 @@ func TestImporterPostImport(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - ReaderWriter: db.Group, - StudioWriter: db.Studio, + ReaderWriter: db.Group, + StudioWriter: db.Studio, + Input: jsonschema.Group{ + CustomFields: customFields, + }, frontImageData: frontImageBytes, backImageData: backImageBytes, } updateMovieImageErr := errors.New("UpdateImages error") + customFieldsErr := errors.New("SetCustomFields error") + + customFieldsInput := models.CustomFieldsInput{ + Full: customFields, + } db.Group.On("UpdateFrontImage", testCtx, movieID, frontImageBytes).Return(nil).Once() - db.Group.On("UpdateBackImage", testCtx, movieID, backImageBytes).Return(nil).Once() db.Group.On("UpdateFrontImage", testCtx, errImageID, frontImageBytes).Return(updateMovieImageErr).Once() + db.Group.On("UpdateBackImage", testCtx, movieID, backImageBytes).Return(nil).Once() + + db.Group.On("SetCustomFields", testCtx, movieID, customFieldsInput).Return(nil).Once() + db.Group.On("SetCustomFields", testCtx, errImageID, customFieldsInput).Return(nil).Once() + db.Group.On("SetCustomFields", testCtx, errCustomFieldsID, customFieldsInput).Return(customFieldsErr).Once() err := i.PostImport(testCtx, movieID) assert.Nil(t, err) @@ -277,6 +289,9 @@ func TestImporterPostImport(t *testing.T) { err = i.PostImport(testCtx, errImageID) assert.NotNil(t, err) + err = i.PostImport(testCtx, errCustomFieldsID) + assert.NotNil(t, err) + db.AssertExpectations(t) } diff --git a/pkg/group/service.go b/pkg/group/service.go index ff6e03541..37094665a 100644 --- a/pkg/group/service.go +++ b/pkg/group/service.go @@ -10,6 +10,7 @@ type CreatorUpdater interface { models.GroupGetter models.GroupCreator models.GroupUpdater + models.CustomFieldsWriter models.ContainingGroupLoader models.SubGroupLoader diff --git a/pkg/hash/imagephash/phash.go b/pkg/hash/imagephash/phash.go new file mode 100644 index 000000000..0af5adec9 --- /dev/null +++ b/pkg/hash/imagephash/phash.go @@ -0,0 +1,84 @@ +package imagephash + +import ( + "bytes" + "context" + "errors" + "fmt" + "image" + + "github.com/corona10/goimagehash" + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/ffmpeg/transcoder" + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" +) + +// Generate computes a perceptual hash for an image file. +func Generate(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (*uint64, error) { + img, err := loadImage(encoder, imageFile) + if err != nil { + return nil, fmt.Errorf("loading image: %w", err) + } + + hash, err := goimagehash.PerceptionHash(img) + if err != nil { + return nil, fmt.Errorf("computing phash from image: %w", err) + } + + hashValue := hash.GetHash() + return &hashValue, nil +} + +// loadImage loads an image from disk and decodes it. +// Where Go has no built-in decoder for a specific format, ffmpeg is used to convert to BMP first. +func loadImage(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (image.Image, error) { + // try to load with Go's built-in decoders first for better performance + reader, err := imageFile.Open(&file.OsFS{}) + if err != nil { + return nil, err + } + defer reader.Close() + + buf := new(bytes.Buffer) + if _, err := buf.ReadFrom(reader); err != nil { + return nil, err + } + + img, _, err := image.Decode(buf) + if errors.Is(err, image.ErrFormat) { + // try ffmpeg as a fallback for unsupported formats + // ffmpeg cannot read files inside zips + if imageFile.Base().ZipFileID != nil { + return nil, fmt.Errorf("ffmpeg fallback unsupported for images in zip files") + } + return loadImageFFmpeg(encoder, imageFile.Path) + } + + if err != nil { + return nil, fmt.Errorf("decoding image: %w", err) + } + + return img, nil +} + +// loadImageFFmpeg uses ffmpeg to convert an image to BMP and then decodes it. +func loadImageFFmpeg(encoder *ffmpeg.FFMpeg, path string) (image.Image, error) { + options := transcoder.ScreenshotOptions{ + OutputPath: "-", + OutputType: transcoder.ScreenshotOutputTypeBMP, + } + + args := transcoder.ScreenshotTime(path, 0, options) + data, err := encoder.GenerateOutput(context.Background(), args, nil) + if err != nil { + return nil, fmt.Errorf("converting image with ffmpeg: %w", err) + } + + img, _, err := image.Decode(bytes.NewReader(data)) + if err != nil { + return nil, fmt.Errorf("decoding ffmpeg output: %w", err) + } + + return img, nil +} diff --git a/pkg/image/delete.go b/pkg/image/delete.go index 69fba9bd6..28bb54a59 100644 --- a/pkg/image/delete.go +++ b/pkg/image/delete.go @@ -19,6 +19,7 @@ type FileDeleter struct { } // MarkGeneratedFiles marks for deletion the generated files for the provided image. +// Generated files bypass trash and are permanently deleted since they can be regenerated. func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error { var files []string thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth) @@ -32,12 +33,12 @@ func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error { files = append(files, prevPath) } - return d.Files(files) + return d.FilesWithoutTrash(files) } // Destroy destroys an image, optionally marking the file and generated files for deletion. -func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { - return s.destroyImage(ctx, i, fileDeleter, deleteGenerated, deleteFile) +func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error { + return s.destroyImage(ctx, i, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry) } // DestroyZipImages destroys all images in zip, optionally marking the files and generated files for deletion. @@ -74,7 +75,8 @@ func (s *Service) DestroyZipImages(ctx context.Context, zipFile models.File, fil } const deleteFileInZip = false - if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip); err != nil { + const destroyFileEntry = false + if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip, destroyFileEntry); err != nil { return nil, err } @@ -134,7 +136,8 @@ func (s *Service) DestroyFolderImages(ctx context.Context, folderID models.Folde continue } - if err := s.Destroy(ctx, img, fileDeleter, deleteGenerated, deleteFile); err != nil { + const destroyFileEntry = false + if err := s.Destroy(ctx, img, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil { return nil, err } @@ -145,11 +148,15 @@ func (s *Service) DestroyFolderImages(ctx context.Context, folderID models.Folde } // Destroy destroys an image, optionally marking the file and generated files for deletion. -func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { +func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter *FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error { if deleteFile { if err := s.deleteFiles(ctx, i, fileDeleter); err != nil { return err } + } else if destroyFileEntry { + if err := s.destroyFileEntries(ctx, i); err != nil { + return err + } } if deleteGenerated { @@ -191,3 +198,35 @@ func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter return nil } + +// destroyFileEntries destroys file entries from the database without deleting +// the files from the filesystem +func (s *Service) destroyFileEntries(ctx context.Context, i *models.Image) error { + if err := i.LoadFiles(ctx, s.Repository); err != nil { + return err + } + + for _, f := range i.Files.List() { + // only destroy file entries where there is no other associated image + otherImages, err := s.Repository.FindByFileID(ctx, f.Base().ID) + if err != nil { + return err + } + + if len(otherImages) > 1 { + // other image associated, don't remove + continue + } + + // don't destroy files in zip archives + if f.Base().ZipFileID == nil { + const deleteFile = false + logger.Info("Destroying image file entry: ", f.Base().Path) + if err := file.Destroy(ctx, s.File, f, nil, deleteFile); err != nil { + return err + } + } + } + + return nil +} diff --git a/pkg/image/export.go b/pkg/image/export.go index fdba6165c..eb5d5da27 100644 --- a/pkg/image/export.go +++ b/pkg/image/export.go @@ -2,16 +2,21 @@ package image import ( "context" + "fmt" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" ) +type ExportReader interface { + models.CustomFieldsReader +} + // ToBasicJSON converts a image object into its JSON object equivalent. It // does not convert the relationships to other objects, with the exception // of cover image. -func ToBasicJSON(image *models.Image) *jsonschema.Image { +func ToBasicJSON(ctx context.Context, reader ExportReader, image *models.Image) (*jsonschema.Image, error) { newImageJSON := jsonschema.Image{ Title: image.Title, Code: image.Code, @@ -33,11 +38,17 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image { newImageJSON.Organized = image.Organized newImageJSON.OCounter = image.OCounter + var err error + newImageJSON.CustomFields, err = reader.GetCustomFields(ctx, image.ID) + if err != nil { + return nil, fmt.Errorf("getting image custom fields: %v", err) + } + for _, f := range image.Files.List() { newImageJSON.Files = append(newImageJSON.Files, f.Base().Path) } - return &newImageJSON + return &newImageJSON, nil } // GetStudioName returns the name of the provided image's studio. It returns an diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go index 6adaf1d33..d0d36afbb 100644 --- a/pkg/image/export_test.go +++ b/pkg/image/export_test.go @@ -29,6 +29,10 @@ var ( dateObj, _ = models.ParseDate(date) organized = true ocounter = 2 + + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) const ( @@ -60,7 +64,7 @@ func createFullImage(id int) models.Image { } } -func createFullJSONImage() *jsonschema.Image { +func createFullJSONImage(customFields map[string]interface{}) *jsonschema.Image { return &jsonschema.Image{ Title: title, OCounter: ocounter, @@ -75,28 +79,40 @@ func createFullJSONImage() *jsonschema.Image { UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: customFields, } } type basicTestScenario struct { - input models.Image - expected *jsonschema.Image + input models.Image + customFields map[string]interface{} + expected *jsonschema.Image } var scenarios = []basicTestScenario{ { createFullImage(imageID), - createFullJSONImage(), + customFields, + createFullJSONImage(customFields), }, } func TestToJSON(t *testing.T) { + db := mocks.NewDatabase() + db.Image.On("GetCustomFields", testCtx, imageID).Return(customFields, nil).Once() + for i, s := range scenarios { image := s.input - json := ToBasicJSON(&image) + json, err := ToBasicJSON(testCtx, db.Image, &image) + if err != nil { + t.Errorf("[%d] unexpected error: %s", i, err.Error()) + continue + } assert.Equal(t, s.expected, json, "[%d]", i) } + + db.AssertExpectations(t) } func createStudioImage(studioID int) models.Image { diff --git a/pkg/image/import.go b/pkg/image/import.go index ec200af04..d8dfa987f 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -31,8 +31,9 @@ type Importer struct { Input jsonschema.Image MissingRefBehaviour models.ImportMissingRefEnum - ID int - image models.Image + ID int + image models.Image + customFields map[string]interface{} } func (i *Importer) PreImport(ctx context.Context) error { @@ -58,6 +59,8 @@ func (i *Importer) PreImport(ctx context.Context) error { return err } + i.customFields = i.Input.CustomFields + return nil } @@ -110,7 +113,7 @@ func (i *Importer) populateFiles(ctx context.Context) error { for _, ref := range i.Input.Files { path := ref - f, err := i.FileFinder.FindByPath(ctx, path) + f, err := i.FileFinder.FindByPath(ctx, path, true) if err != nil { return fmt.Errorf("error finding file: %w", err) } @@ -159,7 +162,7 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() + newStudio := models.NewCreateStudioInput() newStudio.Name = name err := i.StudioWriter.Create(ctx, &newStudio) @@ -344,7 +347,11 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { fileIDs = append(fileIDs, f.Base().ID) } - err := i.ReaderWriter.Create(ctx, &i.image, fileIDs) + err := i.ReaderWriter.Create(ctx, &models.CreateImageInput{ + Image: &i.image, + FileIDs: fileIDs, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating image: %v", err) } @@ -407,7 +414,9 @@ func createTags(ctx context.Context, tagWriter models.TagCreator, names []string newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } diff --git a/pkg/image/import_test.go b/pkg/image/import_test.go index 286e51fe3..a693c4568 100644 --- a/pkg/image/import_test.go +++ b/pkg/image/import_test.go @@ -45,7 +45,8 @@ func TestImporterPreImportWithStudio(t *testing.T) { i := Importer{ StudioWriter: db.Studio, Input: jsonschema.Image{ - Studio: existingStudioName, + Studio: existingStudioName, + CustomFields: customFields, }, } @@ -57,6 +58,7 @@ func TestImporterPreImportWithStudio(t *testing.T) { err := i.PreImport(testCtx) assert.Nil(t, err) assert.Equal(t, existingStudioID, *i.image.StudioID) + assert.Equal(t, customFields, i.customFields) i.Input.Studio = existingStudioErr err = i.PreImport(testCtx) @@ -77,9 +79,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Run(func(args mock.Arguments) { - s := args.Get(1).(*models.Studio) - s.ID = existingStudioID + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Run(func(args mock.Arguments) { + s := args.Get(1).(*models.CreateStudioInput) + s.Studio.ID = existingStudioID }).Return(nil) err := i.PreImport(testCtx) @@ -109,7 +111,7 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { } db.Studio.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() - db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.Studio")).Return(errors.New("Create error")) + db.Studio.On("Create", testCtx, mock.AnythingOfType("*models.CreateStudioInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -251,9 +253,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -285,7 +287,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/image/query.go b/pkg/image/query.go index b9b9e6628..958c9de9b 100644 --- a/pkg/image/query.go +++ b/pkg/image/query.go @@ -2,7 +2,9 @@ package image import ( "context" + "path/filepath" "strconv" + "strings" "github.com/stashapp/stash/pkg/models" ) @@ -46,6 +48,35 @@ func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, return images, nil } +// FilterFromPaths creates a ImageFilterType that filters using the provided +// paths. +func FilterFromPaths(paths []string) *models.ImageFilterType { + ret := &models.ImageFilterType{} + or := ret + sep := string(filepath.Separator) + + for _, p := range paths { + if !strings.HasSuffix(p, sep) { + p += sep + } + + if ret.Path == nil { + or = ret + } else { + newOr := &models.ImageFilterType{} + or.Or = newOr + or = newOr + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + func CountByPerformerID(ctx context.Context, r QueryCounter, id int) (int, error) { filter := &models.ImageFilterType{ Performers: &models.MultiCriterionInput{ diff --git a/pkg/image/scan.go b/pkg/image/scan.go index a6002057f..682641e66 100644 --- a/pkg/image/scan.go +++ b/pkg/image/scan.go @@ -7,6 +7,7 @@ import ( "os" "path/filepath" "slices" + "strings" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -27,7 +28,7 @@ type ScanCreatorUpdater interface { GetFiles(ctx context.Context, relatedID int) ([]models.File, error) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) - Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error + Create(ctx context.Context, newImage *models.CreateImageInput) error UpdatePartial(ctx context.Context, id int, updatedImage models.ImagePartial) (*models.Image, error) AddFileID(ctx context.Context, id int, fileID models.FileID) error } @@ -35,10 +36,15 @@ type ScanCreatorUpdater interface { type GalleryFinderCreator interface { FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) - Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error + models.GalleryCreator UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) } +type ScanSceneFinderUpdater interface { + FindByPath(ctx context.Context, p string) ([]*models.Scene, error) + AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error +} + type ScanConfig interface { GetCreateGalleriesFromFolders() bool } @@ -48,8 +54,9 @@ type ScanGenerator interface { } type ScanHandler struct { - CreatorUpdater ScanCreatorUpdater - GalleryFinder GalleryFinderCreator + CreatorUpdater ScanCreatorUpdater + GalleryFinder GalleryFinderCreator + SceneFinderUpdater ScanSceneFinderUpdater ScanGenerator ScanGenerator @@ -124,7 +131,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models. logger.Infof("Adding %s to gallery %s", f.Base().Path, g.Path) } - if err := h.CreatorUpdater.Create(ctx, &newImage, []models.FileID{imageFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, &models.CreateImageInput{ + Image: &newImage, + FileIDs: []models.FileID{imageFile.ID}, + }); err != nil { return fmt.Errorf("creating new image: %w", err) } @@ -207,8 +217,8 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. changed = true } - if changed { - // always update updated_at time + if changed || updateExisting { + // update updated_at time when file association or content changes imagePartial := models.NewImagePartial() imagePartial.GalleryIDs = galleryIDs @@ -226,9 +236,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. return fmt.Errorf("updating gallery updated at timestamp: %w", err) } } - } - if changed || updateExisting { h.PluginCache.RegisterPostHooks(ctx, i.ID, hook.ImageUpdatePost, nil, nil) } } @@ -252,9 +260,13 @@ func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f model newGallery := models.NewGallery() newGallery.FolderID = &folderID + input := models.CreateGalleryInput{ + Gallery: &newGallery, + } + logger.Infof("Creating folder-based gallery for %s", filepath.Dir(f.Base().Path)) - if err := h.GalleryFinder.Create(ctx, &newGallery, nil); err != nil { + if err := h.GalleryFinder.Create(ctx, &input); err != nil { return nil, fmt.Errorf("creating folder based gallery: %w", err) } @@ -308,15 +320,48 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile mo logger.Infof("%s doesn't exist. Creating new gallery...", zipFile.Base().Path) - if err := h.GalleryFinder.Create(ctx, &newGallery, []models.FileID{zipFile.Base().ID}); err != nil { + input := models.CreateGalleryInput{ + Gallery: &newGallery, + FileIDs: []models.FileID{zipFile.Base().ID}, + } + + if err := h.GalleryFinder.Create(ctx, &input); err != nil { return nil, fmt.Errorf("creating zip-based gallery: %w", err) } + // try to associate with scene + if err := h.associateScene(ctx, &newGallery, zipFile); err != nil { + return nil, fmt.Errorf("associating scene: %w", err) + } + h.PluginCache.RegisterPostHooks(ctx, newGallery.ID, hook.GalleryCreatePost, nil, nil) return &newGallery, nil } +func (h *ScanHandler) associateScene(ctx context.Context, existing *models.Gallery, zipFile models.File) error { + galleryIDs := []int{existing.ID} + + path := zipFile.Base().Path + withoutExt := strings.TrimSuffix(path, filepath.Ext(path)) + ".*" + + // find scenes with a file that matches + scenes, err := h.SceneFinderUpdater.FindByPath(ctx, withoutExt) + if err != nil { + return err + } + + for _, scene := range scenes { + // found related Scene + logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID) + if err := h.SceneFinderUpdater.AddGalleryIDs(ctx, scene.ID, galleryIDs); err != nil { + return err + } + } + + return nil +} + func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*models.Gallery, error) { // don't create folder-based galleries for files in zip file if f.Base().ZipFile != nil { diff --git a/pkg/image/scan_test.go b/pkg/image/scan_test.go new file mode 100644 index 000000000..f48c188ee --- /dev/null +++ b/pkg/image/scan_test.go @@ -0,0 +1,120 @@ +package image + +import ( + "context" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +type mockScanConfig struct{} + +func (m *mockScanConfig) GetCreateGalleriesFromFolders() bool { return false } + +func TestAssociateExisting_UpdatePartialOnContentChange(t *testing.T) { + const ( + testImageID = 1 + testFileID = 100 + ) + + existingFile := &models.BaseFile{ID: models.FileID(testFileID), Path: "/images/test.jpg"} + + makeImage := func() *models.Image { + return &models.Image{ + ID: testImageID, + Files: models.NewRelatedFiles([]models.File{existingFile}), + GalleryIDs: models.NewRelatedIDs([]int{}), + } + } + + tests := []struct { + name string + updateExisting bool + expectUpdate bool + }{ + { + name: "calls UpdatePartial when file content changed", + updateExisting: true, + expectUpdate: true, + }, + { + name: "skips UpdatePartial when file unchanged and already associated", + updateExisting: false, + expectUpdate: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + db := mocks.NewDatabase() + db.Image.On("GetFiles", mock.Anything, testImageID).Return([]models.File{existingFile}, nil) + db.Image.On("GetGalleryIDs", mock.Anything, testImageID).Return([]int{}, nil) + + if tt.expectUpdate { + db.Image.On("UpdatePartial", mock.Anything, testImageID, mock.Anything). + Return(&models.Image{ID: testImageID}, nil) + } + + h := &ScanHandler{ + CreatorUpdater: db.Image, + GalleryFinder: db.Gallery, + ScanConfig: &mockScanConfig{}, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Image{makeImage()}, existingFile, tt.updateExisting) + assert.NoError(t, err) + }) + + if tt.expectUpdate { + db.Image.AssertCalled(t, "UpdatePartial", mock.Anything, testImageID, mock.Anything) + } else { + db.Image.AssertNotCalled(t, "UpdatePartial", mock.Anything, mock.Anything, mock.Anything) + } + }) + } +} + +func TestAssociateExisting_UpdatePartialOnNewFile(t *testing.T) { + const ( + testImageID = 1 + existFileID = 100 + newFileID = 200 + ) + + existingFile := &models.BaseFile{ID: models.FileID(existFileID), Path: "/images/existing.jpg"} + newFile := &models.BaseFile{ID: models.FileID(newFileID), Path: "/images/new.jpg"} + + image := &models.Image{ + ID: testImageID, + Files: models.NewRelatedFiles([]models.File{existingFile}), + GalleryIDs: models.NewRelatedIDs([]int{}), + } + + db := mocks.NewDatabase() + db.Image.On("GetFiles", mock.Anything, testImageID).Return([]models.File{existingFile}, nil) + db.Image.On("GetGalleryIDs", mock.Anything, testImageID).Return([]int{}, nil) + db.Image.On("AddFileID", mock.Anything, testImageID, models.FileID(newFileID)).Return(nil) + db.Image.On("UpdatePartial", mock.Anything, testImageID, mock.Anything). + Return(&models.Image{ID: testImageID}, nil) + + h := &ScanHandler{ + CreatorUpdater: db.Image, + GalleryFinder: db.Gallery, + ScanConfig: &mockScanConfig{}, + PluginCache: &plugin.Cache{}, + } + + db.WithTxnCtx(func(ctx context.Context) { + err := h.associateExisting(ctx, []*models.Image{image}, newFile, false) + assert.NoError(t, err) + }) + + db.Image.AssertCalled(t, "AddFileID", mock.Anything, testImageID, models.FileID(newFileID)) + db.Image.AssertCalled(t, "UpdatePartial", mock.Anything, testImageID, mock.Anything) +} diff --git a/pkg/image/thumbnail.go b/pkg/image/thumbnail.go index c65cfc77e..d0fba0f60 100644 --- a/pkg/image/thumbnail.go +++ b/pkg/image/thumbnail.go @@ -22,12 +22,8 @@ const ffmpegImageQuality = 5 var vipsPath string var once sync.Once -var ( - ErrUnsupportedImageFormat = errors.New("unsupported image format") - - // ErrNotSupportedForThumbnail is returned if the image format is not supported for thumbnail generation - ErrNotSupportedForThumbnail = errors.New("unsupported image format for thumbnail") -) +// ErrNotSupportedForThumbnail is returned if the image format is not supported for thumbnail generation +var ErrNotSupportedForThumbnail = errors.New("unsupported image format for thumbnail") type ThumbnailEncoder struct { FFMpeg *ffmpeg.FFMpeg @@ -83,8 +79,9 @@ func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, err data := buf.Bytes() + format := "" if imageFile, ok := f.(*models.ImageFile); ok { - format := imageFile.Format + format = imageFile.Format animated := imageFile.Format == formatGif // #2266 - if image is webp, then determine if it is animated @@ -96,6 +93,19 @@ func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, err if animated { return nil, fmt.Errorf("%w: %s", ErrNotSupportedForThumbnail, format) } + + // AVIF cannot be read from stdin, must use file path + // AVIF in zip files is not supported + // Note: No Windows check needed here since we use file path, not stdin + if format == "avif" { + if f.Base().ZipFileID != nil { + return nil, fmt.Errorf("%w: AVIF in zip file", ErrNotSupportedForThumbnail) + } + if e.vips != nil { + return e.vips.ImageThumbnailPath(f.Base().Path, maxSize) + } + return e.ffmpegImageThumbnailPath(f.Base().Path, maxSize) + } } // Videofiles can only be thumbnailed with ffmpeg @@ -104,11 +114,15 @@ func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, err } // vips has issues loading files from stdin on Windows - if e.vips != nil && runtime.GOOS != "windows" { - return e.vips.ImageThumbnail(buf, maxSize) - } else { - return e.ffmpegImageThumbnail(buf, maxSize) + if e.vips != nil { + if runtime.GOOS == "windows" && f.Base().ZipFileID == nil { + return e.vips.ImageThumbnailPath(f.Base().Path, maxSize) + } + if runtime.GOOS != "windows" { + return e.vips.ImageThumbnail(buf, maxSize) + } } + return e.ffmpegImageThumbnail(buf, maxSize) } // GetPreview returns the preview clip of the provided image clip resized to @@ -130,16 +144,32 @@ func (e *ThumbnailEncoder) GetPreview(inPath string, outPath string, maxSize int } func (e *ThumbnailEncoder) ffmpegImageThumbnail(image *bytes.Buffer, maxSize int) ([]byte, error) { - args := transcoder.ImageThumbnail("-", transcoder.ImageThumbnailOptions{ + options := transcoder.ImageThumbnailOptions{ OutputFormat: ffmpeg.ImageFormatJpeg, OutputPath: "-", MaxDimensions: maxSize, Quality: ffmpegImageQuality, - }) + } + + args := transcoder.ImageThumbnail("-", options) return e.FFMpeg.GenerateOutput(context.TODO(), args, image) } +// ffmpegImageThumbnailPath generates a thumbnail from a file path (used for AVIF which can't be piped) +func (e *ThumbnailEncoder) ffmpegImageThumbnailPath(inputPath string, maxSize int) ([]byte, error) { + options := transcoder.ImageThumbnailOptions{ + OutputFormat: ffmpeg.ImageFormatJpeg, + OutputPath: "-", + MaxDimensions: maxSize, + Quality: ffmpegImageQuality, + } + + args := transcoder.ImageThumbnail(inputPath, options) + + return e.FFMpeg.GenerateOutput(context.TODO(), args, nil) +} + func (e *ThumbnailEncoder) getClipPreview(inPath string, outPath string, maxSize int, clipDuration float64, frameRate float64) error { var thumbFilter ffmpeg.VideoFilter thumbFilter = thumbFilter.ScaleMaxSize(maxSize) diff --git a/pkg/image/vips.go b/pkg/image/vips.go index 39809dc18..0a0350aa8 100644 --- a/pkg/image/vips.go +++ b/pkg/image/vips.go @@ -24,6 +24,38 @@ func (e *vipsEncoder) ImageThumbnail(image *bytes.Buffer, maxSize int) ([]byte, return []byte(data), err } +// ImageThumbnailPath generates a thumbnail from a file path instead of stdin. +// This is required for formats like AVIF that need random file access (seeking) +// which stdin cannot provide. +func (e *vipsEncoder) ImageThumbnailPath(path string, maxSize int) ([]byte, error) { + // vips thumbnail syntax: thumbnail input output width [options] + // Using .jpg[Q=70,strip] as output writes to stdout + args := []string{ + "thumbnail", + path, + ".jpg[Q=70,strip]", + fmt.Sprint(maxSize), + "--size", "down", + } + + cmd := exec.Command(string(*e), args...) + + var stdout, stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + if err := cmd.Start(); err != nil { + return nil, err + } + + if err := cmd.Wait(); err != nil { + logger.Errorf("image encoder error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderr.String()) + return nil, err + } + + return stdout.Bytes(), nil +} + func (e *vipsEncoder) run(args []string, stdin *bytes.Buffer) (string, error) { cmd := exec.Command(string(*e), args...) diff --git a/pkg/job/manager.go b/pkg/job/manager.go index 983d88cc0..3e47d842b 100644 --- a/pkg/job/manager.go +++ b/pkg/job/manager.go @@ -7,7 +7,6 @@ import ( "time" "github.com/stashapp/stash/pkg/logger" - "github.com/stashapp/stash/pkg/utils" ) const maxGraveyardSize = 10 @@ -179,7 +178,8 @@ func (m *Manager) dispatch(ctx context.Context, j *Job) (done chan struct{}) { j.StartTime = &t j.Status = StatusRunning - ctx, cancelFunc := context.WithCancel(utils.ValueOnlyContext{Context: ctx}) + // create a cancellable context for the job that is not canceled by the outer context + ctx, cancelFunc := context.WithCancel(context.WithoutCancel(ctx)) j.cancelFunc = cancelFunc done = make(chan struct{}) diff --git a/pkg/match/scraped.go b/pkg/match/scraped.go index b66f39a35..a6683ff52 100644 --- a/pkg/match/scraped.go +++ b/pkg/match/scraped.go @@ -45,7 +45,7 @@ func (r SceneRelationships) MatchRelationships(ctx context.Context, s *models.Sc } for _, t := range s.Tags { - err := ScrapedTag(ctx, r.TagFinder, t) + err := ScrapedTag(ctx, r.TagFinder, t, endpoint) if err != nil { return err } @@ -188,13 +188,45 @@ func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, na return } +// ScrapedTagHierarchy executes ScrapedTag for the provided tag and its parent. +func ScrapedTagHierarchy(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag, stashBoxEndpoint string) error { + if err := ScrapedTag(ctx, qb, s, stashBoxEndpoint); err != nil { + return err + } + + if s.Parent == nil { + return nil + } + + // Match parent by name only (categories don't have StashDB tag IDs) + return ScrapedTag(ctx, qb, s.Parent, "") +} + // ScrapedTag matches the provided tag with the tags // in the database and sets the ID field if one is found. -func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag) error { +func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag, stashBoxEndpoint string) error { if s.StoredID != nil { return nil } + // Check if a tag with the StashID already exists + if stashBoxEndpoint != "" && s.RemoteSiteID != nil { + if finder, ok := qb.(models.TagFinder); ok { + tags, err := finder.FindByStashID(ctx, models.StashID{ + StashID: *s.RemoteSiteID, + Endpoint: stashBoxEndpoint, + }) + if err != nil { + return err + } + if len(tags) > 0 { + id := strconv.Itoa(tags[0].ID) + s.StoredID = &id + return nil + } + } + } + t, err := tag.ByName(ctx, qb, s.Name) if err != nil { diff --git a/pkg/models/custom_fields.go b/pkg/models/custom_fields.go index 977c2fe89..3212d676f 100644 --- a/pkg/models/custom_fields.go +++ b/pkg/models/custom_fields.go @@ -9,9 +9,15 @@ type CustomFieldsInput struct { Full map[string]interface{} `json:"full"` // If populated, only the keys in this map will be updated Partial map[string]interface{} `json:"partial"` + // Remove any keys in this list + Remove []string `json:"remove"` } type CustomFieldsReader interface { GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]CustomFieldMap, error) } + +type CustomFieldsWriter interface { + SetCustomFields(ctx context.Context, id int, fields CustomFieldsInput) error +} diff --git a/pkg/models/date.go b/pkg/models/date.go index 151e32c1d..912361507 100644 --- a/pkg/models/date.go +++ b/pkg/models/date.go @@ -1,31 +1,175 @@ package models import ( + "fmt" + "strings" "time" "github.com/stashapp/stash/pkg/utils" ) +type DatePrecision int + +const ( + // default precision is day + DatePrecisionDay DatePrecision = iota + DatePrecisionMonth + DatePrecisionYear +) + // Date wraps a time.Time with a format of "YYYY-MM-DD" type Date struct { time.Time + Precision DatePrecision } -const dateFormat = "2006-01-02" +var dateFormatPrecision = []string{ + "2006-01-02", + "2006-01", + "2006", +} func (d Date) String() string { - return d.Format(dateFormat) + return d.Format(dateFormatPrecision[d.Precision]) } func (d Date) After(o Date) bool { return d.Time.After(o.Time) } -// ParseDate uses utils.ParseDateStringAsTime to parse a string into a date. +// ParseDate tries to parse the input string into a date using utils.ParseDateStringAsTime. +// If that fails, it attempts to parse the string with decreasing precision (month, then year). +// It returns a Date struct with the appropriate precision set, or an error if all parsing attempts fail. func ParseDate(s string) (Date, error) { + var errs []error + + // default parse to day precision ret, err := utils.ParseDateStringAsTime(s) - if err != nil { - return Date{}, err + if err == nil { + return Date{Time: ret, Precision: DatePrecisionDay}, nil } - return Date{Time: ret}, nil + + errs = append(errs, err) + + // try month and year precision + for i, format := range dateFormatPrecision[1:] { + ret, err := time.Parse(format, s) + if err == nil { + return Date{Time: ret, Precision: DatePrecision(i + 1)}, nil + } + errs = append(errs, err) + } + + return Date{}, fmt.Errorf("failed to parse date %q: %v", s, errs) +} + +func DateFromYear(year int) Date { + return Date{ + Time: time.Date(year, 1, 1, 0, 0, 0, 0, time.UTC), + Precision: DatePrecisionYear, + } +} + +func FormatYearRange(start *Date, end *Date) string { + var ( + startStr, endStr string + ) + + if start != nil { + startStr = start.Format(dateFormatPrecision[DatePrecisionYear]) + } + + if end != nil { + endStr = end.Format(dateFormatPrecision[DatePrecisionYear]) + } + + switch { + case startStr == "" && endStr == "": + return "" + case endStr == "": + return fmt.Sprintf("%s -", startStr) + case startStr == "": + return fmt.Sprintf("- %s", endStr) + default: + return fmt.Sprintf("%s - %s", startStr, endStr) + } +} + +func FormatYearRangeString(start *string, end *string) string { + switch { + case start == nil && end == nil: + return "" + case end == nil: + return fmt.Sprintf("%s -", *start) + case start == nil: + return fmt.Sprintf("- %s", *end) + default: + return fmt.Sprintf("%s - %s", *start, *end) + } +} + +// ParseYearRangeString parses a year range string into start and end year integers. +// Supported formats: "YYYY", "YYYY - YYYY", "YYYY-YYYY", "YYYY -", "- YYYY", "YYYY-present". +// Returns nil for start/end if not present in the string. +func ParseYearRangeString(s string) (start *Date, end *Date, err error) { + s = strings.TrimSpace(s) + if s == "" { + return nil, nil, fmt.Errorf("empty year range string") + } + + // normalize "present" to empty end + lower := strings.ToLower(s) + lower = strings.ReplaceAll(lower, "present", "") + + // split on "-" if it contains one + var parts []string + if strings.Contains(lower, "-") { + parts = strings.SplitN(lower, "-", 2) + } else { + // single value, treat as start year + year, err := parseYear(lower) + if err != nil { + return nil, nil, fmt.Errorf("invalid year range %q: %w", s, err) + } + return year, nil, nil + } + + startStr := strings.TrimSpace(parts[0]) + endStr := strings.TrimSpace(parts[1]) + + if startStr != "" { + y, err := parseYear(startStr) + if err != nil { + return nil, nil, fmt.Errorf("invalid start year in %q: %w", s, err) + } + start = y + } + + if endStr != "" { + y, err := parseYear(endStr) + if err != nil { + return nil, nil, fmt.Errorf("invalid end year in %q: %w", s, err) + } + end = y + } + + if start == nil && end == nil { + return nil, nil, fmt.Errorf("could not parse year range %q", s) + } + + return start, end, nil +} + +func parseYear(s string) (*Date, error) { + ret, err := ParseDate(s) + if err != nil { + return nil, fmt.Errorf("parsing year %q: %w", s, err) + } + + year := ret.Time.Year() + if year < 1900 || year > 2200 { + return nil, fmt.Errorf("year %d out of reasonable range", year) + } + + return &ret, nil } diff --git a/pkg/models/date_test.go b/pkg/models/date_test.go new file mode 100644 index 000000000..3b2962e28 --- /dev/null +++ b/pkg/models/date_test.go @@ -0,0 +1,151 @@ +package models + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestParseDateStringAsTime(t *testing.T) { + tests := []struct { + name string + input string + output Date + expectError bool + }{ + // Full date formats (existing support) + {"RFC3339", "2014-01-02T15:04:05Z", Date{Time: time.Date(2014, 1, 2, 15, 4, 5, 0, time.UTC), Precision: DatePrecisionDay}, false}, + {"Date only", "2014-01-02", Date{Time: time.Date(2014, 1, 2, 0, 0, 0, 0, time.UTC), Precision: DatePrecisionDay}, false}, + {"Date with time", "2014-01-02 15:04:05", Date{Time: time.Date(2014, 1, 2, 15, 4, 5, 0, time.UTC), Precision: DatePrecisionDay}, false}, + + // Partial date formats (new support) + {"Year-Month", "2006-08", Date{Time: time.Date(2006, 8, 1, 0, 0, 0, 0, time.UTC), Precision: DatePrecisionMonth}, false}, + {"Year only", "2014", Date{Time: time.Date(2014, 1, 1, 0, 0, 0, 0, time.UTC), Precision: DatePrecisionYear}, false}, + + // Invalid formats + {"Invalid format", "not-a-date", Date{}, true}, + {"Empty string", "", Date{}, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := ParseDate(tt.input) + + if tt.expectError { + if err == nil { + t.Errorf("Expected error for input %q, but got none", tt.input) + } + return + } + + if err != nil { + t.Errorf("Unexpected error for input %q: %v", tt.input, err) + return + } + + if !result.Time.Equal(tt.output.Time) || result.Precision != tt.output.Precision { + t.Errorf("For input %q, expected output %+v, got %+v", tt.input, tt.output, result) + } + }) + } +} + +func TestFormatYearRange(t *testing.T) { + datePtr := func(v int) *Date { + date := DateFromYear(v) + return &date + } + + tests := []struct { + name string + start *Date + end *Date + want string + }{ + {"both nil", nil, nil, ""}, + {"only start", datePtr(2005), nil, "2005 -"}, + {"only end", nil, datePtr(2010), "- 2010"}, + {"start and end", datePtr(2005), datePtr(2010), "2005 - 2010"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := FormatYearRange(tt.start, tt.end) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestFormatYearRangeString(t *testing.T) { + stringPtr := func(v string) *string { return &v } + + tests := []struct { + name string + start *string + end *string + want string + }{ + {"both nil", nil, nil, ""}, + {"only start", stringPtr("2005"), nil, "2005 -"}, + {"only end", nil, stringPtr("2010"), "- 2010"}, + {"start and end", stringPtr("2005"), stringPtr("2010"), "2005 - 2010"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := FormatYearRangeString(tt.start, tt.end) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestParseYearRangeString(t *testing.T) { + intPtr := func(v int) *int { return &v } + + tests := []struct { + name string + input string + wantStart *int + wantEnd *int + wantErr bool + }{ + {"single year", "2005", intPtr(2005), nil, false}, + {"year range with spaces", "2005 - 2010", intPtr(2005), intPtr(2010), false}, + {"year range no spaces", "2005-2010", intPtr(2005), intPtr(2010), false}, + {"year dash open", "2005 -", intPtr(2005), nil, false}, + {"year dash open no space", "2005-", intPtr(2005), nil, false}, + {"dash year", "- 2010", nil, intPtr(2010), false}, + {"year present", "2005-present", intPtr(2005), nil, false}, + {"year Present caps", "2005 - Present", intPtr(2005), nil, false}, + {"whitespace padding", " 2005 - 2010 ", intPtr(2005), intPtr(2010), false}, + {"empty string", "", nil, nil, true}, + {"garbage", "not a year", nil, nil, true}, + {"partial garbage start", "abc - 2010", nil, nil, true}, + {"partial garbage end", "2005 - abc", nil, nil, true}, + {"year out of range", "1800", nil, nil, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + start, end, err := ParseYearRangeString(tt.input) + if tt.wantErr { + assert.Error(t, err) + return + } + assert.NoError(t, err) + if tt.wantStart != nil { + assert.NotNil(t, start) + assert.Equal(t, *tt.wantStart, start.Time.Year()) + } else { + assert.Nil(t, start) + } + if tt.wantEnd != nil { + assert.NotNil(t, end) + assert.Equal(t, *tt.wantEnd, end.Time.Year()) + } else { + assert.Nil(t, end) + } + }) + } +} diff --git a/pkg/models/file.go b/pkg/models/file.go index e6ce41d1e..32263319c 100644 --- a/pkg/models/file.go +++ b/pkg/models/file.go @@ -9,15 +9,35 @@ import ( type FileQueryOptions struct { QueryOptions FileFilter *FileFilterType + + TotalDuration bool + Megapixels bool + TotalSize bool } type FileFilterType struct { - And *FileFilterType `json:"AND"` - Or *FileFilterType `json:"OR"` - Not *FileFilterType `json:"NOT"` + OperatorFilter[FileFilterType] // Filter by path Path *StringCriterionInput `json:"path"` + + Basename *StringCriterionInput `json:"basename"` + Dir *StringCriterionInput `json:"dir"` + ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder"` + ZipFile *MultiCriterionInput `json:"zip_file"` + ModTime *TimestampCriterionInput `json:"mod_time"` + Duplicated *FileDuplicationCriterionInput `json:"duplicated"` + Hashes []*FingerprintFilterInput `json:"hashes"` + VideoFileFilter *VideoFileFilterInput `json:"video_file_filter"` + ImageFileFilter *ImageFileFilterInput `json:"image_file_filter"` + SceneCount *IntCriterionInput `json:"scene_count"` + ImageCount *IntCriterionInput `json:"image_count"` + GalleryCount *IntCriterionInput `json:"gallery_count"` + ScenesFilter *SceneFilterType `json:"scenes_filter"` + ImagesFilter *ImageFilterType `json:"images_filter"` + GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + CreatedAt *TimestampCriterionInput `json:"created_at"` + UpdatedAt *TimestampCriterionInput `json:"updated_at"` } func PathsFileFilter(paths []string) *FileFilterType { @@ -53,10 +73,10 @@ func PathsFileFilter(paths []string) *FileFilterType { } type FileQueryResult struct { - // can't use QueryResult because id type is wrong - - IDs []FileID - Count int + QueryResult[FileID] + TotalDuration float64 + Megapixels float64 + TotalSize int64 getter FileGetter files []File diff --git a/pkg/models/filter.go b/pkg/models/filter.go index 2d25f6516..97d850a55 100644 --- a/pkg/models/filter.go +++ b/pkg/models/filter.go @@ -200,3 +200,31 @@ type CustomFieldCriterionInput struct { Value []any `json:"value"` Modifier CriterionModifier `json:"modifier"` } + +type FingerprintFilterInput struct { + Type string `json:"type"` + Value string `json:"value"` + // Hamming distance - defaults to 0 + Distance *int `json:"distance,omitempty"` +} + +type VideoFileFilterInput struct { + Format *StringCriterionInput `json:"format,omitempty"` + Resolution *ResolutionCriterionInput `json:"resolution,omitempty"` + Orientation *OrientationCriterionInput `json:"orientation,omitempty"` + Framerate *IntCriterionInput `json:"framerate,omitempty"` + Bitrate *IntCriterionInput `json:"bitrate,omitempty"` + VideoCodec *StringCriterionInput `json:"video_codec,omitempty"` + AudioCodec *StringCriterionInput `json:"audio_codec,omitempty"` + // in seconds + Duration *IntCriterionInput `json:"duration,omitempty"` + Captions *StringCriterionInput `json:"captions,omitempty"` + Interactive *bool `json:"interactive,omitempty"` + InteractiveSpeed *IntCriterionInput `json:"interactive_speed,omitempty"` +} + +type ImageFileFilterInput struct { + Format *StringCriterionInput `json:"format,omitempty"` + Resolution *ResolutionCriterionInput `json:"resolution,omitempty"` + Orientation *OrientationCriterionInput `json:"orientation,omitempty"` +} diff --git a/pkg/models/folder.go b/pkg/models/folder.go new file mode 100644 index 000000000..e9e9a3971 --- /dev/null +++ b/pkg/models/folder.go @@ -0,0 +1,90 @@ +package models + +import ( + "context" + "path/filepath" + "strings" +) + +type FolderQueryOptions struct { + QueryOptions + FolderFilter *FolderFilterType + + TotalDuration bool + Megapixels bool + TotalSize bool +} + +type FolderFilterType struct { + OperatorFilter[FolderFilterType] + + Path *StringCriterionInput `json:"path,omitempty"` + Basename *StringCriterionInput `json:"basename,omitempty"` + ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder,omitempty"` + ZipFile *MultiCriterionInput `json:"zip_file,omitempty"` + // Filter by modification time + ModTime *TimestampCriterionInput `json:"mod_time,omitempty"` + GalleryCount *IntCriterionInput `json:"gallery_count,omitempty"` + // Filter by files that meet this criteria + FilesFilter *FileFilterType `json:"files_filter,omitempty"` + // Filter by related galleries that meet this criteria + GalleriesFilter *GalleryFilterType `json:"galleries_filter,omitempty"` + // Filter by creation time + CreatedAt *TimestampCriterionInput `json:"created_at,omitempty"` + // Filter by last update time + UpdatedAt *TimestampCriterionInput `json:"updated_at,omitempty"` +} + +func PathsFolderFilter(paths []string) *FileFilterType { + if paths == nil { + return nil + } + + sep := string(filepath.Separator) + + var ret *FileFilterType + var or *FileFilterType + for _, p := range paths { + newOr := &FileFilterType{} + if or != nil { + or.Or = newOr + } else { + ret = newOr + } + + or = newOr + + if !strings.HasSuffix(p, sep) { + p += sep + } + + or.Path = &StringCriterionInput{ + Modifier: CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + +type FolderQueryResult struct { + QueryResult[FolderID] + + getter FolderGetter + folders []*Folder + resolveErr error +} + +func NewFolderQueryResult(folderGetter FolderGetter) *FolderQueryResult { + return &FolderQueryResult{ + getter: folderGetter, + } +} + +func (r *FolderQueryResult) Resolve(ctx context.Context) ([]*Folder, error) { + // cache results + if r.folders == nil && r.resolveErr == nil { + r.folders, r.resolveErr = r.getter.FindMany(ctx, r.IDs) + } + return r.folders, r.resolveErr +} diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 73fa287d2..3bf70b754 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -11,6 +11,8 @@ type GalleryFilterType struct { Checksum *StringCriterionInput `json:"checksum"` // Filter by path Path *StringCriterionInput `json:"path"` + // Filter by parent folder + ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder,omitempty"` // Filter by zip file count FileCount *IntCriterionInput `json:"file_count"` // Filter to only include galleries missing this property @@ -59,10 +61,17 @@ type GalleryFilterType struct { StudiosFilter *StudioFilterType `json:"studios_filter"` // Filter by related tags that meet this criteria TagsFilter *TagFilterType `json:"tags_filter"` + // Filter by related files that meet this criteria + FilesFilter *FileFilterType `json:"files_filter"` + // Filter by related folders that meet this criteria + FoldersFilter *FolderFilterType `json:"folders_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type GalleryUpdateInput struct { @@ -82,6 +91,8 @@ type GalleryUpdateInput struct { PerformerIds []string `json:"performer_ids"` PrimaryFileID *string `json:"primary_file_id"` + CustomFields *CustomFieldsInput `json:"custom_fields"` + // deprecated URL *string `json:"url"` } @@ -91,6 +102,7 @@ type GalleryDestroyInput struct { // If true, then the zip file will be deleted if the gallery is zip-file-based. // If gallery is folder-based, then any files not associated with other // galleries will be deleted, along with the folder, if it is not empty. - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } diff --git a/pkg/models/group.go b/pkg/models/group.go index 6afda3f48..396384b51 100644 --- a/pkg/models/group.go +++ b/pkg/models/group.go @@ -23,6 +23,8 @@ type GroupFilterType struct { TagCount *IntCriterionInput `json:"tag_count"` // Filter by date Date *DateCriterionInput `json:"date"` + // Filter by O counter + OCounter *IntCriterionInput `json:"o_counter"` // Filter by containing groups ContainingGroups *HierarchicalMultiCriterionInput `json:"containing_groups"` // Filter by sub groups @@ -31,6 +33,8 @@ type GroupFilterType struct { ContainingGroupCount *IntCriterionInput `json:"containing_group_count"` // Filter by number of sub-groups the group has SubGroupCount *IntCriterionInput `json:"sub_group_count"` + // Filter by number of scenes the group has + SceneCount *IntCriterionInput `json:"scene_count"` // Filter by related scenes that meet this criteria ScenesFilter *SceneFilterType `json:"scenes_filter"` // Filter by related studios that meet this criteria @@ -39,4 +43,6 @@ type GroupFilterType struct { CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } diff --git a/pkg/models/image.go b/pkg/models/image.go index 370315159..b99267e8c 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -1,6 +1,8 @@ package models -import "context" +import ( + "context" +) type ImageFilterType struct { OperatorFilter[ImageFilterType] @@ -11,6 +13,8 @@ type ImageFilterType struct { Photographer *StringCriterionInput `json:"photographer"` // Filter by file checksum Checksum *StringCriterionInput `json:"checksum"` + // Filter by phash distance + PhashDistance *PhashDistanceCriterionInput `json:"phash_distance"` // Filter by path Path *StringCriterionInput `json:"path"` // Filter by file count @@ -57,44 +61,51 @@ type ImageFilterType struct { StudiosFilter *StudioFilterType `json:"studios_filter"` // Filter by related tags that meet this criteria TagsFilter *TagFilterType `json:"tags_filter"` + // Filter by related files that meet this criteria + FilesFilter *FileFilterType `json:"files_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type ImageUpdateInput struct { - ClientMutationID *string `json:"clientMutationId"` - ID string `json:"id"` - Title *string `json:"title"` - Code *string `json:"code"` - Urls []string `json:"urls"` - Date *string `json:"date"` - Details *string `json:"details"` - Photographer *string `json:"photographer"` - Rating100 *int `json:"rating100"` - Organized *bool `json:"organized"` - SceneIds []string `json:"scene_ids"` - StudioID *string `json:"studio_id"` - TagIds []string `json:"tag_ids"` - PerformerIds []string `json:"performer_ids"` - GalleryIds []string `json:"gallery_ids"` - PrimaryFileID *string `json:"primary_file_id"` + ClientMutationID *string `json:"clientMutationId"` + ID string `json:"id"` + Title *string `json:"title"` + Code *string `json:"code"` + Urls []string `json:"urls"` + Date *string `json:"date"` + Details *string `json:"details"` + Photographer *string `json:"photographer"` + Rating100 *int `json:"rating100"` + Organized *bool `json:"organized"` + SceneIds []string `json:"scene_ids"` + StudioID *string `json:"studio_id"` + TagIds []string `json:"tag_ids"` + PerformerIds []string `json:"performer_ids"` + GalleryIds []string `json:"gallery_ids"` + PrimaryFileID *string `json:"primary_file_id"` + CustomFields *CustomFieldsInput `json:"custom_fields"` // deprecated URL *string `json:"url"` } type ImageDestroyInput struct { - ID string `json:"id"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + ID string `json:"id"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ImagesDestroyInput struct { - Ids []string `json:"ids"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + Ids []string `json:"ids"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ImageQueryOptions struct { @@ -106,7 +117,7 @@ type ImageQueryOptions struct { } type ImageQueryResult struct { - QueryResult + QueryResult[int] Megapixels float64 TotalSize float64 diff --git a/pkg/models/jsonschema/gallery.go b/pkg/models/jsonschema/gallery.go index 7323e37ba..5fb6e16ab 100644 --- a/pkg/models/jsonschema/gallery.go +++ b/pkg/models/jsonschema/gallery.go @@ -18,22 +18,23 @@ type GalleryChapter struct { } type Gallery struct { - ZipFiles []string `json:"zip_files,omitempty"` - FolderPath string `json:"folder_path,omitempty"` - Title string `json:"title,omitempty"` - Code string `json:"code,omitempty"` - URLs []string `json:"urls,omitempty"` - Date string `json:"date,omitempty"` - Details string `json:"details,omitempty"` - Photographer string `json:"photographer,omitempty"` - Rating int `json:"rating,omitempty"` - Organized bool `json:"organized,omitempty"` - Chapters []GalleryChapter `json:"chapters,omitempty"` - Studio string `json:"studio,omitempty"` - Performers []string `json:"performers,omitempty"` - Tags []string `json:"tags,omitempty"` - CreatedAt json.JSONTime `json:"created_at,omitempty"` - UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + ZipFiles []string `json:"zip_files,omitempty"` + FolderPath string `json:"folder_path,omitempty"` + Title string `json:"title,omitempty"` + Code string `json:"code,omitempty"` + URLs []string `json:"urls,omitempty"` + Date string `json:"date,omitempty"` + Details string `json:"details,omitempty"` + Photographer string `json:"photographer,omitempty"` + Rating int `json:"rating,omitempty"` + Organized bool `json:"organized,omitempty"` + Chapters []GalleryChapter `json:"chapters,omitempty"` + Studio string `json:"studio,omitempty"` + Performers []string `json:"performers,omitempty"` + Tags []string `json:"tags,omitempty"` + CreatedAt json.JSONTime `json:"created_at,omitempty"` + UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` // deprecated - for import only URL string `json:"url,omitempty"` diff --git a/pkg/models/jsonschema/group.go b/pkg/models/jsonschema/group.go index b284dab6e..357ac70bc 100644 --- a/pkg/models/jsonschema/group.go +++ b/pkg/models/jsonschema/group.go @@ -33,6 +33,8 @@ type Group struct { CreatedAt json.JSONTime `json:"created_at,omitempty"` UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` + // deprecated - for import only URL string `json:"url,omitempty"` } diff --git a/pkg/models/jsonschema/image.go b/pkg/models/jsonschema/image.go index 1bdac8770..168ea9eec 100644 --- a/pkg/models/jsonschema/image.go +++ b/pkg/models/jsonschema/image.go @@ -18,18 +18,19 @@ type Image struct { // deprecated - for import only URL string `json:"url,omitempty"` - URLs []string `json:"urls,omitempty"` - Date string `json:"date,omitempty"` - Details string `json:"details,omitempty"` - Photographer string `json:"photographer,omitempty"` - Organized bool `json:"organized,omitempty"` - OCounter int `json:"o_counter,omitempty"` - Galleries []GalleryRef `json:"galleries,omitempty"` - Performers []string `json:"performers,omitempty"` - Tags []string `json:"tags,omitempty"` - Files []string `json:"files,omitempty"` - CreatedAt json.JSONTime `json:"created_at,omitempty"` - UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + URLs []string `json:"urls,omitempty"` + Date string `json:"date,omitempty"` + Details string `json:"details,omitempty"` + Photographer string `json:"photographer,omitempty"` + Organized bool `json:"organized,omitempty"` + OCounter int `json:"o_counter,omitempty"` + Galleries []GalleryRef `json:"galleries,omitempty"` + Performers []string `json:"performers,omitempty"` + Tags []string `json:"tags,omitempty"` + Files []string `json:"files,omitempty"` + CreatedAt json.JSONTime `json:"created_at,omitempty"` + UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` } func (s Image) Filename(basename string, hash string) string { diff --git a/pkg/models/jsonschema/performer.go b/pkg/models/jsonschema/performer.go index 5edd5724c..1a8acd5f3 100644 --- a/pkg/models/jsonschema/performer.go +++ b/pkg/models/jsonschema/performer.go @@ -48,7 +48,9 @@ type Performer struct { FakeTits string `json:"fake_tits,omitempty"` PenisLength float64 `json:"penis_length,omitempty"` Circumcised string `json:"circumcised,omitempty"` - CareerLength string `json:"career_length,omitempty"` + CareerLength string `json:"career_length,omitempty"` // deprecated - for import only + CareerStart string `json:"career_start,omitempty"` + CareerEnd string `json:"career_end,omitempty"` Tattoos string `json:"tattoos,omitempty"` Piercings string `json:"piercings,omitempty"` Aliases StringOrStringList `json:"aliases,omitempty"` diff --git a/pkg/models/jsonschema/scene.go b/pkg/models/jsonschema/scene.go index c2f266d5c..8f15b9c5d 100644 --- a/pkg/models/jsonschema/scene.go +++ b/pkg/models/jsonschema/scene.go @@ -80,6 +80,8 @@ type Scene struct { PlayDuration float64 `json:"play_duration,omitempty"` StashIDs []models.StashID `json:"stash_ids,omitempty"` + + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` } func (s Scene) Filename(id int, basename string, hash string) string { diff --git a/pkg/models/jsonschema/studio.go b/pkg/models/jsonschema/studio.go index 80ed97d92..12a797c13 100644 --- a/pkg/models/jsonschema/studio.go +++ b/pkg/models/jsonschema/studio.go @@ -12,7 +12,7 @@ import ( type Studio struct { Name string `json:"name,omitempty"` - URL string `json:"url,omitempty"` + URLs []string `json:"urls,omitempty"` ParentStudio string `json:"parent_studio,omitempty"` Image string `json:"image,omitempty"` CreatedAt json.JSONTime `json:"created_at,omitempty"` @@ -24,6 +24,12 @@ type Studio struct { StashIDs []models.StashID `json:"stash_ids,omitempty"` Tags []string `json:"tags,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` + Organized bool `json:"organized,omitempty"` + + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` + + // deprecated - for import only + URL string `json:"url,omitempty"` } func (s Studio) Filename() string { diff --git a/pkg/models/jsonschema/tag.go b/pkg/models/jsonschema/tag.go index ed2bc1c9c..e7b16b13f 100644 --- a/pkg/models/jsonschema/tag.go +++ b/pkg/models/jsonschema/tag.go @@ -6,20 +6,23 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" ) type Tag struct { - Name string `json:"name,omitempty"` - SortName string `json:"sort_name,omitempty"` - Description string `json:"description,omitempty"` - Favorite bool `json:"favorite,omitempty"` - Aliases []string `json:"aliases,omitempty"` - Image string `json:"image,omitempty"` - Parents []string `json:"parents,omitempty"` - IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` - CreatedAt json.JSONTime `json:"created_at,omitempty"` - UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + Name string `json:"name,omitempty"` + SortName string `json:"sort_name,omitempty"` + Description string `json:"description,omitempty"` + Favorite bool `json:"favorite,omitempty"` + Aliases []string `json:"aliases,omitempty"` + Image string `json:"image,omitempty"` + Parents []string `json:"parents,omitempty"` + IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` + StashIDs []models.StashID `json:"stash_ids,omitempty"` + CreatedAt json.JSONTime `json:"created_at,omitempty"` + UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` } func (s Tag) Filename() string { diff --git a/pkg/models/mocks/FileReaderWriter.go b/pkg/models/mocks/FileReaderWriter.go index 12a1b3075..4b370459e 100644 --- a/pkg/models/mocks/FileReaderWriter.go +++ b/pkg/models/mocks/FileReaderWriter.go @@ -130,13 +130,13 @@ func (_m *FileReaderWriter) Find(ctx context.Context, id ...models.FileID) ([]mo return r0, r1 } -// FindAllByPath provides a mock function with given fields: ctx, path -func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string) ([]models.File, error) { - ret := _m.Called(ctx, path) +// FindAllByPath provides a mock function with given fields: ctx, path, caseSensitive +func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string, caseSensitive bool) ([]models.File, error) { + ret := _m.Called(ctx, path, caseSensitive) var r0 []models.File - if rf, ok := ret.Get(0).(func(context.Context, string) []models.File); ok { - r0 = rf(ctx, path) + if rf, ok := ret.Get(0).(func(context.Context, string, bool) []models.File); ok { + r0 = rf(ctx, path, caseSensitive) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.File) @@ -144,8 +144,8 @@ func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string) ([]m } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, path) + if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { + r1 = rf(ctx, path, caseSensitive) } else { r1 = ret.Error(1) } @@ -153,13 +153,13 @@ func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string) ([]m return r0, r1 } -// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset -func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]models.File, error) { - ret := _m.Called(ctx, p, limit, offset) +// FindAllInPaths provides a mock function with given fields: ctx, p, includeZipContents, limit, offset +func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit int, offset int) ([]models.File, error) { + ret := _m.Called(ctx, p, includeZipContents, limit, offset) var r0 []models.File - if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []models.File); ok { - r0 = rf(ctx, p, limit, offset) + if rf, ok := ret.Get(0).(func(context.Context, []string, bool, int, int) []models.File); ok { + r0 = rf(ctx, p, includeZipContents, limit, offset) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]models.File) @@ -167,8 +167,8 @@ func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limi } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { - r1 = rf(ctx, p, limit, offset) + if rf, ok := ret.Get(1).(func(context.Context, []string, bool, int, int) error); ok { + r1 = rf(ctx, p, includeZipContents, limit, offset) } else { r1 = ret.Error(1) } @@ -222,13 +222,13 @@ func (_m *FileReaderWriter) FindByFingerprint(ctx context.Context, fp models.Fin return r0, r1 } -// FindByPath provides a mock function with given fields: ctx, path -func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string) (models.File, error) { - ret := _m.Called(ctx, path) +// FindByPath provides a mock function with given fields: ctx, path, caseSensitive +func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string, caseSensitive bool) (models.File, error) { + ret := _m.Called(ctx, path, caseSensitive) var r0 models.File - if rf, ok := ret.Get(0).(func(context.Context, string) models.File); ok { - r0 = rf(ctx, path) + if rf, ok := ret.Get(0).(func(context.Context, string, bool) models.File); ok { + r0 = rf(ctx, path, caseSensitive) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(models.File) @@ -236,8 +236,8 @@ func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string) (models } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, path) + if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { + r1 = rf(ctx, path, caseSensitive) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/FolderReaderWriter.go b/pkg/models/mocks/FolderReaderWriter.go index 968bed4ad..d2230c645 100644 --- a/pkg/models/mocks/FolderReaderWriter.go +++ b/pkg/models/mocks/FolderReaderWriter.go @@ -86,13 +86,13 @@ func (_m *FolderReaderWriter) Find(ctx context.Context, id models.FolderID) (*mo return r0, r1 } -// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset -func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]*models.Folder, error) { - ret := _m.Called(ctx, p, limit, offset) +// FindAllInPaths provides a mock function with given fields: ctx, p, includeZipContents, limit, offset +func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit int, offset int) ([]*models.Folder, error) { + ret := _m.Called(ctx, p, includeZipContents, limit, offset) var r0 []*models.Folder - if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []*models.Folder); ok { - r0 = rf(ctx, p, limit, offset) + if rf, ok := ret.Get(0).(func(context.Context, []string, bool, int, int) []*models.Folder); ok { + r0 = rf(ctx, p, includeZipContents, limit, offset) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]*models.Folder) @@ -100,8 +100,8 @@ func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, li } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { - r1 = rf(ctx, p, limit, offset) + if rf, ok := ret.Get(1).(func(context.Context, []string, bool, int, int) error); ok { + r1 = rf(ctx, p, includeZipContents, limit, offset) } else { r1 = ret.Error(1) } @@ -132,13 +132,13 @@ func (_m *FolderReaderWriter) FindByParentFolderID(ctx context.Context, parentFo return r0, r1 } -// FindByPath provides a mock function with given fields: ctx, path -func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string) (*models.Folder, error) { - ret := _m.Called(ctx, path) +// FindByPath provides a mock function with given fields: ctx, path, caseSensitive +func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string, caseSensitive bool) (*models.Folder, error) { + ret := _m.Called(ctx, path, caseSensitive) var r0 *models.Folder - if rf, ok := ret.Get(0).(func(context.Context, string) *models.Folder); ok { - r0 = rf(ctx, path) + if rf, ok := ret.Get(0).(func(context.Context, string, bool) *models.Folder); ok { + r0 = rf(ctx, path, caseSensitive) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Folder) @@ -146,8 +146,8 @@ func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string) (*mod } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, path) + if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { + r1 = rf(ctx, path, caseSensitive) } else { r1 = ret.Error(1) } @@ -178,6 +178,98 @@ func (_m *FolderReaderWriter) FindByZipFileID(ctx context.Context, zipFileID mod return r0, r1 } +// FindMany provides a mock function with given fields: ctx, id +func (_m *FolderReaderWriter) FindMany(ctx context.Context, id []models.FolderID) ([]*models.Folder, error) { + ret := _m.Called(ctx, id) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) []*models.Folder); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetManyParentFolderIDs provides a mock function with given fields: ctx, folderIDs +func (_m *FolderReaderWriter) GetManyParentFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) { + ret := _m.Called(ctx, folderIDs) + + var r0 [][]models.FolderID + if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) [][]models.FolderID); ok { + r0 = rf(ctx, folderIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FolderID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok { + r1 = rf(ctx, folderIDs) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetManySubFolderIDs provides a mock function with given fields: ctx, folderIDs +func (_m *FolderReaderWriter) GetManySubFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) { + ret := _m.Called(ctx, folderIDs) + + var r0 [][]models.FolderID + if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) [][]models.FolderID); ok { + r0 = rf(ctx, folderIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FolderID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok { + r1 = rf(ctx, folderIDs) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Query provides a mock function with given fields: ctx, options +func (_m *FolderReaderWriter) Query(ctx context.Context, options models.FolderQueryOptions) (*models.FolderQueryResult, error) { + ret := _m.Called(ctx, options) + + var r0 *models.FolderQueryResult + if rf, ok := ret.Get(0).(func(context.Context, models.FolderQueryOptions) *models.FolderQueryResult); ok { + r0 = rf(ctx, options) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.FolderQueryResult) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderQueryOptions) error); ok { + r1 = rf(ctx, options) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Update provides a mock function with given fields: ctx, f func (_m *FolderReaderWriter) Update(ctx context.Context, f *models.Folder) error { ret := _m.Called(ctx, f) diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index f07f8a7d9..e835ea2bc 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -49,6 +49,20 @@ func (_m *GalleryReaderWriter) AddImages(ctx context.Context, galleryID int, ima return r0 } +// AddSceneIDs provides a mock function with given fields: ctx, galleryID, sceneIDs +func (_m *GalleryReaderWriter) AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error { + ret := _m.Called(ctx, galleryID, sceneIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, galleryID, sceneIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *GalleryReaderWriter) All(ctx context.Context) ([]*models.Gallery, error) { ret := _m.Called(ctx) @@ -114,13 +128,13 @@ func (_m *GalleryReaderWriter) CountByFileID(ctx context.Context, fileID models. return r0, r1 } -// Create provides a mock function with given fields: ctx, newGallery, fileIDs -func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error { - ret := _m.Called(ctx, newGallery, fileIDs) +// Create provides a mock function with given fields: ctx, newGallery +func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.CreateGalleryInput) error { + ret := _m.Called(ctx, newGallery) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []models.FileID) error); ok { - r0 = rf(ctx, newGallery, fileIDs) + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateGalleryInput) error); ok { + r0 = rf(ctx, newGallery) } else { r0 = ret.Error(0) } @@ -395,6 +409,52 @@ func (_m *GalleryReaderWriter) FindUserGalleryByTitle(ctx context.Context, title return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *GalleryReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *GalleryReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFiles provides a mock function with given fields: ctx, relatedID func (_m *GalleryReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { ret := _m.Called(ctx, relatedID) @@ -656,12 +716,26 @@ func (_m *GalleryReaderWriter) SetCover(ctx context.Context, galleryID int, cove return r0 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *GalleryReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedGallery -func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.Gallery) error { +func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.UpdateGalleryInput) error { ret := _m.Called(ctx, updatedGallery) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateGalleryInput) error); ok { r0 = rf(ctx, updatedGallery) } else { r0 = ret.Error(0) diff --git a/pkg/models/mocks/GroupReaderWriter.go b/pkg/models/mocks/GroupReaderWriter.go index dc745d094..ac9e513f4 100644 --- a/pkg/models/mocks/GroupReaderWriter.go +++ b/pkg/models/mocks/GroupReaderWriter.go @@ -312,6 +312,52 @@ func (_m *GroupReaderWriter) GetContainingGroupDescriptions(ctx context.Context, return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *GroupReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *GroupReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFrontImage provides a mock function with given fields: ctx, groupID func (_m *GroupReaderWriter) GetFrontImage(ctx context.Context, groupID int) ([]byte, error) { ret := _m.Called(ctx, groupID) @@ -497,6 +543,20 @@ func (_m *GroupReaderWriter) QueryCount(ctx context.Context, groupFilter *models return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *GroupReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedGroup func (_m *GroupReaderWriter) Update(ctx context.Context, updatedGroup *models.Group) error { ret := _m.Called(ctx, updatedGroup) diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index 2bbf4ceeb..f2c9934be 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -137,13 +137,13 @@ func (_m *ImageReaderWriter) CoverByGalleryID(ctx context.Context, galleryId int return r0, r1 } -// Create provides a mock function with given fields: ctx, newImage, fileIDs -func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error { - ret := _m.Called(ctx, newImage, fileIDs) +// Create provides a mock function with given fields: ctx, newImage +func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.CreateImageInput) error { + ret := _m.Called(ctx, newImage) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Image, []models.FileID) error); ok { - r0 = rf(ctx, newImage, fileIDs) + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateImageInput) error); ok { + r0 = rf(ctx, newImage) } else { r0 = ret.Error(0) } @@ -393,6 +393,52 @@ func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *ImageReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *ImageReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFiles provides a mock function with given fields: ctx, relatedID func (_m *ImageReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { ret := _m.Called(ctx, relatedID) @@ -594,6 +640,27 @@ func (_m *ImageReaderWriter) OCountByPerformerID(ctx context.Context, performerI return r0, r1 } +// OCountByStudioID provides a mock function with given fields: ctx, studioID +func (_m *ImageReaderWriter) OCountByStudioID(ctx context.Context, studioID int) (int, error) { + ret := _m.Called(ctx, studioID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, studioID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, studioID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Query provides a mock function with given fields: ctx, options func (_m *ImageReaderWriter) Query(ctx context.Context, options models.ImageQueryOptions) (*models.ImageQueryResult, error) { ret := _m.Called(ctx, options) @@ -673,6 +740,20 @@ func (_m *ImageReaderWriter) ResetOCounter(ctx context.Context, id int) (int, er return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *ImageReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Size provides a mock function with given fields: ctx func (_m *ImageReaderWriter) Size(ctx context.Context) (float64, error) { ret := _m.Called(ctx) diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index dbf19a3cd..6487bc5a5 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -473,6 +473,20 @@ func (_m *PerformerReaderWriter) HasImage(ctx context.Context, performerID int) return r0, r1 } +// Merge provides a mock function with given fields: ctx, source, destination +func (_m *PerformerReaderWriter) Merge(ctx context.Context, source []int, destination int) error { + ret := _m.Called(ctx, source, destination) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, []int, int) error); ok { + r0 = rf(ctx, source, destination) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Query provides a mock function with given fields: ctx, performerFilter, findFilter func (_m *PerformerReaderWriter) Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) { ret := _m.Called(ctx, performerFilter, findFilter) diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 8e4e5ae5a..0053ad6f8 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -754,6 +754,52 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *SceneReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *SceneReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetFiles provides a mock function with given fields: ctx, relatedID func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) { ret := _m.Called(ctx, relatedID) @@ -1141,6 +1187,27 @@ func (_m *SceneReaderWriter) HasCover(ctx context.Context, sceneID int) (bool, e return r0, r1 } +// OCountByGroupID provides a mock function with given fields: ctx, groupID +func (_m *SceneReaderWriter) OCountByGroupID(ctx context.Context, groupID int) (int, error) { + ret := _m.Called(ctx, groupID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, groupID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, groupID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // OCountByPerformerID provides a mock function with given fields: ctx, performerID func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) { ret := _m.Called(ctx, performerID) @@ -1162,6 +1229,27 @@ func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerI return r0, r1 } +// OCountByStudioID provides a mock function with given fields: ctx, studioID +func (_m *SceneReaderWriter) OCountByStudioID(ctx context.Context, studioID int) (int, error) { + ret := _m.Called(ctx, studioID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, studioID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, studioID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // PlayDuration provides a mock function with given fields: ctx func (_m *SceneReaderWriter) PlayDuration(ctx context.Context) (float64, error) { ret := _m.Called(ctx) @@ -1290,6 +1378,20 @@ func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, sceneID int, resu return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *SceneReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Size provides a mock function with given fields: ctx func (_m *SceneReaderWriter) Size(ctx context.Context) (float64, error) { ret := _m.Called(ctx) diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index d4932ca71..f57a73aa1 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -80,11 +80,11 @@ func (_m *StudioReaderWriter) CountByTagID(ctx context.Context, tagID int) (int, } // Create provides a mock function with given fields: ctx, newStudio -func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.Studio) error { +func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.CreateStudioInput) error { ret := _m.Called(ctx, newStudio) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateStudioInput) error); ok { r0 = rf(ctx, newStudio) } else { r0 = ret.Error(0) @@ -291,6 +291,52 @@ func (_m *StudioReaderWriter) GetAliases(ctx context.Context, relatedID int) ([] return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *StudioReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *StudioReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetImage provides a mock function with given fields: ctx, studioID func (_m *StudioReaderWriter) GetImage(ctx context.Context, studioID int) ([]byte, error) { ret := _m.Called(ctx, studioID) @@ -360,6 +406,29 @@ func (_m *StudioReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]i return r0, r1 } +// GetURLs provides a mock function with given fields: ctx, relatedID +func (_m *StudioReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]string, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []string + if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // HasImage provides a mock function with given fields: ctx, studioID func (_m *StudioReaderWriter) HasImage(ctx context.Context, studioID int) (bool, error) { ret := _m.Called(ctx, studioID) @@ -456,11 +525,11 @@ func (_m *StudioReaderWriter) QueryForAutoTag(ctx context.Context, words []strin } // Update provides a mock function with given fields: ctx, updatedStudio -func (_m *StudioReaderWriter) Update(ctx context.Context, updatedStudio *models.Studio) error { +func (_m *StudioReaderWriter) Update(ctx context.Context, updatedStudio *models.UpdateStudioInput) error { ret := _m.Called(ctx, updatedStudio) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateStudioInput) error); ok { r0 = rf(ctx, updatedStudio) } else { r0 = ret.Error(0) diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index a285b97bf..c4423ee52 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -101,11 +101,11 @@ func (_m *TagReaderWriter) CountByParentTagID(ctx context.Context, parentID int) } // Create provides a mock function with given fields: ctx, newTag -func (_m *TagReaderWriter) Create(ctx context.Context, newTag *models.Tag) error { +func (_m *TagReaderWriter) Create(ctx context.Context, newTag *models.CreateTagInput) error { ret := _m.Called(ctx, newTag) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.CreateTagInput) error); ok { r0 = rf(ctx, newTag) } else { r0 = ret.Error(0) @@ -427,6 +427,52 @@ func (_m *TagReaderWriter) FindBySceneMarkerID(ctx context.Context, sceneMarkerI return r0, r1 } +// FindByStashID provides a mock function with given fields: ctx, stashID +func (_m *TagReaderWriter) FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Tag, error) { + ret := _m.Called(ctx, stashID) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func(context.Context, models.StashID) []*models.Tag); ok { + r0 = rf(ctx, stashID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.StashID) error); ok { + r1 = rf(ctx, stashID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByStashIDStatus provides a mock function with given fields: ctx, hasStashID, stashboxEndpoint +func (_m *TagReaderWriter) FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*models.Tag, error) { + ret := _m.Called(ctx, hasStashID, stashboxEndpoint) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func(context.Context, bool, string) []*models.Tag); ok { + r0 = rf(ctx, hasStashID, stashboxEndpoint) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, bool, string) error); ok { + r1 = rf(ctx, hasStashID, stashboxEndpoint) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByStudioID provides a mock function with given fields: ctx, studioID func (_m *TagReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) { ret := _m.Called(ctx, studioID) @@ -519,6 +565,52 @@ func (_m *TagReaderWriter) GetChildIDs(ctx context.Context, relatedID int) ([]in return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *TagReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *TagReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetImage provides a mock function with given fields: ctx, tagID func (_m *TagReaderWriter) GetImage(ctx context.Context, tagID int) ([]byte, error) { ret := _m.Called(ctx, tagID) @@ -565,6 +657,29 @@ func (_m *TagReaderWriter) GetParentIDs(ctx context.Context, relatedID int) ([]i return r0, r1 } +// GetStashIDs provides a mock function with given fields: ctx, relatedID +func (_m *TagReaderWriter) GetStashIDs(ctx context.Context, relatedID int) ([]models.StashID, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []models.StashID + if rf, ok := ret.Get(0).(func(context.Context, int) []models.StashID); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.StashID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // HasImage provides a mock function with given fields: ctx, tagID func (_m *TagReaderWriter) HasImage(ctx context.Context, tagID int) (bool, error) { ret := _m.Called(ctx, tagID) @@ -653,12 +768,26 @@ func (_m *TagReaderWriter) QueryForAutoTag(ctx context.Context, words []string) return r0, r1 } +// SetCustomFields provides a mock function with given fields: ctx, id, fields +func (_m *TagReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error { + ret := _m.Called(ctx, id, fields) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok { + r0 = rf(ctx, id, fields) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedTag -func (_m *TagReaderWriter) Update(ctx context.Context, updatedTag *models.Tag) error { +func (_m *TagReaderWriter) Update(ctx context.Context, updatedTag *models.UpdateTagInput) error { ret := _m.Called(ctx, updatedTag) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Tag) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateTagInput) error); ok { r0 = rf(ctx, updatedTag) } else { r0 = ret.Error(0) diff --git a/pkg/models/mocks/database.go b/pkg/models/mocks/database.go index ec4177b30..88f106e19 100644 --- a/pkg/models/mocks/database.go +++ b/pkg/models/mocks/database.go @@ -3,6 +3,7 @@ package mocks import ( "context" + "errors" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -89,6 +90,16 @@ func (db *Database) AssertExpectations(t mock.TestingT) { db.SavedFilter.AssertExpectations(t) } +// WithTxnCtx runs fn with a context that has a transaction hook manager registered, +// so code that calls txn.AddPostCommitHook (e.g. plugin cache) won't nil-panic. +// Always rolls back to avoid executing the registered hooks. +func (db *Database) WithTxnCtx(fn func(ctx context.Context)) { + _ = txn.WithTxn(context.Background(), db, func(ctx context.Context) error { + fn(ctx) + return errors.New("rollback") + }) +} + func (db *Database) Repository() models.Repository { return models.Repository{ TxnManager: db, diff --git a/pkg/models/model_file.go b/pkg/models/model_file.go index e9df57990..f6b8bdc51 100644 --- a/pkg/models/model_file.go +++ b/pkg/models/model_file.go @@ -79,6 +79,14 @@ func (i FileID) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(i.String())) } +func FileIDsFromInts(ids []int) []FileID { + ret := make([]FileID, len(ids)) + for i, id := range ids { + ret[i] = FileID(id) + } + return ret +} + // DirEntry represents a file or directory in the file system. type DirEntry struct { ZipFileID *FileID `json:"zip_file_id"` @@ -252,6 +260,10 @@ func (f ImageFile) GetHeight() int { return f.Height } +func (f ImageFile) Megapixels() float64 { + return float64(f.Width*f.Height) / 1e6 +} + func (f ImageFile) GetFormat() string { return f.Format } diff --git a/pkg/models/model_folder.go b/pkg/models/model_folder.go index 590cdd7bd..39897aa60 100644 --- a/pkg/models/model_folder.go +++ b/pkg/models/model_folder.go @@ -35,6 +35,14 @@ func (i FolderID) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(i.String())) } +func FolderIDsFromInts(ids []int) []FolderID { + ret := make([]FolderID, len(ids)) + for i, id := range ids { + ret[i] = FolderID(id) + } + return ret +} + // Folder represents a folder in the file system. type Folder struct { ID FolderID `json:"id"` diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go index 4b6a3183d..bbdba46a6 100644 --- a/pkg/models/model_gallery.go +++ b/pkg/models/model_gallery.go @@ -46,6 +46,20 @@ func NewGallery() Gallery { } } +type CreateGalleryInput struct { + *Gallery + + FileIDs []FileID + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdateGalleryInput struct { + *Gallery + + FileIDs []FileID + CustomFields CustomFieldsInput `json:"custom_fields"` +} + // GalleryPartial represents part of a Gallery object. It is used to update // the database entry. Only non-nil fields will be updated. type GalleryPartial struct { @@ -70,6 +84,8 @@ type GalleryPartial struct { TagIDs *UpdateIDs PerformerIDs *UpdateIDs PrimaryFileID *FileID + + CustomFields CustomFieldsInput } func NewGalleryPartial() GalleryPartial { diff --git a/pkg/models/model_group.go b/pkg/models/model_group.go index 82c71996a..5bfb42c44 100644 --- a/pkg/models/model_group.go +++ b/pkg/models/model_group.go @@ -34,6 +34,14 @@ func NewGroup() Group { } } +type CreateGroupInput struct { + *Group + + CustomFields map[string]interface{} `json:"custom_fields"` + FrontImageData []byte + BackImageData []byte +} + func (m *Group) LoadURLs(ctx context.Context, l URLLoader) error { return m.URLs.load(func() ([]string, error) { return l.GetURLs(ctx, m.ID) @@ -74,6 +82,8 @@ type GroupPartial struct { SubGroups *UpdateGroupDescriptions CreatedAt OptionalTime UpdatedAt OptionalTime + + CustomFields CustomFieldsInput } func NewGroupPartial() GroupPartial { diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go index 1d0993536..72ca61826 100644 --- a/pkg/models/model_image.go +++ b/pkg/models/model_image.go @@ -47,6 +47,13 @@ func NewImage() Image { } } +type CreateImageInput struct { + *Image + + FileIDs []FileID + CustomFields map[string]interface{} `json:"custom_fields"` +} + type ImagePartial struct { Title OptionalString Code OptionalString @@ -66,6 +73,7 @@ type ImagePartial struct { TagIDs *UpdateIDs PerformerIDs *UpdateIDs PrimaryFileID *FileID + CustomFields CustomFieldsInput } func NewImagePartial() ImagePartial { diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index 566dcae1e..7bc3b3174 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -6,25 +6,26 @@ import ( ) type Performer struct { - ID int `json:"id"` - Name string `json:"name"` - Disambiguation string `json:"disambiguation"` - Gender *GenderEnum `json:"gender"` - Birthdate *Date `json:"birthdate"` - Ethnicity string `json:"ethnicity"` - Country string `json:"country"` - EyeColor string `json:"eye_color"` - Height *int `json:"height"` - Measurements string `json:"measurements"` - FakeTits string `json:"fake_tits"` - PenisLength *float64 `json:"penis_length"` - Circumcised *CircumisedEnum `json:"circumcised"` - CareerLength string `json:"career_length"` - Tattoos string `json:"tattoos"` - Piercings string `json:"piercings"` - Favorite bool `json:"favorite"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` + ID int `json:"id"` + Name string `json:"name"` + Disambiguation string `json:"disambiguation"` + Gender *GenderEnum `json:"gender"` + Birthdate *Date `json:"birthdate"` + Ethnicity string `json:"ethnicity"` + Country string `json:"country"` + EyeColor string `json:"eye_color"` + Height *int `json:"height"` + Measurements string `json:"measurements"` + FakeTits string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumcisedEnum `json:"circumcised"` + CareerStart *Date `json:"career_start"` + CareerEnd *Date `json:"career_end"` + Tattoos string `json:"tattoos"` + Piercings string `json:"piercings"` + Favorite bool `json:"favorite"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` // Rating expressed in 1-100 scale Rating *int `json:"rating"` Details string `json:"details"` @@ -75,7 +76,8 @@ type PerformerPartial struct { FakeTits OptionalString PenisLength OptionalFloat64 Circumcised OptionalString - CareerLength OptionalString + CareerStart OptionalDate + CareerEnd OptionalDate Tattoos OptionalString Piercings OptionalString Favorite OptionalBool diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index cf0499388..64ad34b9c 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -53,6 +53,20 @@ func NewScene() Scene { } } +type CreateSceneInput struct { + *Scene + + FileIDs []FileID + CoverImage []byte + CustomFields CustomFieldMap `json:"custom_fields"` +} + +type UpdateSceneInput struct { + *Scene + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + // ScenePartial represents part of a Scene object. It is used to update // the database entry. type ScenePartial struct { diff --git a/pkg/models/model_scene_marker.go b/pkg/models/model_scene_marker.go index 778603315..8d723b391 100644 --- a/pkg/models/model_scene_marker.go +++ b/pkg/models/model_scene_marker.go @@ -30,6 +30,7 @@ type SceneMarkerPartial struct { Seconds OptionalFloat64 EndSeconds OptionalFloat64 PrimaryTagID OptionalInt + TagIDs *UpdateIDs SceneID OptionalInt CreatedAt OptionalTime UpdatedAt OptionalTime diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go index 008a05c3d..d20fbd589 100644 --- a/pkg/models/model_scraped_item.go +++ b/pkg/models/model_scraped_item.go @@ -14,21 +14,25 @@ type ScrapedStudio struct { // Set if studio matched StoredID *string `json:"stored_id"` Name string `json:"name"` - URL *string `json:"url"` + URL *string `json:"url"` // deprecated + URLs []string `json:"urls"` Parent *ScrapedStudio `json:"parent"` Image *string `json:"image"` Images []string `json:"images"` + Details *string `json:"details"` + Aliases *string `json:"aliases"` + Tags []*ScrapedTag `json:"tags"` RemoteSiteID *string `json:"remote_site_id"` } func (ScrapedStudio) IsScrapedContent() {} -func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Studio { +func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *CreateStudioInput { // Populate a new studio from the input - ret := NewStudio() - ret.Name = s.Name + ret := NewCreateStudioInput() + ret.Name = strings.TrimSpace(s.Name) - if s.RemoteSiteID != nil && endpoint != "" { + if s.RemoteSiteID != nil && endpoint != "" && *s.RemoteSiteID != "" { ret.StashIDs = NewRelatedStashIDs([]StashID{ { Endpoint: endpoint, @@ -38,8 +42,28 @@ func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Stu }) } - if s.URL != nil && !excluded["url"] { - ret.URL = *s.URL + // if URLs are provided, only use those + if len(s.URLs) > 0 { + if !excluded["urls"] { + ret.URLs = NewRelatedStrings(s.URLs) + } + } else { + urls := []string{} + if s.URL != nil && !excluded["url"] { + urls = append(urls, *s.URL) + } + + if len(urls) > 0 { + ret.URLs = NewRelatedStrings(urls) + } + } + + if s.Details != nil && !excluded["details"] { + ret.Details = *s.Details + } + + if s.Aliases != nil && *s.Aliases != "" && !excluded["aliases"] { + ret.Aliases = NewRelatedStrings(stringslice.FromString(*s.Aliases, ",")) } if s.Parent != nil && s.Parent.StoredID != nil && !excluded["parent"] && !excluded["parent_studio"] { @@ -71,11 +95,40 @@ func (s *ScrapedStudio) ToPartial(id string, endpoint string, excluded map[strin currentTime := time.Now() if s.Name != "" && !excluded["name"] { - ret.Name = NewOptionalString(s.Name) + ret.Name = NewOptionalString(strings.TrimSpace(s.Name)) } - if s.URL != nil && !excluded["url"] { - ret.URL = NewOptionalString(*s.URL) + if len(s.URLs) > 0 { + if !excluded["urls"] { + + ret.URLs = &UpdateStrings{ + Values: stringslice.TrimSpace(s.URLs), + Mode: RelationshipUpdateModeSet, + } + } + } else { + urls := []string{} + if s.URL != nil && !excluded["url"] { + urls = append(urls, strings.TrimSpace(*s.URL)) + } + + if len(urls) > 0 { + ret.URLs = &UpdateStrings{ + Values: stringslice.TrimSpace(urls), + Mode: RelationshipUpdateModeSet, + } + } + } + + if s.Details != nil && !excluded["details"] { + ret.Details = NewOptionalString(strings.TrimSpace(*s.Details)) + } + + if s.Aliases != nil && *s.Aliases != "" && !excluded["aliases"] { + ret.Aliases = &UpdateStrings{ + Values: stringslice.TrimSpace(stringslice.FromString(*s.Aliases, ",")), + Mode: RelationshipUpdateModeSet, + } } if s.Parent != nil && !excluded["parent"] { @@ -88,7 +141,7 @@ func (s *ScrapedStudio) ToPartial(id string, endpoint string, excluded map[strin } } - if s.RemoteSiteID != nil && endpoint != "" { + if s.RemoteSiteID != nil && endpoint != "" && *s.RemoteSiteID != "" { ret.StashIDs = &UpdateStashIDs{ StashIDs: existingStashIDs, Mode: RelationshipUpdateModeSet, @@ -123,7 +176,9 @@ type ScrapedPerformer struct { FakeTits *string `json:"fake_tits"` PenisLength *string `json:"penis_length"` Circumcised *string `json:"circumcised"` - CareerLength *string `json:"career_length"` + CareerLength *string `json:"career_length"` // deprecated: use CareerStart/CareerEnd + CareerStart *string `json:"career_start"` + CareerEnd *string `json:"career_end"` Tattoos *string `json:"tattoos"` Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` @@ -145,10 +200,14 @@ func (ScrapedPerformer) IsScrapedContent() {} func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool) *Performer { ret := NewPerformer() currentTime := time.Now() - ret.Name = *p.Name + ret.Name = strings.TrimSpace(*p.Name) if p.Aliases != nil && !excluded["aliases"] { - ret.Aliases = NewRelatedStrings(stringslice.FromString(*p.Aliases, ",")) + aliases := stringslice.FromString(*p.Aliases, ",") + for i, alias := range aliases { + aliases[i] = strings.TrimSpace(alias) + } + ret.Aliases = NewRelatedStrings(aliases) } if p.Birthdate != nil && !excluded["birthdate"] { date, err := ParseDate(*p.Birthdate) @@ -162,8 +221,20 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool ret.DeathDate = &date } } - if p.CareerLength != nil && !excluded["career_length"] { - ret.CareerLength = *p.CareerLength + + // assume that career length is _not_ populated in favour of start/end + + if p.CareerStart != nil && !excluded["career_start"] { + date, err := ParseDate(*p.CareerStart) + if err == nil { + ret.CareerStart = &date + } + } + if p.CareerEnd != nil && !excluded["career_end"] { + date, err := ParseDate(*p.CareerEnd) + if err == nil { + ret.CareerEnd = &date + } } if p.Country != nil && !excluded["country"] { ret.Country = *p.Country @@ -221,7 +292,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool } } if p.Circumcised != nil && !excluded["circumcised"] { - v := CircumisedEnum(*p.Circumcised) + v := CircumcisedEnum(*p.Circumcised) if v.IsValid() { ret.Circumcised = &v } @@ -249,7 +320,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool } } - if p.RemoteSiteID != nil && endpoint != "" { + if p.RemoteSiteID != nil && endpoint != "" && *p.RemoteSiteID != "" { ret.StashIDs = NewRelatedStashIDs([]StashID{ { Endpoint: endpoint, @@ -299,7 +370,16 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, } } if p.CareerLength != nil && !excluded["career_length"] { - ret.CareerLength = NewOptionalString(*p.CareerLength) + // parse career_length into career_start/career_end + start, end, err := ParseYearRangeString(*p.CareerLength) + if err == nil { + if start != nil { + ret.CareerStart = NewOptionalDate(*start) + } + if end != nil { + ret.CareerEnd = NewOptionalDate(*end) + } + } } if p.Country != nil && !excluded["country"] { ret.Country = NewOptionalString(*p.Country) @@ -378,7 +458,7 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, } } - if p.RemoteSiteID != nil && endpoint != "" { + if p.RemoteSiteID != nil && endpoint != "" && *p.RemoteSiteID != "" { ret.StashIDs = &UpdateStashIDs{ StashIDs: existingStashIDs, Mode: RelationshipUpdateModeSet, @@ -395,12 +475,95 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, type ScrapedTag struct { // Set if tag matched - StoredID *string `json:"stored_id"` - Name string `json:"name"` + StoredID *string `json:"stored_id"` + Name string `json:"name"` + Description *string `json:"description"` + AliasList []string `json:"alias_list"` + RemoteSiteID *string `json:"remote_site_id"` + Parent *ScrapedTag `json:"parent"` } func (ScrapedTag) IsScrapedContent() {} +func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag { + currentTime := time.Now() + ret := NewTag() + ret.Name = t.Name + ret.ParentIDs = NewRelatedIDs([]int{}) + ret.ChildIDs = NewRelatedIDs([]int{}) + ret.Aliases = NewRelatedStrings([]string{}) + + if t.Description != nil && !excluded["description"] { + ret.Description = *t.Description + } + + if len(t.AliasList) > 0 && !excluded["aliases"] { + ret.Aliases = NewRelatedStrings(t.AliasList) + } + + if t.Parent != nil && t.Parent.StoredID != nil { + parentID, err := strconv.Atoi(*t.Parent.StoredID) + if err == nil && parentID > 0 { + ret.ParentIDs = NewRelatedIDs([]int{parentID}) + } + } + + if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" { + ret.StashIDs = NewRelatedStashIDs([]StashID{ + { + Endpoint: endpoint, + StashID: *t.RemoteSiteID, + UpdatedAt: currentTime, + }, + }) + } + + return &ret +} + +func (t *ScrapedTag) ToPartial(storedID string, endpoint string, excluded map[string]bool, existingStashIDs []StashID) TagPartial { + ret := NewTagPartial() + + if t.Name != "" && !excluded["name"] { + ret.Name = NewOptionalString(t.Name) + } + + if t.Description != nil && !excluded["description"] { + ret.Description = NewOptionalString(*t.Description) + } + + if len(t.AliasList) > 0 && !excluded["aliases"] { + ret.Aliases = &UpdateStrings{ + Values: t.AliasList, + Mode: RelationshipUpdateModeSet, + } + } + + if t.Parent != nil && t.Parent.StoredID != nil { + parentID, err := strconv.Atoi(*t.Parent.StoredID) + if err == nil && parentID > 0 { + ret.ParentIDs = &UpdateIDs{ + IDs: []int{parentID}, + Mode: RelationshipUpdateModeAdd, + } + } + } + + if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" { + ret.StashIDs = &UpdateStashIDs{ + StashIDs: existingStashIDs, + Mode: RelationshipUpdateModeSet, + } + ret.StashIDs.Set(StashID{ + Endpoint: endpoint, + StashID: *t.RemoteSiteID, + UpdatedAt: time.Now(), + }) + } + + return ret +} + func ScrapedTagSortFunction(a, b *ScrapedTag) int { return strings.Compare(strings.ToLower(a.Name), strings.ToLower(b.Name)) } @@ -462,6 +625,7 @@ type ScrapedGroup struct { Date *string `json:"date"` Rating *string `json:"rating"` Director *string `json:"director"` + URL *string `json:"url"` // included for backward compatibility URLs []string `json:"urls"` Synopsis *string `json:"synopsis"` Studio *ScrapedStudio `json:"studio"` diff --git a/pkg/models/model_scraped_item_test.go b/pkg/models/model_scraped_item_test.go index 1e8edccb4..1956d8a0b 100644 --- a/pkg/models/model_scraped_item_test.go +++ b/pkg/models/model_scraped_item_test.go @@ -11,6 +11,7 @@ import ( func Test_scrapedToStudioInput(t *testing.T) { const name = "name" url := "url" + url2 := "url2" emptyEndpoint := "" endpoint := "endpoint" remoteSiteID := "remoteSiteID" @@ -25,13 +26,33 @@ func Test_scrapedToStudioInput(t *testing.T) { "set all", &ScrapedStudio{ Name: name, + URLs: []string{url, url2}, URL: &url, RemoteSiteID: &remoteSiteID, }, endpoint, &Studio{ Name: name, - URL: url, + URLs: NewRelatedStrings([]string{url, url2}), + StashIDs: NewRelatedStashIDs([]StashID{ + { + Endpoint: endpoint, + StashID: remoteSiteID, + }, + }), + }, + }, + { + "set url instead of urls", + &ScrapedStudio{ + Name: name, + URL: &url, + RemoteSiteID: &remoteSiteID, + }, + endpoint, + &Studio{ + Name: name, + URLs: NewRelatedStrings([]string{url}), StashIDs: NewRelatedStashIDs([]StashID{ { Endpoint: endpoint, @@ -92,7 +113,7 @@ func Test_scrapedToStudioInput(t *testing.T) { got.StashIDs.List()[stid].UpdatedAt = time.Time{} } } - assert.Equal(t, tt.want, got) + assert.Equal(t, tt.want, got.Studio) }) } } @@ -103,9 +124,10 @@ func Test_scrapedToPerformerInput(t *testing.T) { endpoint := "endpoint" remoteSiteID := "remoteSiteID" - var stringValues []string - for i := 0; i < 20; i++ { - stringValues = append(stringValues, strconv.Itoa(i)) + const nValues = 19 + stringValues := make([]string, nValues) + for i := 0; i < nValues; i++ { + stringValues[i] = strconv.Itoa(i) } upTo := 0 @@ -162,7 +184,8 @@ func Test_scrapedToPerformerInput(t *testing.T) { Weight: nextVal(), Measurements: nextVal(), FakeTits: nextVal(), - CareerLength: nextVal(), + CareerStart: dateStrFromInt(2005), + CareerEnd: dateStrFromInt(2015), Tattoos: nextVal(), Piercings: nextVal(), Aliases: nextVal(), @@ -187,8 +210,9 @@ func Test_scrapedToPerformerInput(t *testing.T) { Weight: nextIntVal(), Measurements: *nextVal(), FakeTits: *nextVal(), - CareerLength: *nextVal(), - Tattoos: *nextVal(), + CareerStart: dateFromInt(2005), + CareerEnd: dateFromInt(2015), + Tattoos: *nextVal(), // skip CareerLength counter slot Piercings: *nextVal(), Aliases: NewRelatedStrings([]string{*nextVal()}), URLs: NewRelatedStrings([]string{*nextVal(), *nextVal(), *nextVal()}), @@ -321,9 +345,12 @@ func TestScrapedStudio_ToPartial(t *testing.T) { fullStudio, stdArgs, StudioPartial{ - ID: id, - Name: NewOptionalString(name), - URL: NewOptionalString(url), + ID: id, + Name: NewOptionalString(name), + URLs: &UpdateStrings{ + Values: []string{url}, + Mode: RelationshipUpdateModeSet, + }, ParentID: NewOptionalInt(parentStoredID), StashIDs: &UpdateStashIDs{ StashIDs: append(existingStashIDs, StashID{ diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go index 0f4a09bc2..ec81aac0e 100644 --- a/pkg/models/model_studio.go +++ b/pkg/models/model_studio.go @@ -8,7 +8,6 @@ import ( type Studio struct { ID int `json:"id"` Name string `json:"name"` - URL string `json:"url"` ParentID *int `json:"parent_id"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` @@ -17,12 +16,26 @@ type Studio struct { Favorite bool `json:"favorite"` Details string `json:"details"` IgnoreAutoTag bool `json:"ignore_auto_tag"` + Organized bool `json:"organized"` Aliases RelatedStrings `json:"aliases"` + URLs RelatedStrings `json:"urls"` TagIDs RelatedIDs `json:"tag_ids"` StashIDs RelatedStashIDs `json:"stash_ids"` } +type CreateStudioInput struct { + *Studio + + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdateStudioInput struct { + *Studio + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + func NewStudio() Studio { currentTime := time.Now() return Studio{ @@ -31,11 +44,17 @@ func NewStudio() Studio { } } +func NewCreateStudioInput() CreateStudioInput { + s := NewStudio() + return CreateStudioInput{ + Studio: &s, + } +} + // StudioPartial represents part of a Studio object. It is used to update the database entry. type StudioPartial struct { ID int Name OptionalString - URL OptionalString ParentID OptionalInt // Rating expressed in 1-100 scale Rating OptionalInt @@ -44,10 +63,14 @@ type StudioPartial struct { CreatedAt OptionalTime UpdatedAt OptionalTime IgnoreAutoTag OptionalBool + Organized OptionalBool Aliases *UpdateStrings + URLs *UpdateStrings TagIDs *UpdateIDs StashIDs *UpdateStashIDs + + CustomFields CustomFieldsInput } func NewStudioPartial() StudioPartial { @@ -63,6 +86,12 @@ func (s *Studio) LoadAliases(ctx context.Context, l AliasLoader) error { }) } +func (s *Studio) LoadURLs(ctx context.Context, l URLLoader) error { + return s.URLs.load(func() ([]string, error) { + return l.GetURLs(ctx, s.ID) + }) +} + func (s *Studio) LoadTagIDs(ctx context.Context, l TagIDLoader) error { return s.TagIDs.load(func() ([]int, error) { return l.GetTagIDs(ctx, s.ID) diff --git a/pkg/models/model_tag.go b/pkg/models/model_tag.go index 0d845750f..aee468639 100644 --- a/pkg/models/model_tag.go +++ b/pkg/models/model_tag.go @@ -15,9 +15,10 @@ type Tag struct { CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` - Aliases RelatedStrings `json:"aliases"` - ParentIDs RelatedIDs `json:"parent_ids"` - ChildIDs RelatedIDs `json:"tag_ids"` + Aliases RelatedStrings `json:"aliases"` + ParentIDs RelatedIDs `json:"parent_ids"` + ChildIDs RelatedIDs `json:"tag_ids"` + StashIDs RelatedStashIDs `json:"stash_ids"` } func NewTag() Tag { @@ -28,6 +29,18 @@ func NewTag() Tag { } } +type CreateTagInput struct { + *Tag + + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdateTagInput struct { + *Tag + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + func (s *Tag) LoadAliases(ctx context.Context, l AliasLoader) error { return s.Aliases.load(func() ([]string, error) { return l.GetAliases(ctx, s.ID) @@ -46,6 +59,12 @@ func (s *Tag) LoadChildIDs(ctx context.Context, l TagRelationLoader) error { }) } +func (s *Tag) LoadStashIDs(ctx context.Context, l StashIDLoader) error { + return s.StashIDs.load(func() ([]StashID, error) { + return l.GetStashIDs(ctx, s.ID) + }) +} + type TagPartial struct { Name OptionalString SortName OptionalString @@ -58,6 +77,9 @@ type TagPartial struct { Aliases *UpdateStrings ParentIDs *UpdateIDs ChildIDs *UpdateIDs + StashIDs *UpdateStashIDs + + CustomFields CustomFieldsInput } func NewTagPartial() TagPartial { diff --git a/pkg/models/paths/paths_generated.go b/pkg/models/paths/paths_generated.go index d87e1eed6..2b5f5003e 100644 --- a/pkg/models/paths/paths_generated.go +++ b/pkg/models/paths/paths_generated.go @@ -43,6 +43,9 @@ func (gp *generatedPaths) GetTmpPath(fileName string) string { // TempFile creates a temporary file using os.CreateTemp. // It is the equivalent of calling os.CreateTemp using Tmp and pattern. func (gp *generatedPaths) TempFile(pattern string) (*os.File, error) { + if err := gp.EnsureTmpDir(); err != nil { + logger.Warnf("Could not ensure existence of a temporary directory: %v", err) + } return os.CreateTemp(gp.Tmp, pattern) } diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 239d8347f..606b87f9f 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -61,49 +61,49 @@ type GenderCriterionInput struct { Modifier CriterionModifier `json:"modifier"` } -type CircumisedEnum string +type CircumcisedEnum string const ( - CircumisedEnumCut CircumisedEnum = "CUT" - CircumisedEnumUncut CircumisedEnum = "UNCUT" + CircumcisedEnumCut CircumcisedEnum = "CUT" + CircumcisedEnumUncut CircumcisedEnum = "UNCUT" ) -var AllCircumcisionEnum = []CircumisedEnum{ - CircumisedEnumCut, - CircumisedEnumUncut, +var AllCircumcisionEnum = []CircumcisedEnum{ + CircumcisedEnumCut, + CircumcisedEnumUncut, } -func (e CircumisedEnum) IsValid() bool { +func (e CircumcisedEnum) IsValid() bool { switch e { - case CircumisedEnumCut, CircumisedEnumUncut: + case CircumcisedEnumCut, CircumcisedEnumUncut: return true } return false } -func (e CircumisedEnum) String() string { +func (e CircumcisedEnum) String() string { return string(e) } -func (e *CircumisedEnum) UnmarshalGQL(v interface{}) error { +func (e *CircumcisedEnum) UnmarshalGQL(v interface{}) error { str, ok := v.(string) if !ok { return fmt.Errorf("enums must be strings") } - *e = CircumisedEnum(str) + *e = CircumcisedEnum(str) if !e.IsValid() { - return fmt.Errorf("%s is not a valid CircumisedEnum", str) + return fmt.Errorf("%s is not a valid CircumcisedEnum", str) } return nil } -func (e CircumisedEnum) MarshalGQL(w io.Writer) { +func (e CircumcisedEnum) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } type CircumcisionCriterionInput struct { - Value []CircumisedEnum `json:"value"` + Value []CircumcisedEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` } @@ -137,7 +137,11 @@ type PerformerFilterType struct { // Filter by circumcision Circumcised *CircumcisionCriterionInput `json:"circumcised"` // Filter by career length - CareerLength *StringCriterionInput `json:"career_length"` + CareerLength *StringCriterionInput `json:"career_length"` // deprecated + // Filter by career start year + CareerStart *DateCriterionInput `json:"career_start"` + // Filter by career end year + CareerEnd *DateCriterionInput `json:"career_end"` // Filter by tattoos Tattoos *StringCriterionInput `json:"tattoos"` // Filter by piercings @@ -154,6 +158,8 @@ type PerformerFilterType struct { TagCount *IntCriterionInput `json:"tag_count"` // Filter by scene count SceneCount *IntCriterionInput `json:"scene_count"` + // Filter by scene marker count (via scene) + MarkerCount *IntCriterionInput `json:"marker_count"` // Filter by image count ImageCount *IntCriterionInput `json:"image_count"` // Filter by gallery count @@ -166,6 +172,8 @@ type PerformerFilterType struct { StashID *StringCriterionInput `json:"stash_id"` // Filter by StashID Endpoint StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` + // Filter by StashIDs Endpoint + StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` // Filter by rating expressed as 1-100 Rating100 *IntCriterionInput `json:"rating100"` // Filter by url @@ -196,6 +204,8 @@ type PerformerFilterType struct { GalleriesFilter *GalleryFilterType `json:"galleries_filter"` // Filter by related tags that meet this criteria TagsFilter *TagFilterType `json:"tags_filter"` + // Filter by related scene markers (via scene) that meet this criteria + MarkersFilter *SceneMarkerFilterType `json:"markers_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at @@ -206,30 +216,32 @@ type PerformerFilterType struct { } type PerformerCreateInput struct { - Name string `json:"name"` - Disambiguation *string `json:"disambiguation"` - URL *string `json:"url"` // deprecated - Urls []string `json:"urls"` - Gender *GenderEnum `json:"gender"` - Birthdate *string `json:"birthdate"` - Ethnicity *string `json:"ethnicity"` - Country *string `json:"country"` - EyeColor *string `json:"eye_color"` - Height *string `json:"height"` - HeightCm *int `json:"height_cm"` - Measurements *string `json:"measurements"` - FakeTits *string `json:"fake_tits"` - PenisLength *float64 `json:"penis_length"` - Circumcised *CircumisedEnum `json:"circumcised"` - CareerLength *string `json:"career_length"` - Tattoos *string `json:"tattoos"` - Piercings *string `json:"piercings"` - Aliases *string `json:"aliases"` - AliasList []string `json:"alias_list"` - Twitter *string `json:"twitter"` // deprecated - Instagram *string `json:"instagram"` // deprecated - Favorite *bool `json:"favorite"` - TagIds []string `json:"tag_ids"` + Name string `json:"name"` + Disambiguation *string `json:"disambiguation"` + URL *string `json:"url"` // deprecated + Urls []string `json:"urls"` + Gender *GenderEnum `json:"gender"` + Birthdate *string `json:"birthdate"` + Ethnicity *string `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *string `json:"eye_color"` + Height *string `json:"height"` + HeightCm *int `json:"height_cm"` + Measurements *string `json:"measurements"` + FakeTits *string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumcisedEnum `json:"circumcised"` + CareerLength *string `json:"career_length"` + CareerStart *string `json:"career_start"` + CareerEnd *string `json:"career_end"` + Tattoos *string `json:"tattoos"` + Piercings *string `json:"piercings"` + Aliases *string `json:"aliases"` + AliasList []string `json:"alias_list"` + Twitter *string `json:"twitter"` // deprecated + Instagram *string `json:"instagram"` // deprecated + Favorite *bool `json:"favorite"` + TagIds []string `json:"tag_ids"` // This should be a URL or a base64 encoded data URL Image *string `json:"image"` StashIds []StashIDInput `json:"stash_ids"` @@ -244,31 +256,33 @@ type PerformerCreateInput struct { } type PerformerUpdateInput struct { - ID string `json:"id"` - Name *string `json:"name"` - Disambiguation *string `json:"disambiguation"` - URL *string `json:"url"` // deprecated - Urls []string `json:"urls"` - Gender *GenderEnum `json:"gender"` - Birthdate *string `json:"birthdate"` - Ethnicity *string `json:"ethnicity"` - Country *string `json:"country"` - EyeColor *string `json:"eye_color"` - Height *string `json:"height"` - HeightCm *int `json:"height_cm"` - Measurements *string `json:"measurements"` - FakeTits *string `json:"fake_tits"` - PenisLength *float64 `json:"penis_length"` - Circumcised *CircumisedEnum `json:"circumcised"` - CareerLength *string `json:"career_length"` - Tattoos *string `json:"tattoos"` - Piercings *string `json:"piercings"` - Aliases *string `json:"aliases"` - AliasList []string `json:"alias_list"` - Twitter *string `json:"twitter"` // deprecated - Instagram *string `json:"instagram"` // deprecated - Favorite *bool `json:"favorite"` - TagIds []string `json:"tag_ids"` + ID string `json:"id"` + Name *string `json:"name"` + Disambiguation *string `json:"disambiguation"` + URL *string `json:"url"` // deprecated + Urls []string `json:"urls"` + Gender *GenderEnum `json:"gender"` + Birthdate *string `json:"birthdate"` + Ethnicity *string `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *string `json:"eye_color"` + Height *string `json:"height"` + HeightCm *int `json:"height_cm"` + Measurements *string `json:"measurements"` + FakeTits *string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumcisedEnum `json:"circumcised"` + CareerLength *string `json:"career_length"` + CareerStart *string `json:"career_start"` + CareerEnd *string `json:"career_end"` + Tattoos *string `json:"tattoos"` + Piercings *string `json:"piercings"` + Aliases *string `json:"aliases"` + AliasList []string `json:"alias_list"` + Twitter *string `json:"twitter"` // deprecated + Instagram *string `json:"instagram"` // deprecated + Favorite *bool `json:"favorite"` + TagIds []string `json:"tag_ids"` // This should be a URL or a base64 encoded data URL Image *string `json:"image"` StashIds []StashIDInput `json:"stash_ids"` diff --git a/pkg/models/query.go b/pkg/models/query.go index 1b2d347b9..a6e15bc4e 100644 --- a/pkg/models/query.go +++ b/pkg/models/query.go @@ -5,7 +5,7 @@ type QueryOptions struct { Count bool } -type QueryResult struct { - IDs []int +type QueryResult[T comparable] struct { + IDs []T Count int } diff --git a/pkg/models/repository_file.go b/pkg/models/repository_file.go index 0819b25a5..e1ac0b213 100644 --- a/pkg/models/repository_file.go +++ b/pkg/models/repository_file.go @@ -13,9 +13,9 @@ type FileGetter interface { // FileFinder provides methods to find files. type FileFinder interface { FileGetter - FindAllByPath(ctx context.Context, path string) ([]File, error) - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error) - FindByPath(ctx context.Context, path string) (File, error) + FindAllByPath(ctx context.Context, path string, caseSensitive bool) ([]File, error) + FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]File, error) + FindByPath(ctx context.Context, path string, caseSensitive bool) (File, error) FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error) FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error) diff --git a/pkg/models/repository_folder.go b/pkg/models/repository_folder.go index c3f82f529..1169e53ac 100644 --- a/pkg/models/repository_folder.go +++ b/pkg/models/repository_folder.go @@ -5,15 +5,22 @@ import "context" // FolderGetter provides methods to get folders by ID. type FolderGetter interface { Find(ctx context.Context, id FolderID) (*Folder, error) + FindMany(ctx context.Context, id []FolderID) ([]*Folder, error) } // FolderFinder provides methods to find folders. type FolderFinder interface { FolderGetter - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error) - FindByPath(ctx context.Context, path string) (*Folder, error) + FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]*Folder, error) + FindByPath(ctx context.Context, path string, caseSensitive bool) (*Folder, error) FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error) FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error) + GetManyParentFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error) + GetManySubFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error) +} + +type FolderQueryer interface { + Query(ctx context.Context, options FolderQueryOptions) (*FolderQueryResult, error) } type FolderCounter interface { @@ -47,6 +54,7 @@ type FolderFinderDestroyer interface { // FolderReader provides all methods to read folders. type FolderReader interface { FolderFinder + FolderQueryer FolderCounter } diff --git a/pkg/models/repository_gallery.go b/pkg/models/repository_gallery.go index 0cfb9964f..8fc3b29d5 100644 --- a/pkg/models/repository_gallery.go +++ b/pkg/models/repository_gallery.go @@ -37,12 +37,12 @@ type GalleryCounter interface { // GalleryCreator provides methods to create galleries. type GalleryCreator interface { - Create(ctx context.Context, newGallery *Gallery, fileIDs []FileID) error + Create(ctx context.Context, newGallery *CreateGalleryInput) error } // GalleryUpdater provides methods to update galleries. type GalleryUpdater interface { - Update(ctx context.Context, updatedGallery *Gallery) error + Update(ctx context.Context, updatedGallery *UpdateGalleryInput) error UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error) UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error } @@ -70,6 +70,7 @@ type GalleryReader interface { PerformerIDLoader TagIDLoader FileLoader + CustomFieldsReader All(ctx context.Context) ([]*Gallery, error) } @@ -80,6 +81,9 @@ type GalleryWriter interface { GalleryUpdater GalleryDestroyer + CustomFieldsWriter + + AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error AddFileID(ctx context.Context, id int, fileID FileID) error AddImages(ctx context.Context, galleryID int, imageIDs ...int) error RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error diff --git a/pkg/models/repository_group.go b/pkg/models/repository_group.go index 704390d77..d7f74de64 100644 --- a/pkg/models/repository_group.go +++ b/pkg/models/repository_group.go @@ -68,6 +68,7 @@ type GroupReader interface { TagIDLoader ContainingGroupLoader SubGroupLoader + CustomFieldsReader All(ctx context.Context) ([]*Group, error) GetFrontImage(ctx context.Context, groupID int) ([]byte, error) @@ -81,6 +82,7 @@ type GroupWriter interface { GroupCreator GroupUpdater GroupDestroyer + CustomFieldsWriter } // GroupReaderWriter provides all group methods. diff --git a/pkg/models/repository_image.go b/pkg/models/repository_image.go index 1455d7762..99dab3479 100644 --- a/pkg/models/repository_image.go +++ b/pkg/models/repository_image.go @@ -38,11 +38,12 @@ type ImageCounter interface { CountByGalleryID(ctx context.Context, galleryID int) (int, error) OCount(ctx context.Context) (int, error) OCountByPerformerID(ctx context.Context, performerID int) (int, error) + OCountByStudioID(ctx context.Context, studioID int) (int, error) } // ImageCreator provides methods to create images. type ImageCreator interface { - Create(ctx context.Context, newImage *Image, fileIDs []FileID) error + Create(ctx context.Context, newImage *CreateImageInput) error } // ImageUpdater provides methods to update images. @@ -77,6 +78,7 @@ type ImageReader interface { FileLoader GalleryCoverFinder + CustomFieldsReader All(ctx context.Context) ([]*Image, error) Size(ctx context.Context) (float64, error) @@ -87,6 +89,7 @@ type ImageWriter interface { ImageCreator ImageUpdater ImageDestroyer + CustomFieldsWriter AddFileID(ctx context.Context, id int, fileID FileID) error RemoveFileID(ctx context.Context, id int, fileID FileID) error diff --git a/pkg/models/repository_performer.go b/pkg/models/repository_performer.go index ad0b61da0..175208c9d 100644 --- a/pkg/models/repository_performer.go +++ b/pkg/models/repository_performer.go @@ -92,6 +92,8 @@ type PerformerWriter interface { PerformerCreator PerformerUpdater PerformerDestroyer + + Merge(ctx context.Context, source []int, destination int) error } // PerformerReaderWriter provides all performer methods. diff --git a/pkg/models/repository_scene.go b/pkg/models/repository_scene.go index f0fff4ac7..6b795c3af 100644 --- a/pkg/models/repository_scene.go +++ b/pkg/models/repository_scene.go @@ -44,6 +44,8 @@ type SceneCounter interface { CountMissingChecksum(ctx context.Context) (int, error) CountMissingOSHash(ctx context.Context) (int, error) OCountByPerformerID(ctx context.Context, performerID int) (int, error) + OCountByGroupID(ctx context.Context, groupID int) (int, error) + OCountByStudioID(ctx context.Context, studioID int) (int, error) } // SceneCreator provides methods to create scenes. @@ -102,6 +104,7 @@ type SceneReader interface { SceneGroupLoader StashIDLoader VideoFileLoader + CustomFieldsReader All(ctx context.Context) ([]*Scene, error) Wall(ctx context.Context, q *string) ([]*Scene, error) @@ -138,6 +141,7 @@ type SceneWriter interface { ViewHistoryWriter SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) ResetActivity(ctx context.Context, sceneID int, resetResume bool, resetDuration bool) (bool, error) + CustomFieldsWriter } // SceneReaderWriter provides all scene methods. diff --git a/pkg/models/repository_studio.go b/pkg/models/repository_studio.go index a2b9202f3..54fb6ed47 100644 --- a/pkg/models/repository_studio.go +++ b/pkg/models/repository_studio.go @@ -42,12 +42,12 @@ type StudioCounter interface { // StudioCreator provides methods to create studios. type StudioCreator interface { - Create(ctx context.Context, newStudio *Studio) error + Create(ctx context.Context, newStudio *CreateStudioInput) error } // StudioUpdater provides methods to update studios. type StudioUpdater interface { - Update(ctx context.Context, updatedStudio *Studio) error + Update(ctx context.Context, updatedStudio *UpdateStudioInput) error UpdatePartial(ctx context.Context, updatedStudio StudioPartial) (*Studio, error) UpdateImage(ctx context.Context, studioID int, image []byte) error } @@ -77,6 +77,9 @@ type StudioReader interface { AliasLoader StashIDLoader TagIDLoader + URLLoader + + CustomFieldsReader All(ctx context.Context) ([]*Studio, error) GetImage(ctx context.Context, studioID int) ([]byte, error) diff --git a/pkg/models/repository_tag.go b/pkg/models/repository_tag.go index 2b073cae0..02dfe0cb6 100644 --- a/pkg/models/repository_tag.go +++ b/pkg/models/repository_tag.go @@ -25,6 +25,8 @@ type TagFinder interface { FindByStudioID(ctx context.Context, studioID int) ([]*Tag, error) FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) + FindByStashID(ctx context.Context, stashID StashID) ([]*Tag, error) + FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Tag, error) } // TagQueryer provides methods to query tags. @@ -50,12 +52,12 @@ type TagCounter interface { // TagCreator provides methods to create tags. type TagCreator interface { - Create(ctx context.Context, newTag *Tag) error + Create(ctx context.Context, newTag *CreateTagInput) error } // TagUpdater provides methods to update tags. type TagUpdater interface { - Update(ctx context.Context, updatedTag *Tag) error + Update(ctx context.Context, updatedTag *UpdateTagInput) error UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) UpdateAliases(ctx context.Context, tagID int, aliases []string) error UpdateImage(ctx context.Context, tagID int, image []byte) error @@ -76,6 +78,7 @@ type TagFinderCreator interface { type TagCreatorUpdater interface { TagCreator TagUpdater + CustomFieldsWriter } // TagReader provides all methods to read tags. @@ -87,6 +90,8 @@ type TagReader interface { AliasLoader TagRelationLoader + StashIDLoader + CustomFieldsReader All(ctx context.Context) ([]*Tag, error) GetImage(ctx context.Context, tagID int) ([]byte, error) @@ -98,6 +103,7 @@ type TagWriter interface { TagCreator TagUpdater TagDestroyer + CustomFieldsWriter Merge(ctx context.Context, source []int, destination int) error } diff --git a/pkg/models/scene.go b/pkg/models/scene.go index c7be343d9..839452501 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -2,10 +2,28 @@ package models import "context" -type PHashDuplicationCriterionInput struct { +type DuplicationCriterionInput struct { + // Deprecated: Use Phash field instead. Kept for backwards compatibility. Duplicated *bool `json:"duplicated"` - // Currently unimplemented + // Currently unimplemented. Intended for phash distance matching. Distance *int `json:"distance"` + // Filter by phash duplication + Phash *bool `json:"phash"` + // Filter by URL duplication + URL *bool `json:"url"` + // Filter by Stash ID duplication + StashID *bool `json:"stash_id"` + // Filter by title duplication + Title *bool `json:"title"` +} + +type FileDuplicationCriterionInput struct { + // Deprecated: Use Phash field instead. Kept for backwards compatibility. + Duplicated *bool `json:"duplicated"` + // Currently unimplemented. Intended for phash distance matching. + Distance *int `json:"distance"` + // Filter by phash duplication + Phash *bool `json:"phash"` } type SceneFilterType struct { @@ -33,8 +51,8 @@ type SceneFilterType struct { Organized *bool `json:"organized"` // Filter by o-counter OCounter *IntCriterionInput `json:"o_counter"` - // Filter Scenes that have an exact phash match available - Duplicated *PHashDuplicationCriterionInput `json:"duplicated"` + // Filter Scenes by duplication criteria + Duplicated *DuplicationCriterionInput `json:"duplicated"` // Filter by resolution Resolution *ResolutionCriterionInput `json:"resolution"` // Filter by orientation @@ -79,6 +97,10 @@ type SceneFilterType struct { StashID *StringCriterionInput `json:"stash_id"` // Filter by StashID Endpoint StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` + // Filter by StashIDs Endpoint + StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` + // Filter by StashID count + StashIDCount *IntCriterionInput `json:"stash_id_count"` // Filter by url URL *StringCriterionInput `json:"url"` // Filter by interactive @@ -111,10 +133,15 @@ type SceneFilterType struct { MoviesFilter *GroupFilterType `json:"movies_filter"` // Filter by related markers that meet this criteria MarkersFilter *SceneMarkerFilterType `json:"markers_filter"` + // Filter by related files that meet this criteria + FilesFilter *FileFilterType `json:"files_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type SceneQueryOptions struct { @@ -126,7 +153,7 @@ type SceneQueryOptions struct { } type SceneQueryResult struct { - QueryResult + QueryResult[int] TotalDuration float64 TotalSize float64 @@ -168,7 +195,8 @@ type SceneCreateInput struct { // The first id will be assigned as primary. // Files will be reassigned from existing scenes if applicable. // Files must not already be primary for another scene. - FileIds []string `json:"file_ids"` + FileIds []string `json:"file_ids"` + CustomFields map[string]any `json:"custom_fields,omitempty"` } type SceneUpdateInput struct { @@ -197,18 +225,21 @@ type SceneUpdateInput struct { PlayDuration *float64 `json:"play_duration"` PlayCount *int `json:"play_count"` PrimaryFileID *string `json:"primary_file_id"` + CustomFields *CustomFieldsInput } type SceneDestroyInput struct { - ID string `json:"id"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + ID string `json:"id"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } type ScenesDestroyInput struct { - Ids []string `json:"ids"` - DeleteFile *bool `json:"delete_file"` - DeleteGenerated *bool `json:"delete_generated"` + Ids []string `json:"ids"` + DeleteFile *bool `json:"delete_file"` + DeleteGenerated *bool `json:"delete_generated"` + DestroyFileEntry *bool `json:"destroy_file_entry"` } func NewSceneQueryResult(getter SceneGetter) *SceneQueryResult { diff --git a/pkg/models/stash_ids.go b/pkg/models/stash_ids.go index 7751c2ef0..d73bfd880 100644 --- a/pkg/models/stash_ids.go +++ b/pkg/models/stash_ids.go @@ -79,10 +79,23 @@ func (s StashIDInputs) ToStashIDs() StashIDs { return nil } - ret := make(StashIDs, len(s)) - for i, v := range s { - ret[i] = v.ToStashID() + // #2800 - deduplicate StashIDs based on endpoint and stash_id + ret := make(StashIDs, 0, len(s)) + seen := make(map[string]map[string]bool) + + for _, v := range s { + stashID := v.ToStashID() + + if seen[stashID.Endpoint] == nil { + seen[stashID.Endpoint] = make(map[string]bool) + } + + if !seen[stashID.Endpoint][stashID.StashID] { + seen[stashID.Endpoint][stashID.StashID] = true + ret = append(ret, stashID) + } } + return ret } @@ -116,8 +129,16 @@ func (u *UpdateStashIDs) Set(v StashID) { type StashIDCriterionInput struct { // If present, this value is treated as a predicate. - // That is, it will filter based on stash_ids with the matching endpoint + // That is, it will filter based on stash_id with the matching endpoint Endpoint *string `json:"endpoint"` StashID *string `json:"stash_id"` Modifier CriterionModifier `json:"modifier"` } + +type StashIDsCriterionInput struct { + // If present, this value is treated as a predicate. + // That is, it will filter based on stash_ids with the matching endpoint + Endpoint *string `json:"endpoint"` + StashIDs []*string `json:"stash_ids"` + Modifier CriterionModifier `json:"modifier"` +} diff --git a/pkg/models/studio.go b/pkg/models/studio.go index 03ea8a84d..7ad8719ac 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -10,6 +10,8 @@ type StudioFilterType struct { StashID *StringCriterionInput `json:"stash_id"` // Filter by StashID Endpoint StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` + // Filter by StashIDs Endpoint + StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` // Filter to only include studios missing this property IsMissing *string `json:"is_missing"` // Filter by rating expressed as 1-100 @@ -26,6 +28,8 @@ type StudioFilterType struct { ImageCount *IntCriterionInput `json:"image_count"` // Filter by gallery count GalleryCount *IntCriterionInput `json:"gallery_count"` + // Filter by group count + GroupCount *IntCriterionInput `json:"group_count"` // Filter by url URL *StringCriterionInput `json:"url"` // Filter by studio aliases @@ -34,22 +38,30 @@ type StudioFilterType struct { ChildCount *IntCriterionInput `json:"child_count"` // Filter by autotag ignore value IgnoreAutoTag *bool `json:"ignore_auto_tag"` + // Filter by organized + Organized *bool `json:"organized"` // Filter by related scenes that meet this criteria ScenesFilter *SceneFilterType `json:"scenes_filter"` // Filter by related images that meet this criteria ImagesFilter *ImageFilterType `json:"images_filter"` // Filter by related galleries that meet this criteria GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related groups that meet this criteria + GroupsFilter *GroupFilterType `json:"groups_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type StudioCreateInput struct { - Name string `json:"name"` - URL *string `json:"url"` - ParentID *string `json:"parent_id"` + Name string `json:"name"` + URL *string `json:"url"` // deprecated + Urls []string `json:"urls"` + ParentID *string `json:"parent_id"` // This should be a URL or a base64 encoded data URL Image *string `json:"image"` StashIds []StashIDInput `json:"stash_ids"` @@ -59,13 +71,17 @@ type StudioCreateInput struct { Aliases []string `json:"aliases"` TagIds []string `json:"tag_ids"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` + Organized *bool `json:"organized"` + + CustomFields map[string]interface{} `json:"custom_fields"` } type StudioUpdateInput struct { - ID string `json:"id"` - Name *string `json:"name"` - URL *string `json:"url"` - ParentID *string `json:"parent_id"` + ID string `json:"id"` + Name *string `json:"name"` + URL *string `json:"url"` // deprecated + Urls []string `json:"urls"` + ParentID *string `json:"parent_id"` // This should be a URL or a base64 encoded data URL Image *string `json:"image"` StashIds []StashIDInput `json:"stash_ids"` @@ -75,4 +91,7 @@ type StudioUpdateInput struct { Aliases []string `json:"aliases"` TagIds []string `json:"tag_ids"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` + Organized *bool `json:"organized"` + + CustomFields CustomFieldsInput `json:"custom_fields"` } diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 1971a8bb6..b166e5a69 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -40,14 +40,29 @@ type TagFilterType struct { ChildCount *IntCriterionInput `json:"child_count"` // Filter by autotag ignore value IgnoreAutoTag *bool `json:"ignore_auto_tag"` + // Filter by StashID Endpoint + StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"` + // Filter by StashIDs Endpoint + StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"` // Filter by related scenes that meet this criteria ScenesFilter *SceneFilterType `json:"scenes_filter"` // Filter by related images that meet this criteria ImagesFilter *ImageFilterType `json:"images_filter"` // Filter by related galleries that meet this criteria GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related groups that meet this criteria + GroupsFilter *GroupFilterType `json:"groups_filter"` + // Filter by related performers that meet this criteria + PerformersFilter *PerformerFilterType `json:"performers_filter"` + // Filter by related studios that meet this criteria + StudiosFilter *StudioFilterType `json:"studios_filter"` + // Filter by related scene markers that meet this criteria + MarkersFilter *SceneMarkerFilterType `json:"markers_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } diff --git a/pkg/performer/export.go b/pkg/performer/export.go index 1455fb7bf..d7807f651 100644 --- a/pkg/performer/export.go +++ b/pkg/performer/export.go @@ -30,7 +30,6 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode EyeColor: performer.EyeColor, Measurements: performer.Measurements, FakeTits: performer.FakeTits, - CareerLength: performer.CareerLength, Tattoos: performer.Tattoos, Piercings: performer.Piercings, Favorite: performer.Favorite, @@ -71,6 +70,13 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode newPerformerJSON.PenisLength = *performer.PenisLength } + if performer.CareerStart != nil { + newPerformerJSON.CareerStart = performer.CareerStart.String() + } + if performer.CareerEnd != nil { + newPerformerJSON.CareerEnd = performer.CareerEnd.String() + } + if err := performer.LoadAliases(ctx, reader); err != nil { return nil, fmt.Errorf("loading performer aliases: %w", err) } diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index e51049e14..2cf476321 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -26,7 +26,6 @@ const ( performerName = "testPerformer" disambiguation = "disambiguation" url = "url" - careerLength = "careerLength" country = "country" ethnicity = "ethnicity" eyeColor = "eyeColor" @@ -49,8 +48,10 @@ var ( rating = 5 height = 123 weight = 60 + careerStart, _ = models.ParseDate("2005") + careerEnd, _ = models.ParseDate("2015") penisLength = 1.23 - circumcisedEnum = models.CircumisedEnumCut + circumcisedEnum = models.CircumcisedEnumCut circumcised = circumcisedEnum.String() emptyCustomFields = make(map[string]interface{}) @@ -87,7 +88,8 @@ func createFullPerformer(id int, name string) *models.Performer { URLs: models.NewRelatedStrings([]string{url, twitter, instagram}), Aliases: models.NewRelatedStrings(aliases), Birthdate: &birthDate, - CareerLength: careerLength, + CareerStart: &careerStart, + CareerEnd: &careerEnd, Country: country, Ethnicity: ethnicity, EyeColor: eyeColor, @@ -132,7 +134,8 @@ func createFullJSONPerformer(name string, image string, withCustomFields bool) * URLs: []string{url, twitter, instagram}, Aliases: aliases, Birthdate: birthDate.String(), - CareerLength: careerLength, + CareerStart: careerStart.String(), + CareerEnd: careerEnd.String(), Country: country, Ethnicity: ethnicity, EyeColor: eyeColor, diff --git a/pkg/performer/import.go b/pkg/performer/import.go index 3aaacdb8b..62b4d87d0 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -32,14 +32,17 @@ type Importer struct { } func (i *Importer) PreImport(ctx context.Context) error { - i.performer = performerJSONToPerformer(i.Input) + var err error + i.performer, err = performerJSONToPerformer(i.Input) + if err != nil { + return err + } i.customFields = i.Input.CustomFields if err := i.populateTags(ctx); err != nil { return err } - var err error if len(i.Input.Image) > 0 { i.imageData, err = utils.ProcessBase64Image(i.Input.Image) if err != nil { @@ -107,7 +110,9 @@ func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names [] newTag := models.NewTag() newTag.Name = name - err := tagWriter.Create(ctx, &newTag) + err := tagWriter.Create(ctx, &models.CreateTagInput{ + Tag: &newTag, + }) if err != nil { return nil, err } @@ -194,7 +199,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Performer { +func performerJSONToPerformer(performerJSON jsonschema.Performer) (models.Performer, error) { newPerformer := models.Performer{ Name: performerJSON.Name, Disambiguation: performerJSON.Disambiguation, @@ -203,7 +208,6 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform EyeColor: performerJSON.EyeColor, Measurements: performerJSON.Measurements, FakeTits: performerJSON.FakeTits, - CareerLength: performerJSON.CareerLength, Tattoos: performerJSON.Tattoos, Piercings: performerJSON.Piercings, Aliases: models.NewRelatedStrings(performerJSON.Aliases), @@ -233,7 +237,7 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform } if len(urls) > 0 { - newPerformer.URLs = models.NewRelatedStrings([]string{performerJSON.URL}) + newPerformer.URLs = models.NewRelatedStrings(urls) } } @@ -243,7 +247,7 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform } if performerJSON.Circumcised != "" { - v := models.CircumisedEnum(performerJSON.Circumcised) + v := models.CircumcisedEnum(performerJSON.Circumcised) newPerformer.Circumcised = &v } @@ -280,5 +284,24 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform } } - return newPerformer + // prefer explicit career_start/career_end, fall back to parsing legacy career_length + if performerJSON.CareerStart != "" || performerJSON.CareerEnd != "" { + careerStart, err := models.ParseDate(performerJSON.CareerStart) + if err == nil { + newPerformer.CareerStart = &careerStart + } + careerEnd, err := models.ParseDate(performerJSON.CareerEnd) + if err == nil { + newPerformer.CareerEnd = &careerEnd + } + } else if performerJSON.CareerLength != "" { + start, end, err := models.ParseYearRangeString(performerJSON.CareerLength) + if err != nil { + return models.Performer{}, fmt.Errorf("invalid career_length %q: %w", performerJSON.CareerLength, err) + } + newPerformer.CareerStart = start + newPerformer.CareerEnd = end + } + + return newPerformer, nil } diff --git a/pkg/performer/import_test.go b/pkg/performer/import_test.go index 0a3f86291..0d5f80d01 100644 --- a/pkg/performer/import_test.go +++ b/pkg/performer/import_test.go @@ -111,9 +111,9 @@ func TestImporterPreImportWithMissingTag(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { - t := args.Get(1).(*models.Tag) - t.ID = existingTagID + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.CreateTagInput) + t.Tag.ID = existingTagID }).Return(nil) err := i.PreImport(testCtx) @@ -146,7 +146,7 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { } db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() - db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.CreateTagInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) @@ -315,3 +315,86 @@ func TestUpdate(t *testing.T) { db.AssertExpectations(t) } + +func TestImportCareerFields(t *testing.T) { + startYear, _ := models.ParseDate("2005") + endYear, _ := models.ParseDate("2015") + + // explicit career_start/career_end should be used directly + t.Run("explicit fields", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerStart: startYear.String(), + CareerEnd: endYear.String(), + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // explicit fields take priority over legacy career_length + t.Run("explicit fields override legacy", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerStart: startYear.String(), + CareerEnd: endYear.String(), + CareerLength: "1990 - 1995", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // legacy career_length should be parsed when explicit fields are absent + t.Run("legacy career_length fallback", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "2005 - 2015", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Equal(t, &endYear, p.CareerEnd) + }) + + // legacy career_length with only start year + t.Run("legacy career_length start only", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "2005 -", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Equal(t, &startYear, p.CareerStart) + assert.Nil(t, p.CareerEnd) + }) + + // unparseable career_length should return an error + t.Run("legacy career_length unparseable", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + CareerLength: "not a year range", + } + + _, err := performerJSONToPerformer(input) + assert.NotNil(t, err) + }) + + // no career fields at all + t.Run("no career fields", func(t *testing.T) { + input := jsonschema.Performer{ + Name: "test", + } + + p, err := performerJSONToPerformer(input) + assert.Nil(t, err) + assert.Nil(t, p.CareerStart) + assert.Nil(t, p.CareerEnd) + }) +} diff --git a/pkg/performer/validate.go b/pkg/performer/validate.go index 68f7a8ef5..3baaa182b 100644 --- a/pkg/performer/validate.go +++ b/pkg/performer/validate.go @@ -225,6 +225,11 @@ func ValidateUpdateAliases(existing models.Performer, name models.OptionalString newName = name.Value } + // If aliases is nil, we're only changing the name - check existing aliases against new name + if aliases == nil { + return ValidateAliases(newName, existing.Aliases) + } + newAliases := aliases.Apply(existing.Aliases.List()) return ValidateAliases(newName, models.NewRelatedStrings(newAliases)) diff --git a/pkg/performer/validate_test.go b/pkg/performer/validate_test.go index 33f4b1cec..afd9c01c5 100644 --- a/pkg/performer/validate_test.go +++ b/pkg/performer/validate_test.go @@ -213,12 +213,12 @@ func TestValidateUpdateAliases(t *testing.T) { want error }{ {"both unset", osUnset, nil, nil}, - {"invalid name set", os2, nil, &DuplicateAliasError{name2}}, + {"name conflicts with alias", os2, nil, &DuplicateAliasError{name2}}, {"valid name set", os3, nil, nil}, {"valid aliases empty", os1, []string{}, nil}, - {"invalid aliases set", osUnset, []string{name1U}, &DuplicateAliasError{name1U}}, + {"alias matches name", osUnset, []string{name1U}, &DuplicateAliasError{name1U}}, {"valid aliases set", osUnset, []string{name3, name2}, nil}, - {"invalid both set", os4, []string{name4}, &DuplicateAliasError{name4}}, + {"alias matches new name", os4, []string{name4}, &DuplicateAliasError{name4}}, {"valid both set", os2, []string{name1}, nil}, } diff --git a/pkg/pkg/cache.go b/pkg/pkg/cache.go index 9d36bdd1d..e94b2cb41 100644 --- a/pkg/pkg/cache.go +++ b/pkg/pkg/cache.go @@ -1,6 +1,7 @@ package pkg import ( + "sync" "time" ) @@ -10,22 +11,23 @@ type cacheEntry struct { } type repositoryCache struct { + mu sync.RWMutex // cache maps the URL to the last modified time and the data cache map[string]cacheEntry } -func (c *repositoryCache) ensureCache() { - if c.cache == nil { - c.cache = make(map[string]cacheEntry) - } -} - func (c *repositoryCache) lastModified(url string) *time.Time { if c == nil { return nil } - c.ensureCache() + c.mu.RLock() + defer c.mu.RUnlock() + + if c.cache == nil { + return nil + } + e, found := c.cache[url] if !found { @@ -36,7 +38,13 @@ func (c *repositoryCache) lastModified(url string) *time.Time { } func (c *repositoryCache) getPackageList(url string) []RemotePackage { - c.ensureCache() + c.mu.RLock() + defer c.mu.RUnlock() + + if c.cache == nil { + return nil + } + e, found := c.cache[url] if !found { @@ -51,7 +59,13 @@ func (c *repositoryCache) cacheList(url string, lastModified time.Time, data []R return } - c.ensureCache() + c.mu.Lock() + defer c.mu.Unlock() + + if c.cache == nil { + c.cache = make(map[string]cacheEntry) + } + c.cache[url] = cacheEntry{ lastModified: lastModified, data: data, diff --git a/pkg/pkg/manager.go b/pkg/pkg/manager.go index 18fa4e0d1..4024191ad 100644 --- a/pkg/pkg/manager.go +++ b/pkg/pkg/manager.go @@ -10,6 +10,7 @@ import ( "net/http" "net/url" "path/filepath" + "sync" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -31,13 +32,14 @@ type Manager struct { Client *http.Client - cache *repositoryCache + cacheOnce sync.Once + cache *repositoryCache } func (m *Manager) getCache() *repositoryCache { - if m.cache == nil { + m.cacheOnce.Do(func() { m.cache = &repositoryCache{} - } + }) return m.cache } diff --git a/pkg/plugin/examples/react-component/README.md b/pkg/plugin/examples/react-component/README.md index 5a42a3749..383b47235 100644 --- a/pkg/plugin/examples/react-component/README.md +++ b/pkg/plugin/examples/react-component/README.md @@ -1,7 +1,7 @@ This is a reference React component plugin. It replaces the `details` part of scene cards with a list of performers and tags. To build: -- run `yarn install --frozen-lockfile` -- run `yarn run build` +- run `pnpm install --frozen-lockfile` +- run `npm run build` This will copy the plugin files into the `dist` directory. These files can be copied to a `plugins` directory. diff --git a/pkg/plugin/examples/react-component/package.json b/pkg/plugin/examples/react-component/package.json index b37205d9c..1c07e5774 100644 --- a/pkg/plugin/examples/react-component/package.json +++ b/pkg/plugin/examples/react-component/package.json @@ -5,11 +5,11 @@ "author": "WithoutPants", "license": "AGPL-3.0", "scripts": { - "compile:ts": "yarn tsc", - "compile:sass": "yarn sass src/testReact.scss dist/testReact.css", + "compile:ts": "npm run tsc", + "compile:sass": "npm run sass src/testReact.scss dist/testReact.css", "copy:yml": "cpx \"src/testReact.yml\" \"dist\"", - "compile": "yarn run compile:ts && yarn run compile:sass", - "build": "yarn run compile && yarn run copy:yml" + "compile": "npm run compile:ts && npm run compile:sass", + "build": "npm run compile && npm run copy:yml" }, "devDependencies": { "@types/react": "^18.2.31", diff --git a/pkg/plugin/examples/react-component/pnpm-lock.yaml b/pkg/plugin/examples/react-component/pnpm-lock.yaml new file mode 100644 index 000000000..d19c17347 --- /dev/null +++ b/pkg/plugin/examples/react-component/pnpm-lock.yaml @@ -0,0 +1,1578 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + '@types/react': + specifier: ^18.2.31 + version: 18.3.26 + '@types/react-dom': + specifier: ^18.2.14 + version: 18.3.7(@types/react@18.3.26) + cpx: + specifier: ^1.5.0 + version: 1.5.0 + sass: + specifier: ^1.69.4 + version: 1.93.2 + typescript: + specifier: ^5.2.2 + version: 5.9.3 + +packages: + + '@parcel/watcher-android-arm64@2.5.1': + resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [android] + + '@parcel/watcher-darwin-arm64@2.5.1': + resolution: {integrity: sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [darwin] + + '@parcel/watcher-darwin-x64@2.5.1': + resolution: {integrity: sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [darwin] + + '@parcel/watcher-freebsd-x64@2.5.1': + resolution: {integrity: sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [freebsd] + + '@parcel/watcher-linux-arm-glibc@2.5.1': + resolution: {integrity: sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + + '@parcel/watcher-linux-arm-musl@2.5.1': + resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + + '@parcel/watcher-linux-arm64-glibc@2.5.1': + resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + + '@parcel/watcher-linux-arm64-musl@2.5.1': + resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + + '@parcel/watcher-linux-x64-glibc@2.5.1': + resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + + '@parcel/watcher-linux-x64-musl@2.5.1': + resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + + '@parcel/watcher-win32-arm64@2.5.1': + resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [win32] + + '@parcel/watcher-win32-ia32@2.5.1': + resolution: {integrity: sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==} + engines: {node: '>= 10.0.0'} + cpu: [ia32] + os: [win32] + + '@parcel/watcher-win32-x64@2.5.1': + resolution: {integrity: sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [win32] + + '@parcel/watcher@2.5.1': + resolution: {integrity: sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==} + engines: {node: '>= 10.0.0'} + + '@types/prop-types@15.7.15': + resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} + + '@types/react-dom@18.3.7': + resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==} + peerDependencies: + '@types/react': ^18.0.0 + + '@types/react@18.3.26': + resolution: {integrity: sha512-RFA/bURkcKzx/X9oumPG9Vp3D3JUgus/d0b67KB0t5S/raciymilkOa66olh78MUI92QLbEJevO7rvqU/kjwKA==} + + anymatch@1.3.2: + resolution: {integrity: sha512-0XNayC8lTHQ2OI8aljNCN3sSx6hsr/1+rlcDAotXJR7C1oZZHCNsfpbKwMjRA3Uqb5tF1Rae2oloTr4xpq+WjA==} + + arr-diff@2.0.0: + resolution: {integrity: sha512-dtXTVMkh6VkEEA7OhXnN1Ecb8aAGFdZ1LFxtOCoqj4qkyOJMt7+qs6Ahdy6p/NQCPYsRSXXivhSB/J5E9jmYKA==} + engines: {node: '>=0.10.0'} + + arr-diff@4.0.0: + resolution: {integrity: sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA==} + engines: {node: '>=0.10.0'} + + arr-flatten@1.1.0: + resolution: {integrity: sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==} + engines: {node: '>=0.10.0'} + + arr-union@3.1.0: + resolution: {integrity: sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q==} + engines: {node: '>=0.10.0'} + + array-unique@0.2.1: + resolution: {integrity: sha512-G2n5bG5fSUCpnsXz4+8FUkYsGPkNfLn9YvS66U5qbTIXI2Ynnlo4Bi42bWv+omKUCqz+ejzfClwne0alJWJPhg==} + engines: {node: '>=0.10.0'} + + array-unique@0.3.2: + resolution: {integrity: sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ==} + engines: {node: '>=0.10.0'} + + assign-symbols@1.0.0: + resolution: {integrity: sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw==} + engines: {node: '>=0.10.0'} + + async-each@1.0.6: + resolution: {integrity: sha512-c646jH1avxr+aVpndVMeAfYw7wAa6idufrlN3LPA4PmKS0QEGp6PIC9nwz0WQkkvBGAMEki3pFdtxaF39J9vvg==} + + atob@2.1.2: + resolution: {integrity: sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==} + engines: {node: '>= 4.5.0'} + hasBin: true + + babel-runtime@6.26.0: + resolution: {integrity: sha512-ITKNuq2wKlW1fJg9sSW52eepoYgZBggvOAHC0u/CYu/qxQ9EVzThCgR69BnSXLHjy2f7SY5zaQ4yt7H9ZVxY2g==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + base@0.11.2: + resolution: {integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==} + engines: {node: '>=0.10.0'} + + binary-extensions@1.13.1: + resolution: {integrity: sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==} + engines: {node: '>=0.10.0'} + + bindings@1.5.0: + resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} + + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + + braces@1.8.5: + resolution: {integrity: sha512-xU7bpz2ytJl1bH9cgIurjpg/n8Gohy9GTw81heDYLJQ4RU60dlyJsa+atVF2pI0yMMvKxI9HkKwjePCj5XI1hw==} + engines: {node: '>=0.10.0'} + + braces@2.3.2: + resolution: {integrity: sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==} + engines: {node: '>=0.10.0'} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + cache-base@1.0.1: + resolution: {integrity: sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==} + engines: {node: '>=0.10.0'} + + chokidar@1.7.0: + resolution: {integrity: sha512-mk8fAWcRUOxY7btlLtitj3A45jOwSAxH4tOFOoEGbVsl6cL6pPMWUy7dwZ/canfj3QEdP6FHSnf/l1c6/WkzVg==} + + chokidar@4.0.3: + resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} + engines: {node: '>= 14.16.0'} + + class-utils@0.3.6: + resolution: {integrity: sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==} + engines: {node: '>=0.10.0'} + + collection-visit@1.0.0: + resolution: {integrity: sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw==} + engines: {node: '>=0.10.0'} + + component-emitter@1.3.1: + resolution: {integrity: sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + copy-descriptor@0.1.1: + resolution: {integrity: sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw==} + engines: {node: '>=0.10.0'} + + core-js@2.6.12: + resolution: {integrity: sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==} + deprecated: core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js. + + core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + + cpx@1.5.0: + resolution: {integrity: sha512-jHTjZhsbg9xWgsP2vuNW2jnnzBX+p4T+vNI9Lbjzs1n4KhOfa22bQppiFYLsWQKd8TzmL5aSP/Me3yfsCwXbDA==} + hasBin: true + + csstype@3.1.3: + resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} + + debug@2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decode-uri-component@0.2.2: + resolution: {integrity: sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==} + engines: {node: '>=0.10'} + + define-property@0.2.5: + resolution: {integrity: sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==} + engines: {node: '>=0.10.0'} + + define-property@1.0.0: + resolution: {integrity: sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==} + engines: {node: '>=0.10.0'} + + define-property@2.0.2: + resolution: {integrity: sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==} + engines: {node: '>=0.10.0'} + + detect-libc@1.0.3: + resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==} + engines: {node: '>=0.10'} + hasBin: true + + duplexer@0.1.2: + resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} + + expand-brackets@0.1.5: + resolution: {integrity: sha512-hxx03P2dJxss6ceIeri9cmYOT4SRs3Zk3afZwWpOsRqLqprhTR8u++SlC+sFGsQr7WGFPdMF7Gjc1njDLDK6UA==} + engines: {node: '>=0.10.0'} + + expand-brackets@2.1.4: + resolution: {integrity: sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA==} + engines: {node: '>=0.10.0'} + + expand-range@1.8.2: + resolution: {integrity: sha512-AFASGfIlnIbkKPQwX1yHaDjFvh/1gyKJODme52V6IORh69uEYgZp0o9C+qsIGNVEiuuhQU0CSSl++Rlegg1qvA==} + engines: {node: '>=0.10.0'} + + extend-shallow@2.0.1: + resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} + engines: {node: '>=0.10.0'} + + extend-shallow@3.0.2: + resolution: {integrity: sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==} + engines: {node: '>=0.10.0'} + + extglob@0.3.2: + resolution: {integrity: sha512-1FOj1LOwn42TMrruOHGt18HemVnbwAmAak7krWk+wa93KXxGbK+2jpezm+ytJYDaBX0/SPLZFHKM7m+tKobWGg==} + engines: {node: '>=0.10.0'} + + extglob@2.0.4: + resolution: {integrity: sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==} + engines: {node: '>=0.10.0'} + + file-uri-to-path@1.0.0: + resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} + + filename-regex@2.0.1: + resolution: {integrity: sha512-BTCqyBaWBTsauvnHiE8i562+EdJj+oUpkqWp2R1iCoR8f6oo8STRu3of7WJJ0TqWtxN50a5YFpzYK4Jj9esYfQ==} + engines: {node: '>=0.10.0'} + + fill-range@2.2.4: + resolution: {integrity: sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q==} + engines: {node: '>=0.10.0'} + + fill-range@4.0.0: + resolution: {integrity: sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==} + engines: {node: '>=0.10.0'} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + find-index@0.1.1: + resolution: {integrity: sha512-uJ5vWrfBKMcE6y2Z8834dwEZj9mNGxYa3t3I53OwFeuZ8D9oc2E5zcsrkuhX6h4iYrjhiv0T3szQmxlAV9uxDg==} + + for-in@1.0.2: + resolution: {integrity: sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ==} + engines: {node: '>=0.10.0'} + + for-own@0.1.5: + resolution: {integrity: sha512-SKmowqGTJoPzLO1T0BBJpkfp3EMacCMOuH40hOUbrbzElVktk4DioXVM99QkLCyKoiuOmyjgcWMpVz2xjE7LZw==} + engines: {node: '>=0.10.0'} + + fragment-cache@0.2.1: + resolution: {integrity: sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA==} + engines: {node: '>=0.10.0'} + + fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + + fsevents@1.2.13: + resolution: {integrity: sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==} + engines: {node: '>= 4.0'} + os: [darwin] + deprecated: Upgrade to fsevents v2 to mitigate potential security issues + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + get-value@2.0.6: + resolution: {integrity: sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA==} + engines: {node: '>=0.10.0'} + + glob-base@0.3.0: + resolution: {integrity: sha512-ab1S1g1EbO7YzauaJLkgLp7DZVAqj9M/dvKlTt8DkXA2tiOIcSMrlVI2J1RZyB5iJVccEscjGn+kpOG9788MHA==} + engines: {node: '>=0.10.0'} + + glob-parent@2.0.0: + resolution: {integrity: sha512-JDYOvfxio/t42HKdxkAYaCiBN7oYiuxykOxKxdaUW5Qn0zaYN3gRQWolrwdnf0shM9/EP0ebuuTmyoXNr1cC5w==} + + glob2base@0.0.12: + resolution: {integrity: sha512-ZyqlgowMbfj2NPjxaZZ/EtsXlOch28FRXgMd64vqZWk1bT9+wvSRLYD1om9M7QfQru51zJPAT17qXm4/zd+9QA==} + engines: {node: '>= 0.10'} + + glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + deprecated: Glob versions prior to v9 are no longer supported + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + has-value@0.3.1: + resolution: {integrity: sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q==} + engines: {node: '>=0.10.0'} + + has-value@1.0.0: + resolution: {integrity: sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw==} + engines: {node: '>=0.10.0'} + + has-values@0.1.4: + resolution: {integrity: sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ==} + engines: {node: '>=0.10.0'} + + has-values@1.0.0: + resolution: {integrity: sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ==} + engines: {node: '>=0.10.0'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + immutable@5.1.4: + resolution: {integrity: sha512-p6u1bG3YSnINT5RQmx/yRZBpenIl30kVxkTLDyHLIMk0gict704Q9n+thfDI7lTRm9vXdDYutVzXhzcThxTnXA==} + + inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + is-accessor-descriptor@1.0.1: + resolution: {integrity: sha512-YBUanLI8Yoihw923YeFUS5fs0fF2f5TSFTNiYAAzhhDscDa3lEqYuz1pDOEP5KvX94I9ey3vsqjJcLVFVU+3QA==} + engines: {node: '>= 0.10'} + + is-binary-path@1.0.1: + resolution: {integrity: sha512-9fRVlXc0uCxEDj1nQzaWONSpbTfx0FmJfzHF7pwlI8DkWGoHBBea4Pg5Ky0ojwwxQmnSifgbKkI06Qv0Ljgj+Q==} + engines: {node: '>=0.10.0'} + + is-buffer@1.1.6: + resolution: {integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==} + + is-core-module@2.16.1: + resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} + engines: {node: '>= 0.4'} + + is-data-descriptor@1.0.1: + resolution: {integrity: sha512-bc4NlCDiCr28U4aEsQ3Qs2491gVq4V8G7MQyws968ImqjKuYtTJXrl7Vq7jsN7Ly/C3xj5KWFrY7sHNeDkAzXw==} + engines: {node: '>= 0.4'} + + is-descriptor@0.1.7: + resolution: {integrity: sha512-C3grZTvObeN1xud4cRWl366OMXZTj0+HGyk4hvfpx4ZHt1Pb60ANSXqCK7pdOTeUQpRzECBSTphqvD7U+l22Eg==} + engines: {node: '>= 0.4'} + + is-descriptor@1.0.3: + resolution: {integrity: sha512-JCNNGbwWZEVaSPtS45mdtrneRWJFp07LLmykxeFV5F6oBvNF8vHSfJuJgoT472pSfk+Mf8VnlrspaFBHWM8JAw==} + engines: {node: '>= 0.4'} + + is-dotfile@1.0.3: + resolution: {integrity: sha512-9YclgOGtN/f8zx0Pr4FQYMdibBiTaH3sn52vjYip4ZSf6C4/6RfTEZ+MR4GvKhCxdPh21Bg42/WL55f6KSnKpg==} + engines: {node: '>=0.10.0'} + + is-equal-shallow@0.1.3: + resolution: {integrity: sha512-0EygVC5qPvIyb+gSz7zdD5/AAoS6Qrx1e//6N4yv4oNm30kqvdmG66oZFWVlQHUWe5OjP08FuTw2IdT0EOTcYA==} + engines: {node: '>=0.10.0'} + + is-extendable@0.1.1: + resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} + engines: {node: '>=0.10.0'} + + is-extendable@1.0.1: + resolution: {integrity: sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==} + engines: {node: '>=0.10.0'} + + is-extglob@1.0.0: + resolution: {integrity: sha512-7Q+VbVafe6x2T+Tu6NcOf6sRklazEPmBoB3IWk3WdGZM2iGUwU/Oe3Wtq5lSEkDTTlpp8yx+5t4pzO/i9Ty1ww==} + engines: {node: '>=0.10.0'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-glob@2.0.1: + resolution: {integrity: sha512-a1dBeB19NXsf/E0+FHqkagizel/LQw2DjSQpvQrj3zT+jYPpaUCryPnrQajXKFLCMuf4I6FhRpaGtw4lPrG6Eg==} + engines: {node: '>=0.10.0'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-number@2.1.0: + resolution: {integrity: sha512-QUzH43Gfb9+5yckcrSA0VBDwEtDUchrk4F6tfJZQuNzDJbEDB9cZNzSfXGQ1jqmdDY/kl41lUOWM9syA8z8jlg==} + engines: {node: '>=0.10.0'} + + is-number@3.0.0: + resolution: {integrity: sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==} + engines: {node: '>=0.10.0'} + + is-number@4.0.0: + resolution: {integrity: sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==} + engines: {node: '>=0.10.0'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-plain-object@2.0.4: + resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} + engines: {node: '>=0.10.0'} + + is-posix-bracket@0.1.1: + resolution: {integrity: sha512-Yu68oeXJ7LeWNmZ3Zov/xg/oDBnBK2RNxwYY1ilNJX+tKKZqgPK+qOn/Gs9jEu66KDY9Netf5XLKNGzas/vPfQ==} + engines: {node: '>=0.10.0'} + + is-primitive@2.0.0: + resolution: {integrity: sha512-N3w1tFaRfk3UrPfqeRyD+GYDASU3W5VinKhlORy8EWVf/sIdDL9GAcew85XmktCfH+ngG7SRXEVDoO18WMdB/Q==} + engines: {node: '>=0.10.0'} + + is-windows@1.0.2: + resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} + engines: {node: '>=0.10.0'} + + isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + + isobject@2.1.0: + resolution: {integrity: sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==} + engines: {node: '>=0.10.0'} + + isobject@3.0.1: + resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} + engines: {node: '>=0.10.0'} + + kind-of@3.2.2: + resolution: {integrity: sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==} + engines: {node: '>=0.10.0'} + + kind-of@4.0.0: + resolution: {integrity: sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw==} + engines: {node: '>=0.10.0'} + + kind-of@6.0.3: + resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} + engines: {node: '>=0.10.0'} + + map-cache@0.2.2: + resolution: {integrity: sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==} + engines: {node: '>=0.10.0'} + + map-visit@1.0.0: + resolution: {integrity: sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w==} + engines: {node: '>=0.10.0'} + + math-random@1.0.4: + resolution: {integrity: sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A==} + + micromatch@2.3.11: + resolution: {integrity: sha512-LnU2XFEk9xxSJ6rfgAry/ty5qwUTyHYOBU0g4R6tIw5ljwgGIBmiKhRWLw5NpMOnrgUNcDJ4WMp8rl3sYVHLNA==} + engines: {node: '>=0.10.0'} + + micromatch@3.1.10: + resolution: {integrity: sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==} + engines: {node: '>=0.10.0'} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + mixin-deep@1.3.2: + resolution: {integrity: sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==} + engines: {node: '>=0.10.0'} + + mkdirp@0.5.6: + resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} + hasBin: true + + ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + + nan@2.23.0: + resolution: {integrity: sha512-1UxuyYGdoQHcGg87Lkqm3FzefucTa0NAiOcuRsDmysep3c1LVCRK2krrUDafMWtjSG04htvAmvg96+SDknOmgQ==} + + nanomatch@1.2.13: + resolution: {integrity: sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==} + engines: {node: '>=0.10.0'} + + node-addon-api@7.1.1: + resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==} + + normalize-path@2.1.1: + resolution: {integrity: sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==} + engines: {node: '>=0.10.0'} + + object-copy@0.1.0: + resolution: {integrity: sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ==} + engines: {node: '>=0.10.0'} + + object-visit@1.0.1: + resolution: {integrity: sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA==} + engines: {node: '>=0.10.0'} + + object.omit@2.0.1: + resolution: {integrity: sha512-UiAM5mhmIuKLsOvrL+B0U2d1hXHF3bFYWIuH1LMpuV2EJEHG1Ntz06PgLEHjm6VFd87NpH8rastvPoyv6UW2fA==} + engines: {node: '>=0.10.0'} + + object.pick@1.3.0: + resolution: {integrity: sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ==} + engines: {node: '>=0.10.0'} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + + parse-glob@3.0.4: + resolution: {integrity: sha512-FC5TeK0AwXzq3tUBFtH74naWkPQCEWs4K+xMxWZBlKDWu0bVHXGZa+KKqxKidd7xwhdZ19ZNuF2uO1M/r196HA==} + engines: {node: '>=0.10.0'} + + pascalcase@0.1.1: + resolution: {integrity: sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw==} + engines: {node: '>=0.10.0'} + + path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + + path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + posix-character-classes@0.1.1: + resolution: {integrity: sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg==} + engines: {node: '>=0.10.0'} + + preserve@0.2.0: + resolution: {integrity: sha512-s/46sYeylUfHNjI+sA/78FAHlmIuKqI9wNnzEOGehAlUUYeObv5C2mOinXBjyUyWmJ2SfcS2/ydApH4hTF4WXQ==} + engines: {node: '>=0.10.0'} + + process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + + randomatic@3.1.1: + resolution: {integrity: sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw==} + engines: {node: '>= 0.10.0'} + + readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + + readdirp@2.2.1: + resolution: {integrity: sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==} + engines: {node: '>=0.10'} + + readdirp@4.1.2: + resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} + engines: {node: '>= 14.18.0'} + + regenerator-runtime@0.11.1: + resolution: {integrity: sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==} + + regex-cache@0.4.4: + resolution: {integrity: sha512-nVIZwtCjkC9YgvWkpM55B5rBhBYRZhAaJbgcFYXXsHnbZ9UZI9nnVWYZpBlCqv9ho2eZryPnWrZGsOdPwVWXWQ==} + engines: {node: '>=0.10.0'} + + regex-not@1.0.2: + resolution: {integrity: sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==} + engines: {node: '>=0.10.0'} + + remove-trailing-separator@1.1.0: + resolution: {integrity: sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==} + + repeat-element@1.1.4: + resolution: {integrity: sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==} + engines: {node: '>=0.10.0'} + + repeat-string@1.6.1: + resolution: {integrity: sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==} + engines: {node: '>=0.10'} + + resolve-url@0.2.1: + resolution: {integrity: sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg==} + deprecated: https://github.com/lydell/resolve-url#deprecated + + resolve@1.22.11: + resolution: {integrity: sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==} + engines: {node: '>= 0.4'} + hasBin: true + + ret@0.1.15: + resolution: {integrity: sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==} + engines: {node: '>=0.12'} + + safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safe-regex@1.1.0: + resolution: {integrity: sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg==} + + sass@1.93.2: + resolution: {integrity: sha512-t+YPtOQHpGW1QWsh1CHQ5cPIr9lbbGZLZnbihP/D/qZj/yuV68m8qarcV17nvkOX81BCrvzAlq2klCQFZghyTg==} + engines: {node: '>=14.0.0'} + hasBin: true + + set-value@2.0.1: + resolution: {integrity: sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==} + engines: {node: '>=0.10.0'} + + shell-quote@1.8.3: + resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==} + engines: {node: '>= 0.4'} + + snapdragon-node@2.1.1: + resolution: {integrity: sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==} + engines: {node: '>=0.10.0'} + + snapdragon-util@3.0.1: + resolution: {integrity: sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==} + engines: {node: '>=0.10.0'} + + snapdragon@0.8.2: + resolution: {integrity: sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==} + engines: {node: '>=0.10.0'} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + source-map-resolve@0.5.3: + resolution: {integrity: sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==} + deprecated: See https://github.com/lydell/source-map-resolve#deprecated + + source-map-url@0.4.1: + resolution: {integrity: sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==} + deprecated: See https://github.com/lydell/source-map-url#deprecated + + source-map@0.5.7: + resolution: {integrity: sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==} + engines: {node: '>=0.10.0'} + + split-string@3.1.0: + resolution: {integrity: sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==} + engines: {node: '>=0.10.0'} + + static-extend@0.1.2: + resolution: {integrity: sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g==} + engines: {node: '>=0.10.0'} + + string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + + subarg@1.0.0: + resolution: {integrity: sha512-RIrIdRY0X1xojthNcVtgT9sjpOGagEUKpZdgBUi054OEPFo282yg+zE+t1Rj3+RqKq2xStL7uUHhY+AjbC4BXg==} + + supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + to-object-path@0.3.0: + resolution: {integrity: sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg==} + engines: {node: '>=0.10.0'} + + to-regex-range@2.1.1: + resolution: {integrity: sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==} + engines: {node: '>=0.10.0'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + to-regex@3.0.2: + resolution: {integrity: sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==} + engines: {node: '>=0.10.0'} + + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + + union-value@1.0.1: + resolution: {integrity: sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==} + engines: {node: '>=0.10.0'} + + unset-value@1.0.0: + resolution: {integrity: sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ==} + engines: {node: '>=0.10.0'} + + urix@0.1.0: + resolution: {integrity: sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg==} + deprecated: Please see https://github.com/lydell/urix#deprecated + + use@3.1.1: + resolution: {integrity: sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==} + engines: {node: '>=0.10.0'} + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + +snapshots: + + '@parcel/watcher-android-arm64@2.5.1': + optional: true + + '@parcel/watcher-darwin-arm64@2.5.1': + optional: true + + '@parcel/watcher-darwin-x64@2.5.1': + optional: true + + '@parcel/watcher-freebsd-x64@2.5.1': + optional: true + + '@parcel/watcher-linux-arm-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-arm-musl@2.5.1': + optional: true + + '@parcel/watcher-linux-arm64-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-arm64-musl@2.5.1': + optional: true + + '@parcel/watcher-linux-x64-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-x64-musl@2.5.1': + optional: true + + '@parcel/watcher-win32-arm64@2.5.1': + optional: true + + '@parcel/watcher-win32-ia32@2.5.1': + optional: true + + '@parcel/watcher-win32-x64@2.5.1': + optional: true + + '@parcel/watcher@2.5.1': + dependencies: + detect-libc: 1.0.3 + is-glob: 4.0.3 + micromatch: 4.0.8 + node-addon-api: 7.1.1 + optionalDependencies: + '@parcel/watcher-android-arm64': 2.5.1 + '@parcel/watcher-darwin-arm64': 2.5.1 + '@parcel/watcher-darwin-x64': 2.5.1 + '@parcel/watcher-freebsd-x64': 2.5.1 + '@parcel/watcher-linux-arm-glibc': 2.5.1 + '@parcel/watcher-linux-arm-musl': 2.5.1 + '@parcel/watcher-linux-arm64-glibc': 2.5.1 + '@parcel/watcher-linux-arm64-musl': 2.5.1 + '@parcel/watcher-linux-x64-glibc': 2.5.1 + '@parcel/watcher-linux-x64-musl': 2.5.1 + '@parcel/watcher-win32-arm64': 2.5.1 + '@parcel/watcher-win32-ia32': 2.5.1 + '@parcel/watcher-win32-x64': 2.5.1 + optional: true + + '@types/prop-types@15.7.15': {} + + '@types/react-dom@18.3.7(@types/react@18.3.26)': + dependencies: + '@types/react': 18.3.26 + + '@types/react@18.3.26': + dependencies: + '@types/prop-types': 15.7.15 + csstype: 3.1.3 + + anymatch@1.3.2: + dependencies: + micromatch: 2.3.11 + normalize-path: 2.1.1 + + arr-diff@2.0.0: + dependencies: + arr-flatten: 1.1.0 + + arr-diff@4.0.0: {} + + arr-flatten@1.1.0: {} + + arr-union@3.1.0: {} + + array-unique@0.2.1: {} + + array-unique@0.3.2: {} + + assign-symbols@1.0.0: {} + + async-each@1.0.6: {} + + atob@2.1.2: {} + + babel-runtime@6.26.0: + dependencies: + core-js: 2.6.12 + regenerator-runtime: 0.11.1 + + balanced-match@1.0.2: {} + + base@0.11.2: + dependencies: + cache-base: 1.0.1 + class-utils: 0.3.6 + component-emitter: 1.3.1 + define-property: 1.0.0 + isobject: 3.0.1 + mixin-deep: 1.3.2 + pascalcase: 0.1.1 + + binary-extensions@1.13.1: {} + + bindings@1.5.0: + dependencies: + file-uri-to-path: 1.0.0 + optional: true + + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + braces@1.8.5: + dependencies: + expand-range: 1.8.2 + preserve: 0.2.0 + repeat-element: 1.1.4 + + braces@2.3.2: + dependencies: + arr-flatten: 1.1.0 + array-unique: 0.3.2 + extend-shallow: 2.0.1 + fill-range: 4.0.0 + isobject: 3.0.1 + repeat-element: 1.1.4 + snapdragon: 0.8.2 + snapdragon-node: 2.1.1 + split-string: 3.1.0 + to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + optional: true + + cache-base@1.0.1: + dependencies: + collection-visit: 1.0.0 + component-emitter: 1.3.1 + get-value: 2.0.6 + has-value: 1.0.0 + isobject: 3.0.1 + set-value: 2.0.1 + to-object-path: 0.3.0 + union-value: 1.0.1 + unset-value: 1.0.0 + + chokidar@1.7.0: + dependencies: + anymatch: 1.3.2 + async-each: 1.0.6 + glob-parent: 2.0.0 + inherits: 2.0.4 + is-binary-path: 1.0.1 + is-glob: 2.0.1 + path-is-absolute: 1.0.1 + readdirp: 2.2.1 + optionalDependencies: + fsevents: 1.2.13 + transitivePeerDependencies: + - supports-color + + chokidar@4.0.3: + dependencies: + readdirp: 4.1.2 + + class-utils@0.3.6: + dependencies: + arr-union: 3.1.0 + define-property: 0.2.5 + isobject: 3.0.1 + static-extend: 0.1.2 + + collection-visit@1.0.0: + dependencies: + map-visit: 1.0.0 + object-visit: 1.0.1 + + component-emitter@1.3.1: {} + + concat-map@0.0.1: {} + + copy-descriptor@0.1.1: {} + + core-js@2.6.12: {} + + core-util-is@1.0.3: {} + + cpx@1.5.0: + dependencies: + babel-runtime: 6.26.0 + chokidar: 1.7.0 + duplexer: 0.1.2 + glob: 7.2.3 + glob2base: 0.0.12 + minimatch: 3.1.2 + mkdirp: 0.5.6 + resolve: 1.22.11 + safe-buffer: 5.2.1 + shell-quote: 1.8.3 + subarg: 1.0.0 + transitivePeerDependencies: + - supports-color + + csstype@3.1.3: {} + + debug@2.6.9: + dependencies: + ms: 2.0.0 + + decode-uri-component@0.2.2: {} + + define-property@0.2.5: + dependencies: + is-descriptor: 0.1.7 + + define-property@1.0.0: + dependencies: + is-descriptor: 1.0.3 + + define-property@2.0.2: + dependencies: + is-descriptor: 1.0.3 + isobject: 3.0.1 + + detect-libc@1.0.3: + optional: true + + duplexer@0.1.2: {} + + expand-brackets@0.1.5: + dependencies: + is-posix-bracket: 0.1.1 + + expand-brackets@2.1.4: + dependencies: + debug: 2.6.9 + define-property: 0.2.5 + extend-shallow: 2.0.1 + posix-character-classes: 0.1.1 + regex-not: 1.0.2 + snapdragon: 0.8.2 + to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color + + expand-range@1.8.2: + dependencies: + fill-range: 2.2.4 + + extend-shallow@2.0.1: + dependencies: + is-extendable: 0.1.1 + + extend-shallow@3.0.2: + dependencies: + assign-symbols: 1.0.0 + is-extendable: 1.0.1 + + extglob@0.3.2: + dependencies: + is-extglob: 1.0.0 + + extglob@2.0.4: + dependencies: + array-unique: 0.3.2 + define-property: 1.0.0 + expand-brackets: 2.1.4 + extend-shallow: 2.0.1 + fragment-cache: 0.2.1 + regex-not: 1.0.2 + snapdragon: 0.8.2 + to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color + + file-uri-to-path@1.0.0: + optional: true + + filename-regex@2.0.1: {} + + fill-range@2.2.4: + dependencies: + is-number: 2.1.0 + isobject: 2.1.0 + randomatic: 3.1.1 + repeat-element: 1.1.4 + repeat-string: 1.6.1 + + fill-range@4.0.0: + dependencies: + extend-shallow: 2.0.1 + is-number: 3.0.0 + repeat-string: 1.6.1 + to-regex-range: 2.1.1 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + optional: true + + find-index@0.1.1: {} + + for-in@1.0.2: {} + + for-own@0.1.5: + dependencies: + for-in: 1.0.2 + + fragment-cache@0.2.1: + dependencies: + map-cache: 0.2.2 + + fs.realpath@1.0.0: {} + + fsevents@1.2.13: + dependencies: + bindings: 1.5.0 + nan: 2.23.0 + optional: true + + function-bind@1.1.2: {} + + get-value@2.0.6: {} + + glob-base@0.3.0: + dependencies: + glob-parent: 2.0.0 + is-glob: 2.0.1 + + glob-parent@2.0.0: + dependencies: + is-glob: 2.0.1 + + glob2base@0.0.12: + dependencies: + find-index: 0.1.1 + + glob@7.2.3: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + + graceful-fs@4.2.11: {} + + has-value@0.3.1: + dependencies: + get-value: 2.0.6 + has-values: 0.1.4 + isobject: 2.1.0 + + has-value@1.0.0: + dependencies: + get-value: 2.0.6 + has-values: 1.0.0 + isobject: 3.0.1 + + has-values@0.1.4: {} + + has-values@1.0.0: + dependencies: + is-number: 3.0.0 + kind-of: 4.0.0 + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + immutable@5.1.4: {} + + inflight@1.0.6: + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + inherits@2.0.4: {} + + is-accessor-descriptor@1.0.1: + dependencies: + hasown: 2.0.2 + + is-binary-path@1.0.1: + dependencies: + binary-extensions: 1.13.1 + + is-buffer@1.1.6: {} + + is-core-module@2.16.1: + dependencies: + hasown: 2.0.2 + + is-data-descriptor@1.0.1: + dependencies: + hasown: 2.0.2 + + is-descriptor@0.1.7: + dependencies: + is-accessor-descriptor: 1.0.1 + is-data-descriptor: 1.0.1 + + is-descriptor@1.0.3: + dependencies: + is-accessor-descriptor: 1.0.1 + is-data-descriptor: 1.0.1 + + is-dotfile@1.0.3: {} + + is-equal-shallow@0.1.3: + dependencies: + is-primitive: 2.0.0 + + is-extendable@0.1.1: {} + + is-extendable@1.0.1: + dependencies: + is-plain-object: 2.0.4 + + is-extglob@1.0.0: {} + + is-extglob@2.1.1: + optional: true + + is-glob@2.0.1: + dependencies: + is-extglob: 1.0.0 + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + optional: true + + is-number@2.1.0: + dependencies: + kind-of: 3.2.2 + + is-number@3.0.0: + dependencies: + kind-of: 3.2.2 + + is-number@4.0.0: {} + + is-number@7.0.0: + optional: true + + is-plain-object@2.0.4: + dependencies: + isobject: 3.0.1 + + is-posix-bracket@0.1.1: {} + + is-primitive@2.0.0: {} + + is-windows@1.0.2: {} + + isarray@1.0.0: {} + + isobject@2.1.0: + dependencies: + isarray: 1.0.0 + + isobject@3.0.1: {} + + kind-of@3.2.2: + dependencies: + is-buffer: 1.1.6 + + kind-of@4.0.0: + dependencies: + is-buffer: 1.1.6 + + kind-of@6.0.3: {} + + map-cache@0.2.2: {} + + map-visit@1.0.0: + dependencies: + object-visit: 1.0.1 + + math-random@1.0.4: {} + + micromatch@2.3.11: + dependencies: + arr-diff: 2.0.0 + array-unique: 0.2.1 + braces: 1.8.5 + expand-brackets: 0.1.5 + extglob: 0.3.2 + filename-regex: 2.0.1 + is-extglob: 1.0.0 + is-glob: 2.0.1 + kind-of: 3.2.2 + normalize-path: 2.1.1 + object.omit: 2.0.1 + parse-glob: 3.0.4 + regex-cache: 0.4.4 + + micromatch@3.1.10: + dependencies: + arr-diff: 4.0.0 + array-unique: 0.3.2 + braces: 2.3.2 + define-property: 2.0.2 + extend-shallow: 3.0.2 + extglob: 2.0.4 + fragment-cache: 0.2.1 + kind-of: 6.0.3 + nanomatch: 1.2.13 + object.pick: 1.3.0 + regex-not: 1.0.2 + snapdragon: 0.8.2 + to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + optional: true + + minimatch@3.1.2: + dependencies: + brace-expansion: 1.1.12 + + minimist@1.2.8: {} + + mixin-deep@1.3.2: + dependencies: + for-in: 1.0.2 + is-extendable: 1.0.1 + + mkdirp@0.5.6: + dependencies: + minimist: 1.2.8 + + ms@2.0.0: {} + + nan@2.23.0: + optional: true + + nanomatch@1.2.13: + dependencies: + arr-diff: 4.0.0 + array-unique: 0.3.2 + define-property: 2.0.2 + extend-shallow: 3.0.2 + fragment-cache: 0.2.1 + is-windows: 1.0.2 + kind-of: 6.0.3 + object.pick: 1.3.0 + regex-not: 1.0.2 + snapdragon: 0.8.2 + to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color + + node-addon-api@7.1.1: + optional: true + + normalize-path@2.1.1: + dependencies: + remove-trailing-separator: 1.1.0 + + object-copy@0.1.0: + dependencies: + copy-descriptor: 0.1.1 + define-property: 0.2.5 + kind-of: 3.2.2 + + object-visit@1.0.1: + dependencies: + isobject: 3.0.1 + + object.omit@2.0.1: + dependencies: + for-own: 0.1.5 + is-extendable: 0.1.1 + + object.pick@1.3.0: + dependencies: + isobject: 3.0.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + parse-glob@3.0.4: + dependencies: + glob-base: 0.3.0 + is-dotfile: 1.0.3 + is-extglob: 1.0.0 + is-glob: 2.0.1 + + pascalcase@0.1.1: {} + + path-is-absolute@1.0.1: {} + + path-parse@1.0.7: {} + + picomatch@2.3.1: + optional: true + + posix-character-classes@0.1.1: {} + + preserve@0.2.0: {} + + process-nextick-args@2.0.1: {} + + randomatic@3.1.1: + dependencies: + is-number: 4.0.0 + kind-of: 6.0.3 + math-random: 1.0.4 + + readable-stream@2.3.8: + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + + readdirp@2.2.1: + dependencies: + graceful-fs: 4.2.11 + micromatch: 3.1.10 + readable-stream: 2.3.8 + transitivePeerDependencies: + - supports-color + + readdirp@4.1.2: {} + + regenerator-runtime@0.11.1: {} + + regex-cache@0.4.4: + dependencies: + is-equal-shallow: 0.1.3 + + regex-not@1.0.2: + dependencies: + extend-shallow: 3.0.2 + safe-regex: 1.1.0 + + remove-trailing-separator@1.1.0: {} + + repeat-element@1.1.4: {} + + repeat-string@1.6.1: {} + + resolve-url@0.2.1: {} + + resolve@1.22.11: + dependencies: + is-core-module: 2.16.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + ret@0.1.15: {} + + safe-buffer@5.1.2: {} + + safe-buffer@5.2.1: {} + + safe-regex@1.1.0: + dependencies: + ret: 0.1.15 + + sass@1.93.2: + dependencies: + chokidar: 4.0.3 + immutable: 5.1.4 + source-map-js: 1.2.1 + optionalDependencies: + '@parcel/watcher': 2.5.1 + + set-value@2.0.1: + dependencies: + extend-shallow: 2.0.1 + is-extendable: 0.1.1 + is-plain-object: 2.0.4 + split-string: 3.1.0 + + shell-quote@1.8.3: {} + + snapdragon-node@2.1.1: + dependencies: + define-property: 1.0.0 + isobject: 3.0.1 + snapdragon-util: 3.0.1 + + snapdragon-util@3.0.1: + dependencies: + kind-of: 3.2.2 + + snapdragon@0.8.2: + dependencies: + base: 0.11.2 + debug: 2.6.9 + define-property: 0.2.5 + extend-shallow: 2.0.1 + map-cache: 0.2.2 + source-map: 0.5.7 + source-map-resolve: 0.5.3 + use: 3.1.1 + transitivePeerDependencies: + - supports-color + + source-map-js@1.2.1: {} + + source-map-resolve@0.5.3: + dependencies: + atob: 2.1.2 + decode-uri-component: 0.2.2 + resolve-url: 0.2.1 + source-map-url: 0.4.1 + urix: 0.1.0 + + source-map-url@0.4.1: {} + + source-map@0.5.7: {} + + split-string@3.1.0: + dependencies: + extend-shallow: 3.0.2 + + static-extend@0.1.2: + dependencies: + define-property: 0.2.5 + object-copy: 0.1.0 + + string_decoder@1.1.1: + dependencies: + safe-buffer: 5.1.2 + + subarg@1.0.0: + dependencies: + minimist: 1.2.8 + + supports-preserve-symlinks-flag@1.0.0: {} + + to-object-path@0.3.0: + dependencies: + kind-of: 3.2.2 + + to-regex-range@2.1.1: + dependencies: + is-number: 3.0.0 + repeat-string: 1.6.1 + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + optional: true + + to-regex@3.0.2: + dependencies: + define-property: 2.0.2 + extend-shallow: 3.0.2 + regex-not: 1.0.2 + safe-regex: 1.1.0 + + typescript@5.9.3: {} + + union-value@1.0.1: + dependencies: + arr-union: 3.1.0 + get-value: 2.0.6 + is-extendable: 0.1.1 + set-value: 2.0.1 + + unset-value@1.0.0: + dependencies: + has-value: 0.3.1 + isobject: 3.0.1 + + urix@0.1.0: {} + + use@3.1.1: {} + + util-deprecate@1.0.2: {} + + wrappy@1.0.2: {} diff --git a/pkg/plugin/examples/react-component/src/testReact.tsx b/pkg/plugin/examples/react-component/src/testReact.tsx index d2733fb26..677e13aac 100644 --- a/pkg/plugin/examples/react-component/src/testReact.tsx +++ b/pkg/plugin/examples/react-component/src/testReact.tsx @@ -192,7 +192,7 @@ interface IPluginApi { ); }; - PluginApi.register.route("/plugin/test-react", TestPage); + PluginApi.register.route("/plugins/test-react", TestPage); PluginApi.patch.before("SettingsToolsSection", function (props: any) { const { @@ -206,7 +206,7 @@ interface IPluginApi { {props.children} + @@ -232,7 +232,7 @@ interface IPluginApi { + {isNew ? ( + formik.submitForm()} + > + onSaveAndNewClick()}> + + + + ) : ( + + )} + + + ); +}; interface IGalleryList { filterHook?: (filter: ListFilterModel) => ListFilterModel; view?: View; alterQuery?: boolean; + extraOperations?: IItemListOperation[]; } -export const GalleryList: React.FC = ({ - filterHook, - view, - alterQuery, -}) => { - const intl = useIntl(); +function useViewRandom(filter: ListFilterModel, count: number) { const history = useHistory(); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); - const filterMode = GQL.FilterMode.Galleries; + const viewRandom = useCallback(async () => { + // query for a random scene + if (count === 0) { + return; + } - const otherOperations = [ - { - text: intl.formatMessage({ id: "actions.view_random" }), - onClick: viewRandom, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, - }, - ]; + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindGalleries(filterCopy); + if (singleResult.data.findGalleries.galleries.length === 1) { + const { id } = singleResult.data.findGalleries.galleries[0]; + // navigate to the image player page + history.push(`/galleries/${id}`); + } + }, [history, filter, count]); - function addKeybinds( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel - ) { + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { Mousetrap.bind("p r", () => { - viewRandom(result, filter); + viewRandom(); }); return () => { Mousetrap.unbind("p r"); }; - } + }, [viewRandom]); +} - async function viewRandom( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel - ) { - // query for a random image - if (result.data?.findGalleries) { - const { count } = result.data.findGalleries; +export const FilteredGalleryList = PatchComponent( + "FilteredGalleryList", + (props: IGalleryList) => { + const intl = useIntl(); - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindGalleries(filterCopy); - if (singleResult.data.findGalleries.galleries.length === 1) { - const { id } = singleResult.data.findGalleries.galleries[0]; - // navigate to the image player page - history.push(`/galleries/${id}`); - } - } - } + const searchFocus = useFocus(); - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } + const { filterHook, view, alterQuery, extraOperations = [] } = props; - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); - function renderContent( - result: GQL.FindGalleriesQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - function maybeRenderGalleryExportDialog() { - if (isExportDialogOpen) { - return ( - setIsExportDialogOpen(false)} - /> - ); - } + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Galleries, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindGalleries, + getCount: (r) => r.data?.findGalleries.count ?? 0, + getItems: (r) => r.data?.findGalleries.galleries ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(effectiveFilter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); + + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }); + + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); + + const viewRandom = useViewRandom(effectiveFilter, totalCount); + + function onExport(all: boolean) { + showModal( + closeModal()} + /> + ); } - function renderGalleries() { - if (!result.data?.findGalleries) return; - - if (filter.displayMode === DisplayMode.Grid) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.List) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.Wall) { - return ( -
-
- {result.data.findGalleries.galleries.map((gallery) => ( - - ))} -
-
- ); - } + function onEdit() { + showModal( + + ); } - return ( - <> - {maybeRenderGalleryExportDialog()} - {renderGalleries()} - - ); - } + function onDelete() { + showModal( + + ); + } - function renderEditDialog( - selectedImages: GQL.SlimGalleryDataFragment[], - onClose: (applied: boolean) => void - ) { - return ; - } + function onGenerate() { + showModal( + closeModal()} + /> + ); + } - function renderDeleteDialog( - selectedImages: GQL.SlimGalleryDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ( - - ); - } + const convertedExtraOperations: IListFilterOperation[] = + extraOperations.map((o) => ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); - return ( - - onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.view_random" }), + onClick: viewRandom, + }, + { + text: `${intl.formatMessage({ id: "actions.generate" })}…`, + onClick: onGenerate, + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + - - ); -}; + ); + + return ( +
+ {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
+ setFilter(filter.changePage(page))} + /> + +
+ + + + + + {totalCount > filter.itemsPerPage && ( +
+
+ +
+
+ )} +
+
+
+
+ ); + } +); diff --git a/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx b/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx index ef47782bf..5c0a07356 100644 --- a/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx @@ -10,6 +10,7 @@ export const GalleryPreviewScrubber: React.FC<{ imageCount: number; onClick?: (imageIndex: number) => void; onPathChanged: React.Dispatch>; + disabled?: boolean; }> = ({ className, previewPath, @@ -17,6 +18,7 @@ export const GalleryPreviewScrubber: React.FC<{ imageCount, onClick, onPathChanged, + disabled, }) => { const [activeIndex, setActiveIndex] = useState(); const debounceSetActiveIndex = useThrottle(setActiveIndex, 50); @@ -48,6 +50,7 @@ export const GalleryPreviewScrubber: React.FC<{ activeIndex={activeIndex} setActiveIndex={(i) => debounceSetActiveIndex(i)} onClick={onScrubberClick} + disabled={disabled} /> ); diff --git a/ui/v2.5/src/components/Galleries/GalleryRecommendationRow.tsx b/ui/v2.5/src/components/Galleries/GalleryRecommendationRow.tsx index ee94d6da2..3df07b643 100644 --- a/ui/v2.5/src/components/Galleries/GalleryRecommendationRow.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryRecommendationRow.tsx @@ -1,12 +1,9 @@ import React from "react"; -import { Link } from "react-router-dom"; import { useFindGalleries } from "src/core/StashService"; -import Slider from "@ant-design/react-slick"; import { GalleryCard } from "./GalleryCard"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { getSlickSliderSettings } from "src/core/recommendations"; -import { RecommendationRow } from "../FrontPage/RecommendationRow"; -import { FormattedMessage } from "react-intl"; +import { PatchComponent } from "src/patch"; +import { FilteredRecommendationRow } from "../FrontPage/FilteredRecommendationRow"; interface IProps { isTouch: boolean; @@ -14,29 +11,21 @@ interface IProps { header: string; } -export const GalleryRecommendationRow: React.FC = (props) => { - const result = useFindGalleries(props.filter); - const cardCount = result.data?.findGalleries.count; +export const GalleryRecommendationRow: React.FC = PatchComponent( + "GalleryRecommendationRow", + (props) => { + const result = useFindGalleries(props.filter); + const count = result.data?.findGalleries.count ?? 0; - if (!result.loading && !cardCount) { - return null; - } - - return ( - - - - } - > - {result.loading ? [...Array(props.filter.itemsPerPage)].map((i) => ( @@ -48,7 +37,7 @@ export const GalleryRecommendationRow: React.FC = (props) => { : result.data?.findGalleries.galleries.map((g) => ( ))} - - - ); -}; + + ); + } +); diff --git a/ui/v2.5/src/components/Galleries/GallerySelect.tsx b/ui/v2.5/src/components/Galleries/GallerySelect.tsx index 4cd8825bb..0e02b8cb3 100644 --- a/ui/v2.5/src/components/Galleries/GallerySelect.tsx +++ b/ui/v2.5/src/components/Galleries/GallerySelect.tsx @@ -11,8 +11,9 @@ import * as GQL from "src/core/generated-graphql"; import { queryFindGalleriesForSelect, queryFindGalleriesByIDForSelect, + useGalleryCreate, } from "src/core/StashService"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { useIntl } from "react-intl"; import { defaultMaxOptionsShown } from "src/core/config"; import { ListFilterModel } from "src/models/list-filter/filter"; @@ -70,10 +71,14 @@ const gallerySelectSort = PatchFunction( const _GallerySelect: React.FC< IFilterProps & IFilterValueProps & ExtraGalleryProps > = (props) => { - const { configuration } = React.useContext(ConfigurationContext); + const [createGallery] = useGalleryCreate(); + + const { configuration } = useConfigurationContext(); const intl = useIntl(); const maxOptionsShown = configuration?.ui.maxOptionsShown ?? defaultMaxOptionsShown; + const defaultCreatable = + !configuration?.interface.disableDropdownCreate.gallery; const exclude = useMemo(() => props.excludeIds ?? [], [props.excludeIds]); @@ -203,6 +208,42 @@ const _GallerySelect: React.FC< return ; }; + const onCreate = async (name: string) => { + const result = await createGallery({ + variables: { input: { title: name } }, + }); + return { + value: result.data!.galleryCreate!.id, + item: result.data!.galleryCreate!, + message: "Created gallery", + }; + }; + + const getNamedObject = (id: string, name: string): Gallery => { + return { + id, + title: name, + files: [], + folder: null, + }; + }; + + const isValidNewOption = (inputValue: string, options: Gallery[]) => { + if (!inputValue) { + return false; + } + + if ( + options.some((o) => { + return galleryTitle(o).toLowerCase() === inputValue.toLowerCase(); + }) + ) { + return false; + } + + return true; + }; + return ( {...props} @@ -214,12 +255,16 @@ const _GallerySelect: React.FC< props.className )} loadOptions={loadGalleries} + getNamedObject={getNamedObject} + isValidNewOption={isValidNewOption} components={{ Option: GalleryOption, MultiValueLabel: GalleryMultiValueLabel, SingleValue: GalleryValueLabel, }} isMulti={props.isMulti ?? false} + creatable={props.creatable ?? defaultCreatable} + onCreate={onCreate} placeholder={ props.noSelectionString ?? intl.formatMessage( diff --git a/ui/v2.5/src/components/Galleries/GalleryViewer.tsx b/ui/v2.5/src/components/Galleries/GalleryViewer.tsx index 7ebb679fd..f570f9990 100644 --- a/ui/v2.5/src/components/Galleries/GalleryViewer.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryViewer.tsx @@ -67,8 +67,8 @@ export const GalleryViewer: React.FC = ({ galleryId }) => { images.forEach((image, index) => { let imageData = { src: image.paths.thumbnail!, - width: image.visual_files[0].width, - height: image.visual_files[0].height, + width: image.visual_files[0]?.width ?? 0, + height: image.visual_files[0]?.height ?? 0, tabIndex: index, key: image.id ?? index, loading: "lazy", diff --git a/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx b/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx index c794ddc14..c79000783 100644 --- a/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx @@ -1,4 +1,5 @@ import React, { useState } from "react"; +import { Form } from "react-bootstrap"; import { useIntl } from "react-intl"; import { Link } from "react-router-dom"; import * as GQL from "src/core/generated-graphql"; @@ -8,6 +9,7 @@ import { useGalleryLightbox } from "src/hooks/Lightbox/hooks"; import { galleryTitle } from "src/core/galleries"; import { RatingSystem } from "../Shared/Rating/RatingSystem"; import { GalleryPreviewScrubber } from "./GalleryPreviewScrubber"; +import { useDragMoveSelect } from "../Shared/GridCard/dragMoveSelect"; import cx from "classnames"; const CLASSNAME = "GalleryWallCard"; @@ -18,6 +20,9 @@ const CLASSNAME_IMG_CONTAIN = `${CLASSNAME}-img-contain`; interface IProps { gallery: GQL.SlimGalleryDataFragment; + selected?: boolean; + onSelectedChanged?: (selected: boolean, shiftKey: boolean) => void; + selecting?: boolean; } type Orientation = "landscape" | "portrait"; @@ -26,7 +31,12 @@ function getOrientation(width: number, height: number): Orientation { return width > height ? "landscape" : "portrait"; } -const GalleryWallCard: React.FC = ({ gallery }) => { +const GalleryWallCard: React.FC = ({ + gallery, + selected, + onSelectedChanged, + selecting, +}) => { const intl = useIntl(); const [coverOrientation, setCoverOrientation] = React.useState("landscape"); @@ -34,6 +44,12 @@ const GalleryWallCard: React.FC = ({ gallery }) => { React.useState("landscape"); const showLightbox = useGalleryLightbox(gallery.id, gallery.chapters); + const { dragProps } = useDragMoveSelect({ + selecting: selecting || false, + selected: selected || false, + onSelectedChanged: onSelectedChanged, + }); + const cover = gallery?.paths.cover; function onCoverLoad(e: React.SyntheticEvent) { @@ -58,6 +74,14 @@ const GalleryWallCard: React.FC = ({ gallery }) => { ? [...performerNames.slice(0, -2), performerNames.slice(-2).join(" & ")] : performerNames; + function handleCardClick(event: React.MouseEvent) { + if (selecting && onSelectedChanged) { + onSelectedChanged(!selected, event.shiftKey); + return; + } + showLightboxStart(); + } + async function showLightboxStart() { if (gallery.image_count === 0) { return; @@ -69,15 +93,32 @@ const GalleryWallCard: React.FC = ({ gallery }) => { const imgClassname = imageOrientation !== coverOrientation ? CLASSNAME_IMG_CONTAIN : ""; + let shiftKey = false; + return ( <>
showLightboxStart()} role="button" tabIndex={0} + {...dragProps} > + {onSelectedChanged && ( + onSelectedChanged(!selected, shiftKey)} + onClick={( + event: React.MouseEvent + ) => { + shiftKey = event.shiftKey; + event.stopPropagation(); + }} + /> + )} = ({ gallery }) => {
e.stopPropagation()} + onClick={(e) => { + if (selecting) { + e.preventDefault(); + handleCardClick(e); + } + e.stopPropagation(); + }} > {title && ( = ({ gallery }) => { )}
- {gallery.date && TextUtils.formatDate(intl, gallery.date)} + {gallery.date && TextUtils.formatFuzzyDate(intl, gallery.date)}
diff --git a/ui/v2.5/src/components/Galleries/styles.scss b/ui/v2.5/src/components/Galleries/styles.scss index 12439a94d..b05be7856 100644 --- a/ui/v2.5/src/components/Galleries/styles.scss +++ b/ui/v2.5/src/components/Galleries/styles.scss @@ -17,7 +17,7 @@ order: 1; } - .gallery-studio-image { + .studio-logo { flex: 0 0 25%; order: 2; } @@ -182,6 +182,11 @@ $galleryTabWidth: 450px; width: 100%; } +@media (min-width: 1200px) { + .gallery-container .image-list .filtered-list-toolbar.has-selection { + top: 0; + } +} @media (min-width: 1200px), (max-width: 575px) { .gallery-performers { .performer-card { @@ -204,13 +209,45 @@ $galleryTabWidth: 450px; font-size: 1.3em; height: calc(1.5em + 0.75rem + 2px); } + + .form-group[data-field="urls"] .string-list-input input.form-control { + font-size: 0.85em; + } + + @include media-breakpoint-up(xl) { + .custom-fields-input { + .custom-fields-field { + flex: 0 0 25%; + max-width: 25%; + } + + .custom-fields-value { + flex: 0 0 75%; + max-width: 75%; + } + } + } } -.GalleryWall { +.gallery-cover { + aspect-ratio: 4 / 3; + display: block; + height: auto; + width: 100%; +} + +.gallery-cover img { + height: auto; + max-height: 100%; + max-width: 100%; + object-fit: contain; + width: auto; +} + +div.GalleryWall { display: flex; flex-wrap: wrap; margin: 0 auto; - width: 96vw; /* Prevents last row from consuming all space and stretching images to oblivion */ &::after { @@ -249,28 +286,6 @@ $galleryTabWidth: 450px; z-index: 1; } - @mixin galleryWidth($width) { - height: math.div($width, 3) * 2; - - &-landscape { - width: $width; - } - - &-portrait { - width: math.div($width, 2); - } - } - - @media (min-width: 576px) { - @include galleryWidth(96vw); - } - @media (min-width: 768px) { - @include galleryWidth(48vw); - } - @media (min-width: 1200px) { - @include galleryWidth(32vw); - } - &-img { height: 100%; object-fit: cover; @@ -355,6 +370,62 @@ $galleryTabWidth: 450px; } } +div.GalleryWall { + @mixin galleryWidth($width) { + height: math.div($width, 3) * 2; + + &-landscape { + width: $width; + } + + &-portrait { + width: math.div($width, 2); + } + } + + .GalleryWallCard { + @media (min-width: 576px) { + @include galleryWidth(96vw); + } + } + + &.zoom-0 .GalleryWallCard { + @media (min-width: 768px) { + @include galleryWidth(16vw); + } + @media (min-width: 1200px) { + @include galleryWidth(10vw); + } + } + + &.zoom-1 .GalleryWallCard { + @media (min-width: 768px) { + @include galleryWidth(24vw); + } + @media (min-width: 1200px) { + @include galleryWidth(16vw); + } + } + + &.zoom-2 .GalleryWallCard { + @media (min-width: 768px) { + @include galleryWidth(32vw); + } + @media (min-width: 1200px) { + @include galleryWidth(24vw); + } + } + + &.zoom-3 .GalleryWallCard { + @media (min-width: 768px) { + @include galleryWidth(48vw); + } + @media (min-width: 1200px) { + @include galleryWidth(32vw); + } + } +} + .gallery-file-card.card { margin: 0; padding: 0; diff --git a/ui/v2.5/src/components/Groups/EditGroupsDialog.tsx b/ui/v2.5/src/components/Groups/EditGroupsDialog.tsx index efd14e757..99c482aba 100644 --- a/ui/v2.5/src/components/Groups/EditGroupsDialog.tsx +++ b/ui/v2.5/src/components/Groups/EditGroupsDialog.tsx @@ -1,34 +1,34 @@ -import React, { useEffect, useState } from "react"; -import { Form, Col, Row } from "react-bootstrap"; -import { FormattedMessage, useIntl } from "react-intl"; +import React, { useEffect, useMemo, useState } from "react"; +import { Form } from "react-bootstrap"; +import { useIntl } from "react-intl"; import { useBulkGroupUpdate } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; -import { ModalComponent } from "../Shared/Modal"; import { StudioSelect } from "../Shared/Select"; +import { ModalComponent } from "../Shared/Modal"; +import { MultiSet } from "../Shared/MultiSet"; import { useToast } from "src/hooks/Toast"; -import * as FormUtils from "src/utils/form"; import { RatingSystem } from "../Shared/Rating/RatingSystem"; import { - getAggregateIds, - getAggregateInputIDs, getAggregateInputValue, - getAggregateRating, - getAggregateStudioId, + getAggregateStateObject, getAggregateTagIds, + getAggregateStudioId, + getAggregateIds, } from "src/utils/bulkUpdate"; import { faPencilAlt } from "@fortawesome/free-solid-svg-icons"; -import { isEqual } from "lodash-es"; -import { MultiSet } from "../Shared/MultiSet"; -import { ContainingGroupsMultiSet } from "./ContainingGroupsMultiSet"; +import { BulkUpdateFormGroup, BulkUpdateTextInput } from "../Shared/BulkUpdate"; +import { BulkUpdateDateInput } from "../Shared/DateInput"; import { IRelatedGroupEntry } from "./GroupDetails/RelatedGroupTable"; +import { ContainingGroupsMultiSet } from "./ContainingGroupsMultiSet"; +import { getDateError } from "src/utils/yup"; interface IListOperationProps { - selected: GQL.GroupDataFragment[]; + selected: GQL.ListGroupDataFragment[]; onClose: (applied: boolean) => void; } export function getAggregateContainingGroups( - state: Pick[] + state: Pick[] ) { const sortedLists: IRelatedGroupEntry[][] = state.map((o) => o.containing_groups @@ -67,50 +67,86 @@ function getAggregateContainingGroupInput( return undefined; } +const groupFields = ["rating100", "synopsis", "director", "date"]; + export const EditGroupsDialog: React.FC = ( props: IListOperationProps ) => { const intl = useIntl(); const Toast = useToast(); - const [rating100, setRating] = useState(); - const [studioId, setStudioId] = useState(); - const [director, setDirector] = useState(); - const [tagMode, setTagMode] = React.useState( - GQL.BulkUpdateIdMode.Add - ); - const [tagIds, setTagIds] = useState(); - const [existingTagIds, setExistingTagIds] = useState(); + const [updateInput, setUpdateInput] = useState({ + ids: props.selected.map((group) => { + return group.id; + }), + }); + const [tagIds, setTagIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); const [containingGroupsMode, setGroupMode] = React.useState(GQL.BulkUpdateIdMode.Add); const [containingGroups, setGroups] = useState(); - const [existingContainingGroups, setExistingContainingGroups] = - useState(); - const [updateGroups] = useBulkGroupUpdate(getGroupInput()); + const unsetDisabled = props.selected.length < 2; + const [updateGroups] = useBulkGroupUpdate(); + + const [dateError, setDateError] = useState(); + + // Network state const [isUpdating, setIsUpdating] = useState(false); - function getGroupInput(): GQL.BulkGroupUpdateInput { - const aggregateRating = getAggregateRating(props.selected); - const aggregateStudioId = getAggregateStudioId(props.selected); - const aggregateTagIds = getAggregateTagIds(props.selected); + const aggregateState = useMemo(() => { + const updateState: Partial = {}; + const state = props.selected; + updateState.studio_id = getAggregateStudioId(props.selected); + const updateTagIds = getAggregateTagIds(props.selected); const aggregateGroups = getAggregateContainingGroups(props.selected); + let first = true; + state.forEach((group: GQL.ListGroupDataFragment) => { + getAggregateStateObject(updateState, group, groupFields, first); + first = false; + }); + + return { + state: updateState, + tagIds: updateTagIds, + containingGroups: aggregateGroups, + }; + }, [props.selected]); + + // update initial state from aggregate + useEffect(() => { + setUpdateInput((current) => ({ ...current, ...aggregateState.state })); + }, [aggregateState]); + + useEffect(() => { + setDateError(getDateError(updateInput.date ?? "", intl)); + }, [updateInput.date, intl]); + + function setUpdateField(input: Partial) { + setUpdateInput((current) => ({ ...current, ...input })); + } + + function getGroupInput(): GQL.BulkGroupUpdateInput { const groupInput: GQL.BulkGroupUpdateInput = { - ids: props.selected.map((group) => group.id), - director, + ...updateInput, + tag_ids: tagIds, }; - groupInput.rating100 = getAggregateInputValue(rating100, aggregateRating); - groupInput.studio_id = getAggregateInputValue(studioId, aggregateStudioId); - groupInput.tag_ids = getAggregateInputIDs(tagMode, tagIds, aggregateTagIds); + // we don't have unset functionality for the rating star control + // so need to determine if we are setting a rating or not + groupInput.rating100 = getAggregateInputValue( + updateInput.rating100, + aggregateState.state.rating100 + ); groupInput.containing_groups = getAggregateContainingGroupInput( containingGroupsMode, containingGroups, - aggregateGroups + aggregateState.containingGroups ); return groupInput; @@ -119,13 +155,11 @@ export const EditGroupsDialog: React.FC = ( async function onSave() { setIsUpdating(true); try { - await updateGroups(); + await updateGroups({ variables: { input: getGroupInput() } }); Toast.success( intl.formatMessage( { id: "toast.updated_entity" }, - { - entity: intl.formatMessage({ id: "groups" }).toLocaleLowerCase(), - } + { entity: intl.formatMessage({ id: "groups" }).toLocaleLowerCase() } ) ); props.onClose(true); @@ -135,67 +169,24 @@ export const EditGroupsDialog: React.FC = ( setIsUpdating(false); } - useEffect(() => { - const state = props.selected; - let updateRating: number | undefined; - let updateStudioId: string | undefined; - let updateTagIds: string[] = []; - let updateContainingGroupIds: IRelatedGroupEntry[] = []; - let updateDirector: string | undefined; - let first = true; - - state.forEach((group: GQL.GroupDataFragment) => { - const groupTagIDs = (group.tags ?? []).map((p) => p.id).sort(); - const groupContainingGroupIDs = (group.containing_groups ?? []).sort( - (a, b) => a.group.id.localeCompare(b.group.id) - ); - - if (first) { - first = false; - updateRating = group.rating100 ?? undefined; - updateStudioId = group.studio?.id ?? undefined; - updateTagIds = groupTagIDs; - updateContainingGroupIds = groupContainingGroupIDs; - updateDirector = group.director ?? undefined; - } else { - if (group.rating100 !== updateRating) { - updateRating = undefined; - } - if (group.studio?.id !== updateStudioId) { - updateStudioId = undefined; - } - if (group.director !== updateDirector) { - updateDirector = undefined; - } - if (!isEqual(groupTagIDs, updateTagIds)) { - updateTagIds = []; - } - if (!isEqual(groupContainingGroupIDs, updateContainingGroupIds)) { - updateTagIds = []; - } - } - }); - - setRating(updateRating); - setStudioId(updateStudioId); - setExistingTagIds(updateTagIds); - setExistingContainingGroups(updateContainingGroupIds); - setDirector(updateDirector); - }, [props.selected]); - function render() { return ( props.onClose(false), text: intl.formatMessage({ id: "actions.cancel" }), @@ -204,74 +195,90 @@ export const EditGroupsDialog: React.FC = ( isRunning={isUpdating} >
- - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "rating" }), - })} - - setRating(value ?? undefined)} - disabled={isUpdating} - /> - - - - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "studio" }), - })} - - - setStudioId(items.length > 0 ? items[0]?.id : undefined) - } - ids={studioId ? [studioId] : []} - isDisabled={isUpdating} - menuPortalTarget={document.body} - /> - - - - - - + + + setUpdateField({ rating100: value ?? undefined }) + } + disabled={isUpdating} + /> + + + + setUpdateField({ date: newValue })} + unsetDisabled={unsetDisabled} + error={dateError} + /> + + + + + setUpdateField({ director: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ + studio_id: items.length > 0 ? items[0]?.id : undefined, + }) + } + ids={updateInput.studio_id ? [updateInput.studio_id] : []} + isDisabled={isUpdating} + menuPortalTarget={document.body} + /> + + + setGroups(v)} onSetMode={(newMode) => setGroupMode(newMode)} - existingValue={existingContainingGroups ?? []} + existingValue={aggregateState.containingGroups ?? []} value={containingGroups ?? []} mode={containingGroupsMode} menuPortalTarget={document.body} /> - - - - - - setDirector(event.currentTarget.value)} - placeholder={intl.formatMessage({ id: "director" })} - /> - - - - - + + + setTagIds(itemIDs)} - onSetMode={(newMode) => setTagMode(newMode)} - existingIds={existingTagIds ?? []} - ids={tagIds ?? []} - mode={tagMode} + onUpdate={(itemIDs) => { + setTagIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setTagIds((c) => ({ ...c, mode: newMode })); + }} + ids={tagIds.ids ?? []} + existingIds={aggregateState.tagIds} + mode={tagIds.mode} menuPortalTarget={document.body} /> - + + + + + setUpdateField({ synopsis: newValue }) + } + unsetDisabled={unsetDisabled} + as="textarea" + /> +
); diff --git a/ui/v2.5/src/components/Groups/GroupCard.tsx b/ui/v2.5/src/components/Groups/GroupCard.tsx index f1d6089d0..5bc1b5d7f 100644 --- a/ui/v2.5/src/components/Groups/GroupCard.tsx +++ b/ui/v2.5/src/components/Groups/GroupCard.tsx @@ -1,6 +1,7 @@ import React, { useMemo } from "react"; import { Button, ButtonGroup } from "react-bootstrap"; import * as GQL from "src/core/generated-graphql"; +import { PatchComponent } from "src/patch"; import { GridCard } from "../Shared/GridCard/GridCard"; import { HoverPopover } from "../Shared/HoverPopover"; import { Icon } from "../Shared/Icon"; @@ -10,6 +11,7 @@ import { FormattedMessage } from "react-intl"; import { RatingBanner } from "../Shared/RatingBanner"; import { faPlayCircle, faTag } from "@fortawesome/free-solid-svg-icons"; import { RelatedGroupPopoverButton } from "./RelatedGroupPopover"; +import { OCounterButton } from "../Shared/CountButton"; const Description: React.FC<{ sceneNumber?: number; @@ -35,7 +37,7 @@ const Description: React.FC<{ }; interface IProps { - group: GQL.GroupDataFragment; + group: GQL.ListGroupDataFragment; cardWidth?: number; sceneNumber?: number; selecting?: boolean; @@ -46,130 +48,140 @@ interface IProps { onMove?: (srcIds: string[], targetId: string, after: boolean) => void; } -export const GroupCard: React.FC = ({ - group, - sceneNumber, - cardWidth, - selecting, - selected, - zoomIndex, - onSelectedChanged, - fromGroupId, - onMove, -}) => { - const groupDescription = useMemo(() => { - if (!fromGroupId) { - return undefined; - } +export const GroupCard: React.FC = PatchComponent( + "GroupCard", + ({ + group, + sceneNumber, + cardWidth, + selecting, + selected, + zoomIndex, + onSelectedChanged, + fromGroupId, + onMove, + }) => { + const groupDescription = useMemo(() => { + if (!fromGroupId) { + return undefined; + } - const containingGroup = group.containing_groups.find( - (cg) => cg.group.id === fromGroupId - ); + const containingGroup = group.containing_groups.find( + (cg) => cg.group.id === fromGroupId + ); - return containingGroup?.description ?? undefined; - }, [fromGroupId, group.containing_groups]); + return containingGroup?.description ?? undefined; + }, [fromGroupId, group.containing_groups]); - function maybeRenderScenesPopoverButton() { - if (group.scenes.length === 0) return; + function maybeRenderScenesPopoverButton() { + if (group.scenes.length === 0) return; - const popoverContent = group.scenes.map((scene) => ( - - )); + const popoverContent = group.scenes.map((scene) => ( + + )); - return ( - - - - ); - } - - function maybeRenderTagPopoverButton() { - if (group.tags.length <= 0) return; - - const popoverContent = group.tags.map((tag) => ( - - )); - - return ( - - - - ); - } - - function maybeRenderPopoverButtonGroup() { - if ( - sceneNumber || - groupDescription || - group.scenes.length > 0 || - group.tags.length > 0 || - group.containing_groups.length > 0 || - group.sub_group_count > 0 - ) { return ( - <> - -
- - {maybeRenderScenesPopoverButton()} - {maybeRenderTagPopoverButton()} - {(group.sub_group_count > 0 || - group.containing_groups.length > 0) && ( - - )} - - + + + ); } - } - return ( - - {group.name - - + function maybeRenderTagPopoverButton() { + if (group.tags.length <= 0) return; + + const popoverContent = group.tags.map((tag) => ( + + )); + + return ( + + + + ); + } + + function maybeRenderOCounter() { + if (!group.o_counter) return; + + return ; + } + + function maybeRenderPopoverButtonGroup() { + if ( + sceneNumber || + groupDescription || + group.scenes.length > 0 || + group.tags.length > 0 || + group.containing_groups.length > 0 || + group.sub_group_count > 0 + ) { + return ( + <> + +
+ + {maybeRenderScenesPopoverButton()} + {maybeRenderTagPopoverButton()} + {(group.sub_group_count > 0 || + group.containing_groups.length > 0) && ( + + )} + {maybeRenderOCounter()} + + + ); } - details={ -
- {group.date} - -
- } - selected={selected} - selecting={selecting} - onSelectedChanged={onSelectedChanged} - popovers={maybeRenderPopoverButtonGroup()} - /> - ); -}; + } + + return ( + + {group.name + + + } + details={ +
+ {group.date} + +
+ } + selected={selected} + selecting={selecting} + onSelectedChanged={onSelectedChanged} + popovers={maybeRenderPopoverButtonGroup()} + /> + ); + } +); diff --git a/ui/v2.5/src/components/Groups/GroupCardGrid.tsx b/ui/v2.5/src/components/Groups/GroupCardGrid.tsx index b73919e64..e3b70c75f 100644 --- a/ui/v2.5/src/components/Groups/GroupCardGrid.tsx +++ b/ui/v2.5/src/components/Groups/GroupCardGrid.tsx @@ -5,9 +5,10 @@ import { useCardWidth, useContainerDimensions, } from "../Shared/GridCard/GridCard"; +import { PatchComponent } from "src/patch"; interface IGroupCardGrid { - groups: GQL.GroupDataFragment[]; + groups: GQL.ListGroupDataFragment[]; selectedIds: Set; zoomIndex: number; onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; @@ -17,34 +18,30 @@ interface IGroupCardGrid { const zoomWidths = [210, 250, 300, 375]; -export const GroupCardGrid: React.FC = ({ - groups, - selectedIds, - zoomIndex, - onSelectChange, - fromGroupId, - onMove, -}) => { - const [componentRef, { width: containerWidth }] = useContainerDimensions(); - const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); +export const GroupCardGrid: React.FC = PatchComponent( + "GroupCardGrid", + ({ groups, selectedIds, zoomIndex, onSelectChange, fromGroupId, onMove }) => { + const [componentRef, { width: containerWidth }] = useContainerDimensions(); + const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); - return ( -
- {groups.map((p) => ( - 0} - selected={selectedIds.has(p.id)} - onSelectedChanged={(selected: boolean, shiftKey: boolean) => - onSelectChange(p.id, selected, shiftKey) - } - fromGroupId={fromGroupId} - onMove={onMove} - /> - ))} -
- ); -}; + return ( +
+ {groups.map((p) => ( + 0} + selected={selectedIds.has(p.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + onSelectChange(p.id, selected, shiftKey) + } + fromGroupId={fromGroupId} + onMove={onMove} + /> + ))} +
+ ); + } +); diff --git a/ui/v2.5/src/components/Groups/GroupDetails/AddGroupsDialog.tsx b/ui/v2.5/src/components/Groups/GroupDetails/AddGroupsDialog.tsx index b89356810..79c6075c0 100644 --- a/ui/v2.5/src/components/Groups/GroupDetails/AddGroupsDialog.tsx +++ b/ui/v2.5/src/components/Groups/GroupDetails/AddGroupsDialog.tsx @@ -114,6 +114,7 @@ export const AddSubGroupsDialog: React.FC = ( onUpdate={(input) => setEntries(input)} excludeIDs={excludeIDs} filterHook={filterHook} + menuPortalTarget={document.body} /> diff --git a/ui/v2.5/src/components/Groups/GroupDetails/Group.tsx b/ui/v2.5/src/components/Groups/GroupDetails/Group.tsx index bd58a6682..b2b3d8176 100644 --- a/ui/v2.5/src/components/Groups/GroupDetails/Group.tsx +++ b/ui/v2.5/src/components/Groups/GroupDetails/Group.tsx @@ -23,7 +23,7 @@ import { import { GroupEditPanel } from "./GroupEditPanel"; import { faRefresh, faTrashAlt } from "@fortawesome/free-solid-svg-icons"; import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { DetailImage } from "src/components/Shared/DetailImage"; import { useRatingKeybinds } from "src/hooks/keybinds"; import { useLoadStickyHeader } from "src/hooks/detailsPanel"; @@ -43,6 +43,7 @@ import { Button, Tab, Tabs } from "react-bootstrap"; import { GroupSubGroupsPanel } from "./GroupSubGroupsPanel"; import { GroupPerformersPanel } from "./GroupPerformersPanel"; import { Icon } from "src/components/Shared/Icon"; +import { goBackOrReplace } from "src/utils/history"; const validTabs = ["default", "scenes", "performers", "subgroups"] as const; type TabKey = (typeof validTabs)[number]; @@ -145,7 +146,7 @@ const GroupPage: React.FC = ({ group, tabKey }) => { const Toast = useToast(); // Configuration settings - const { configuration } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const uiConfig = configuration?.ui; const enableBackgroundImage = uiConfig?.enableMovieBackgroundImage ?? false; const compactExpandedDetails = uiConfig?.compactExpandedDetails ?? false; @@ -276,7 +277,7 @@ const GroupPage: React.FC = ({ group, tabKey }) => { return; } - history.goBack(); + goBackOrReplace(history, "/groups"); } function toggleEditing(value?: boolean) { diff --git a/ui/v2.5/src/components/Groups/GroupDetails/GroupCreate.tsx b/ui/v2.5/src/components/Groups/GroupDetails/GroupCreate.tsx index 9dd3e22b9..5026d5b6e 100644 --- a/ui/v2.5/src/components/Groups/GroupDetails/GroupCreate.tsx +++ b/ui/v2.5/src/components/Groups/GroupDetails/GroupCreate.tsx @@ -25,12 +25,14 @@ const GroupCreate: React.FC = () => { const [createGroup] = useGroupCreate(); - async function onSave(input: GQL.GroupCreateInput) { + async function onSave(input: GQL.GroupCreateInput, andNew?: boolean) { const result = await createGroup({ variables: { input }, }); if (result.data?.groupCreate?.id) { - history.push(`/groups/${result.data.groupCreate.id}`); + if (!andNew) { + history.push(`/groups/${result.data.groupCreate.id}`); + } Toast.success( intl.formatMessage( { id: "toast.created_entity" }, diff --git a/ui/v2.5/src/components/Groups/GroupDetails/GroupDetailsPanel.tsx b/ui/v2.5/src/components/Groups/GroupDetails/GroupDetailsPanel.tsx index d93b06466..8ae4b16a9 100644 --- a/ui/v2.5/src/components/Groups/GroupDetails/GroupDetailsPanel.tsx +++ b/ui/v2.5/src/components/Groups/GroupDetails/GroupDetailsPanel.tsx @@ -6,6 +6,7 @@ import { DetailItem } from "src/components/Shared/DetailItem"; import { Link } from "react-router-dom"; import { DirectorLink } from "src/components/Shared/Link"; import { GroupLink, TagLink } from "src/components/Shared/TagLink"; +import { CustomFields } from "src/components/Shared/CustomFields"; interface IGroupDescription { group: GQL.SlimGroupDataFragment; @@ -65,7 +66,7 @@ export const GroupDetailsPanel: React.FC = ({ /> = ({ fullWidth={fullWidth} /> )} + ); }; diff --git a/ui/v2.5/src/components/Groups/GroupDetails/GroupEditPanel.tsx b/ui/v2.5/src/components/Groups/GroupDetails/GroupEditPanel.tsx index 0b94baf27..6401738fa 100644 --- a/ui/v2.5/src/components/Groups/GroupDetails/GroupEditPanel.tsx +++ b/ui/v2.5/src/components/Groups/GroupDetails/GroupEditPanel.tsx @@ -28,10 +28,15 @@ import { Studio, StudioSelect } from "src/components/Studios/StudioSelect"; import { useTagsEdit } from "src/hooks/tagsEdit"; import { Group } from "src/components/Groups/GroupSelect"; import { RelatedGroupTable, IRelatedGroupEntry } from "./RelatedGroupTable"; +import { + CustomFieldsInput, + formatCustomFieldInput, +} from "src/components/Shared/CustomFields"; +import { cloneDeep } from "@apollo/client/utilities"; interface IGroupEditPanel { group: Partial; - onSubmit: (group: GQL.GroupCreateInput) => Promise; + onSubmit: (group: GQL.GroupCreateInput, andNew?: boolean) => Promise; onCancel: () => void; onDelete: () => void; setFrontImage: (image?: string | null) => void; @@ -84,6 +89,7 @@ export const GroupEditPanel: React.FC = ({ synopsis: yup.string().ensure(), front_image: yup.string().nullable().optional(), back_image: yup.string().nullable().optional(), + custom_fields: yup.object().required().defined(), }); const initialValues = { @@ -99,15 +105,26 @@ export const GroupEditPanel: React.FC = ({ director: group?.director ?? "", urls: group?.urls ?? [], synopsis: group?.synopsis ?? "", + custom_fields: cloneDeep(group?.custom_fields ?? {}), }; type InputValues = yup.InferType; + const [customFieldsError, setCustomFieldsError] = useState(); + + function submit(values: InputValues) { + const input = { + ...schema.cast(values), + custom_fields: formatCustomFieldInput(isNew, values.custom_fields), + }; + onSave(input); + } + const formik = useFormik({ initialValues, enableReinitialize: true, validate: yupFormikValidate(schema), - onSubmit: (values) => onSave(schema.cast(values)), + onSubmit: submit, }); const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit( @@ -208,10 +225,10 @@ export const GroupEditPanel: React.FC = ({ } } - async function onSave(input: InputValues) { + async function onSave(input: InputValues, andNew?: boolean) { setIsLoading(true); try { - await onSubmit(input); + await onSubmit(input, andNew); formik.resetForm(); } catch (e) { Toast.error(e); @@ -219,6 +236,14 @@ export const GroupEditPanel: React.FC = ({ setIsLoading(false); } + async function onSaveAndNewClick() { + const input = { + ...schema.cast(formik.values), + custom_fields: formatCustomFieldInput(isNew, formik.values.custom_fields), + }; + onSave(input, true); + } + async function onScrapeGroupURL(url: string) { if (!url) return; setIsLoading(true); @@ -453,6 +478,13 @@ export const GroupEditPanel: React.FC = ({ {renderURLListField("urls", onScrapeGroupURL, urlScrapable)} {renderInputField("synopsis", "textarea")} {renderTagsField()} + + formik.setFieldValue("custom_fields", v)} + error={customFieldsError} + setError={(e) => setCustomFieldsError(e)} + /> = ({ isEditing onToggleEdit={onCancel} onSave={formik.handleSubmit} - saveDisabled={(!isNew && !formik.dirty) || !isEqual(formik.errors, {})} + onSaveAndNew={isNew ? onSaveAndNewClick : undefined} + saveDisabled={ + (!isNew && !formik.dirty) || + !isEqual(formik.errors, {}) || + customFieldsError !== undefined + } onImageChange={onFrontImageChange} onImageChangeURL={onFrontImageLoad} onClearImage={() => onFrontImageLoad(null)} diff --git a/ui/v2.5/src/components/Groups/GroupDetails/GroupPerformersPanel.tsx b/ui/v2.5/src/components/Groups/GroupDetails/GroupPerformersPanel.tsx index 057b99f2a..3ec78084a 100644 --- a/ui/v2.5/src/components/Groups/GroupDetails/GroupPerformersPanel.tsx +++ b/ui/v2.5/src/components/Groups/GroupDetails/GroupPerformersPanel.tsx @@ -1,7 +1,7 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; import { useGroupFilterHook } from "src/core/groups"; -import { PerformerList } from "src/components/Performers/PerformerList"; +import { FilteredPerformerList } from "src/components/Performers/PerformerList"; import { View } from "src/components/List/views"; interface IGroupPerformersPanel { @@ -18,7 +18,7 @@ export const GroupPerformersPanel: React.FC = ({ const filterHook = useGroupFilterHook(group, showChildGroupContent); return ( - = ({ setNewObject: setNewStudio, }); - const { tags, newTags, scrapedTagsRow } = useScrapedTags( + const { tags, newTags, scrapedTagsRow, linkDialog } = useScrapedTags( groupTags, scraped.tags ); @@ -149,37 +149,44 @@ export const GroupScrapeDialog: React.FC = ({ return ( <> setName(value)} /> setAliases(value)} /> setDuration(value)} /> setDate(value)} /> setDirector(value)} /> setSynopsis(value)} /> setStudio(value)} @@ -187,18 +194,21 @@ export const GroupScrapeDialog: React.FC = ({ onCreateNew={createNewStudio} /> setURLs(value)} /> {scrapedTagsRow} setFrontImage(value)} /> = ({ ); } + if (linkDialog) { + return linkDialog; + } + return ( { onClose(apply ? makeNewScrapedItem() : undefined); }} - /> + > + {renderScrapeRows()} + ); }; diff --git a/ui/v2.5/src/components/Groups/GroupDetails/GroupSubGroupsPanel.tsx b/ui/v2.5/src/components/Groups/GroupDetails/GroupSubGroupsPanel.tsx index a2bb26e95..6a11f7004 100644 --- a/ui/v2.5/src/components/Groups/GroupDetails/GroupSubGroupsPanel.tsx +++ b/ui/v2.5/src/components/Groups/GroupDetails/GroupSubGroupsPanel.tsx @@ -1,6 +1,6 @@ -import React, { useMemo } from "react"; +import React from "react"; import * as GQL from "src/core/generated-graphql"; -import { GroupList } from "../GroupList"; +import { FilteredGroupList } from "../GroupList"; import { ListFilterModel } from "src/models/list-filter/filter"; import { ContainingGroupsCriterionOption, @@ -10,15 +10,7 @@ import { useRemoveSubGroups, useReorderSubGroupsMutation, } from "src/core/StashService"; -import { ButtonToolbar } from "react-bootstrap"; -import { ListOperationButtons } from "src/components/List/ListOperationButtons"; -import { useListContext } from "src/components/List/ListProvider"; -import { - PageSizeSelector, - SearchTermInput, -} from "src/components/List/ListFilter"; -import { useFilter } from "src/components/List/FilterProvider"; -import { IFilteredListToolbar } from "src/components/List/FilteredListToolbar"; +import { IItemListOperation } from "src/components/List/FilteredListToolbar"; import { showWhenNoneSelected, showWhenSelected, @@ -28,6 +20,8 @@ import { useIntl } from "react-intl"; import { useToast } from "src/hooks/Toast"; import { useModal } from "src/hooks/modal"; import { AddSubGroupsDialog } from "./AddGroupsDialog"; +import { PatchComponent } from "src/patch"; +import { View } from "src/components/List/views"; const useContainingGroupFilterHook = ( group: Pick, @@ -67,138 +61,117 @@ const useContainingGroupFilterHook = ( }; }; -const Toolbar: React.FC = ({ - onEdit, - onDelete, - operations, -}) => { - const { getSelected, onSelectAll, onSelectNone } = useListContext(); - const { filter, setFilter } = useFilter(); - - return ( - -
- -
- setFilter(filter.setPageSize(size))} - /> - 0} - otherOperations={operations} - onEdit={onEdit} - onDelete={onDelete} - /> -
- ); -}; - interface IGroupSubGroupsPanel { active: boolean; group: GQL.GroupDataFragment; + extraOperations?: IItemListOperation[]; } -export const GroupSubGroupsPanel: React.FC = ({ - active, - group, -}) => { - const intl = useIntl(); - const Toast = useToast(); - const { modal, showModal, closeModal } = useModal(); +const defaultFilter = (() => { + const sortBy = "sub_group_order"; + const ret = new ListFilterModel(GQL.FilterMode.Groups, undefined, { + defaultSortBy: sortBy, + }); - const [reorderSubGroups] = useReorderSubGroupsMutation(); - const mutateRemoveSubGroups = useRemoveSubGroups(); + // unset the sort by so that its not included in the URL + ret.sortBy = undefined; - const filterHook = useContainingGroupFilterHook(group); + return ret; +})(); - const defaultFilter = useMemo(() => { - const sortBy = "sub_group_order"; - const ret = new ListFilterModel(GQL.FilterMode.Groups, undefined, { - defaultSortBy: sortBy, - }); +export const GroupSubGroupsPanel: React.FC = + PatchComponent( + "GroupSubGroupsPanel", + ({ active, group, extraOperations = [] }) => { + const intl = useIntl(); + const Toast = useToast(); + const { modal, showModal, closeModal } = useModal(); - // unset the sort by so that its not included in the URL - ret.sortBy = undefined; + const [reorderSubGroups] = useReorderSubGroupsMutation(); + const mutateRemoveSubGroups = useRemoveSubGroups(); - return ret; - }, []); + const filterHook = useContainingGroupFilterHook(group); - async function removeSubGroups( - result: GQL.FindGroupsQueryResult, - filter: ListFilterModel, - selectedIds: Set - ) { - try { - await mutateRemoveSubGroups(group.id, Array.from(selectedIds.values())); + async function removeSubGroups( + result: GQL.FindGroupsQueryResult, + filter: ListFilterModel, + selectedIds: Set + ) { + try { + await mutateRemoveSubGroups( + group.id, + Array.from(selectedIds.values()) + ); - Toast.success( - intl.formatMessage( - { id: "toast.removed_entity" }, - { - count: selectedIds.size, - singularEntity: intl.formatMessage({ id: "group" }), - pluralEntity: intl.formatMessage({ id: "groups" }), - } - ) - ); - } catch (e) { - Toast.error(e); - } - } + Toast.success( + intl.formatMessage( + { id: "toast.removed_entity" }, + { + count: selectedIds.size, + singularEntity: intl.formatMessage({ id: "group" }), + pluralEntity: intl.formatMessage({ id: "groups" }), + } + ) + ); + } catch (e) { + Toast.error(e); + } + } - async function onAddSubGroups() { - showModal( - - ); - } + async function onAddSubGroups() { + showModal( + + ); + } - const otherOperations = [ - { - text: intl.formatMessage({ id: "actions.add_sub_groups" }), - onClick: onAddSubGroups, - isDisplayed: showWhenNoneSelected, - postRefetch: true, - icon: faPlus, - buttonVariant: "secondary", - }, - { - text: intl.formatMessage({ id: "actions.remove_from_containing_group" }), - onClick: removeSubGroups, - isDisplayed: showWhenSelected, - postRefetch: true, - icon: faMinus, - buttonVariant: "danger", - }, - ]; - - function onMove(srcIds: string[], targetId: string, after: boolean) { - reorderSubGroups({ - variables: { - input: { - group_id: group.id, - sub_group_ids: srcIds, - insert_at_id: targetId, - insert_after: after, + const otherOperations = [ + ...extraOperations, + { + text: intl.formatMessage({ id: "actions.add_sub_groups" }), + onClick: onAddSubGroups, + isDisplayed: showWhenNoneSelected, + postRefetch: true, + icon: faPlus, + buttonVariant: "secondary", }, - }, - }); - } + { + text: intl.formatMessage({ + id: "actions.remove_from_containing_group", + }), + onClick: removeSubGroups, + isDisplayed: showWhenSelected, + postRefetch: true, + icon: faMinus, + buttonVariant: "danger", + }, + ]; - return ( - <> - {modal} - } - /> - + function onMove(srcIds: string[], targetId: string, after: boolean) { + reorderSubGroups({ + variables: { + input: { + group_id: group.id, + sub_group_ids: srcIds, + insert_at_id: targetId, + insert_after: after, + }, + }, + }); + } + + return ( + <> + {modal} + + + ); + } ); -}; diff --git a/ui/v2.5/src/components/Groups/GroupList.tsx b/ui/v2.5/src/components/Groups/GroupList.tsx index 707b6fa4b..69961f783 100644 --- a/ui/v2.5/src/components/Groups/GroupList.tsx +++ b/ui/v2.5/src/components/Groups/GroupList.tsx @@ -1,5 +1,5 @@ -import React, { PropsWithChildren, useState } from "react"; -import { useIntl } from "react-intl"; +import React, { useCallback, useEffect } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; import cloneDeep from "lodash-es/cloneDeep"; import Mousetrap from "mousetrap"; import { useHistory } from "react-router-dom"; @@ -11,230 +11,485 @@ import { useFindGroups, useGroupsDestroy, } from "src/core/StashService"; -import { ItemList, ItemListContext, showWhenSelected } from "../List/ItemList"; +import { useFilteredItemList } from "../List/ItemList"; import { ExportDialog } from "../Shared/ExportDialog"; import { DeleteEntityDialog } from "../Shared/DeleteEntityDialog"; import { GroupCardGrid } from "./GroupCardGrid"; import { EditGroupsDialog } from "./EditGroupsDialog"; import { View } from "../List/views"; import { - IFilteredListToolbar, + FilteredListToolbar, IItemListOperation, } from "../List/FilteredListToolbar"; +import { PatchComponent, PatchContainerComponent } from "src/patch"; +import useFocus from "src/utils/focus"; +import { + Sidebar, + SidebarPane, + SidebarPaneContent, + SidebarStateContext, + useSidebarState, +} from "../Shared/Sidebar"; +import { useCloseEditDelete, useFilterOperations } from "../List/util"; +import { + FilteredSidebarHeader, + useFilteredSidebarKeybinds, +} from "../List/Filters/FilterSidebar"; +import { + IListFilterOperation, + ListOperations, +} from "../List/ListOperationButtons"; +import cx from "classnames"; +import { FilterTags } from "../List/FilterTags"; +import { Pagination, PaginationIndex } from "../List/Pagination"; +import { LoadedContent } from "../List/PagedList"; +import { SidebarStudiosFilter } from "../List/Filters/StudiosFilter"; +import { SidebarTagsFilter } from "../List/Filters/TagsFilter"; +import { SidebarRatingFilter } from "../List/Filters/RatingFilter"; +import { Button } from "react-bootstrap"; -const GroupExportDialog: React.FC<{ - open?: boolean; +const GroupList: React.FC<{ + groups: GQL.ListGroupDataFragment[]; + filter: ListFilterModel; selectedIds: Set; - isExportAll?: boolean; - onClose: () => void; -}> = ({ open = false, selectedIds, isExportAll = false, onClose }) => { - if (!open) { + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; + fromGroupId?: string; + onMove?: (srcIds: string[], targetId: string, after: boolean) => void; +}> = PatchComponent( + "GroupList", + ({ groups, filter, selectedIds, onSelectChange, fromGroupId, onMove }) => { + if (groups.length === 0) { + return null; + } + + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + return null; } +); + +const GroupFilterSidebarSections = PatchContainerComponent( + "FilteredGroupList.SidebarSections" +); + +const SidebarContent: React.FC<{ + filter: ListFilterModel; + setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + sidebarOpen: boolean; + onClose?: () => void; + showEditFilter: (editingCriterion?: string) => void; + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, + filterHook, + view, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, +}) => { + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; + + const hideStudios = view === View.StudioScenes; return ( - + <> + + + + {!hideStudios && ( + + )} + + + + +
+ +
+ ); }; -const filterMode = GQL.FilterMode.Groups; - -function getItems(result: GQL.FindGroupsQueryResult) { - return result?.data?.findGroups?.groups ?? []; -} - -function getCount(result: GQL.FindGroupsQueryResult) { - return result?.data?.findGroups?.count ?? 0; -} - interface IGroupListContext { filterHook?: (filter: ListFilterModel) => ListFilterModel; defaultFilter?: ListFilterModel; view?: View; alterQuery?: boolean; - selectable?: boolean; } -export const GroupListContext: React.FC< - PropsWithChildren -> = ({ alterQuery, filterHook, defaultFilter, view, selectable, children }) => { - return ( - - {children} - - ); -}; - interface IGroupList extends IGroupListContext { fromGroupId?: string; onMove?: (srcIds: string[], targetId: string, after: boolean) => void; - renderToolbar?: (props: IFilteredListToolbar) => React.ReactNode; otherOperations?: IItemListOperation[]; } -export const GroupList: React.FC = ({ - filterHook, - alterQuery, - defaultFilter, - view, - fromGroupId, - onMove, - selectable, - renderToolbar, - otherOperations: providedOperations = [], -}) => { - const intl = useIntl(); +function useViewRandom(filter: ListFilterModel, count: number) { const history = useHistory(); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); - const otherOperations = [ - { - text: intl.formatMessage({ id: "actions.view_random" }), - onClick: viewRandom, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, - }, - ...providedOperations, - ]; + const viewRandom = useCallback(async () => { + // query for a random scene + if (count === 0) { + return; + } - function addKeybinds( - result: GQL.FindGroupsQueryResult, - filter: ListFilterModel - ) { + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindGroups(filterCopy); + if (singleResult.data.findGroups.groups.length === 1) { + const { id } = singleResult.data.findGroups.groups[0]; + // navigate to the image player page + history.push(`/groups/${id}`); + } + }, [history, filter, count]); + + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { Mousetrap.bind("p r", () => { - viewRandom(result, filter); + viewRandom(); }); return () => { Mousetrap.unbind("p r"); }; - } + }, [viewRandom]); +} - async function viewRandom( - result: GQL.FindGroupsQueryResult, - filter: ListFilterModel - ) { - // query for a random image - if (result.data?.findGroups) { - const { count } = result.data.findGroups; +export const FilteredGroupList = PatchComponent( + "FilteredGroupList", + (props: IGroupList) => { + const intl = useIntl(); - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindGroups(filterCopy); - if (singleResult.data.findGroups.groups.length === 1) { - const { id } = singleResult.data.findGroups.groups[0]; - // navigate to the group page - history.push(`/groups/${id}`); - } - } - } + const searchFocus = useFocus(); - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } + const { + filterHook, + view, + alterQuery, + onMove, + fromGroupId, + otherOperations: providedOperations = [], + defaultFilter, + } = props; - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } + const withSidebar = view !== View.GroupSubGroups; + const filterable = view !== View.GroupSubGroups; + const sortable = view !== View.GroupSubGroups; - function renderContent( - result: GQL.FindGroupsQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - return ( - <> - setIsExportDialogOpen(false)} + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Groups, + defaultFilter, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindGroups, + getCount: (r) => r.data?.findGroups.count ?? 0, + getItems: (r) => r.data?.findGroups.groups ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(effectiveFilter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); + + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }); + + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); + + const viewRandom = useViewRandom(effectiveFilter, totalCount); + + function onExport(all: boolean) { + showModal( + closeModal()} /> - {filter.displayMode === DisplayMode.Grid && ( - + ); + } + + function onDelete() { + showModal( + + ); + } + + const convertedExtraOperations: IListFilterOperation[] = + providedOperations.map((o) => ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); + + const otherOperations = [ + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.view_random" }), + onClick: viewRandom, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + + ); + + const content = ( + <> + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
+ setFilter(filter.changePage(page))} + /> + +
+ + + + + + {totalCount > filter.itemsPerPage && ( +
+
+ +
+
)} ); - } - function renderEditDialog( - selectedGroups: GQL.GroupDataFragment[], - onClose: (applied: boolean) => void - ) { - return ; - } + if (!withSidebar) { + return content; + } - function renderDeleteDialog( - selectedGroups: GQL.SlimGroupDataFragment[], - onClose: (confirmed: boolean) => void - ) { return ( - +
+ {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + {content} + + + +
); } - - return ( - - - - ); -}; +); diff --git a/ui/v2.5/src/components/Groups/GroupRecommendationRow.tsx b/ui/v2.5/src/components/Groups/GroupRecommendationRow.tsx index 3a8fee856..b9e523b34 100644 --- a/ui/v2.5/src/components/Groups/GroupRecommendationRow.tsx +++ b/ui/v2.5/src/components/Groups/GroupRecommendationRow.tsx @@ -1,12 +1,9 @@ import React from "react"; -import { Link } from "react-router-dom"; import { useFindGroups } from "src/core/StashService"; -import Slider from "@ant-design/react-slick"; import { GroupCard } from "./GroupCard"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { getSlickSliderSettings } from "src/core/recommendations"; -import { RecommendationRow } from "../FrontPage/RecommendationRow"; -import { FormattedMessage } from "react-intl"; +import { PatchComponent } from "src/patch"; +import { FilteredRecommendationRow } from "../FrontPage/FilteredRecommendationRow"; interface IProps { isTouch: boolean; @@ -14,29 +11,21 @@ interface IProps { header: string; } -export const GroupRecommendationRow: React.FC = (props: IProps) => { - const result = useFindGroups(props.filter); - const cardCount = result.data?.findGroups.count; +export const GroupRecommendationRow: React.FC = PatchComponent( + "GroupRecommendationRow", + (props: IProps) => { + const result = useFindGroups(props.filter); + const count = result.data?.findGroups.count ?? 0; - if (!result.loading && !cardCount) { - return null; - } - - return ( - - - - } - > - {result.loading ? [...Array(props.filter.itemsPerPage)].map((i) => ( @@ -45,7 +34,7 @@ export const GroupRecommendationRow: React.FC = (props: IProps) => { : result.data?.findGroups.groups.map((g) => ( ))} - - - ); -}; + + ); + } +); diff --git a/ui/v2.5/src/components/Groups/GroupSelect.tsx b/ui/v2.5/src/components/Groups/GroupSelect.tsx index dd16088e9..a904e2223 100644 --- a/ui/v2.5/src/components/Groups/GroupSelect.tsx +++ b/ui/v2.5/src/components/Groups/GroupSelect.tsx @@ -13,7 +13,7 @@ import { queryFindGroupsByIDForSelect, useGroupCreate, } from "src/core/StashService"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { useIntl } from "react-intl"; import { defaultMaxOptionsShown } from "src/core/config"; import { ListFilterModel } from "src/models/list-filter/filter"; @@ -66,12 +66,12 @@ export const GroupSelect: React.FC< > = PatchComponent("GroupSelect", (props) => { const [createGroup] = useGroupCreate(); - const { configuration } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const intl = useIntl(); const maxOptionsShown = configuration?.ui.maxOptionsShown ?? defaultMaxOptionsShown; const defaultCreatable = - !configuration?.interface.disableDropdownCreate.movie ?? true; + !configuration?.interface.disableDropdownCreate.movie; const exclude = useMemo(() => props.excludeIds ?? [], [props.excludeIds]); diff --git a/ui/v2.5/src/components/Groups/Groups.tsx b/ui/v2.5/src/components/Groups/Groups.tsx index 5ec7b4eaf..1a89444b0 100644 --- a/ui/v2.5/src/components/Groups/Groups.tsx +++ b/ui/v2.5/src/components/Groups/Groups.tsx @@ -4,11 +4,11 @@ import { Helmet } from "react-helmet"; import { useTitleProps } from "src/hooks/title"; import Group from "./GroupDetails/Group"; import GroupCreate from "./GroupDetails/GroupCreate"; -import { GroupList } from "./GroupList"; +import { FilteredGroupList } from "./GroupList"; import { View } from "../List/views"; const Groups: React.FC = () => { - return ; + return ; }; const GroupRoutes: React.FC = () => { diff --git a/ui/v2.5/src/components/Groups/RelatedGroupPopover.tsx b/ui/v2.5/src/components/Groups/RelatedGroupPopover.tsx index 03095f284..a2eea9975 100644 --- a/ui/v2.5/src/components/Groups/RelatedGroupPopover.tsx +++ b/ui/v2.5/src/components/Groups/RelatedGroupPopover.tsx @@ -16,7 +16,7 @@ import { GroupTag } from "./GroupTag"; interface IProps { group: Pick< - GQL.GroupDataFragment, + GQL.ListGroupDataFragment, "id" | "name" | "containing_groups" | "sub_group_count" >; } diff --git a/ui/v2.5/src/components/Help/Manual.tsx b/ui/v2.5/src/components/Help/Manual.tsx index d8fc1dbed..e90e2e5ac 100644 --- a/ui/v2.5/src/components/Help/Manual.tsx +++ b/ui/v2.5/src/components/Help/Manual.tsx @@ -23,6 +23,7 @@ import Interactive from "src/docs/en/Manual/Interactive.md"; import Captions from "src/docs/en/Manual/Captions.md"; import Identify from "src/docs/en/Manual/Identify.md"; import Browsing from "src/docs/en/Manual/Browsing.md"; +import TroubleshootingMode from "src/docs/en/Manual/TroubleshootingMode.md"; import { MarkdownPage } from "../Shared/MarkdownPage"; interface IManualProps { @@ -152,6 +153,11 @@ export const Manual: React.FC = ({ title: "Keyboard Shortcuts", content: KeyboardShortcuts, }, + { + key: "TroubleshootingMode.md", + title: "Troubleshooting Mode", + content: TroubleshootingMode, + }, { key: "Contributing.md", title: "Contributing", diff --git a/ui/v2.5/src/components/Images/DeleteImagesDialog.tsx b/ui/v2.5/src/components/Images/DeleteImagesDialog.tsx index 36a3ead3c..d57c60ab4 100644 --- a/ui/v2.5/src/components/Images/DeleteImagesDialog.tsx +++ b/ui/v2.5/src/components/Images/DeleteImagesDialog.tsx @@ -4,7 +4,7 @@ import { useImagesDestroy } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; import { ModalComponent } from "src/components/Shared/Modal"; import { useToast } from "src/hooks/Toast"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { FormattedMessage, useIntl } from "react-intl"; import { faTrashAlt } from "@fortawesome/free-solid-svg-icons"; @@ -33,7 +33,7 @@ export const DeleteImagesDialog: React.FC = ( { count: props.selected.length, singularEntity, pluralEntity } ); - const { configuration: config } = React.useContext(ConfigurationContext); + const { configuration: config } = useConfigurationContext(); const [deleteFile, setDeleteFile] = useState( config?.defaults.deleteFile ?? false @@ -80,6 +80,11 @@ export const DeleteImagesDialog: React.FC = ( deletedFiles.push(...paths); }); + const deleteTrashPath = config?.general.deleteTrashPath; + const deleteAlertId = deleteTrashPath + ? "dialogs.delete_alert_to_trash" + : "dialogs.delete_alert"; + return (

@@ -89,7 +94,7 @@ export const DeleteImagesDialog: React.FC = ( singularEntity: intl.formatMessage({ id: "file" }), pluralEntity: intl.formatMessage({ id: "files" }), }} - id="dialogs.delete_alert" + id={deleteAlertId} />

    diff --git a/ui/v2.5/src/components/Images/EditImagesDialog.tsx b/ui/v2.5/src/components/Images/EditImagesDialog.tsx index 275ff1556..a90ef922e 100644 --- a/ui/v2.5/src/components/Images/EditImagesDialog.tsx +++ b/ui/v2.5/src/components/Images/EditImagesDialog.tsx @@ -1,96 +1,121 @@ -import React, { useEffect, useState } from "react"; -import { Form, Col, Row } from "react-bootstrap"; -import { FormattedMessage, useIntl } from "react-intl"; -import isEqual from "lodash-es/isEqual"; +import React, { useEffect, useMemo, useState } from "react"; +import { Form } from "react-bootstrap"; +import { useIntl } from "react-intl"; import { useBulkImageUpdate } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; -import { StudioSelect } from "src/components/Shared/Select"; -import { ModalComponent } from "src/components/Shared/Modal"; -import { useToast } from "src/hooks/Toast"; -import * as FormUtils from "src/utils/form"; +import { StudioSelect } from "../Shared/Select"; +import { ModalComponent } from "../Shared/Modal"; import { MultiSet } from "../Shared/MultiSet"; +import { useToast } from "src/hooks/Toast"; import { RatingSystem } from "../Shared/Rating/RatingSystem"; import { - getAggregateGalleryIds, - getAggregateInputIDs, getAggregateInputValue, getAggregatePerformerIds, - getAggregateRating, - getAggregateStudioId, + getAggregateStateObject, getAggregateTagIds, + getAggregateStudioId, + getAggregateGalleryIds, } from "src/utils/bulkUpdate"; import { faPencilAlt } from "@fortawesome/free-solid-svg-icons"; +import { IndeterminateCheckbox } from "../Shared/IndeterminateCheckbox"; +import { BulkUpdateFormGroup, BulkUpdateTextInput } from "../Shared/BulkUpdate"; +import { BulkUpdateDateInput } from "../Shared/DateInput"; +import { getDateError } from "src/utils/yup"; interface IListOperationProps { selected: GQL.SlimImageDataFragment[]; onClose: (applied: boolean) => void; } +const imageFields = [ + "code", + "rating100", + "details", + "organized", + "photographer", + "date", +]; + export const EditImagesDialog: React.FC = ( props: IListOperationProps ) => { const intl = useIntl(); const Toast = useToast(); - const [rating100, setRating] = useState(); - const [studioId, setStudioId] = useState(); - const [performerMode, setPerformerMode] = - React.useState(GQL.BulkUpdateIdMode.Add); - const [performerIds, setPerformerIds] = useState(); - const [existingPerformerIds, setExistingPerformerIds] = useState(); - const [tagMode, setTagMode] = React.useState( - GQL.BulkUpdateIdMode.Add - ); - const [tagIds, setTagIds] = useState(); - const [existingTagIds, setExistingTagIds] = useState(); + const [updateInput, setUpdateInput] = useState({ + ids: props.selected.map((image) => { + return image.id; + }), + }); - const [galleryMode, setGalleryMode] = React.useState( - GQL.BulkUpdateIdMode.Add - ); - const [galleryIds, setGalleryIds] = useState(); - const [existingGalleryIds, setExistingGalleryIds] = useState(); + const [performerIds, setPerformerIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + const [tagIds, setTagIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + const [galleryIds, setGalleryIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); - const [organized, setOrganized] = useState(); + const unsetDisabled = props.selected.length < 2; + + const [dateError, setDateError] = useState(); const [updateImages] = useBulkImageUpdate(); // Network state const [isUpdating, setIsUpdating] = useState(false); - const checkboxRef = React.createRef(); + const aggregateState = useMemo(() => { + const updateState: Partial = {}; + const state = props.selected; + updateState.studio_id = getAggregateStudioId(props.selected); + const updateTagIds = getAggregateTagIds(props.selected); + const updatePerformerIds = getAggregatePerformerIds(props.selected); + const updateGalleryIds = getAggregateGalleryIds(props.selected); + let first = true; + + state.forEach((image: GQL.SlimImageDataFragment) => { + getAggregateStateObject(updateState, image, imageFields, first); + first = false; + }); + + return { + state: updateState, + tagIds: updateTagIds, + performerIds: updatePerformerIds, + galleryIds: updateGalleryIds, + }; + }, [props.selected]); + + // update initial state from aggregate + useEffect(() => { + setUpdateInput((current) => ({ ...current, ...aggregateState.state })); + }, [aggregateState]); + + useEffect(() => { + setDateError(getDateError(updateInput.date ?? "", intl)); + }, [updateInput.date, intl]); + + function setUpdateField(input: Partial) { + setUpdateInput((current) => ({ ...current, ...input })); + } function getImageInput(): GQL.BulkImageUpdateInput { - // need to determine what we are actually setting on each image - const aggregateRating = getAggregateRating(props.selected); - const aggregateStudioId = getAggregateStudioId(props.selected); - const aggregatePerformerIds = getAggregatePerformerIds(props.selected); - const aggregateTagIds = getAggregateTagIds(props.selected); - const aggregateGalleryIds = getAggregateGalleryIds(props.selected); - const imageInput: GQL.BulkImageUpdateInput = { - ids: props.selected.map((image) => { - return image.id; - }), + ...updateInput, + tag_ids: tagIds, + performer_ids: performerIds, + gallery_ids: galleryIds, }; - imageInput.rating100 = getAggregateInputValue(rating100, aggregateRating); - imageInput.studio_id = getAggregateInputValue(studioId, aggregateStudioId); - - imageInput.performer_ids = getAggregateInputIDs( - performerMode, - performerIds, - aggregatePerformerIds + // we don't have unset functionality for the rating star control + // so need to determine if we are setting a rating or not + imageInput.rating100 = getAggregateInputValue( + updateInput.rating100, + aggregateState.state.rating100 ); - imageInput.tag_ids = getAggregateInputIDs(tagMode, tagIds, aggregateTagIds); - imageInput.gallery_ids = getAggregateInputIDs( - galleryMode, - galleryIds, - aggregateGalleryIds - ); - - if (organized !== undefined) { - imageInput.organized = organized; - } return imageInput; } @@ -98,11 +123,7 @@ export const EditImagesDialog: React.FC = ( async function onSave() { setIsUpdating(true); try { - await updateImages({ - variables: { - input: getImageInput(), - }, - }); + await updateImages({ variables: { input: getImageInput() } }); Toast.success( intl.formatMessage( { id: "toast.updated_entity" }, @@ -116,86 +137,13 @@ export const EditImagesDialog: React.FC = ( setIsUpdating(false); } - useEffect(() => { - const state = props.selected; - let updateRating: number | undefined; - let updateStudioID: string | undefined; - let updatePerformerIds: string[] = []; - let updateTagIds: string[] = []; - let updateGalleryIds: string[] = []; - let updateOrganized: boolean | undefined; - let first = true; - - state.forEach((image: GQL.SlimImageDataFragment) => { - const imageRating = image.rating100; - const imageStudioID = image?.studio?.id; - const imagePerformerIDs = (image.performers ?? []) - .map((p) => p.id) - .sort(); - const imageTagIDs = (image.tags ?? []).map((p) => p.id).sort(); - const imageGalleryIDs = (image.galleries ?? []).map((p) => p.id).sort(); - - if (first) { - updateRating = imageRating ?? undefined; - updateStudioID = imageStudioID; - updatePerformerIds = imagePerformerIDs; - updateTagIds = imageTagIDs; - updateGalleryIds = imageGalleryIDs; - updateOrganized = image.organized; - first = false; - } else { - if (imageRating !== updateRating) { - updateRating = undefined; - } - if (imageStudioID !== updateStudioID) { - updateStudioID = undefined; - } - if (!isEqual(imagePerformerIDs, updatePerformerIds)) { - updatePerformerIds = []; - } - if (!isEqual(imageTagIDs, updateTagIds)) { - updateTagIds = []; - } - if (!isEqual(imageGalleryIDs, updateGalleryIds)) { - updateGalleryIds = []; - } - if (image.organized !== updateOrganized) { - updateOrganized = undefined; - } - } - }); - - setRating(updateRating); - setStudioId(updateStudioID); - setExistingPerformerIds(updatePerformerIds); - setExistingTagIds(updateTagIds); - setExistingGalleryIds(updateGalleryIds); - setOrganized(updateOrganized); - }, [props.selected]); - - useEffect(() => { - if (checkboxRef.current) { - checkboxRef.current.indeterminate = organized === undefined; - } - }, [organized, checkboxRef]); - - function cycleOrganized() { - if (organized) { - setOrganized(undefined); - } else if (organized === undefined) { - setOrganized(false); - } else { - setOrganized(true); - } - } - function render() { return ( = ( onClick: onSave, text: intl.formatMessage({ id: "actions.apply" }), }} + disabled={isUpdating || !!dateError} cancel={{ onClick: () => props.onClose(false), text: intl.formatMessage({ id: "actions.cancel" }), @@ -214,89 +163,120 @@ export const EditImagesDialog: React.FC = ( isRunning={isUpdating} >
    - - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "rating" }), - })} - - setRating(value ?? undefined)} - disabled={isUpdating} - /> - - - - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "studio" }), - })} - - - setStudioId(items.length > 0 ? items[0]?.id : undefined) - } - ids={studioId ? [studioId] : []} - isDisabled={isUpdating} - menuPortalTarget={document.body} - /> - - - - - - - - + + setUpdateField({ rating100: value ?? undefined }) + } disabled={isUpdating} - onUpdate={(itemIDs) => setPerformerIds(itemIDs)} - onSetMode={(newMode) => setPerformerMode(newMode)} - existingIds={existingPerformerIds ?? []} - ids={performerIds ?? []} - mode={performerMode} + /> + + + + setUpdateField({ code: newValue })} + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ date: newValue })} + unsetDisabled={unsetDisabled} + error={dateError} + /> + + + + + setUpdateField({ photographer: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ + studio_id: items.length > 0 ? items[0]?.id : undefined, + }) + } + ids={updateInput.studio_id ? [updateInput.studio_id] : []} + isDisabled={isUpdating} menuPortalTarget={document.body} /> - + - - - - + setTagIds(itemIDs)} - onSetMode={(newMode) => setTagMode(newMode)} - existingIds={existingTagIds ?? []} - ids={tagIds ?? []} - mode={tagMode} + onUpdate={(itemIDs) => { + setPerformerIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setPerformerIds((c) => ({ ...c, mode: newMode })); + }} + ids={performerIds.ids ?? []} + existingIds={aggregateState.performerIds} + mode={performerIds.mode} menuPortalTarget={document.body} /> - + - - - - + setGalleryIds(itemIDs)} - onSetMode={(newMode) => setGalleryMode(newMode)} - existingIds={existingGalleryIds ?? []} - ids={galleryIds ?? []} - mode={galleryMode} + onUpdate={(itemIDs) => { + setGalleryIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setGalleryIds((c) => ({ ...c, mode: newMode })); + }} + ids={galleryIds.ids ?? []} + existingIds={aggregateState.galleryIds} + mode={galleryIds.mode} menuPortalTarget={document.body} /> - + + + + { + setTagIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setTagIds((c) => ({ ...c, mode: newMode })); + }} + ids={tagIds.ids ?? []} + existingIds={aggregateState.tagIds} + mode={tagIds.mode} + menuPortalTarget={document.body} + /> + + + + setUpdateField({ details: newValue })} + unsetDisabled={unsetDisabled} + as="textarea" + /> + - cycleOrganized()} + setChecked={(checked) => setUpdateField({ organized: checked })} + checked={updateInput.organized ?? undefined} />
    diff --git a/ui/v2.5/src/components/Images/ImageCard.tsx b/ui/v2.5/src/components/Images/ImageCard.tsx index d530b253e..a1189c844 100644 --- a/ui/v2.5/src/components/Images/ImageCard.tsx +++ b/ui/v2.5/src/components/Images/ImageCard.tsx @@ -5,7 +5,6 @@ import * as GQL from "src/core/generated-graphql"; import { Icon } from "src/components/Shared/Icon"; import { GalleryLink, TagLink } from "src/components/Shared/TagLink"; import { HoverPopover } from "src/components/Shared/HoverPopover"; -import { SweatDrops } from "src/components/Shared/SweatDrops"; import { PerformerPopoverButton } from "src/components/Shared/PerformerPopoverButton"; import { GridCard } from "src/components/Shared/GridCard/GridCard"; import { RatingBanner } from "src/components/Shared/RatingBanner"; @@ -16,8 +15,10 @@ import { faTag, } from "@fortawesome/free-solid-svg-icons"; import { imageTitle } from "src/core/files"; +import { PatchComponent } from "src/patch"; import { TruncatedText } from "../Shared/TruncatedText"; import { StudioOverlay } from "../Shared/GridCard/StudioOverlay"; +import { OCounterButton } from "../Shared/CountButton"; interface IImageCardProps { image: GQL.SlimImageDataFragment; @@ -29,98 +30,80 @@ interface IImageCardProps { onPreview?: (ev: MouseEvent) => void; } -export const ImageCard: React.FC = ( - props: IImageCardProps -) => { - const file = useMemo( - () => - props.image.visual_files.length > 0 - ? props.image.visual_files[0] - : undefined, - [props.image] - ); +const ImageCardPopovers = PatchComponent( + "ImageCard.Popovers", + (props: IImageCardProps) => { + function maybeRenderTagPopoverButton() { + if (props.image.tags.length <= 0) return; - function maybeRenderTagPopoverButton() { - if (props.image.tags.length <= 0) return; + const popoverContent = props.image.tags.map((tag) => ( + + )); - const popoverContent = props.image.tags.map((tag) => ( - - )); - - return ( - - - - ); - } - - function maybeRenderPerformerPopoverButton() { - if (props.image.performers.length <= 0) return; - - return ( - - ); - } - - function maybeRenderOCounter() { - if (props.image.o_counter) { return ( -
    + -
    + ); } - } - function maybeRenderGallery() { - if (props.image.galleries.length <= 0) return; + function maybeRenderPerformerPopoverButton() { + if (props.image.performers.length <= 0) return; - const popoverContent = props.image.galleries.map((gallery) => ( - - )); - - return ( - - - - ); - } - - function maybeRenderOrganized() { - if (props.image.organized) { return ( -
    - -
    + ); } - } - function maybeRenderPopoverButtonGroup() { + function maybeRenderOCounter() { + if (props.image.o_counter) { + return ; + } + } + + function maybeRenderGallery() { + if (props.image.galleries.length <= 0) return; + + const popoverContent = props.image.galleries.map((gallery) => ( + + )); + + return ( + + + + ); + } + + function maybeRenderOrganized() { + if (props.image.organized) { + return ( +
    + +
    + ); + } + } + if ( props.image.tags.length > 0 || props.image.performers.length > 0 || @@ -141,64 +124,107 @@ export const ImageCard: React.FC = ( ); } + + return null; } +); - function isPortrait() { - const width = file?.width ? file.width : 0; - const height = file?.height ? file.height : 0; - return height > width; +const ImageCardDetails = PatchComponent( + "ImageCard.Details", + (props: IImageCardProps) => { + return ( +
    + {props.image.date} + +
    + ); } +); - const source = - props.image.paths.preview != "" - ? props.image.paths.preview ?? "" - : props.image.paths.thumbnail ?? ""; - const video = source.includes("preview"); - const ImagePreview = video ? "video" : "img"; +const ImageCardOverlays = PatchComponent( + "ImageCard.Overlays", + (props: IImageCardProps) => { + const ret = useMemo(() => { + return ( + + ); + }, [props.image.studio, props.selecting]); - return ( - -
    - - {props.onPreview ? ( -
    - -
    - ) : undefined} -
    - - - } - details={ -
    - {props.image.date} - { + const file = useMemo( + () => + props.image.visual_files.length > 0 + ? props.image.visual_files[0] + : undefined, + [props.image] + ); + + function isPortrait() { + const width = file?.width ? file.width : 0; + const height = file?.height ? file.height : 0; + return height > width; + } + + const source = + props.image.paths.preview != "" + ? props.image.paths.preview ?? "" + : props.image.paths.thumbnail ?? ""; + const video = source.includes("preview"); + const ImagePreview = video ? "video" : "img"; + + return ( + <> +
    + + {props.onPreview ? ( +
    + +
    + ) : undefined}
    - } - overlays={} - popovers={maybeRenderPopoverButtonGroup()} - selected={props.selected} - selecting={props.selecting} - onSelectedChanged={props.onSelectedChanged} - /> - ); -}; + + + ); + } +); + +export const ImageCard: React.FC = PatchComponent( + "ImageCard", + (props: IImageCardProps) => { + return ( + } + details={} + overlays={} + popovers={} + selected={props.selected} + selecting={props.selecting} + onSelectedChanged={props.onSelectedChanged} + /> + ); + } +); diff --git a/ui/v2.5/src/components/Images/ImageCardGrid.tsx b/ui/v2.5/src/components/Images/ImageCardGrid.tsx new file mode 100644 index 000000000..dadab571b --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageCardGrid.tsx @@ -0,0 +1,47 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { ImageCard } from "./ImageCard"; +import { + useCardWidth, + useContainerDimensions, +} from "../Shared/GridCard/GridCard"; +import { PatchComponent } from "src/patch"; + +interface IImageCardGrid { + images: GQL.SlimImageDataFragment[]; + selectedIds: Set; + zoomIndex: number; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; + onPreview: (index: number, ev: React.MouseEvent) => void; +} + +const zoomWidths = [280, 340, 480, 640]; + +export const ImageCardGrid: React.FC = PatchComponent( + "ImageCardGrid", + ({ images, selectedIds, zoomIndex, onSelectChange, onPreview }) => { + const [componentRef, { width: containerWidth }] = useContainerDimensions(); + const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); + + return ( +
    + {images.map((image, index) => ( + 0} + selected={selectedIds.has(image.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + onSelectChange(image.id, selected, shiftKey) + } + onPreview={ + selectedIds.size < 1 ? (ev) => onPreview(index, ev) : undefined + } + /> + ))} +
    + ); + } +); diff --git a/ui/v2.5/src/components/Images/ImageDetails/Image.tsx b/ui/v2.5/src/components/Images/ImageDetails/Image.tsx index 4ab6641d7..f885c21bb 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/Image.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/Image.tsx @@ -1,7 +1,7 @@ import { Tab, Nav, Dropdown } from "react-bootstrap"; -import React, { useContext, useEffect, useMemo, useState } from "react"; -import { FormattedDate, FormattedMessage, useIntl } from "react-intl"; -import { useHistory, Link, RouteComponentProps } from "react-router-dom"; +import React, { useEffect, useMemo, useState } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; +import { useHistory, RouteComponentProps } from "react-router-dom"; import { Helmet } from "react-helmet"; import { useFindImage, @@ -29,11 +29,15 @@ import { imagePath, imageTitle } from "src/core/files"; import { isVideo } from "src/utils/visualFile"; import { useScrollToTopOnMount } from "src/hooks/scrollToTop"; import { useRatingKeybinds } from "src/hooks/keybinds"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import TextUtils from "src/utils/text"; import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; import cx from "classnames"; import { TruncatedText } from "src/components/Shared/TruncatedText"; +import { goBackOrReplace } from "src/utils/history"; +import { FormattedDate } from "src/components/Shared/Date"; +import { GenerateDialog } from "src/components/Dialogs/GenerateDialog"; +import { StudioLogo } from "src/components/Shared/StudioLogo"; interface IProps { image: GQL.ImageDataFragment; @@ -47,7 +51,8 @@ const ImagePage: React.FC = ({ image }) => { const history = useHistory(); const Toast = useToast(); const intl = useIntl(); - const { configuration } = useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); + const { showStudioText } = configuration?.ui ?? {}; const [incrementO] = useImageIncrementO(image.id); const [decrementO] = useImageDecrementO(image.id); @@ -60,6 +65,7 @@ const ImagePage: React.FC = ({ image }) => { const [activeTabKey, setActiveTabKey] = useState("image-details-panel"); const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); + const [isGenerateDialogOpen, setIsGenerateDialogOpen] = useState(false); async function onSave(input: GQL.ImageUpdateInput) { await updateImage({ @@ -156,7 +162,7 @@ const ImagePage: React.FC = ({ image }) => { function onDeleteDialogClosed(deleted: boolean) { setIsDeleteAlertOpen(false); if (deleted) { - history.goBack(); + goBackOrReplace(history, "/images"); } } @@ -168,6 +174,20 @@ const ImagePage: React.FC = ({ image }) => { } } + function maybeRenderSceneGenerateDialog() { + if (isGenerateDialogOpen) { + return ( + { + setIsGenerateDialogOpen(false); + }} + type="image" + /> + ); + } + } + function renderOperations() { return ( @@ -187,6 +207,13 @@ const ImagePage: React.FC = ({ image }) => { > + setIsGenerateDialogOpen(true)} + > + … + = ({ image }) => { {maybeRenderDeleteDialog()} + {maybeRenderSceneGenerateDialog()}
    - {image.studio && ( -

    - - {`${image.studio.name} - -

    - )} +

    @@ -318,13 +336,7 @@ const ImagePage: React.FC = ({ image }) => {
    - {!!image.date && ( - - )} + {!!image.date && } {resolution ? ( @@ -369,6 +381,7 @@ const ImagePage: React.FC = ({ image }) => { = PatchComponent( {renderDetails()} {renderTags()} {renderPerformers()} +
    diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx index f2771f542..94dddac4b 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx @@ -35,6 +35,11 @@ import { } from "src/components/Galleries/GallerySelect"; import { useTagsEdit } from "src/hooks/tagsEdit"; import { ScraperMenu } from "src/components/Shared/ScraperMenu"; +import { + CustomFieldsInput, + formatCustomFieldInput, +} from "src/components/Shared/CustomFields"; +import { cloneDeep } from "@apollo/client/utilities"; interface IProps { image: GQL.ImageDataFragment; @@ -86,6 +91,7 @@ export const ImageEditPanel: React.FC = ({ studio_id: yup.string().required().nullable(), performer_ids: yup.array(yup.string().required()).defined(), tag_ids: yup.array(yup.string().required()).defined(), + custom_fields: yup.object().required().defined(), }); const initialValues = { @@ -99,15 +105,26 @@ export const ImageEditPanel: React.FC = ({ studio_id: image.studio?.id ?? null, performer_ids: (image.performers ?? []).map((p) => p.id), tag_ids: (image.tags ?? []).map((t) => t.id), + custom_fields: cloneDeep(image.custom_fields ?? {}), }; type InputValues = yup.InferType; + const [customFieldsError, setCustomFieldsError] = useState(); + + function submit(values: InputValues) { + const input = { + ...schema.cast(values), + custom_fields: formatCustomFieldInput(isNew, values.custom_fields), + }; + onSave(input); + } + const formik = useFormik({ initialValues, enableReinitialize: true, validate: yupFormikValidate(schema), - onSubmit: (values) => onSave(schema.cast(values)), + onSubmit: submit, }); const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit( @@ -320,6 +337,19 @@ export const ImageEditPanel: React.FC = ({ xl: 12, }, }; + const urlProps = isNew + ? splitProps + : { + labelProps: { + column: true, + md: 3, + lg: 12, + }, + fieldProps: { + md: 9, + lg: 12, + }, + }; const { renderField, renderInputField, renderDateField, renderURLListField } = formikUtils(intl, formik, splitProps); @@ -431,7 +461,9 @@ export const ImageEditPanel: React.FC = ({ className="edit-button" variant="primary" disabled={ - (!isNew && !formik.dirty) || !isEqual(formik.errors, {}) + (!isNew && !formik.dirty) || + !isEqual(formik.errors, {}) || + customFieldsError !== undefined } onClick={() => formik.submitForm()} > @@ -461,7 +493,13 @@ export const ImageEditPanel: React.FC = ({ {renderInputField("title")} {renderInputField("code", "text", "scene_code")} - {renderURLListField("urls", onScrapeImageURL, urlScrapable)} + {renderURLListField( + "urls", + onScrapeImageURL, + urlScrapable, + "urls", + urlProps + )} {renderDateField("date")} {renderInputField("photographer")} @@ -473,6 +511,13 @@ export const ImageEditPanel: React.FC = ({ {renderDetailsField()} + + formik.setFieldValue("custom_fields", v)} + error={customFieldsError} + setError={(e) => setCustomFieldsError(e)} + /> diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx index 4e566a626..097a64340 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx @@ -1,14 +1,16 @@ import React, { useState } from "react"; import { Accordion, Button, Card } from "react-bootstrap"; -import { FormattedMessage, FormattedTime } from "react-intl"; +import { FormattedMessage, FormattedTime, useIntl } from "react-intl"; import { TruncatedText } from "src/components/Shared/TruncatedText"; import { DeleteFilesDialog } from "src/components/Shared/DeleteFilesDialog"; +import { RevealInFilesystemButton } from "src/components/Shared/RevealInFilesystemButton"; import * as GQL from "src/core/generated-graphql"; import { mutateImageSetPrimaryFile } from "src/core/StashService"; import { useToast } from "src/hooks/Toast"; import TextUtils from "src/utils/text"; import { TextField, URLField, URLsField } from "src/utils/field"; import { FileSize } from "src/components/Shared/FileSize"; +import NavUtils from "src/utils/navigation"; interface IFileInfoPanelProps { file: GQL.ImageFileDataFragment | GQL.VideoFileDataFragment; @@ -22,7 +24,9 @@ interface IFileInfoPanelProps { const FileInfoPanel: React.FC = ( props: IFileInfoPanelProps ) => { + const intl = useIntl(); const checksum = props.file.fingerprints.find((f) => f.type === "md5"); + const phash = props.file.fingerprints.find((f) => f.type === "phash"); return (
    @@ -35,13 +39,22 @@ const FileInfoPanel: React.FC = ( )} - + + + + + + + diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageScrapeDialog.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageScrapeDialog.tsx index cc7dffe66..aa1b4633c 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageScrapeDialog.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageScrapeDialog.tsx @@ -2,11 +2,11 @@ import React, { useState } from "react"; import { useIntl } from "react-intl"; import * as GQL from "src/core/generated-graphql"; import { - ScrapeDialog, ScrapedInputGroupRow, ScrapedStringListRow, ScrapedTextAreaRow, -} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialogRow"; +import { ScrapeDialog } from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import { ObjectListScrapeResult, ObjectScrapeResult, @@ -100,7 +100,7 @@ export const ImageScrapeDialog: React.FC = ({ scraped.performers?.filter((t) => !t.stored_id) ?? [] ); - const { tags, newTags, scrapedTagsRow } = useScrapedTags( + const { tags, newTags, scrapedTagsRow, linkDialog } = useScrapedTags( imageTags, scraped.tags ); @@ -163,32 +163,38 @@ export const ImageScrapeDialog: React.FC = ({ return ( <> setTitle(value)} /> setCode(value)} /> setURLs(value)} /> setDate(value)} /> setPhotographer(value)} /> setStudio(value)} @@ -196,6 +202,7 @@ export const ImageScrapeDialog: React.FC = ({ onCreateNew={createNewStudio} /> setPerformers(value)} @@ -204,6 +211,7 @@ export const ImageScrapeDialog: React.FC = ({ /> {scrapedTagsRow} setDetails(value)} @@ -212,16 +220,21 @@ export const ImageScrapeDialog: React.FC = ({ ); } + if (linkDialog) { + return linkDialog; + } + return ( { onClose(apply ? makeNewScrapedItem() : undefined); }} - /> + > + {renderScrapeRows()} + ); }; diff --git a/ui/v2.5/src/components/Images/ImageGridCard.tsx b/ui/v2.5/src/components/Images/ImageGridCard.tsx deleted file mode 100644 index cbb76d853..000000000 --- a/ui/v2.5/src/components/Images/ImageGridCard.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import React from "react"; -import * as GQL from "src/core/generated-graphql"; -import { ImageCard } from "./ImageCard"; -import { - useCardWidth, - useContainerDimensions, -} from "../Shared/GridCard/GridCard"; - -interface IImageCardGrid { - images: GQL.SlimImageDataFragment[]; - selectedIds: Set; - zoomIndex: number; - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; - onPreview: (index: number, ev: React.MouseEvent) => void; -} - -const zoomWidths = [280, 340, 480, 640]; - -export const ImageGridCard: React.FC = ({ - images, - selectedIds, - zoomIndex, - onSelectChange, - onPreview, -}) => { - const [componentRef, { width: containerWidth }] = useContainerDimensions(); - const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); - - return ( -
    - {images.map((image, index) => ( - 0} - selected={selectedIds.has(image.id)} - onSelectedChanged={(selected: boolean, shiftKey: boolean) => - onSelectChange(image.id, selected, shiftKey) - } - onPreview={ - selectedIds.size < 1 ? (ev) => onPreview(index, ev) : undefined - } - /> - ))} -
    - ); -}; diff --git a/ui/v2.5/src/components/Images/ImageList.tsx b/ui/v2.5/src/components/Images/ImageList.tsx index 12eb264b1..00b23b0aa 100644 --- a/ui/v2.5/src/components/Images/ImageList.tsx +++ b/ui/v2.5/src/components/Images/ImageList.tsx @@ -3,31 +3,70 @@ import React, { useState, useMemo, MouseEvent, - useContext, + useEffect, } from "react"; -import { FormattedNumber, useIntl } from "react-intl"; +import { FormattedMessage, FormattedNumber, useIntl } from "react-intl"; import cloneDeep from "lodash-es/cloneDeep"; import { useHistory } from "react-router-dom"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; -import { queryFindImages, useFindImages } from "src/core/StashService"; -import { ItemList, ItemListContext, showWhenSelected } from "../List/ItemList"; +import { + queryFindImages, + useFindImages, + useFindImagesMetadata, +} from "src/core/StashService"; +import { useFilteredItemList } from "../List/ItemList"; import { useLightbox } from "src/hooks/Lightbox/hooks"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; - import { ImageWallItem } from "./ImageWallItem"; import { EditImagesDialog } from "./EditImagesDialog"; import { DeleteImagesDialog } from "./DeleteImagesDialog"; import "flexbin/flexbin.css"; -import Gallery from "react-photo-gallery"; +import Gallery, { RenderImageProps } from "react-photo-gallery"; import { ExportDialog } from "../Shared/ExportDialog"; import { objectTitle } from "src/core/files"; -import { ConfigurationContext } from "src/hooks/Config"; -import { ImageGridCard } from "./ImageGridCard"; +import { useConfigurationContext } from "src/hooks/Config"; +import { ImageCardGrid } from "./ImageCardGrid"; import { View } from "../List/views"; -import { IItemListOperation } from "../List/FilteredListToolbar"; +import { + FilteredListToolbar, + IItemListOperation, +} from "../List/FilteredListToolbar"; import { FileSize } from "../Shared/FileSize"; +import { PatchComponent, PatchContainerComponent } from "src/patch"; +import { GenerateDialog } from "../Dialogs/GenerateDialog"; +import { + Sidebar, + SidebarPane, + SidebarPaneContent, + SidebarStateContext, + useSidebarState, +} from "../Shared/Sidebar"; +import { useCloseEditDelete, useFilterOperations } from "../List/util"; +import { + FilteredSidebarHeader, + useFilteredSidebarKeybinds, +} from "../List/Filters/FilterSidebar"; +import { + IListFilterOperation, + ListOperations, +} from "../List/ListOperationButtons"; +import { FilterTags } from "../List/FilterTags"; +import { Pagination, PaginationIndex } from "../List/Pagination"; +import { LoadedContent } from "../List/PagedList"; +import useFocus from "src/utils/focus"; +import cx from "classnames"; +import { SidebarStudiosFilter } from "../List/Filters/StudiosFilter"; +import { SidebarPerformersFilter } from "../List/Filters/PerformersFilter"; +import { SidebarTagsFilter } from "../List/Filters/TagsFilter"; +import { SidebarRatingFilter } from "../List/Filters/RatingFilter"; +import { SidebarBooleanFilter } from "../List/Filters/BooleanFilter"; +import { Button } from "react-bootstrap"; +import { OrganizedCriterionOption } from "src/models/list-filter/criteria/organized"; +import { SidebarAgeFilter } from "../List/Filters/SidebarAgeFilter"; +import { PerformerAgeCriterionOption } from "src/models/list-filter/images"; +import { SidebarFolderFilter } from "../List/Filters/FolderFilter"; interface IImageWallProps { images: GQL.SlimImageDataFragment[]; @@ -35,12 +74,33 @@ interface IImageWallProps { currentPage: number; pageCount: number; handleImageOpen: (index: number) => void; + zoomIndex: number; + selectedIds?: Set; + onSelectChange?: (id: string, selected: boolean, shiftKey: boolean) => void; + selecting?: boolean; } -const ImageWall: React.FC = ({ images, handleImageOpen }) => { - const { configuration } = useContext(ConfigurationContext); +const zoomWidths = [280, 340, 480, 640]; +const breakpointZoomHeights = [ + { minWidth: 576, heights: [100, 120, 240, 360] }, + { minWidth: 768, heights: [120, 160, 240, 480] }, + { minWidth: 1200, heights: [120, 160, 240, 300] }, + { minWidth: 1400, heights: [160, 240, 300, 480] }, +]; + +const ImageWall: React.FC = ({ + images, + zoomIndex, + handleImageOpen, + selectedIds, + onSelectChange, + selecting, +}) => { + const { configuration } = useConfigurationContext(); const uiConfig = configuration?.ui; + const containerRef = React.useRef(null); + let photos: { src: string; srcSet?: string | string[] | undefined; @@ -57,8 +117,8 @@ const ImageWall: React.FC = ({ images, handleImageOpen }) => { image.paths.preview != "" ? image.paths.preview! : image.paths.thumbnail!, - width: image.visual_files[0].width, - height: image.visual_files[0].height, + width: image.visual_files?.[0]?.width ?? 0, + height: image.visual_files?.[0]?.height ?? 0, tabIndex: index, key: image.id, loading: "lazy", @@ -76,21 +136,70 @@ const ImageWall: React.FC = ({ images, handleImageOpen }) => { ); function columns(containerWidth: number) { - let preferredSize = 300; + let preferredSize = zoomWidths[zoomIndex]; let columnCount = containerWidth / preferredSize; return Math.round(columnCount); } + const targetRowHeight = useCallback( + (containerWidth: number) => { + let zoomHeight = 280; + breakpointZoomHeights.forEach((e) => { + if (containerWidth >= e.minWidth) { + zoomHeight = e.heights[zoomIndex]; + } + }); + return zoomHeight; + }, + [zoomIndex] + ); + + // set the max height as a factor of the targetRowHeight + // this allows some images to be taller than the target row height + // but prevents images from becoming too tall when there is a small number of items + const maxHeightFactor = 1.3; + + const renderImage = useCallback( + (props: RenderImageProps) => { + // #6165 - only use targetRowHeight in row direction + const maxHeight = + props.direction === "column" + ? props.photo.height + : targetRowHeight(containerRef.current?.offsetWidth ?? 0) * + maxHeightFactor; + const imageId = props.photo.key; + if (!imageId) { + return null; + } + return ( + + onSelectChange(imageId, selected, shiftKey) + : undefined + } + selecting={selecting} + /> + ); + }, + [targetRowHeight, selectedIds, onSelectChange, selecting] + ); + return ( -
    +
    {photos.length ? ( ) : null}
    @@ -109,127 +218,133 @@ interface IImageListImages { chapters?: GQL.GalleryChapterDataFragment[]; } -const ImageListImages: React.FC = ({ - images, - filter, - selectedIds, - onChangePage, - pageCount, - onSelectChange, - slideshowRunning, - setSlideshowRunning, - chapters = [], -}) => { - const handleLightBoxPage = useCallback( - (props: { direction?: number; page?: number }) => { - const { direction, page: newPage } = props; - - if (direction !== undefined) { - if (direction < 0) { - if (filter.currentPage === 1) { - onChangePage(pageCount); - } else { - onChangePage(filter.currentPage + direction); - } - } else if (direction > 0) { - if (filter.currentPage === pageCount) { - // return to the first page - onChangePage(1); - } else { - onChangePage(filter.currentPage + direction); - } - } - } else if (newPage !== undefined) { - onChangePage(newPage); - } - }, - [onChangePage, filter.currentPage, pageCount] - ); - - const handleClose = useCallback(() => { - setSlideshowRunning(false); - }, [setSlideshowRunning]); - - const lightboxState = useMemo(() => { - return { - images, - showNavigation: false, - pageCallback: pageCount > 1 ? handleLightBoxPage : undefined, - page: filter.currentPage, - pages: pageCount, - pageSize: filter.itemsPerPage, - slideshowEnabled: slideshowRunning, - onClose: handleClose, - }; - }, [ +const ImageList: React.FC = PatchComponent( + "ImageList", + ({ images, + filter, + selectedIds, + onChangePage, pageCount, - filter.currentPage, - filter.itemsPerPage, + onSelectChange, slideshowRunning, - handleClose, - handleLightBoxPage, - ]); + setSlideshowRunning, + chapters = [], + }) => { + const handleLightBoxPage = useCallback( + (props: { direction?: number; page?: number }) => { + const { direction, page: newPage } = props; - const showLightbox = useLightbox( - lightboxState, - filter.sortBy === "path" && - filter.sortDirection === GQL.SortDirectionEnum.Asc - ? chapters - : [] - ); - - const handleImageOpen = useCallback( - (index) => { - setSlideshowRunning(true); - showLightbox({ initialIndex: index, slideshowEnabled: true }); - }, - [showLightbox, setSlideshowRunning] - ); - - function onPreview(index: number, ev: MouseEvent) { - handleImageOpen(index); - ev.preventDefault(); - } - - if (filter.displayMode === DisplayMode.Grid) { - return ( - + if (direction !== undefined) { + if (direction < 0) { + if (filter.currentPage === 1) { + onChangePage(pageCount); + } else { + onChangePage(filter.currentPage + direction); + } + } else if (direction > 0) { + if (filter.currentPage === pageCount) { + // return to the first page + onChangePage(1); + } else { + onChangePage(filter.currentPage + direction); + } + } + } else if (newPage !== undefined) { + onChangePage(newPage); + } + }, + [onChangePage, filter.currentPage, pageCount] ); - } - if (filter.displayMode === DisplayMode.Wall) { - return ( - + + const handleClose = useCallback(() => { + setSlideshowRunning(false); + }, [setSlideshowRunning]); + + const lightboxState = useMemo(() => { + return { + images, + showNavigation: false, + pageCallback: pageCount > 1 ? handleLightBoxPage : undefined, + page: filter.currentPage, + pages: pageCount, + pageSize: filter.itemsPerPage, + slideshowEnabled: slideshowRunning, + onClose: handleClose, + }; + }, [ + images, + pageCount, + filter.currentPage, + filter.itemsPerPage, + slideshowRunning, + handleClose, + handleLightBoxPage, + ]); + + const showLightbox = useLightbox( + lightboxState, + filter.sortBy === "path" && + filter.sortDirection === GQL.SortDirectionEnum.Asc + ? chapters + : [] ); + + const handleImageOpen = useCallback( + (index) => { + setSlideshowRunning(true); + showLightbox({ initialIndex: index, slideshowEnabled: true }); + }, + [showLightbox, setSlideshowRunning] + ); + + function onPreview(index: number, ev: MouseEvent) { + handleImageOpen(index); + ev.preventDefault(); + } + + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.Wall) { + return ( + 0} + /> + ); + } + + // should not happen + return <>; } +); - // should not happen - return <>; -}; +function renderMetadataByline( + metadataInfo: GQL.FindImagesMetadataQueryResult | undefined +) { + const megapixels = metadataInfo?.data?.findImages?.megapixels; + const size = metadataInfo?.data?.findImages?.filesize; -function getItems(result: GQL.FindImagesQueryResult) { - return result?.data?.findImages?.images ?? []; -} - -function getCount(result: GQL.FindImagesQueryResult) { - return result?.data?.findImages?.count ?? 0; -} - -function renderMetadataByline(result: GQL.FindImagesQueryResult) { - const megapixels = result?.data?.findImages?.megapixels; - const size = result?.data?.findImages?.filesize; + if (metadataInfo?.loading) { + // return ellipsis + return  (...); + } if (!megapixels && !size) { return; @@ -256,6 +371,136 @@ function renderMetadataByline(result: GQL.FindImagesQueryResult) { ); } +const ImageFilterSidebarSections = PatchContainerComponent( + "FilteredImageList.SidebarSections" +); + +const SidebarContent: React.FC<{ + filter: ListFilterModel; + setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + sidebarOpen: boolean; + onClose?: () => void; + showEditFilter: (editingCriterion?: string) => void; + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, + filterHook, + view, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, +}) => { + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; + + const hideStudios = view === View.StudioScenes; + + return ( + <> + + + + {!hideStudios && ( + + )} + + + + } + filter={filter} + setFilter={setFilter} + sectionID="folder" + /> + } + data-type={OrganizedCriterionOption.type} + option={OrganizedCriterionOption} + filter={filter} + setFilter={setFilter} + sectionID="organized" + /> + } + option={PerformerAgeCriterionOption} + filter={filter} + setFilter={setFilter} + sectionID="performer_age" + /> + + +
    + +
    + + ); +}; + +function useViewRandom(filter: ListFilterModel, count: number) { + const history = useHistory(); + + const viewRandom = useCallback(async () => { + // query for a random image + if (count === 0) { + return; + } + + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindImages(filterCopy); + if (singleResult.data.findImages.images.length === 1) { + const { id } = singleResult.data.findImages.images[0]; + // navigate to the image player page + history.push(`/images/${id}`); + } + }, [history, filter, count]); + + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { + Mousetrap.bind("p r", () => { + viewRandom(); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }, [viewRandom]); +} + interface IImageList { filterHook?: (filter: ListFilterModel) => ListFilterModel; view?: View; @@ -264,167 +509,331 @@ interface IImageList { chapters?: GQL.GalleryChapterDataFragment[]; } -export const ImageList: React.FC = ({ - filterHook, - view, - alterQuery, - extraOperations, - chapters = [], -}) => { - const intl = useIntl(); - const history = useHistory(); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); - const [slideshowRunning, setSlideshowRunning] = useState(false); +export const FilteredImageList = PatchComponent( + "FilteredImageList", + (props: IImageList) => { + const intl = useIntl(); - const filterMode = GQL.FilterMode.Images; + const [slideshowRunning, setSlideshowRunning] = useState(false); - const otherOperations = [ - ...(extraOperations ?? []), - { - text: intl.formatMessage({ id: "actions.view_random" }), - onClick: viewRandom, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, - }, - ]; + const searchFocus = useFocus(); - function addKeybinds( - result: GQL.FindImagesQueryResult, - filter: ListFilterModel - ) { - Mousetrap.bind("p r", () => { - viewRandom(result, filter); + const withSidebar = props.view !== View.GalleryImages; + + const { + filterHook, + view, + alterQuery, + extraOperations: providedOperations = [], + chapters, + } = props; + + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); + + const { + filterState, + queryResult, + metadataInfo, + modalState, + listSelect, + showEditFilter, + } = useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Images, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindImages, + useMetadataInfo: useFindImagesMetadata, + getCount: (r) => r.data?.findImages.count ?? 0, + getItems: (r) => r.data?.findImages.images ?? [], + filterHook, + }, }); - return () => { - Mousetrap.unbind("p r"); - }; - } + const { filter, setFilter } = filterState; - async function viewRandom( - result: GQL.FindImagesQueryResult, - filter: ListFilterModel - ) { - // query for a random image - if (result.data?.findImages) { - const { count } = result.data.findImages; + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindImages(filterCopy); - if (singleResult.data.findImages.images.length === 1) { - const { id } = singleResult.data.findImages.images[0]; - // navigate to the image player page - history.push(`/images/${id}`); - } - } - } + const metadataByline = useMemo(() => { + if (cachedResult.loading) return null; - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } + return renderMetadataByline(metadataInfo) ?? null; + }, [cachedResult.loading, metadataInfo]); - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; - function renderContent( - result: GQL.FindImagesQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void, - onChangePage: (page: number) => void, - pageCount: number - ) { - function maybeRenderImageExportDialog() { - if (isExportDialogOpen) { - return ( - setIsExportDialogOpen(false)} - /> - ); - } - } + const { modal, showModal, closeModal } = modalState; - function renderImages() { - if (!result.data?.findImages) return; + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); - return ( - closeModal()} /> ); } + const onEdit = useCallback(() => { + showModal( + + ); + }, [showModal, selectedItems, onCloseEditDelete]); + + const onDelete = useCallback(() => { + showModal( + + ); + }, [showModal, selectedItems, onCloseEditDelete]); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); + + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }, [hasSelection, onEdit, onDelete]); + + const convertedExtraOperations: IListFilterOperation[] = + providedOperations.map((o) => ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); + + const otherOperations: IListFilterOperation[] = [ + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.view_random" }), + onClick: viewRandom, + }, + { + text: `${intl.formatMessage({ id: "actions.generate" })}…`, + onClick: () => { + showModal( + closeModal()} + /> + ); + }, + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + + ); + + const pageCount = Math.ceil(totalCount / filter.itemsPerPage); + + const content = ( + <> + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
    + + +
    + + + + + + {totalCount > filter.itemsPerPage && ( +
    +
    + +
    +
    + )} + + ); + return ( <> - {maybeRenderImageExportDialog()} - {renderImages()} + {modal} + {!withSidebar ? ( +
    {content}
    + ) : ( +
    + + + setShowSidebar(false)} + > + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + {content} + + + +
    + )} ); } - - function renderEditDialog( - selectedImages: GQL.SlimImageDataFragment[], - onClose: (applied: boolean) => void - ) { - return ; - } - - function renderDeleteDialog( - selectedImages: GQL.SlimImageDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ; - } - - return ( - - - - ); -}; +); diff --git a/ui/v2.5/src/components/Images/ImageRecommendationRow.tsx b/ui/v2.5/src/components/Images/ImageRecommendationRow.tsx index f0fc84493..0541e5934 100644 --- a/ui/v2.5/src/components/Images/ImageRecommendationRow.tsx +++ b/ui/v2.5/src/components/Images/ImageRecommendationRow.tsx @@ -1,12 +1,9 @@ import React from "react"; -import { Link } from "react-router-dom"; import { useFindImages } from "src/core/StashService"; -import Slider from "@ant-design/react-slick"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { getSlickSliderSettings } from "src/core/recommendations"; -import { RecommendationRow } from "../FrontPage/RecommendationRow"; -import { FormattedMessage } from "react-intl"; import { ImageCard } from "./ImageCard"; +import { PatchComponent } from "src/patch"; +import { FilteredRecommendationRow } from "../FrontPage/FilteredRecommendationRow"; interface IProps { isTouch: boolean; @@ -14,29 +11,21 @@ interface IProps { header: string; } -export const ImageRecommendationRow: React.FC = (props: IProps) => { - const result = useFindImages(props.filter); - const cardCount = result.data?.findImages.count; +export const ImageRecommendationRow: React.FC = PatchComponent( + "ImageRecommendationRow", + (props: IProps) => { + const result = useFindImages(props.filter); + const count = result.data?.findImages.count ?? 0; - if (!result.loading && !cardCount) { - return null; - } - - return ( - - - - } - > - {result.loading ? [...Array(props.filter.itemsPerPage)].map((i) => ( @@ -45,7 +34,7 @@ export const ImageRecommendationRow: React.FC = (props: IProps) => { : result.data?.findImages.images.map((i) => ( ))} - - - ); -}; + + ); + } +); diff --git a/ui/v2.5/src/components/Images/ImageWallItem.tsx b/ui/v2.5/src/components/Images/ImageWallItem.tsx index 8403b3a98..a9f681474 100644 --- a/ui/v2.5/src/components/Images/ImageWallItem.tsx +++ b/ui/v2.5/src/components/Images/ImageWallItem.tsx @@ -1,38 +1,50 @@ import React from "react"; -import type { - RenderImageProps, - renderImageClickHandler, - PhotoProps, -} from "react-photo-gallery"; +import { Form } from "react-bootstrap"; +import type { RenderImageProps } from "react-photo-gallery"; +import { useDragMoveSelect } from "../Shared/GridCard/dragMoveSelect"; -interface IImageWallProps { - margin?: string; - index: number; - photo: PhotoProps; - onClick: renderImageClickHandler | null; - direction: "row" | "column"; - top?: number; - left?: number; +interface IExtraProps { + maxHeight: number; + selected?: boolean; + onSelectedChanged?: (selected: boolean, shiftKey: boolean) => void; + selecting?: boolean; } -export const ImageWallItem: React.FC = ( - props: IImageWallProps +export const ImageWallItem: React.FC = ( + props: RenderImageProps & IExtraProps ) => { + const { dragProps } = useDragMoveSelect({ + selecting: props.selecting || false, + selected: props.selected || false, + onSelectedChanged: props.onSelectedChanged, + }); + + const height = Math.min(props.maxHeight, props.photo.height); + const zoomFactor = height / props.photo.height; + const width = props.photo.width * zoomFactor; + type style = Record; - var imgStyle: style = { + var divStyle: style = { margin: props.margin, display: "block", + position: "relative", }; if (props.direction === "column") { - imgStyle.position = "absolute"; - imgStyle.left = props.left; - imgStyle.top = props.top; + divStyle.position = "absolute"; + divStyle.left = props.left; + divStyle.top = props.top; } var handleClick = function handleClick( event: React.MouseEvent ) { + if (props.selecting && props.onSelectedChanged) { + props.onSelectedChanged(!props.selected, event.shiftKey); + event.preventDefault(); + event.stopPropagation(); + return; + } if (props.onClick) { props.onClick(event, { index: props.index }); } @@ -41,18 +53,39 @@ export const ImageWallItem: React.FC = ( const video = props.photo.src.includes("preview"); const ImagePreview = video ? "video" : "img"; + let shiftKey = false; + return ( - + {...dragProps} + > + {props.onSelectedChanged && ( + props.onSelectedChanged!(!props.selected, shiftKey)} + onClick={(event: React.MouseEvent) => { + shiftKey = event.shiftKey; + event.stopPropagation(); + }} + /> + )} + +
    ); }; diff --git a/ui/v2.5/src/components/Images/Images.tsx b/ui/v2.5/src/components/Images/Images.tsx index 91edfdf79..932bbc2c1 100644 --- a/ui/v2.5/src/components/Images/Images.tsx +++ b/ui/v2.5/src/components/Images/Images.tsx @@ -3,11 +3,11 @@ import { Route, Switch } from "react-router-dom"; import { Helmet } from "react-helmet"; import { useTitleProps } from "src/hooks/title"; import Image from "./ImageDetails/Image"; -import { ImageList } from "./ImageList"; +import { FilteredImageList } from "./ImageList"; import { View } from "../List/views"; const Images: React.FC = () => { - return ; + return ; }; const ImageRoutes: React.FC = () => { diff --git a/ui/v2.5/src/components/Images/styles.scss b/ui/v2.5/src/components/Images/styles.scss index 936947bc3..0a8ca760e 100644 --- a/ui/v2.5/src/components/Images/styles.scss +++ b/ui/v2.5/src/components/Images/styles.scss @@ -9,7 +9,7 @@ order: 1; } - .image-studio-image { + .studio-logo { flex: 0 0 25%; order: 2; } @@ -86,6 +86,7 @@ } &-preview { + align-items: center; display: flex; justify-content: center; margin-bottom: 5px; @@ -94,7 +95,6 @@ &-image { height: 100%; object-fit: contain; - object-position: top; width: 100%; } @@ -175,6 +175,24 @@ $imageTabWidth: 450px; font-size: 1.3em; height: calc(1.5em + 0.75rem + 2px); } + + .form-group[data-field="urls"] .string-list-input input.form-control { + font-size: 0.85em; + } + + @include media-breakpoint-up(xl) { + .custom-fields-input { + .custom-fields-field { + flex: 0 0 25%; + max-width: 25%; + } + + .custom-fields-value { + flex: 0 0 75%; + max-width: 75%; + } + } + } } .image-file-card.card { diff --git a/ui/v2.5/src/components/List/CriterionEditor.tsx b/ui/v2.5/src/components/List/CriterionEditor.tsx index eba212223..8a72d6e43 100644 --- a/ui/v2.5/src/components/List/CriterionEditor.tsx +++ b/ui/v2.5/src/components/List/CriterionEditor.tsx @@ -42,12 +42,21 @@ import { StudiosCriterion } from "src/models/list-filter/criteria/studios"; import StudiosFilter from "./Filters/StudiosFilter"; import { TagsCriterion } from "src/models/list-filter/criteria/tags"; import TagsFilter from "./Filters/TagsFilter"; -import { PhashCriterion } from "src/models/list-filter/criteria/phash"; +import { + PhashCriterion, + DuplicatedCriterion, +} from "src/models/list-filter/criteria/phash"; import { PhashFilter } from "./Filters/PhashFilter"; +import { DuplicatedFilter } from "./Filters/DuplicateFilter"; import { PathCriterion } from "src/models/list-filter/criteria/path"; import { ModifierSelectorButtons } from "./ModifierSelect"; import { CustomFieldsCriterion } from "src/models/list-filter/criteria/custom-fields"; import { CustomFieldsFilter } from "./Filters/CustomFieldsFilter"; +import { FolderFilter } from "./Filters/FolderFilter"; +import { + FolderCriterion, + ParentFolderCriterion, +} from "src/models/list-filter/criteria/folder"; interface IGenericCriterionEditor { criterion: ModifierCriterion; @@ -64,7 +73,9 @@ const GenericCriterionEditor: React.FC = ({ if ( criterion instanceof PerformersCriterion || criterion instanceof StudiosCriterion || - criterion instanceof TagsCriterion + criterion instanceof TagsCriterion || + criterion instanceof FolderCriterion || + criterion instanceof ParentFolderCriterion ) { return false; } @@ -159,6 +170,18 @@ const GenericCriterionEditor: React.FC = ({ ); } + if ( + criterion instanceof FolderCriterion || + criterion instanceof ParentFolderCriterion + ) { + return ( + setCriterion(c)} + /> + ); + } + if (criterion instanceof ILabeledIdCriterion) { return ( = ({ ); } + if (criterion instanceof DuplicatedCriterion) { + return ( + + ); + } + if (criterion instanceof CustomFieldsCriterion) { return ( diff --git a/ui/v2.5/src/components/List/EditFilterDialog.tsx b/ui/v2.5/src/components/List/EditFilterDialog.tsx index 4b31ac31a..3f0f486b8 100644 --- a/ui/v2.5/src/components/List/EditFilterDialog.tsx +++ b/ui/v2.5/src/components/List/EditFilterDialog.tsx @@ -1,7 +1,6 @@ import cloneDeep from "lodash-es/cloneDeep"; import React, { useCallback, - useContext, useEffect, useMemo, useRef, @@ -14,7 +13,7 @@ import { CriterionOption, } from "src/models/list-filter/criteria/criterion"; import { FormattedMessage, useIntl } from "react-intl"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { ListFilterModel } from "src/models/list-filter/filter"; import { getFilterOptions } from "src/models/list-filter/factory"; import { FilterTags } from "./FilterTags"; @@ -29,11 +28,16 @@ import { import { useCompare, usePrevious } from "src/hooks/state"; import { CriterionType } from "src/models/list-filter/types"; import { useToast } from "src/hooks/Toast"; -import { useConfigureUI } from "src/core/StashService"; -import { FilterMode } from "src/core/generated-graphql"; +import { useConfigureUI, useSaveFilter } from "src/core/StashService"; +import { + FilterMode, + SavedFilterDataFragment, +} from "src/core/generated-graphql"; import { useFocusOnce } from "src/utils/focus"; import Mousetrap from "mousetrap"; import ScreenUtils from "src/utils/screen"; +import { LoadFilterDialog, SaveFilterDialog } from "./SavedFilterList"; +import { SearchTermInput } from "./ListFilter"; interface ICriterionList { criteria: string[]; @@ -45,6 +49,7 @@ interface ICriterionList { optionSelected: (o?: CriterionOption) => void; onRemoveCriterion: (c: string) => void; onTogglePin: (c: CriterionOption) => void; + externallySelected?: boolean; } const CriterionOptionList: React.FC = ({ @@ -57,7 +62,11 @@ const CriterionOptionList: React.FC = ({ optionSelected, onRemoveCriterion, onTogglePin, + externallySelected = false, }) => { + const { configuration } = useConfigurationContext(); + const { sfwContentMode } = configuration.interface; + const prevCriterion = usePrevious(currentCriterion); const scrolled = useRef(false); @@ -96,14 +105,19 @@ const CriterionOptionList: React.FC = ({ // scrolling to the current criterion doesn't work well when the // dialog is already open, so limit to when we click on the // criterion from the external tags - if (!scrolled.current && type && criteriaRefs[type]?.current) { + if ( + externallySelected && + !scrolled.current && + type && + criteriaRefs[type]?.current + ) { criteriaRefs[type].current!.scrollIntoView({ behavior: "smooth", block: "start", }); scrolled.current = true; } - }, [currentCriterion, criteriaRefs, type]); + }, [externallySelected, currentCriterion, criteriaRefs, type]); function getReleventCriterion(t: CriterionType) { if (currentCriterion?.criterionOption.type === t) { @@ -136,7 +150,9 @@ const CriterionOptionList: React.FC = ({ className="collapse-icon fa-fw" icon={type === c.type ? faChevronDown : faChevronRight} /> - +
    {criteria.some((cc) => c.type === cc) && ( - +
    + + +
    +
    + + +
    diff --git a/ui/v2.5/src/components/List/FilterTags.tsx b/ui/v2.5/src/components/List/FilterTags.tsx index a384f05ca..28c9f77fa 100644 --- a/ui/v2.5/src/components/List/FilterTags.tsx +++ b/ui/v2.5/src/components/List/FilterTags.tsx @@ -1,32 +1,64 @@ -import React, { PropsWithChildren } from "react"; -import { Badge, BadgeProps, Button } from "react-bootstrap"; -import { Criterion } from "src/models/list-filter/criteria/criterion"; +import React, { + PropsWithChildren, + useEffect, + useLayoutEffect, + useReducer, + useRef, +} from "react"; +import { Badge, BadgeProps, Button, Overlay, Popover } from "react-bootstrap"; +import { + Criterion, + UnsupportedCriterion, +} from "src/models/list-filter/criteria/criterion"; import { FormattedMessage, useIntl } from "react-intl"; import { Icon } from "../Shared/Icon"; -import { faTimes } from "@fortawesome/free-solid-svg-icons"; +import { + faExclamationTriangle, + faMagnifyingGlass, + faTimes, +} from "@fortawesome/free-solid-svg-icons"; import { BsPrefixProps, ReplaceProps } from "react-bootstrap/esm/helpers"; import { CustomFieldsCriterion } from "src/models/list-filter/criteria/custom-fields"; +import { useDebounce } from "src/hooks/debounce"; +import cx from "classnames"; +import { useConfigurationContext } from "src/hooks/Config"; type TagItemProps = PropsWithChildren< ReplaceProps<"span", BsPrefixProps<"span"> & BadgeProps> >; export const TagItem: React.FC = (props) => { - const { children } = props; + const { className, children, ...others } = props; return ( - + {children} ); }; export const FilterTag: React.FC<{ + className?: string; label: React.ReactNode; onClick: React.MouseEventHandler; onRemove: React.MouseEventHandler; -}> = ({ label, onClick, onRemove }) => { + unsupported?: boolean; +}> = ({ className, label, onClick, onRemove, unsupported }) => { + function handleClick(e: React.MouseEvent) { + if (unsupported) { + return; + } + onClick(e); + } + return ( - + + {unsupported && ( + + )} {label} + {selected} + +
    + ); +}; export interface IItemListOperation { text: string; @@ -42,8 +78,10 @@ export interface IFilteredListToolbar { onEdit?: () => void; onDelete?: () => void; operations?: IListFilterOperation[]; + operationComponent?: React.ReactNode; zoomable?: boolean; - onToggleSidebar?: () => void; + filterable?: boolean; + sortable?: boolean; } export const FilteredListToolbar: React.FC = ({ @@ -55,63 +93,96 @@ export const FilteredListToolbar: React.FC = ({ onEdit, onDelete, operations, + operationComponent, zoomable = false, - onToggleSidebar, + filterable = true, + sortable = true, }) => { - const intl = useIntl(); const filterOptions = filter.options; const { setDisplayMode, setZoom } = useFilterOperations({ filter, setFilter, }); - const { selectedIds, onSelectAll, onSelectNone } = listSelect; + const { selectedIds, onSelectAll, onSelectNone, onInvertSelection } = + listSelect; + const hasSelection = selectedIds.size > 0; + + const renderOperations = operationComponent ?? ( + 0} + onEdit={onEdit} + onDelete={onDelete} + /> + ); return ( - - - {onToggleSidebar && ( - - - - )} - - - - {showEditFilter && ( - showEditFilter()} - view={view} - withSidebar={!!onToggleSidebar} - /> - )} - + {hasSelection ? ( + 0} - onEdit={onEdit} - onDelete={onDelete} /> - - + {filterable && ( + + )} + + {filterable && ( + + + showEditFilter()} + count={filter.count()} + /> + + )} + + {sortable && ( + + setFilter(filter.setSortBy(e ?? undefined)) + } + onChangeSortDirection={() => + setFilter(filter.toggleSortDirection()) + } + onReshuffleRandomSort={() => + setFilter(filter.reshuffleRandomSort()) + } + /> + )} + + setFilter(filter.setPageSize(size))} /> - - - + + )} + + {renderOperations} + + ); }; diff --git a/ui/v2.5/src/components/List/Filters/BooleanFilter.tsx b/ui/v2.5/src/components/List/Filters/BooleanFilter.tsx index 18df1b9f1..657e9ddbd 100644 --- a/ui/v2.5/src/components/List/Filters/BooleanFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/BooleanFilter.tsx @@ -54,6 +54,7 @@ interface ISidebarFilter { option: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; + sectionID?: string; } export const SidebarBooleanFilter: React.FC = ({ @@ -61,6 +62,7 @@ export const SidebarBooleanFilter: React.FC = ({ option, filter, setFilter, + sectionID, }) => { const intl = useIntl(); @@ -127,6 +129,7 @@ export const SidebarBooleanFilter: React.FC = ({ onUnselect={onUnselect} selected={selected} singleValue + sectionID={sectionID} /> ); diff --git a/ui/v2.5/src/components/List/Filters/DuplicateFilter.tsx b/ui/v2.5/src/components/List/Filters/DuplicateFilter.tsx new file mode 100644 index 000000000..819d5b885 --- /dev/null +++ b/ui/v2.5/src/components/List/Filters/DuplicateFilter.tsx @@ -0,0 +1,227 @@ +import React, { useCallback, useMemo, useState } from "react"; +import { useIntl } from "react-intl"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { Option, SelectedList } from "./SidebarListFilter"; +import { + DuplicatedCriterion, + DuplicatedCriterionOption, + DuplicationFieldId, + DUPLICATION_FIELD_IDS, + DUPLICATION_FIELD_MESSAGE_IDS, +} from "src/models/list-filter/criteria/phash"; +import { IndeterminateCheckbox } from "src/components/Shared/IndeterminateCheckbox"; +import { SidebarSection } from "src/components/Shared/Sidebar"; +import { Icon } from "src/components/Shared/Icon"; +import { faPlus } from "@fortawesome/free-solid-svg-icons"; +import { keyboardClickHandler } from "src/utils/keyboard"; + +interface IDuplicatedFilter { + criterion: DuplicatedCriterion; + setCriterion: (c: DuplicatedCriterion) => void; +} + +export const DuplicatedFilter: React.FC = ({ + criterion, + setCriterion, +}) => { + const intl = useIntl(); + + function onFieldChange( + fieldId: DuplicationFieldId, + value: boolean | undefined + ) { + const c = criterion.clone(); + if (value === undefined) { + delete c.value[fieldId]; + } else { + c.value[fieldId] = value; + } + setCriterion(c); + } + + return ( +
    + {DUPLICATION_FIELD_IDS.map((fieldId) => ( + onFieldChange(fieldId, v)} + /> + ))} +
    + ); +}; + +interface ISidebarDuplicateFilterProps { + title?: React.ReactNode; + filter: ListFilterModel; + setFilter: (f: ListFilterModel) => void; + sectionID?: string; +} + +export const SidebarDuplicateFilter: React.FC = ({ + title, + filter, + setFilter, + sectionID, +}) => { + const intl = useIntl(); + const [expandedType, setExpandedType] = useState(null); + + const trueLabel = intl.formatMessage({ id: "true" }); + const falseLabel = intl.formatMessage({ id: "false" }); + + // Get label for a duplicate type + const getLabel = useCallback( + (typeId: DuplicationFieldId) => + intl.formatMessage({ id: DUPLICATION_FIELD_MESSAGE_IDS[typeId] }), + [intl] + ); + + // Get the single duplicated criterion from the filter + const getCriterion = useCallback((): DuplicatedCriterion | null => { + const criteria = filter.criteriaFor( + DuplicatedCriterionOption.type + ) as DuplicatedCriterion[]; + return criteria.length > 0 ? criteria[0] : null; + }, [filter]); + + // Get value for a specific type from the criterion + const getTypeValue = useCallback( + (typeId: DuplicationFieldId): boolean | undefined => { + const criterion = getCriterion(); + if (!criterion) return undefined; + return criterion.value[typeId]; + }, + [getCriterion] + ); + + // Build selected items list + const selected: Option[] = useMemo(() => { + const result: Option[] = []; + const criterion = getCriterion(); + if (!criterion) return result; + + for (const typeId of DUPLICATION_FIELD_IDS) { + const value = criterion.value[typeId]; + if (value !== undefined) { + const valueLabel = value ? trueLabel : falseLabel; + result.push({ + id: typeId, + label: `${getLabel(typeId)}: ${valueLabel}`, + }); + } + } + + return result; + }, [getCriterion, trueLabel, falseLabel, getLabel]); + + // Available options - show options that aren't already selected + const options = useMemo(() => { + const result: { id: DuplicationFieldId; label: string }[] = []; + + for (const typeId of DUPLICATION_FIELD_IDS) { + if (getTypeValue(typeId) === undefined) { + result.push({ id: typeId, label: getLabel(typeId) }); + } + } + + return result; + }, [getTypeValue, getLabel]); + + function onToggleExpand(id: string) { + setExpandedType(expandedType === id ? null : id); + } + + function onUnselect(item: Option) { + const typeId = item.id as DuplicationFieldId; + const criterion = getCriterion(); + + if (!criterion) return; + + const newCriterion = criterion.clone(); + delete newCriterion.value[typeId]; + + // If no fields are set, remove the criterion entirely + const hasAnyValue = DUPLICATION_FIELD_IDS.some( + (id) => newCriterion.value[id] !== undefined + ); + + if (!hasAnyValue) { + setFilter(filter.removeCriterion(DuplicatedCriterionOption.type)); + } else { + setFilter( + filter.replaceCriteria(DuplicatedCriterionOption.type, [newCriterion]) + ); + } + setExpandedType(null); + } + + function onSelectValue(typeId: string, value: boolean) { + const criterion = getCriterion(); + const newCriterion = criterion + ? criterion.clone() + : (DuplicatedCriterionOption.makeCriterion() as DuplicatedCriterion); + + newCriterion.value[typeId as DuplicationFieldId] = value; + setFilter( + filter.replaceCriteria(DuplicatedCriterionOption.type, [newCriterion]) + ); + setExpandedType(null); + } + + return ( + onUnselect(i)} /> + } + > +
    + +
    +
    + ); +}; diff --git a/ui/v2.5/src/components/List/Filters/FilterButton.tsx b/ui/v2.5/src/components/List/Filters/FilterButton.tsx index b92ddcf0d..63e026df2 100644 --- a/ui/v2.5/src/components/List/Filters/FilterButton.tsx +++ b/ui/v2.5/src/components/List/Filters/FilterButton.tsx @@ -1,28 +1,32 @@ -import React, { useMemo } from "react"; +import React from "react"; import { Badge, Button } from "react-bootstrap"; -import { ListFilterModel } from "src/models/list-filter/filter"; import { faFilter } from "@fortawesome/free-solid-svg-icons"; import { Icon } from "src/components/Shared/Icon"; import { useIntl } from "react-intl"; interface IFilterButtonProps { - filter: ListFilterModel; + count?: number; onClick: () => void; + title?: string; } export const FilterButton: React.FC = ({ - filter, + count = 0, onClick, + title, }) => { const intl = useIntl(); - const count = useMemo(() => filter.count(), [filter]); + + if (!title) { + title = intl.formatMessage({ id: "search_filter.edit_filter" }); + } return ( +
    + } + sectionID={savedFiltersSectionID} > { - Mousetrap.bind("/", (e) => { - if (!showSidebar) { - setShowSidebar(true); - e.preventDefault(); - } - }); - - return () => { - Mousetrap.unbind("/"); - }; - }, [showSidebar, setShowSidebar]); - // Hide the sidebar when the user presses the "Esc" key useEffect(() => { Mousetrap.bind("esc", (e) => { diff --git a/ui/v2.5/src/components/List/Filters/FolderFilter.tsx b/ui/v2.5/src/components/List/Filters/FolderFilter.tsx new file mode 100644 index 000000000..3eaaf0427 --- /dev/null +++ b/ui/v2.5/src/components/List/Filters/FolderFilter.tsx @@ -0,0 +1,767 @@ +import React, { useCallback, useEffect, useMemo, useState } from "react"; +import { + CriterionModifier, + FilterMode, + FolderDataFragment, + MultiCriterionInput, + useFindFolderHierarchyForIDsQuery, + useFindFoldersForQueryQuery, + useFindRootFoldersForSelectQuery, +} from "src/core/generated-graphql"; +import { + ISidebarSectionProps, + SidebarSection, +} from "src/components/Shared/Sidebar"; +import { + faChevronDown, + faChevronRight, + faMinus, + faPlus, +} from "@fortawesome/free-solid-svg-icons"; +import { ExpandCollapseButton } from "src/components/Shared/CollapseButton"; +import cx from "classnames"; +import { queryFindSubFolders } from "src/core/StashService"; +import { keyboardClickHandler } from "src/utils/keyboard"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { + FolderCriterion, + FolderCriterionOption, +} from "src/models/list-filter/criteria/folder"; +import { Option, SelectedList } from "./SidebarListFilter"; +import { + defineMessages, + FormattedMessage, + MessageDescriptor, + useIntl, +} from "react-intl"; +import { Icon } from "src/components/Shared/Icon"; +import { Button, Form } from "react-bootstrap"; +import { DepthSelector } from "./SelectableFilter"; +import ClearableInput from "src/components/Shared/ClearableInput"; +import { useDebouncedState } from "src/hooks/debounce"; +import { ModifierCriterionOption } from "src/models/list-filter/criteria/criterion"; + +interface IFolder extends FolderDataFragment { + children?: IFolder[]; + expanded: boolean; +} + +const FolderRow: React.FC<{ + folder: IFolder; + level?: number; + canExclude?: boolean; + toggleExpanded: (folder: IFolder) => void; + onSelect: (folder: IFolder, exclude?: boolean) => void; +}> = ({ folder, level, toggleExpanded, onSelect, canExclude }) => { + return ( + <> +
  • + onSelect(folder)} + onKeyDown={keyboardClickHandler(() => onSelect(folder))} + tabIndex={0} + > + + + toggleExpanded(folder)} + collapsedIcon={faChevronRight} + notCollapsedIcon={faChevronDown} + /> + + {folder.basename} + + {canExclude && ( + + )} + +
  • + {folder.expanded && + folder.children?.map((child) => ( + + ))} + + ); +}; + +function toggleExpandedFn(object: IFolder): (f: IFolder) => IFolder { + return (f: IFolder) => { + if (f.id === object.id) { + return { ...f, expanded: !f.expanded }; + } + + if (f.children) { + return { + ...f, + children: f.children.map(toggleExpandedFn(object)), + }; + } + + return f; + }; +} + +function replaceFolder(folder: IFolder): (f: IFolder) => IFolder { + return (f: IFolder) => { + if (f.id === folder.id) { + return folder; + } + + if (f.children) { + return { + ...f, + children: f.children.map(replaceFolder(folder)), + }; + } + + return f; + }; +} + +function mergeFolderMaps(base: IFolder[], update: IFolder[]): IFolder[] { + const ret = [...base]; + + update.forEach((updateFolder) => { + const existingIndex = ret.findIndex((f) => f.id === updateFolder.id); + if (existingIndex === -1) { + // not found, add to the end + ret.push(updateFolder); + } else { + // found, replace + ret[existingIndex] = updateFolder; + } + }); + + return ret; +} + +function useFolderMap(props: { + query: string; + skip?: boolean; + initialSelected?: string[]; + mode?: FilterMode; +}) { + const { query, skip = false, initialSelected, mode } = props; + + const [cachedInitialSelected] = useState(initialSelected ?? []); + + // exclude zip folders for scenes and galleries + const excludeZipFolders = + mode === FilterMode.Scenes || mode === FilterMode.Galleries; + + const zipFileFilter: MultiCriterionInput | undefined = useMemo( + () => + excludeZipFolders + ? { + modifier: CriterionModifier.IsNull, + } + : undefined, + [excludeZipFolders] + ); + + const folderFilterForQuery = useMemo( + () => (zipFileFilter ? { zip_file: zipFileFilter } : undefined), + [zipFileFilter] + ); + + const { data: rootFoldersResult } = useFindRootFoldersForSelectQuery({ + skip, + variables: { + zip_file_filter: zipFileFilter, + }, + }); + + const { data: queryFoldersResult } = useFindFoldersForQueryQuery({ + skip: !query, + variables: { + filter: { q: query, per_page: 200 }, + folder_filter: folderFilterForQuery, + }, + }); + + const { data: initialSelectedResult } = useFindFolderHierarchyForIDsQuery({ + skip: !initialSelected || cachedInitialSelected.length === 0, + variables: { + ids: cachedInitialSelected ?? [], + }, + }); + + const rootFolders: IFolder[] = useMemo(() => { + const ret = rootFoldersResult?.findFolders.folders ?? []; + return ret.map((f) => ({ ...f, expanded: false, children: undefined })); + }, [rootFoldersResult]); + + const initialSelectedFolders: IFolder[] = useMemo(() => { + const ret: IFolder[] = []; + (initialSelectedResult?.findFolders.folders ?? []).forEach((folder) => { + if (!folder.parent_folders.length) { + // add root folder if not present + if (!ret.find((f) => f.id === folder.id)) { + ret.push({ ...folder, expanded: true, children: [] }); + } + return; + } + + let currentParent: IFolder | undefined; + + for (let i = folder.parent_folders.length - 1; i >= 0; i--) { + const thisFolder = folder.parent_folders[i]; + let existing: IFolder | undefined; + + if (i === folder.parent_folders.length - 1) { + // last parent, add the folder as root if not present + existing = ret.find((f) => f.id === thisFolder.id); + if (!existing) { + existing = { + ...folder.parent_folders[i], + expanded: true, + children: folder.parent_folders[i].sub_folders + // filter out zip folders if needed + .filter((f) => f.zip_file === null || !excludeZipFolders) + .map((f) => ({ + ...f, + expanded: false, + children: undefined, + })), + }; + ret.push(existing); + } + currentParent = existing; + continue; + } + + const existingIndex = + currentParent!.children?.findIndex((f) => f.id === thisFolder.id) ?? + -1; + if (existingIndex === -1) { + // should be guaranteed + throw new Error( + `Parent folder ${thisFolder.id} not found in children of ${ + currentParent!.id + }` + ); + } + + existing = currentParent!.children![existingIndex]; + + // replace children + existing = { + ...existing, + expanded: true, + // filter out zip folders if needed + children: thisFolder.sub_folders + .filter((f) => f.zip_file === null || !excludeZipFolders) + .map((f) => ({ + ...f, + expanded: false, + children: undefined, + })), + }; + + currentParent!.children![existingIndex] = existing; + currentParent = existing; + } + }); + return ret; + }, [initialSelectedResult, excludeZipFolders]); + + const mergedRootFolders = useMemo(() => { + if (query) { + return rootFolders; + } + + return mergeFolderMaps(rootFolders, initialSelectedFolders); + }, [rootFolders, initialSelectedFolders, query]); + + const queryFolders: IFolder[] = useMemo(() => { + // construct the folder list from the query result + const ret: IFolder[] = []; + + (queryFoldersResult?.findFolders.folders ?? []).forEach((folder) => { + if (!folder.parent_folders.length) { + // no parents, just add it if not present + if (!ret.find((f) => f.id === folder.id)) { + ret.push({ ...folder, expanded: true, children: [] }); + } + return; + } + + // expand the parent folders + let currentParent: IFolder | undefined; + for (let i = folder.parent_folders.length - 1; i >= 0; i--) { + const thisFolder = folder.parent_folders[i]; + let existing: IFolder | undefined; + + if (i === folder.parent_folders.length - 1) { + // last parent, add the folder as root + existing = ret.find((f) => f.id === thisFolder.id); + if (!existing) { + existing = { + ...folder.parent_folders[i], + expanded: true, + children: [], + }; + ret.push(existing); + } + currentParent = existing; + continue; + } + + // find folder in current parent's children + // currentParent is guaranteed to be defined here + existing = currentParent!.children?.find((f) => f.id === thisFolder.id); + if (!existing) { + // add to current parent's children + existing = { + ...thisFolder, + expanded: true, + children: [], + }; + currentParent!.children!.push(existing); + } + currentParent = existing; + } + + if (!currentParent) { + return; + } + + if (!currentParent.children) { + currentParent.children = []; + } + + // currentParent is now the immediate parent folder + currentParent!.children!.push({ + ...folder, + expanded: false, + children: undefined, + }); + }); + return ret; + }, [queryFoldersResult]); + + const [folderMap, setFolderMap] = React.useState([]); + + useEffect(() => { + if (!query) { + setFolderMap(mergedRootFolders); + } else { + setFolderMap(queryFolders); + } + }, [query, mergedRootFolders, queryFolders]); + + async function onToggleExpanded(folder: IFolder) { + setFolderMap(folderMap.map(toggleExpandedFn(folder))); + + // query children folders if not already loaded + if (folder.children === undefined) { + const subFolderResult = await queryFindSubFolders( + folder.id, + excludeZipFolders + ); + setFolderMap((current) => + current.map( + replaceFolder({ + ...folder, + expanded: true, + children: subFolderResult.data.findFolders.folders.map((f) => ({ + ...f, + expanded: false, + })), + }) + ) + ); + } + } + + return { folderMap, onToggleExpanded }; +} + +function getMatchingFolders(folders: IFolder[], query: string): IFolder[] { + let matches: IFolder[] = []; + + const queryLower = query.toLowerCase(); + + folders.forEach((folder) => { + if ( + folder.basename.toLowerCase().includes(queryLower) || + folder.path.toLowerCase() === queryLower + ) { + matches.push(folder); + } + + if (folder.children) { + matches = matches.concat(getMatchingFolders(folder.children, query)); + } + }); + + return matches; +} + +export const FolderSelector: React.FC<{ + onSelect: (folder: IFolder, exclude?: boolean) => void; + canExclude?: boolean; + preListContent?: React.ReactNode; + folderMap: IFolder[]; + onToggleExpanded: (folder: IFolder) => void; +}> = ({ + onSelect, + preListContent, + canExclude = false, + folderMap, + onToggleExpanded, +}) => { + return ( +
      + {preListContent} + {folderMap.map((folder) => ( + onSelect(f, exclude)} + toggleExpanded={onToggleExpanded} + canExclude={canExclude} + /> + ))} +
    + ); +}; + +interface IInputFilterProps { + criterion: FolderCriterion; + setCriterion: (c: FolderCriterion) => void; + mode?: FilterMode; +} + +export const FolderFilter: React.FC = ({ + criterion, + setCriterion, + mode, +}) => { + const intl = useIntl(); + const [query, setQuery] = useState(""); + const [displayQuery, onQueryChange] = useDebouncedState(query, setQuery, 250); + + const { folderMap, onToggleExpanded } = useFolderMap({ query, mode }); + + const messages = defineMessages({ + sub_folder_depth: { + id: "sub_folder_depth", + defaultMessage: "Levels (empty for all)", + }, + }); + + function criterionOptionTypeToIncludeID(): string { + return "include-sub-folders"; + } + + function criterionOptionTypeToIncludeUIString(): MessageDescriptor { + const optionType = "include_sub_folders"; + + return { + id: optionType, + }; + } + + function onDepthChanged(depth: number) { + // this could be ParentFolderCriterion, but the types are the same + const newValue = criterion.clone() as FolderCriterion; + newValue.value.depth = depth; + setCriterion(newValue); + } + + function onSelect(folder: IFolder, exclude: boolean = false) { + // toggle selection + const newValue = criterion.clone() as FolderCriterion; + + if (!exclude) { + if (newValue.value.items.find((i) => i.id === folder.id)) { + return; + } + + newValue.value.items.push({ id: folder.id, label: folder.path }); + } else { + if (newValue.value.excluded.find((i) => i.id === folder.id)) { + return; + } + + newValue.value.excluded.push({ id: folder.id, label: folder.path }); + } + + setCriterion(newValue); + } + + const onUnselect = useCallback( + (i: Option, excluded?: boolean) => { + const newValue = criterion.clone() as FolderCriterion; + + if (!excluded) { + newValue.value.items = newValue.value.items.filter( + (item) => item.id !== i.id + ); + } else { + newValue.value.excluded = newValue.value.excluded.filter( + (item) => item.id !== i.id + ); + } + setCriterion(newValue); + }, + [criterion, setCriterion] + ); + + function onEnter() { + if (!query) return; + + // if there is a single folder that matches the query, select it + const matchingFolders = getMatchingFolders(folderMap, query); + if (matchingFolders.length === 1) { + onSelect(matchingFolders[0]); + } + } + + const selectedList = useMemo(() => { + const selected: Option[] = + criterion.value?.items.map((item) => ({ + id: item.id, + label: item.label, + })) ?? []; + + return ; + }, [criterion, onUnselect]); + + const excludedList = useMemo(() => { + const selected: Option[] = + criterion.value?.excluded.map((item) => ({ + id: item.id, + label: item.label, + })) ?? []; + + return ( + onUnselect(i, true)} + /> + ); + }, [criterion, onUnselect]); + + return ( +
    + + + + {selectedList} + {excludedList} + onQueryChange(v)} + placeholder={`${intl.formatMessage({ id: "actions.search" })}…`} + onEnter={onEnter} + /> + + +
    + ); +}; + +export const SidebarFolderFilter: React.FC< + ISidebarSectionProps & { + filter: ListFilterModel; + setFilter: (f: ListFilterModel) => void; + criterionOption?: ModifierCriterionOption; + } +> = (props) => { + const intl = useIntl(); + const [skip, setSkip] = useState(true); + const [query, setQuery] = useState(""); + const [displayQuery, onQueryChange] = useDebouncedState(query, setQuery, 250); + + function onOpen() { + setSkip(false); + props.onOpen?.(); + } + + const option = props.criterionOption ?? FolderCriterionOption; + const { filter, setFilter } = props; + + const criterion = useMemo(() => { + const ret = filter.criteria.find( + (c) => c.criterionOption.type === option.type + ); + if (ret) return ret as FolderCriterion; + + const newCriterion = filter.makeCriterion(option.type) as FolderCriterion; + return newCriterion; + }, [option.type, filter]); + + const subDirsSelected = criterion.value?.depth === -1; + + // if there are multiple values or excluded values, then we show none of the + // current values + const multipleSelected = + criterion.value.items.length > 1 || criterion.value.excluded.length > 0; + + const { folderMap, onToggleExpanded } = useFolderMap({ + query, + skip, + initialSelected: criterion.value.items.map((i) => i.id), + mode: filter.mode, + }); + + function onSelect(folder: IFolder) { + // maintain sub-folder select if present + const depth = subDirsSelected ? -1 : 0; + + const c = criterion.clone() as FolderCriterion; + c.value = { + items: [{ id: folder.id, label: folder.path }], + depth, + excluded: [], + }; + + const newCriteria = props.filter.criteria.filter( + (cc) => cc.criterionOption.type !== option.type + ); + + if (c.isValid()) newCriteria.push(c); + + setFilter(props.filter.setCriteria(newCriteria)); + } + + function onSelectSubfolders() { + const c = criterion.clone() as FolderCriterion; + c.value = { + items: c.value?.items ?? [], + depth: -1, + excluded: c.value?.excluded ?? [], + }; + + setFilter(props.filter.replaceCriteria(option.type, [c])); + } + + const onUnselect = useCallback( + (i: Option) => { + if (i.className === "modifier-object") { + // subfolders option + const c = criterion.clone() as FolderCriterion; + c.value = { + items: c.value?.items ?? [], + depth: 0, + excluded: c.value?.excluded ?? [], + }; + + setFilter(props.filter.replaceCriteria(option.type, [c])); + return; + } + + setFilter(props.filter.removeCriterion(option.type)); + }, + [props.filter, setFilter, option.type, criterion] + ); + + function onEnter() { + if (!query) return; + + // if there is a single folder that matches the query, select it + const matchingFolders = getMatchingFolders(folderMap, query); + if (matchingFolders.length === 1) { + onSelect(matchingFolders[0]); + } + } + + const selectedList = useMemo(() => { + if (multipleSelected) { + return null; + } + + const selected: Option[] = + criterion.value?.items.map((item) => ({ + id: item.id, + label: item.label, + })) ?? []; + + if (subDirsSelected) { + selected.push({ + id: "subfolders", + label: "(" + intl.formatMessage({ id: "sub_folders" }) + ")", + className: "modifier-object", + }); + } + + return ; + }, [intl, multipleSelected, subDirsSelected, criterion, onUnselect]); + + const modifierItem = criterion.value.items.length > 0 && + !multipleSelected && + !subDirsSelected && ( +
  • + + + + () + + +
  • + ); + + return ( + + onQueryChange(v)} + placeholder={`${intl.formatMessage({ id: "actions.search" })}…`} + onEnter={onEnter} + /> + + onSelect(f)} + /> + + ); +}; diff --git a/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx b/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx index ef309ecde..355a85d67 100644 --- a/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx @@ -18,9 +18,15 @@ import { Option } from "./SidebarListFilter"; import { CriterionModifier, FilterMode, + GalleryFilterType, + GroupFilterType, + ImageFilterType, InputMaybe, IntCriterionInput, + PerformerFilterType, SceneFilterType, + SceneMarkerFilterType, + StudioFilterType, } from "src/core/generated-graphql"; import { useIntl } from "react-intl"; @@ -82,7 +88,7 @@ export const LabeledIdFilter: React.FC = ({ ); }; -type ModifierValue = "any" | "none" | "any_of" | "only" | "include_subs"; +export type ModifierValue = "any" | "none" | "any_of" | "only" | "include_subs"; export function getModifierCandidates(props: { modifier: CriterionModifier; @@ -321,18 +327,24 @@ export function useCriterion( return { criterion, setCriterion }; } +export interface IUseQueryHookProps { + q: string; + filter?: ListFilterModel; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + skip: boolean; +} + export function useQueryState( - useQuery: ( - q: string, - filter: ListFilterModel, - skip: boolean - ) => ILoadResults, + useQuery: (props: IUseQueryHookProps) => ILoadResults, filter: ListFilterModel, - skip: boolean + skip: boolean, + options?: { + filterHook?: (filter: ListFilterModel) => ListFilterModel; + } ) { const [query, setQuery] = useState(""); const { results: queryResults } = useCacheResults( - useQuery(query, filter, skip) + useQuery({ q: query, filter, filterHook: options?.filterHook, skip }) ); return { query, setQuery, queryResults }; @@ -379,10 +391,17 @@ export function useCandidates(props: { const defaultModifier = getDefaultModifier(singleValue); const candidates = useMemo(() => { + return (results ?? []).map((r) => ({ + id: r.id, + label: r.label, + })); + }, [results]); + + const modifierCandidates = useMemo(() => { const hierarchicalCandidate = hierarchical && (criterion.value as IHierarchicalLabelValue).depth !== -1; - const modifierCandidates: Option[] = getModifierCandidates({ + return getModifierCandidates({ modifier, defaultModifier, hasSelected: selected.length > 0, @@ -404,19 +423,11 @@ export function useCandidates(props: { canExclude: false, }; }); - - return modifierCandidates.concat( - (results ?? []).map((r) => ({ - id: r.id, - label: r.label, - })) - ); }, [ defaultModifier, intl, modifier, singleValue, - results, selected, excluded, criterion.value, @@ -424,18 +435,15 @@ export function useCandidates(props: { includeSubMessageID, ]); - return candidates; + return { candidates, modifierCandidates }; } export function useLabeledIdFilterState(props: { option: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; - useQuery: ( - q: string, - filter: ListFilterModel, - skip: boolean - ) => ILoadResults; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + useQuery: (props: IUseQueryHookProps) => ILoadResults; singleValue?: boolean; hierarchical?: boolean; includeSubMessageID?: string; @@ -444,6 +452,7 @@ export function useLabeledIdFilterState(props: { option, filter, setFilter, + filterHook, useQuery, singleValue = false, hierarchical = false, @@ -456,7 +465,8 @@ export function useLabeledIdFilterState(props: { const { query, setQuery, queryResults } = useQueryState( useQuery, filter, - skip + skip, + { filterHook } ); const { criterion, setCriterion } = useCriterion(option, filter, setFilter); @@ -470,7 +480,7 @@ export function useLabeledIdFilterState(props: { includeSubMessageID, }); - const candidates = useCandidates({ + const { candidates, modifierCandidates } = useCandidates({ criterion, queryResults, selected, @@ -486,6 +496,7 @@ export function useLabeledIdFilterState(props: { return { candidates, + modifierCandidates, onSelect, onUnselect, selected, @@ -510,12 +521,29 @@ export function makeQueryVariables(query: string, extraProps: {}) { interface IFilterType { scenes_filter?: InputMaybe; scene_count?: InputMaybe; + performers_filter?: InputMaybe; + performer_count?: InputMaybe; + galleries_filter?: InputMaybe; + gallery_count?: InputMaybe; + images_filter?: InputMaybe; + image_count?: InputMaybe; + groups_filter?: InputMaybe; + group_count?: InputMaybe; + studios_filter?: InputMaybe; + studio_count?: InputMaybe; + marker_count?: InputMaybe; + markers_filter?: InputMaybe; } export function setObjectFilter( out: IFilterType, mode: FilterMode, - relatedFilterOutput: SceneFilterType + relatedFilterOutput: + | SceneFilterType + | PerformerFilterType + | GalleryFilterType + | GroupFilterType + | StudioFilterType ) { const empty = Object.keys(relatedFilterOutput).length === 0; @@ -527,8 +555,77 @@ export function setObjectFilter( modifier: CriterionModifier.GreaterThan, value: 0, }; + break; } - out.scenes_filter = relatedFilterOutput; + out.scenes_filter = relatedFilterOutput as SceneFilterType; break; + case FilterMode.Performers: + // if empty, only get objects with performers + if (empty) { + out.performer_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + break; + } + out.performers_filter = relatedFilterOutput as PerformerFilterType; + break; + case FilterMode.Galleries: + // if empty, only get objects with galleries + if (empty) { + out.gallery_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + break; + } + out.galleries_filter = relatedFilterOutput as GalleryFilterType; + break; + case FilterMode.Images: + // if empty, only get objects with galleries + if (empty) { + out.image_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + break; + } + out.images_filter = relatedFilterOutput as ImageFilterType; + break; + case FilterMode.Groups: + // if empty, only get objects with groups + if (empty) { + out.group_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + break; + } + out.groups_filter = relatedFilterOutput as GroupFilterType; + break; + case FilterMode.Studios: + // if empty, only get objects with studios + if (empty) { + out.studio_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + break; + } + out.studios_filter = relatedFilterOutput as StudioFilterType; + break; + case FilterMode.SceneMarkers: + // if empty, only get objects with scene markers + if (empty) { + out.marker_count = { + modifier: CriterionModifier.GreaterThan, + value: 0, + }; + break; + } + out.markers_filter = relatedFilterOutput as SceneMarkerFilterType; + break; + default: + throw new Error("Invalid filter mode"); } } diff --git a/ui/v2.5/src/components/List/Filters/OptionFilter.tsx b/ui/v2.5/src/components/List/Filters/OptionFilter.tsx index d9cfaf733..6753df09d 100644 --- a/ui/v2.5/src/components/List/Filters/OptionFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/OptionFilter.tsx @@ -1,10 +1,20 @@ import cloneDeep from "lodash-es/cloneDeep"; -import React from "react"; +import React, { useMemo } from "react"; import { Form } from "react-bootstrap"; import { CriterionValue, ModifierCriterion, + ModifierCriterionOption, } from "src/models/list-filter/criteria/criterion"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { Option, SidebarListFilter } from "./SidebarListFilter"; +import { CriterionModifier } from "src/core/generated-graphql"; +import { + getModifierCandidates, + ModifierValue, + modifierValueToModifier, +} from "./LabeledIdFilter"; +import { useIntl } from "react-intl"; interface IOptionsFilter { criterion: ModifierCriterion; @@ -83,3 +93,142 @@ export const OptionListFilter: React.FC = ({
    ); }; + +interface ISidebarFilter { + title?: React.ReactNode; + option: ModifierCriterionOption; + filter: ListFilterModel; + setFilter: (f: ListFilterModel) => void; + sectionID?: string; +} + +export const SidebarOptionFilter: React.FC = ({ + title, + option, + filter, + setFilter, + sectionID, +}) => { + const intl = useIntl(); + + const criteria = filter.criteriaFor( + option.type + ) as ModifierCriterion[]; + const criterion = criteria.length > 0 ? criteria[0] : null; + const { options: criterionOptions = [] } = option; + const currentValues = criteria.flatMap((c) => c.value as string[]); + + const hasNullModifiers = + option.modifierOptions.includes(CriterionModifier.IsNull) && + option.modifierOptions.includes(CriterionModifier.NotNull); + + const selected: Option[] = useMemo(() => { + if (!criterion) return []; + + if (criterion.modifier === CriterionModifier.IsNull) { + return [ + { + id: "none", + label: intl.formatMessage({ id: "criterion_modifier_values.none" }), + }, + ]; + } else if (criterion.modifier === CriterionModifier.NotNull) { + return [ + { + id: "any", + label: intl.formatMessage({ id: "criterion_modifier_values.any" }), + }, + ]; + } + + return criterionOptions + .filter((o) => currentValues.includes(o.toString())) + .map((o) => ({ + id: o.toString(), + label: o.toLocaleString(), + })); + }, [criterion, currentValues, criterionOptions, intl]); + + const modifierCandidates: Option[] = useMemo(() => { + if (!hasNullModifiers) return []; + + const c = getModifierCandidates({ + modifier: criterion?.modifier ?? option.defaultModifier, + defaultModifier: option.defaultModifier, + hasExcluded: false, + hasSelected: selected.length > 0, + singleValue: true, // so that it doesn't include any_of + }); + + return c.map((v) => { + const messageID = `criterion_modifier_values.${v}`; + + return { + id: v, + label: `(${intl.formatMessage({ + id: messageID, + })})`, + className: "modifier-object", + canExclude: false, + }; + }); + }, [criterion, option, selected, hasNullModifiers, intl]); + + const options = useMemo(() => { + const o = criterionOptions + .filter((oo) => !currentValues.includes(oo.toString())) + .map((oo) => ({ + id: oo.toString(), + label: oo.toString(), + })); + + return [...modifierCandidates, ...o]; + }, [criterionOptions, currentValues, modifierCandidates]); + + function onSelect(item: Option) { + const newCriterion = criterion ? criterion.clone() : option.makeCriterion(); + + if (item.className === "modifier-object") { + newCriterion.modifier = modifierValueToModifier(item.id as ModifierValue); + newCriterion.value = []; + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + return; + } + + const cv = newCriterion.value as string[]; + if (cv.includes(item.id)) { + return; + } else { + newCriterion.value = [...cv, item.id]; + } + + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + } + + function onUnselect(item: Option) { + if (item.className === "modifier-object") { + const newCriterion = criterion + ? criterion.clone() + : option.makeCriterion(); + newCriterion.modifier = option.defaultModifier; + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + return; + } + + setFilter(filter.removeCriterion(option.type)); + } + + return ( + <> + + + ); +}; diff --git a/ui/v2.5/src/components/List/Filters/PathFilter.tsx b/ui/v2.5/src/components/List/Filters/PathFilter.tsx index 97711ebef..ac44302c5 100644 --- a/ui/v2.5/src/components/List/Filters/PathFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/PathFilter.tsx @@ -2,7 +2,7 @@ import React from "react"; import { Form } from "react-bootstrap"; import { FolderSelect } from "src/components/Shared/FolderSelect/FolderSelect"; import { CriterionModifier } from "src/core/generated-graphql"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { ModifierCriterion, CriterionValue, @@ -17,7 +17,7 @@ export const PathFilter: React.FC = ({ criterion, onValueChanged, }) => { - const { configuration } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const libraryPaths = configuration?.general.stashes.map((s) => s.path); // don't show folder select for regex diff --git a/ui/v2.5/src/components/List/Filters/PerformersFilter.tsx b/ui/v2.5/src/components/List/Filters/PerformersFilter.tsx index 84e3dd19b..7e0dee855 100644 --- a/ui/v2.5/src/components/List/Filters/PerformersFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/PerformersFilter.tsx @@ -1,5 +1,8 @@ import React, { ReactNode, useMemo } from "react"; -import { PerformersCriterion } from "src/models/list-filter/criteria/performers"; +import { + PerformersCriterion, + PerformersCriterionOption, +} from "src/models/list-filter/criteria/performers"; import { CriterionModifier, FindPerformersForSelectQueryVariables, @@ -12,11 +15,13 @@ import { sortByRelevance } from "src/utils/query"; import { ListFilterModel } from "src/models/list-filter/filter"; import { CriterionOption } from "src/models/list-filter/criteria/criterion"; import { + IUseQueryHookProps, makeQueryVariables, setObjectFilter, useLabeledIdFilterState, } from "./LabeledIdFilter"; import { SidebarListFilter } from "./SidebarListFilter"; +import { FormattedMessage } from "react-intl"; interface IPerformersFilter { criterion: PerformersCriterion; @@ -69,13 +74,12 @@ function sortResults( }); } -function usePerformerQueryFilter( - query: string, - f?: ListFilterModel, - skip?: boolean -) { +function usePerformerQueryFilter(props: IUseQueryHookProps) { + const { q: query, filter: f, skip, filterHook } = props; + const appliedFilter = filterHook && f ? filterHook(f.clone()) : f; + const { data, loading } = useFindPerformersForSelectQuery({ - variables: queryVariables(query, f), + variables: queryVariables(query, appliedFilter), skip, }); @@ -88,7 +92,7 @@ function usePerformerQueryFilter( } function usePerformerQuery(query: string, skip?: boolean) { - return usePerformerQueryFilter(query, undefined, skip); + return usePerformerQueryFilter({ q: query, skip: !!skip }); } const PerformersFilter: React.FC = ({ @@ -106,18 +110,35 @@ const PerformersFilter: React.FC = ({ export const SidebarPerformersFilter: React.FC<{ title?: ReactNode; - option: CriterionOption; + option?: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; -}> = ({ title, option, filter, setFilter }) => { + filterHook?: (f: ListFilterModel) => ListFilterModel; + sectionID?: string; +}> = ({ + title = , + option = PerformersCriterionOption, + filter, + setFilter, + filterHook, + sectionID = "performers", +}) => { const state = useLabeledIdFilterState({ filter, setFilter, + filterHook, option, useQuery: usePerformerQueryFilter, }); - return ; + return ( + + ); }; export default PerformersFilter; diff --git a/ui/v2.5/src/components/List/Filters/RatingFilter.tsx b/ui/v2.5/src/components/List/Filters/RatingFilter.tsx index 86d6a905b..8a07d54f9 100644 --- a/ui/v2.5/src/components/List/Filters/RatingFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/RatingFilter.tsx @@ -12,8 +12,11 @@ import { defaultRatingStarPrecision, defaultRatingSystemOptions, } from "src/utils/rating"; -import { ConfigurationContext } from "src/hooks/Config"; -import { RatingCriterion } from "src/models/list-filter/criteria/rating"; +import { useConfigurationContext } from "src/hooks/Config"; +import { + RatingCriterion, + RatingCriterionOption, +} from "src/models/list-filter/criteria/rating"; import { ListFilterModel } from "src/models/list-filter/filter"; import { Option, SidebarListFilter } from "./SidebarListFilter"; @@ -74,19 +77,21 @@ export const RatingFilter: React.FC = ({ interface ISidebarFilter { title?: React.ReactNode; - option: CriterionOption; + option?: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; + sectionID?: string; } const any = "any"; const none = "none"; export const SidebarRatingFilter: React.FC = ({ - title, - option, + title = , + option = RatingCriterionOption, filter, setFilter, + sectionID = "rating", }) => { const intl = useIntl(); @@ -115,7 +120,7 @@ export const SidebarRatingFilter: React.FC = ({ [noneLabel] ); - const { configuration: config } = React.useContext(ConfigurationContext); + const { configuration: config } = useConfigurationContext(); const ratingSystemOptions = config?.ui.ratingSystemOptions ?? defaultRatingSystemOptions; @@ -191,6 +196,7 @@ export const SidebarRatingFilter: React.FC = ({ return ( <> = ({ singleValue preCandidates={ratingValue === null ? ratingStars : undefined} preSelected={ratingValue !== null ? ratingStars : undefined} + sectionID={sectionID} />
    diff --git a/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx b/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx index 9ea4333da..e599f3a87 100644 --- a/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx @@ -19,7 +19,12 @@ import { ModifierCriterion, IHierarchicalLabeledIdCriterion, } from "src/models/list-filter/criteria/criterion"; -import { defineMessages, MessageDescriptor, useIntl } from "react-intl"; +import { + defineMessages, + FormattedMessage, + MessageDescriptor, + useIntl, +} from "react-intl"; import { CriterionModifier } from "src/core/generated-graphql"; import { keyboardClickHandler } from "src/utils/keyboard"; import { useDebounce } from "src/hooks/debounce"; @@ -118,7 +123,9 @@ const UnselectedItem: React.FC<{ onKeyDown={(e) => e.stopPropagation()} className="minimal exclude-button" > - exclude + + + {excludeIcon} )} @@ -240,12 +247,19 @@ const SelectableFilter: React.FC = ({ onSetModifier(defaultModifier); } + function onEnter() { + if (objects.length === 1) { + onSelect(objects[0], false); + } + } + return (
    onQueryChange(v)} + onEnter={onEnter} placeholder={`${intl.formatMessage({ id: "actions.search" })}…`} />
      @@ -450,6 +464,42 @@ export const ObjectsFilter = < ); }; +export const DepthSelector: React.FC<{ + depth: number | undefined; + onDepthChanged: (depth: number) => void; + id: string; + label?: React.ReactNode; + placeholder?: string; + disabled?: boolean; +}> = ({ depth, onDepthChanged, id, label, disabled, placeholder }) => { + return ( + + + onDepthChanged(depth !== 0 ? 0 : -1)} + disabled={disabled} + /> + + {depth !== 0 && ( + + + onDepthChanged(e.target.value ? parseInt(e.target.value, 10) : -1) + } + defaultValue={depth !== -1 ? depth : ""} + min="1" + /> + + )} + + ); +}; + interface IHierarchicalObjectsFilter extends IObjectsFilter {} @@ -497,38 +547,15 @@ export const HierarchicalObjectsFilter = < } return ( -
      - - onDepthChanged(criterion.value.depth !== 0 ? 0 : -1)} - disabled={criterion.modifier === CriterionModifier.Equals} - /> - - - {criterion.value.depth !== 0 && ( - - - onDepthChanged(e.target.value ? parseInt(e.target.value, 10) : -1) - } - defaultValue={ - criterion.value && criterion.value.depth !== -1 - ? criterion.value.depth - : "" - } - min="1" - /> - - )} +
      + - +
      ); }; diff --git a/ui/v2.5/src/components/List/Filters/SidebarAgeFilter.tsx b/ui/v2.5/src/components/List/Filters/SidebarAgeFilter.tsx new file mode 100644 index 000000000..3a6449ab6 --- /dev/null +++ b/ui/v2.5/src/components/List/Filters/SidebarAgeFilter.tsx @@ -0,0 +1,310 @@ +import React, { useEffect, useMemo, useState } from "react"; +import { CriterionModifier } from "../../../core/generated-graphql"; +import { CriterionOption } from "../../../models/list-filter/criteria/criterion"; +import { NumberCriterion } from "src/models/list-filter/criteria/criterion"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { Option, SidebarListFilter } from "./SidebarListFilter"; +import { DoubleRangeInput } from "src/components/Shared/DoubleRangeInput"; +import { useDebounce } from "src/hooks/debounce"; + +interface ISidebarFilter { + title?: React.ReactNode; + option: CriterionOption; + filter: ListFilterModel; + setFilter: (f: ListFilterModel) => void; + sectionID?: string; +} + +// Age presets +const AGE_PRESETS = [ + { id: "18-25", label: "18-25", min: 18, max: 25 }, + { id: "25-35", label: "25-35", min: 25, max: 35 }, + { id: "35-45", label: "35-45", min: 35, max: 45 }, + { id: "45-60", label: "45-60", min: 45, max: 60 }, + { id: "60+", label: "60+", min: 60, max: null }, +]; + +const MAX_AGE = 60; // Maximum age for the slider +const MAX_LABEL = "60+"; // Display label for maximum age + +export const SidebarAgeFilter: React.FC = ({ + title, + option, + filter, + setFilter, + sectionID, +}) => { + const criteria = filter.criteriaFor(option.type) as NumberCriterion[]; + const criterion = criteria.length > 0 ? criteria[0] : null; + + // Get current values from criterion + const currentMin = criterion?.value?.value ?? 18; + const currentMax = criterion?.value?.value2 ?? MAX_AGE; + + const [sliderMin, setSliderMin] = useState(currentMin); + const [sliderMax, setSliderMax] = useState(currentMax); + const [minInput, setMinInput] = useState(currentMin.toString()); + const [maxInput, setMaxInput] = useState( + currentMax >= MAX_AGE ? MAX_LABEL : currentMax.toString() + ); + + // Reset slider when criterion is removed externally (via filter tag X) + useEffect(() => { + if (!criterion) { + setSliderMin(18); + setSliderMax(MAX_AGE); + setMinInput("18"); + setMaxInput(MAX_LABEL); + } + }, [criterion]); + + // Determine which preset is selected + const selectedPreset = useMemo(() => { + if (!criterion) return null; + + // Check if current values match any preset + for (const preset of AGE_PRESETS) { + if (preset.max === null) { + // For "60+" preset + if ( + criterion.modifier === CriterionModifier.GreaterThan && + criterion.value.value === preset.min + ) { + return preset.id; + } + } else { + // For range presets + if ( + criterion.modifier === CriterionModifier.Between && + criterion.value.value === preset.min && + criterion.value.value2 === preset.max + ) { + return preset.id; + } + } + } + + // Check if it's a custom range or custom GreaterThan + if ( + criterion.modifier === CriterionModifier.Between || + criterion.modifier === CriterionModifier.GreaterThan + ) { + return "custom"; + } + + return null; + }, [criterion]); + + const options: Option[] = useMemo(() => { + return AGE_PRESETS.map((preset) => ({ + id: preset.id, + label: preset.label, + className: "age-preset", + })); + }, []); + + const selected: Option[] = useMemo(() => { + if (!selectedPreset) return []; + if (selectedPreset === "custom") return []; + + const preset = AGE_PRESETS.find((p) => p.id === selectedPreset); + if (preset) { + return [ + { + id: preset.id, + label: preset.label, + className: "age-preset", + }, + ]; + } + return []; + }, [selectedPreset]); + + function onSelectPreset(item: Option) { + const preset = AGE_PRESETS.find((p) => p.id === item.id); + if (!preset) return; + + setSliderMin(preset.min); + setSliderMax(preset.max ?? MAX_AGE); + setMinInput(preset.min.toString()); + setMaxInput(preset.max === null ? MAX_LABEL : preset.max.toString()); + + const currentCriteria = filter.criteriaFor( + option.type + ) as NumberCriterion[]; + const currentCriterion = + currentCriteria.length > 0 ? currentCriteria[0] : null; + const newCriterion = currentCriterion + ? currentCriterion.clone() + : option.makeCriterion(); + + if (preset.max === null) { + // "60+" - use GreaterThan + newCriterion.modifier = CriterionModifier.GreaterThan; + newCriterion.value.value = preset.min; + newCriterion.value.value2 = undefined; + } else { + // Range preset - use Between + newCriterion.modifier = CriterionModifier.Between; + newCriterion.value.value = preset.min; + newCriterion.value.value2 = preset.max; + } + + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + } + + function onUnselectPreset() { + setSliderMin(18); + setSliderMax(MAX_AGE); + setMinInput("18"); + setMaxInput(MAX_LABEL); + setFilter(filter.removeCriterion(option.type)); + } + + // Parse age input (supports formats like "25", "100+") + function parseAgeInput(input: string): number | null { + const trimmed = input.trim().toLowerCase(); + + if (trimmed === "max" || trimmed === MAX_LABEL.toLowerCase()) { + return MAX_AGE; + } + + const age = parseInt(trimmed); + if (isNaN(age) || age < 18 || age > MAX_AGE) { + return null; + } + + return age; + } + + // Filter update + function updateFilter(min: number, max: number) { + // If slider is at full range (18 to max), remove the filter entirely + if (min === 18 && max >= MAX_AGE) { + setFilter(filter.removeCriterion(option.type)); + return; + } + + const currentCriteria = filter.criteriaFor( + option.type + ) as NumberCriterion[]; + const currentCriterion = + currentCriteria.length > 0 ? currentCriteria[0] : null; + const newCriterion = currentCriterion + ? currentCriterion.clone() + : option.makeCriterion(); + + // If max is at MAX_AGE (but min > 18), use GreaterThan + if (max >= MAX_AGE) { + newCriterion.modifier = CriterionModifier.GreaterThan; + newCriterion.value.value = min; + newCriterion.value.value2 = undefined; + } else { + newCriterion.modifier = CriterionModifier.Between; + newCriterion.value.value = min; + newCriterion.value.value2 = max; + } + + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + } + + const updateFilterDebounceMS = 300; + const debounceUpdateFilter = useDebounce( + updateFilter, + updateFilterDebounceMS + ); + + function handleSliderChange(min: number, max: number) { + setSliderMin(min); + setSliderMax(max); + setMinInput(min.toString()); + setMaxInput(max >= MAX_AGE ? MAX_LABEL : max.toString()); + + debounceUpdateFilter(min, max); + } + + function handleMinInputChange(value: string) { + setMinInput(value); + } + + function handleMaxInputChange(value: string) { + setMaxInput(value); + } + + function handleMinInputBlur() { + const parsed = parseAgeInput(minInput); + if (parsed !== null && parsed >= 18 && parsed < sliderMax) { + handleSliderChange(parsed, sliderMax); + } else { + // Reset to current value if invalid + setMinInput(sliderMin.toString()); + } + } + + function handleMaxInputBlur() { + const parsed = parseAgeInput(maxInput); + if (parsed !== null && parsed > sliderMin && parsed <= MAX_AGE) { + handleSliderChange(sliderMin, parsed); + } else { + // Reset to current value if invalid + setMaxInput(sliderMax >= MAX_AGE ? MAX_LABEL : sliderMax.toString()); + } + } + + const customSlider = ( +
      + handleSliderChange(min, max)} + minInput={ + handleMinInputChange(e.target.value)} + onBlur={handleMinInputBlur} + onKeyDown={(e) => { + if (e.key === "Enter") { + e.currentTarget.blur(); + } + }} + placeholder="18" + /> + } + maxInput={ + handleMaxInputChange(e.target.value)} + onBlur={handleMaxInputBlur} + onKeyDown={(e) => { + if (e.key === "Enter") { + e.currentTarget.blur(); + } + }} + placeholder={MAX_LABEL} + /> + } + /> +
      + ); + + return ( + + ); +}; diff --git a/ui/v2.5/src/components/List/Filters/SidebarDurationFilter.tsx b/ui/v2.5/src/components/List/Filters/SidebarDurationFilter.tsx new file mode 100644 index 000000000..29463bfa6 --- /dev/null +++ b/ui/v2.5/src/components/List/Filters/SidebarDurationFilter.tsx @@ -0,0 +1,362 @@ +import React, { useEffect, useMemo, useState } from "react"; +import { CriterionModifier } from "../../../core/generated-graphql"; +import { CriterionOption } from "../../../models/list-filter/criteria/criterion"; +import { DurationCriterion } from "src/models/list-filter/criteria/criterion"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { Option, SidebarListFilter } from "./SidebarListFilter"; +import TextUtils from "src/utils/text"; +import { DoubleRangeInput } from "src/components/Shared/DoubleRangeInput"; +import { useDebounce } from "src/hooks/debounce"; +import { FormattedMessage } from "react-intl"; +import { DurationCriterionOption } from "src/models/list-filter/scenes"; + +interface ISidebarFilter { + title?: React.ReactNode; + option?: CriterionOption; + filter: ListFilterModel; + setFilter: (f: ListFilterModel) => void; + sectionID?: string; +} + +// Duration presets in seconds +const DURATION_PRESETS = [ + { id: "0-5", label: "0-5 min", min: 0, max: 300 }, + { id: "5-10", label: "5-10 min", min: 300, max: 600 }, + { id: "10-20", label: "10-20 min", min: 600, max: 1200 }, + { id: "20-40", label: "20-40 min", min: 1200, max: 2400 }, + { id: "40+", label: "40+ min", min: 2400, max: null }, +]; + +const MAX_DURATION = 7200; // 2 hours in seconds for the slider +const MAX_LABEL = "2+ hrs"; // Display label for maximum duration + +// Custom step values: 0, 2min (120s), 5min (300s), then 5 minute intervals +const DURATION_STEPS = [ + 0, 120, 300, 600, 900, 1200, 1500, 1800, 2100, 2400, 2700, 3000, 3300, 3600, + 3900, 4200, 4500, 4800, 5100, 5400, 5700, 6000, 6300, 6600, 6900, 7200, +]; + +// Snap a value to the nearest valid step +function snapToStep(value: number): number { + if (value <= 0) return 0; + if (value >= MAX_DURATION) return MAX_DURATION; + + // Find the closest step + let closest = DURATION_STEPS[0]; + let minDiff = Math.abs(value - closest); + + for (const step of DURATION_STEPS) { + const diff = Math.abs(value - step); + if (diff < minDiff) { + minDiff = diff; + closest = step; + } + } + + return closest; +} + +export const SidebarDurationFilter: React.FC = ({ + title = , + option = DurationCriterionOption, + filter, + setFilter, + sectionID = "duration", +}) => { + const criteria = filter.criteriaFor(option.type) as DurationCriterion[]; + const criterion = criteria.length > 0 ? criteria[0] : null; + + // Get current values from criterion + const currentMin = criterion?.value?.value ?? 0; + const currentMax = criterion?.value?.value2 ?? MAX_DURATION; + + const [sliderMin, setSliderMin] = useState(currentMin); + const [sliderMax, setSliderMax] = useState(currentMax); + const [minInput, setMinInput] = useState( + currentMin === 0 ? "0m" : TextUtils.secondsAsTimeString(currentMin) + ); + const [maxInput, setMaxInput] = useState( + currentMax >= MAX_DURATION + ? MAX_LABEL + : TextUtils.secondsAsTimeString(currentMax) + ); + + // Reset slider when criterion is removed externally (via filter tag X) + useEffect(() => { + if (!criterion) { + setSliderMin(0); + setSliderMax(MAX_DURATION); + setMinInput("0m"); + setMaxInput(MAX_LABEL); + } + }, [criterion]); + + // Determine which preset is selected + const selectedPreset = useMemo(() => { + if (!criterion) return null; + + // Check if current values match any preset + for (const preset of DURATION_PRESETS) { + if (preset.max === null) { + // For "40+ min" preset + if ( + criterion.modifier === CriterionModifier.GreaterThan && + criterion.value.value === preset.min + ) { + return preset.id; + } + } else { + // For range presets + if ( + criterion.modifier === CriterionModifier.Between && + criterion.value.value === preset.min && + criterion.value.value2 === preset.max + ) { + return preset.id; + } + } + } + + // Check if it's a custom range or custom GreaterThan + if ( + criterion.modifier === CriterionModifier.Between || + criterion.modifier === CriterionModifier.GreaterThan + ) { + return "custom"; + } + + return null; + }, [criterion]); + + const options: Option[] = useMemo(() => { + return DURATION_PRESETS.map((preset) => ({ + id: preset.id, + label: preset.label, + className: "duration-preset", + })); + }, []); + + const selected: Option[] = useMemo(() => { + if (!selectedPreset) return []; + if (selectedPreset === "custom") return []; + + const preset = DURATION_PRESETS.find((p) => p.id === selectedPreset); + if (preset) { + return [ + { + id: preset.id, + label: preset.label, + className: "duration-preset", + }, + ]; + } + return []; + }, [selectedPreset]); + + function onSelectPreset(item: Option) { + const preset = DURATION_PRESETS.find((p) => p.id === item.id); + if (!preset) return; + + const newCriterion = criterion ? criterion.clone() : option.makeCriterion(); + + if (preset.max === null) { + // "40+ min" - use GreaterThan + newCriterion.modifier = CriterionModifier.GreaterThan; + newCriterion.value.value = preset.min; + newCriterion.value.value2 = undefined; + } else { + // Range preset - use Between + newCriterion.modifier = CriterionModifier.Between; + newCriterion.value.value = preset.min; + newCriterion.value.value2 = preset.max; + } + + setSliderMin(preset.min); + setSliderMax(preset.max ?? MAX_DURATION); + setMinInput( + preset.min === 0 ? "0m" : TextUtils.secondsAsTimeString(preset.min) + ); + setMaxInput( + preset.max === null + ? MAX_LABEL + : TextUtils.secondsAsTimeString(preset.max) + ); + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + } + + function onUnselectPreset() { + setFilter(filter.removeCriterion(option.type)); + setSliderMin(0); + setSliderMax(MAX_DURATION); + setMinInput("0m"); + setMaxInput(MAX_LABEL); + } + + // Parse time input (supports formats like "10", "1:30", "1:30:00", "2+ hrs") + function parseTimeInput(input: string): number | null { + const trimmed = input.trim().toLowerCase(); + + if (trimmed === "max" || trimmed === MAX_LABEL.toLowerCase()) { + return MAX_DURATION; + } + + // Try to parse as pure number (minutes) + const minutesOnly = parseFloat(trimmed); + if (!isNaN(minutesOnly) && trimmed.indexOf(":") === -1) { + return Math.round(minutesOnly * 60); + } + + // Parse HH:MM:SS or MM:SS format + const parts = trimmed.split(":").map((p) => parseInt(p)); + if (parts.some(isNaN)) { + return null; + } + + if (parts.length === 2) { + // MM:SS + return parts[0] * 60 + parts[1]; + } else if (parts.length === 3) { + // HH:MM:SS + return parts[0] * 3600 + parts[1] * 60 + parts[2]; + } + + return null; + } + + // Debounced filter update + function updateFilter(min: number, max: number) { + // If slider is at full range (0 to max), remove the filter entirely + if (min === 0 && max >= MAX_DURATION) { + setFilter(filter.removeCriterion(option.type)); + return; + } + + const newCriterion = criterion ? criterion.clone() : option.makeCriterion(); + + // If max is at MAX_DURATION (but min > 0), use GreaterThan + if (max >= MAX_DURATION) { + newCriterion.modifier = CriterionModifier.GreaterThan; + newCriterion.value.value = min; + newCriterion.value.value2 = undefined; + } else { + newCriterion.modifier = CriterionModifier.Between; + newCriterion.value.value = min; + newCriterion.value.value2 = max; + } + + setFilter(filter.replaceCriteria(option.type, [newCriterion])); + } + + const updateFilterDebounceMS = 300; + const debounceUpdateFilter = useDebounce( + updateFilter, + updateFilterDebounceMS + ); + + function handleSliderChange(min: number, max: number) { + if (min < 0 || max > MAX_DURATION || min >= max) { + return; + } + + setSliderMin(min); + setSliderMax(max); + setMinInput(min === 0 ? "0m" : TextUtils.secondsAsTimeString(min)); + setMaxInput( + max >= MAX_DURATION ? MAX_LABEL : TextUtils.secondsAsTimeString(max) + ); + + debounceUpdateFilter(min, max); + } + + function handleMinInputChange(value: string) { + setMinInput(value); + } + + function handleMaxInputChange(value: string) { + setMaxInput(value); + } + + function handleMinInputBlur() { + const parsed = parseTimeInput(minInput); + if (parsed !== null && parsed >= 0 && parsed < sliderMax) { + handleSliderChange(parsed, sliderMax); + } else { + // Reset to current value if invalid + setMinInput( + sliderMin === 0 ? "0m" : TextUtils.secondsAsTimeString(sliderMin) + ); + } + } + + function handleMaxInputBlur() { + const parsed = parseTimeInput(maxInput); + if (parsed !== null && parsed > sliderMin && parsed <= MAX_DURATION) { + handleSliderChange(sliderMin, parsed); + } else { + // Reset to current value if invalid + setMaxInput( + sliderMax >= MAX_DURATION + ? MAX_LABEL + : TextUtils.secondsAsTimeString(sliderMax) + ); + } + } + + const customSlider = ( + handleMinInputChange(e.target.value)} + onBlur={handleMinInputBlur} + onKeyDown={(e) => { + if (e.key === "Enter") { + e.currentTarget.blur(); + } + }} + placeholder="0:00" + /> + } + maxInput={ + handleMaxInputChange(e.target.value)} + onBlur={handleMaxInputBlur} + onKeyDown={(e) => { + if (e.key === "Enter") { + e.currentTarget.blur(); + } + }} + placeholder={MAX_LABEL} + /> + } + min={0} + max={MAX_DURATION} + value={[sliderMin, sliderMax]} + onChange={(vals) => { + handleSliderChange(snapToStep(vals[0]), snapToStep(vals[1])); + }} + /> + ); + + return ( + + ); +}; diff --git a/ui/v2.5/src/components/List/Filters/SidebarListFilter.tsx b/ui/v2.5/src/components/List/Filters/SidebarListFilter.tsx index 71a56f23d..14e11e968 100644 --- a/ui/v2.5/src/components/List/Filters/SidebarListFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/SidebarListFilter.tsx @@ -182,7 +182,8 @@ const QueryField: React.FC<{ focus: ReturnType; value: string; setValue: (query: string) => void; -}> = ({ focus, value, setValue }) => { + onEnter?: () => void; +}> = ({ focus, value, setValue, onEnter }) => { const intl = useIntl(); const [displayQuery, setDisplayQuery] = useState(value); @@ -206,6 +207,7 @@ const QueryField: React.FC<{ value={displayQuery} setValue={(v) => onQueryChange(v)} placeholder={`${intl.formatMessage({ id: "actions.search" })}…`} + onEnter={onEnter} /> ); }; @@ -214,6 +216,7 @@ interface IQueryableProps { inputFocus?: ReturnType; query?: string; setQuery?: (query: string) => void; + onEnter?: () => void; } export const CandidateList: React.FC< @@ -227,6 +230,7 @@ export const CandidateList: React.FC< inputFocus, query, setQuery, + onEnter, items, onSelect, canExclude, @@ -242,6 +246,7 @@ export const CandidateList: React.FC< focus={inputFocus} value={query} setValue={(v) => setQuery(v)} + onEnter={onEnter} /> )}
        @@ -265,6 +270,7 @@ export const SidebarListFilter: React.FC<{ selected: Option[]; excluded?: Option[]; candidates: Option[]; + modifierCandidates?: Option[]; singleValue?: boolean; onSelect: (item: Option, exclude: boolean) => void; onUnselect: (item: Option, exclude: boolean) => void; @@ -276,11 +282,14 @@ export const SidebarListFilter: React.FC<{ preCandidates?: React.ReactNode; postCandidates?: React.ReactNode; onOpen?: () => void; + // used to store open/closed state in SidebarStateContext + sectionID?: string; }> = ({ title, selected, excluded, candidates, + modifierCandidates, onSelect, onUnselect, canExclude, @@ -292,6 +301,7 @@ export const SidebarListFilter: React.FC<{ preSelected, postSelected, onOpen, + sectionID, }) => { // TODO - sort items? @@ -321,10 +331,25 @@ export const SidebarListFilter: React.FC<{ } } + function onEnter() { + if (candidates && candidates.length === 1) { + selectHook(candidates[0], false); + } + } + + const items = useMemo(() => { + if (!modifierCandidates) { + return candidates; + } + + return [...modifierCandidates, ...candidates]; + }, [candidates, modifierCandidates]); + return ( {preSelected ?
        {preSelected}
        : null} @@ -346,13 +371,14 @@ export const SidebarListFilter: React.FC<{ > {preCandidates ?
        {preCandidates}
        : null} {postCandidates ?
        {postCandidates}
        : null}
        diff --git a/ui/v2.5/src/components/List/Filters/StudiosFilter.tsx b/ui/v2.5/src/components/List/Filters/StudiosFilter.tsx index e9c05013d..3e28bd927 100644 --- a/ui/v2.5/src/components/List/Filters/StudiosFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/StudiosFilter.tsx @@ -5,16 +5,21 @@ import { useFindStudiosForSelectQuery, } from "src/core/generated-graphql"; import { HierarchicalObjectsFilter } from "./SelectableFilter"; -import { StudiosCriterion } from "src/models/list-filter/criteria/studios"; +import { + StudiosCriterion, + StudiosCriterionOption, +} from "src/models/list-filter/criteria/studios"; import { sortByRelevance } from "src/utils/query"; import { CriterionOption } from "src/models/list-filter/criteria/criterion"; import { ListFilterModel } from "src/models/list-filter/filter"; import { + IUseQueryHookProps, makeQueryVariables, setObjectFilter, useLabeledIdFilterState, } from "./LabeledIdFilter"; import { SidebarListFilter } from "./SidebarListFilter"; +import { FormattedMessage } from "react-intl"; interface IStudiosFilter { criterion: StudiosCriterion; @@ -56,13 +61,12 @@ function sortResults( }); } -function useStudioQueryFilter( - query: string, - filter?: ListFilterModel, - skip?: boolean -) { +function useStudioQueryFilter(props: IUseQueryHookProps) { + const { q: query, filter: f, skip, filterHook } = props; + const appliedFilter = filterHook && f ? filterHook(f.clone()) : f; + const { data, loading } = useFindStudiosForSelectQuery({ - variables: queryVariables(query, filter), + variables: queryVariables(query, appliedFilter), skip, }); @@ -75,7 +79,7 @@ function useStudioQueryFilter( } function useStudioQuery(query: string, skip?: boolean) { - return useStudioQueryFilter(query, undefined, skip); + return useStudioQueryFilter({ q: query, skip: !!skip }); } const StudiosFilter: React.FC = ({ @@ -94,13 +98,23 @@ const StudiosFilter: React.FC = ({ export const SidebarStudiosFilter: React.FC<{ title?: ReactNode; - option: CriterionOption; + option?: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; -}> = ({ title, option, filter, setFilter }) => { + filterHook?: (f: ListFilterModel) => ListFilterModel; + sectionID?: string; +}> = ({ + title = , + option = StudiosCriterionOption, + filter, + setFilter, + filterHook, + sectionID = "studios", +}) => { const state = useLabeledIdFilterState({ filter, setFilter, + filterHook, option, useQuery: useStudioQueryFilter, singleValue: true, @@ -108,7 +122,14 @@ export const SidebarStudiosFilter: React.FC<{ includeSubMessageID: "subsidiary_studios", }); - return ; + return ( + + ); }; export default StudiosFilter; diff --git a/ui/v2.5/src/components/List/Filters/TagsFilter.tsx b/ui/v2.5/src/components/List/Filters/TagsFilter.tsx index 0a3f8c942..446a90331 100644 --- a/ui/v2.5/src/components/List/Filters/TagsFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/TagsFilter.tsx @@ -10,12 +10,17 @@ import { sortByRelevance } from "src/utils/query"; import { CriterionOption } from "src/models/list-filter/criteria/criterion"; import { ListFilterModel } from "src/models/list-filter/filter"; import { + IUseQueryHookProps, makeQueryVariables, setObjectFilter, useLabeledIdFilterState, } from "./LabeledIdFilter"; import { SidebarListFilter } from "./SidebarListFilter"; -import { TagsCriterion } from "src/models/list-filter/criteria/tags"; +import { + TagsCriterion, + TagsCriterionOption, +} from "src/models/list-filter/criteria/tags"; +import { FormattedMessage } from "react-intl"; interface ITagsFilter { criterion: TagsCriterion; @@ -65,13 +70,12 @@ function sortResults( }); } -function useTagQueryFilter( - query: string, - filter?: ListFilterModel, - skip?: boolean -) { +function useTagQueryFilter(props: IUseQueryHookProps) { + const { q: query, filter: f, skip, filterHook } = props; + const appliedFilter = filterHook && f ? filterHook(f.clone()) : f; + const { data, loading } = useFindTagsForSelectQuery({ - variables: queryVariables(query, filter), + variables: queryVariables(query, appliedFilter), skip, }); @@ -84,7 +88,7 @@ function useTagQueryFilter( } function useTagQuery(query: string, skip?: boolean) { - return useTagQueryFilter(query, undefined, skip); + return useTagQueryFilter({ q: query, skip: !!skip }); } const TagsFilter: React.FC = ({ criterion, setCriterion }) => { @@ -99,20 +103,37 @@ const TagsFilter: React.FC = ({ criterion, setCriterion }) => { export const SidebarTagsFilter: React.FC<{ title?: ReactNode; - option: CriterionOption; + option?: CriterionOption; filter: ListFilterModel; setFilter: (f: ListFilterModel) => void; -}> = ({ title, option, filter, setFilter }) => { + filterHook?: (f: ListFilterModel) => ListFilterModel; + sectionID?: string; +}> = ({ + title = , + option = TagsCriterionOption, + filter, + setFilter, + filterHook, + sectionID = "tags", +}) => { const state = useLabeledIdFilterState({ filter, setFilter, + filterHook, option, useQuery: useTagQueryFilter, hierarchical: true, includeSubMessageID: "sub_tags", }); - return ; + return ( + + ); }; export default TagsFilter; diff --git a/ui/v2.5/src/components/List/ItemList.tsx b/ui/v2.5/src/components/List/ItemList.tsx index 4ffeff766..893f1d005 100644 --- a/ui/v2.5/src/components/List/ItemList.tsx +++ b/ui/v2.5/src/components/List/ItemList.tsx @@ -1,34 +1,11 @@ -import React, { - PropsWithChildren, - useCallback, - useContext, - useEffect, - useMemo, - useState, -} from "react"; -import * as GQL from "src/core/generated-graphql"; import { QueryResult } from "@apollo/client"; -import { Criterion } from "src/models/list-filter/criteria/criterion"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { - EditFilterDialog, - useShowEditFilter, -} from "src/components/List/EditFilterDialog"; -import { FilterTags } from "./FilterTags"; -import { View } from "./views"; +import { useShowEditFilter } from "src/components/List/EditFilterDialog"; import { IHasID } from "src/utils/data"; -import { - ListContext, - QueryResultContext, - useListContext, - useQueryResultContext, -} from "./ListProvider"; -import { FilterContext, SetFilterURL, useFilter } from "./FilterProvider"; import { useModal } from "src/hooks/modal"; import { IFilterStateHook, IQueryResultHook, - useDefaultFilter, useEnsureValidPage, useFilterOperations, useFilterState, @@ -37,25 +14,24 @@ import { useQueryResult, useScrollToTopOnPageChange, } from "./util"; -import { - FilteredListToolbar, - IFilteredListToolbar, - IItemListOperation, -} from "./FilteredListToolbar"; -import { PagedList } from "./PagedList"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; -interface IFilteredItemList { +interface IFilteredItemList< + T extends QueryResult, + E extends IHasID = IHasID, + M = unknown +> { filterStateProps: IFilterStateHook; - queryResultProps: IQueryResultHook; + queryResultProps: IQueryResultHook; } // Provides the common state and behaviour for filtered item list components export function useFilteredItemList< T extends QueryResult, - E extends IHasID = IHasID ->(props: IFilteredItemList) { - const { configuration: config } = useContext(ConfigurationContext); + E extends IHasID = IHasID, + M = unknown +>(props: IFilteredItemList) { + const { configuration: config } = useConfigurationContext(); // States const filterState = useFilterState({ @@ -69,10 +45,10 @@ export function useFilteredItemList< filter, ...props.queryResultProps, }); - const { result, items, totalCount, pages } = queryResult; + const { result, items, totalCount, pages, metadataInfo } = queryResult; const listSelect = useListSelect(items); - const { onSelectAll, onSelectNone } = listSelect; + const { onSelectAll, onSelectNone, onInvertSelection } = listSelect; const modalState = useModal(); const { showModal, closeModal } = modalState; @@ -98,6 +74,7 @@ export function useFilteredItemList< onChangePage: setPage, onSelectAll, onSelectNone, + onInvertSelection, pages, showEditFilter, }); @@ -105,337 +82,13 @@ export function useFilteredItemList< return { filterState, queryResult, + metadataInfo, listSelect, modalState, showEditFilter, }; } -interface IItemListProps { - view?: View; - zoomable?: boolean; - otherOperations?: IItemListOperation[]; - renderContent: ( - result: T, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void, - onChangePage: (page: number) => void, - pageCount: number - ) => React.ReactNode; - renderMetadataByline?: (data: T) => React.ReactNode; - renderEditDialog?: ( - selected: E[], - onClose: (applied: boolean) => void - ) => React.ReactNode; - renderDeleteDialog?: ( - selected: E[], - onClose: (confirmed: boolean) => void - ) => React.ReactNode; - addKeybinds?: ( - result: T, - filter: ListFilterModel, - selectedIds: Set - ) => () => void; - renderToolbar?: (props: IFilteredListToolbar) => React.ReactNode; -} - -export const ItemList = ( - props: IItemListProps -) => { - const { - view, - zoomable, - otherOperations, - renderContent, - renderEditDialog, - renderDeleteDialog, - renderMetadataByline, - addKeybinds, - renderToolbar: providedToolbar, - } = props; - - const { filter, setFilter: updateFilter } = useFilter(); - const { effectiveFilter, result, cachedResult, totalCount } = - useQueryResultContext(); - const listSelect = useListContext(); - const { - selectedIds, - getSelected, - onSelectChange, - onSelectAll, - onSelectNone, - } = listSelect; - - // scroll to the top of the page when the page changes - useScrollToTopOnPageChange(filter.currentPage, result.loading); - - const { modal, showModal, closeModal } = useModal(); - - const metadataByline = useMemo(() => { - if (cachedResult.loading) return ""; - - return renderMetadataByline?.(cachedResult) ?? ""; - }, [renderMetadataByline, cachedResult]); - - const pages = Math.ceil(totalCount / filter.itemsPerPage); - - const onChangePage = useCallback( - (p: number) => { - updateFilter(filter.changePage(p)); - }, - [filter, updateFilter] - ); - - useEnsureValidPage(filter, totalCount, updateFilter); - - const showEditFilter = useCallback( - (editingCriterion?: string) => { - function onApplyEditFilter(f: ListFilterModel) { - closeModal(); - updateFilter(f); - } - - showModal( - closeModal()} - editingCriterion={editingCriterion} - /> - ); - }, - [filter, updateFilter, showModal, closeModal] - ); - - useListKeyboardShortcuts({ - currentPage: filter.currentPage, - onChangePage, - onSelectAll, - onSelectNone, - pages, - showEditFilter, - }); - - useEffect(() => { - if (addKeybinds) { - const unbindExtras = addKeybinds(result, effectiveFilter, selectedIds); - return () => { - unbindExtras(); - }; - } - }, [addKeybinds, result, effectiveFilter, selectedIds]); - - const operations = useMemo(() => { - async function onOperationClicked(o: IItemListOperation) { - await o.onClick(result, effectiveFilter, selectedIds); - if (o.postRefetch) { - result.refetch(); - } - } - - return otherOperations?.map((o) => ({ - text: o.text, - onClick: () => { - onOperationClicked(o); - }, - isDisplayed: () => { - if (o.isDisplayed) { - return o.isDisplayed(result, effectiveFilter, selectedIds); - } - - return true; - }, - icon: o.icon, - buttonVariant: o.buttonVariant, - })); - }, [result, effectiveFilter, selectedIds, otherOperations]); - - function onEdit() { - if (!renderEditDialog) { - return; - } - - showModal( - renderEditDialog(getSelected(), (applied) => onEditDialogClosed(applied)) - ); - } - - function onEditDialogClosed(applied: boolean) { - if (applied) { - onSelectNone(); - } - closeModal(); - - // refetch - result.refetch(); - } - - function onDelete() { - if (!renderDeleteDialog) { - return; - } - - showModal( - renderDeleteDialog(getSelected(), (deleted) => - onDeleteDialogClosed(deleted) - ) - ); - } - - function onDeleteDialogClosed(deleted: boolean) { - if (deleted) { - onSelectNone(); - } - closeModal(); - - // refetch - result.refetch(); - } - - function onRemoveCriterion(removedCriterion: Criterion, valueIndex?: number) { - if (valueIndex === undefined) { - updateFilter( - filter.removeCriterion(removedCriterion.criterionOption.type) - ); - } else { - updateFilter( - filter.removeCustomFieldCriterion( - removedCriterion.criterionOption.type, - valueIndex - ) - ); - } - } - - function onClearAllCriteria() { - updateFilter(filter.clearCriteria()); - } - - const filterListToolbarProps: IFilteredListToolbar = { - filter, - setFilter: updateFilter, - listSelect, - showEditFilter, - view: view, - operations: operations, - zoomable: zoomable, - onEdit: renderEditDialog ? onEdit : undefined, - onDelete: renderDeleteDialog ? onDelete : undefined, - }; - - return ( -
        - {providedToolbar ? ( - providedToolbar(filterListToolbarProps) - ) : ( - - )} - showEditFilter(c.criterionOption.type)} - onRemoveCriterion={onRemoveCriterion} - onRemoveAll={() => onClearAllCriteria()} - /> - {modal} - - - {renderContent( - result, - // #4780 - use effectiveFilter to ensure filterHook is applied - effectiveFilter, - selectedIds, - onSelectChange, - onChangePage, - pages - )} - -
        - ); -}; - -interface IItemListContextProps { - filterMode: GQL.FilterMode; - defaultSort?: string; - defaultFilter?: ListFilterModel; - useResult: (filter: ListFilterModel) => T; - getCount: (data: T) => number; - getItems: (data: T) => E[]; - filterHook?: (filter: ListFilterModel) => ListFilterModel; - view?: View; - alterQuery?: boolean; - selectable?: boolean; -} - -// Provides the contexts for the ItemList component. Includes functionality to scroll -// to top on page change. -export const ItemListContext = ( - props: PropsWithChildren> -) => { - const { - filterMode, - defaultSort, - defaultFilter: providedDefaultFilter, - useResult, - getCount, - getItems, - view, - filterHook, - alterQuery = true, - selectable, - children, - } = props; - - const { configuration: config } = useContext(ConfigurationContext); - - const emptyFilter = useMemo( - () => - providedDefaultFilter?.clone() ?? - new ListFilterModel(filterMode, config, { - defaultSortBy: defaultSort, - }), - [config, filterMode, defaultSort, providedDefaultFilter] - ); - - const [filter, setFilterState] = useState( - () => - new ListFilterModel(filterMode, config, { defaultSortBy: defaultSort }) - ); - - const { defaultFilter, loading: defaultFilterLoading } = useDefaultFilter( - emptyFilter, - view - ); - - if (defaultFilterLoading) return null; - - return ( - - - - {({ items }) => ( - - {children} - - )} - - - - ); -}; - export const showWhenSelected = ( result: T, filter: ListFilterModel, diff --git a/ui/v2.5/src/components/List/ListFilter.tsx b/ui/v2.5/src/components/List/ListFilter.tsx index 4933c7e75..ff3be0360 100644 --- a/ui/v2.5/src/components/List/ListFilter.tsx +++ b/ui/v2.5/src/components/List/ListFilter.tsx @@ -1,4 +1,3 @@ -import cloneDeep from "lodash-es/cloneDeep"; import React, { useCallback, useEffect, @@ -23,19 +22,18 @@ import { import { Icon } from "../Shared/Icon"; import { ListFilterModel } from "src/models/list-filter/filter"; import useFocus from "src/utils/focus"; -import { FormattedMessage, useIntl } from "react-intl"; -import { SavedFilterDropdown } from "./SavedFilterList"; +import { useIntl } from "react-intl"; import { faCaretDown, faCaretUp, faCheck, faRandom, } from "@fortawesome/free-solid-svg-icons"; -import { FilterButton } from "./Filters/FilterButton"; import { useDebounce } from "src/hooks/debounce"; -import { View } from "./views"; import { ClearableInput } from "../Shared/ClearableInput"; import { useStopWheelScroll } from "src/utils/form"; +import { ISortByOption } from "src/models/list-filter/filter-options"; +import { useConfigurationContext } from "src/hooks/Config"; export function useDebouncedSearchInput( filter: ListFilterModel, @@ -230,70 +228,42 @@ export const PageSizeSelector: React.FC<{ ); }; -interface IListFilterProps { - onFilterUpdate: (newFilter: ListFilterModel) => void; - filter: ListFilterModel; - view?: View; - openFilterDialog: () => void; - withSidebar?: boolean; -} - -export const ListFilter: React.FC = ({ - onFilterUpdate, - filter, - openFilterDialog, - view, - withSidebar, +export const SortBySelect: React.FC<{ + className?: string; + sortBy: string | undefined; + sortDirection: SortDirectionEnum; + options: ISortByOption[]; + onChangeSortBy: (eventKey: string | null) => void; + onChangeSortDirection: () => void; + onReshuffleRandomSort: () => void; +}> = ({ + className, + sortBy, + sortDirection, + options, + onChangeSortBy, + onChangeSortDirection, + onReshuffleRandomSort, }) => { - const filterOptions = filter.options; - const intl = useIntl(); + const { configuration } = useConfigurationContext(); + const { sfwContentMode } = configuration.interface; - useEffect(() => { - Mousetrap.bind("r", () => onReshuffleRandomSort()); - - return () => { - Mousetrap.unbind("r"); - }; - }); - - function onChangePageSize(pp: number) { - const newFilter = cloneDeep(filter); - newFilter.itemsPerPage = pp; - newFilter.currentPage = 1; - onFilterUpdate(newFilter); - } - - function onChangeSortDirection() { - const newFilter = cloneDeep(filter); - if (filter.sortDirection === SortDirectionEnum.Asc) { - newFilter.sortDirection = SortDirectionEnum.Desc; - } else { - newFilter.sortDirection = SortDirectionEnum.Asc; - } - - onFilterUpdate(newFilter); - } - - function onChangeSortBy(eventKey: string | null) { - const newFilter = cloneDeep(filter); - newFilter.sortBy = eventKey ?? undefined; - newFilter.currentPage = 1; - onFilterUpdate(newFilter); - } - - function onReshuffleRandomSort() { - const newFilter = cloneDeep(filter); - newFilter.currentPage = 1; - newFilter.randomSeed = -1; - onFilterUpdate(newFilter); - } + const currentSortBy = options.find((o) => o.value === sortBy); + const currentSortByMessageID = currentSortBy + ? !sfwContentMode + ? currentSortBy.messageID + : currentSortBy.sfwMessageID ?? currentSortBy.messageID + : ""; function renderSortByOptions() { - return filterOptions.sortByOptions + return options .map((o) => { + const messageID = !sfwContentMode + ? o.messageID + : o.sfwMessageID ?? o.messageID; return { - message: intl.formatMessage({ id: o.messageID }), + message: intl.formatMessage({ id: messageID }), value: o.value, }; }) @@ -304,102 +274,55 @@ export const ListFilter: React.FC = ({ key={option.value} className="bg-secondary text-white" eventKey={option.value} + data-value={option.value} > {option.message} )); } - function render() { - const currentSortBy = filterOptions.sortByOptions.find( - (o) => o.value === filter.sortBy - ); - - return ( - <> - {!withSidebar && ( -
        - -
        - )} - - {!withSidebar && ( - - { - onFilterUpdate(f); - }} - view={view} - /> - - - - } - > - openFilterDialog()} - filter={filter} - /> - - - )} - - - - - {currentSortBy - ? intl.formatMessage({ id: currentSortBy.messageID }) - : ""} - - - - {renderSortByOptions()} - - - {filter.sortDirection === SortDirectionEnum.Asc - ? intl.formatMessage({ id: "ascending" }) - : intl.formatMessage({ id: "descending" })} - + return ( + + + + {currentSortBy + ? intl.formatMessage({ id: currentSortByMessageID }) + : ""} + + + + {renderSortByOptions()} + + + {sortDirection === SortDirectionEnum.Asc + ? intl.formatMessage({ id: "ascending" }) + : intl.formatMessage({ id: "descending" })} + + } + > + - - {filter.sortBy === "random" && ( - - {intl.formatMessage({ id: "actions.reshuffle" })} - - } - > - - - )} - - - - - ); - } - - return render(); + /> + + + {sortBy === "random" && ( + + {intl.formatMessage({ id: "actions.reshuffle" })} + + } + > + + + )} + + ); }; diff --git a/ui/v2.5/src/components/List/ListOperationButtons.tsx b/ui/v2.5/src/components/List/ListOperationButtons.tsx index 8ea21df98..2a4232fb3 100644 --- a/ui/v2.5/src/components/List/ListOperationButtons.tsx +++ b/ui/v2.5/src/components/List/ListOperationButtons.tsx @@ -1,49 +1,72 @@ -import React, { PropsWithChildren, useEffect } from "react"; -import { - Button, - ButtonGroup, - Dropdown, - OverlayTrigger, - Tooltip, -} from "react-bootstrap"; +import React, { PropsWithChildren, useEffect, useMemo } from "react"; +import { Button, ButtonGroup, Dropdown } from "react-bootstrap"; import Mousetrap from "mousetrap"; import { FormattedMessage, useIntl } from "react-intl"; import { IconDefinition } from "@fortawesome/fontawesome-svg-core"; import { Icon } from "../Shared/Icon"; import { faEllipsisH, + faPencil, faPencilAlt, + faPlay, faTrash, } from "@fortawesome/free-solid-svg-icons"; +import cx from "classnames"; +import { createPortal } from "react-dom"; -export const OperationDropdown: React.FC> = ({ - children, -}) => { +export const OperationDropdown: React.FC< + PropsWithChildren<{ + className?: string; + menuPortalTarget?: HTMLElement; + menuClassName?: string; + }> +> = ({ className, menuPortalTarget, menuClassName, children }) => { if (!children) return null; + const menu = ( + + {children} + + ); + return ( - + - - {children} - + {menuPortalTarget ? createPortal(menu, menuPortalTarget) : menu} ); }; +export const OperationDropdownItem: React.FC<{ + text: string; + onClick: () => void; + className?: string; +}> = ({ text, onClick, className }) => { + return ( + + {text} + + ); +}; + export interface IListFilterOperation { text: string; onClick: () => void; isDisplayed?: () => boolean; icon?: IconDefinition; buttonVariant?: string; + className?: string; } interface IListOperationButtonsProps { onSelectAll?: () => void; onSelectNone?: () => void; + onInvertSelection?: () => void; onEdit?: () => void; onDelete?: () => void; itemsSelected?: boolean; @@ -53,6 +76,7 @@ interface IListOperationButtonsProps { export const ListOperationButtons: React.FC = ({ onSelectAll, onSelectNone, + onInvertSelection, onEdit, onDelete, itemsSelected, @@ -63,6 +87,7 @@ export const ListOperationButtons: React.FC = ({ useEffect(() => { Mousetrap.bind("s a", () => onSelectAll?.()); Mousetrap.bind("s n", () => onSelectNone?.()); + Mousetrap.bind("s i", () => onInvertSelection?.()); Mousetrap.bind("e", () => { if (itemsSelected) { @@ -79,13 +104,21 @@ export const ListOperationButtons: React.FC = ({ return () => { Mousetrap.unbind("s a"); Mousetrap.unbind("s n"); + Mousetrap.unbind("s i"); Mousetrap.unbind("e"); Mousetrap.unbind("d d"); }; - }); + }, [ + onSelectAll, + onSelectNone, + onInvertSelection, + itemsSelected, + onEdit, + onDelete, + ]); - function maybeRenderButtons() { - const buttons = (otherOperations ?? []).filter((o) => { + const buttons = useMemo(() => { + const ret = (otherOperations ?? []).filter((o) => { if (!o.icon) { return false; } @@ -96,16 +129,17 @@ export const ListOperationButtons: React.FC = ({ return o.isDisplayed(); }); + if (itemsSelected) { if (onEdit) { - buttons.push({ + ret.push({ icon: faPencilAlt, text: intl.formatMessage({ id: "actions.edit" }), onClick: onEdit, }); } if (onDelete) { - buttons.push({ + ret.push({ icon: faTrash, text: intl.formatMessage({ id: "actions.delete" }), onClick: onDelete, @@ -114,59 +148,76 @@ export const ListOperationButtons: React.FC = ({ } } - if (buttons.length > 0) { - return ( - - {buttons.map((button) => { - return ( - {button.text}} - key={button.text} - > - - - ); - })} - - ); - } - } + return ret; + }, [otherOperations, itemsSelected, onEdit, onDelete, intl]); - function renderSelectAll() { - if (onSelectAll) { - return ( - onSelectAll?.()} - > - - - ); - } - } + const operationButtons = useMemo(() => { + return ( + <> + {buttons.map((button) => { + return ( + + ); + })} + + ); + }, [buttons]); - function renderSelectNone() { - if (onSelectNone) { - return ( - onSelectNone?.()} - > - - - ); + const moreDropdown = useMemo(() => { + function renderSelectAll() { + if (onSelectAll) { + return ( + onSelectAll?.()} + > + + + ); + } } - } - function renderMore() { - const options = [renderSelectAll(), renderSelectNone()].filter((o) => o); + function renderSelectNone() { + if (onSelectNone) { + return ( + onSelectNone?.()} + > + + + ); + } + } + + function renderInvertSelection() { + if (onInvertSelection) { + return ( + onInvertSelection?.()} + > + + + ); + } + } + + const options = [ + renderSelectAll(), + renderSelectNone(), + renderInvertSelection(), + ].filter((o) => o); if (otherOperations) { otherOperations @@ -200,13 +251,164 @@ export const ListOperationButtons: React.FC = ({ {options.length > 0 ? options : undefined} ); + }, [otherOperations, onSelectAll, onSelectNone, onInvertSelection]); + + // don't render anything if there are no buttons or operations + if (buttons.length === 0 && !moreDropdown) { + return null; } return ( <> - {maybeRenderButtons()} - - {renderMore()} + + {operationButtons} + {moreDropdown} + ); }; + +export const ListOperations: React.FC<{ + items: number; + hasSelection?: boolean; + operations?: IListFilterOperation[]; + onEdit?: () => void; + onDelete?: () => void; + onPlay?: () => void; + operationsClassName?: string; + operationsMenuClassName?: string; +}> = ({ + items, + hasSelection = false, + operations = [], + onEdit, + onDelete, + onPlay, + operationsClassName = "list-operations", + operationsMenuClassName, +}) => { + const intl = useIntl(); + + const dropdownOperations = useMemo(() => { + return operations.filter((o) => { + if (o.icon) { + return false; + } + + if (!o.isDisplayed) { + return true; + } + + return o.isDisplayed(); + }); + }, [operations]); + + const buttons = useMemo(() => { + const otherButtons = (operations ?? []).filter((o) => { + if (!o.icon) { + return false; + } + + if (!o.isDisplayed) { + return true; + } + + return o.isDisplayed(); + }); + + const ret: React.ReactNode[] = []; + + function addButton(b: React.ReactNode | null) { + if (b) { + ret.push(b); + } + } + + const playButton = + !!items && onPlay ? ( + + ) : null; + + const editButton = + hasSelection && onEdit ? ( + + ) : null; + + const deleteButton = + hasSelection && onDelete ? ( + + ) : null; + + addButton(playButton); + addButton(editButton); + addButton(deleteButton); + + otherButtons.forEach((button) => { + addButton( + + ); + }); + + if (ret.length === 0) { + return null; + } + + return ret; + }, [operations, hasSelection, onDelete, onEdit, onPlay, items, intl]); + + if (dropdownOperations.length === 0 && !buttons) { + return null; + } + + return ( +
        + + {buttons} + + {dropdownOperations.length > 0 && ( + + {dropdownOperations.map((o) => ( + + ))} + + )} + +
        + ); +}; diff --git a/ui/v2.5/src/components/List/ListProvider.tsx b/ui/v2.5/src/components/List/ListProvider.tsx index 2e8854586..8b9ee7bfb 100644 --- a/ui/v2.5/src/components/List/ListProvider.tsx +++ b/ui/v2.5/src/components/List/ListProvider.tsx @@ -63,6 +63,7 @@ const emptyState: IListContextState = { onSelectChange: () => {}, onSelectAll: () => {}, onSelectNone: () => {}, + onInvertSelection: () => {}, items: [], hasSelection: false, selectedItems: [], @@ -80,21 +81,25 @@ export function useListContextOptional() { interface IQueryResultContextOptions< T extends QueryResult, - E extends IHasID = IHasID + E extends IHasID = IHasID, + M = unknown > { filterHook?: (filter: ListFilterModel) => ListFilterModel; useResult: (filter: ListFilterModel) => T; + useMetadataInfo?: (filter: ListFilterModel) => M; getCount: (data: T) => number; getItems: (data: T) => E[]; } export interface IQueryResultContextState< T extends QueryResult = QueryResult, - E extends IHasID = IHasID + E extends IHasID = IHasID, + M = unknown > { effectiveFilter: ListFilterModel; result: T; cachedResult: T; + metadataInfo?: M; items: E[]; totalCount: number; } @@ -104,15 +109,23 @@ export const QueryResultStateContext = export const QueryResultContext = < T extends QueryResult, - E extends IHasID = IHasID + E extends IHasID = IHasID, + M = unknown >( - props: IQueryResultContextOptions & { + props: IQueryResultContextOptions & { children?: - | ((props: IQueryResultContextState) => React.ReactNode) + | ((props: IQueryResultContextState) => React.ReactNode) | React.ReactNode; } ) => { - const { filterHook, useResult, getItems, getCount, children } = props; + const { + filterHook, + useResult, + useMetadataInfo, + getItems, + getCount, + children, + } = props; const { filter } = useFilter(); const effectiveFilter = useMemo(() => { @@ -122,9 +135,16 @@ export const QueryResultContext = < return filter; }, [filter, filterHook]); - const result = useResult(effectiveFilter); + // metadata filter is the effective filter with the sort, page size and page number removed + const metadataFilter = useMemo( + () => effectiveFilter.metadataInfo(), + [effectiveFilter] + ); - // use cached query result for pagination and metadata rendering + const result = useResult(effectiveFilter); + const metadataInfo = useMetadataInfo?.(metadataFilter); + + // use cached query result for pagination const cachedResult = useCachedQueryResult(effectiveFilter, result); const items = useMemo(() => getItems(result), [getItems, result]); @@ -133,12 +153,13 @@ export const QueryResultContext = < [getCount, cachedResult] ); - const state: IQueryResultContextState = { + const state: IQueryResultContextState = { effectiveFilter, result, cachedResult, items, totalCount, + metadataInfo, }; return ( @@ -154,7 +175,8 @@ export const QueryResultContext = < export function useQueryResultContext< T extends QueryResult, - E extends IHasID = IHasID + E extends IHasID = IHasID, + M = unknown >() { const context = React.useContext(QueryResultStateContext); @@ -164,5 +186,5 @@ export function useQueryResultContext< ); } - return context as IQueryResultContextState; + return context as IQueryResultContextState; } diff --git a/ui/v2.5/src/components/List/ListViewOptions.tsx b/ui/v2.5/src/components/List/ListViewOptions.tsx index e83ff9290..b681e086d 100644 --- a/ui/v2.5/src/components/List/ListViewOptions.tsx +++ b/ui/v2.5/src/components/List/ListViewOptions.tsx @@ -1,8 +1,16 @@ import React, { useEffect, useRef, useState } from "react"; import Mousetrap from "mousetrap"; -import { Button, Dropdown, Overlay, Popover } from "react-bootstrap"; +import { + Button, + ButtonGroup, + Dropdown, + Overlay, + OverlayTrigger, + Popover, + Tooltip, +} from "react-bootstrap"; import { DisplayMode } from "src/models/list-filter/types"; -import { useIntl } from "react-intl"; +import { IntlShape, useIntl } from "react-intl"; import { Icon } from "../Shared/Icon"; import { faChevronDown, @@ -53,6 +61,10 @@ function getLabelId(option: DisplayMode) { return `display_mode.${displayModeId}`; } +function getLabel(intl: IntlShape, option: DisplayMode) { + return intl.formatMessage({ id: getLabelId(option) }); +} + export const ListViewOptions: React.FC = ({ zoomIndex, onSetZoom, @@ -60,9 +72,6 @@ export const ListViewOptions: React.FC = ({ onSetDisplayMode, displayModeOptions, }) => { - const minZoom = 0; - const maxZoom = 3; - const intl = useIntl(); const overlayTarget = useRef(null); @@ -84,18 +93,20 @@ export const ListViewOptions: React.FC = ({ onSetDisplayMode(DisplayMode.Wall); } }); + Mousetrap.bind("v t", () => { + if (displayModeOptions.includes(DisplayMode.Tagger)) { + onSetDisplayMode(DisplayMode.Tagger); + } + }); return () => { Mousetrap.unbind("v g"); Mousetrap.unbind("v l"); Mousetrap.unbind("v w"); + Mousetrap.unbind("v t"); }; }); - function getLabel(option: DisplayMode) { - return intl.formatMessage({ id: getLabelId(option) }); - } - function onChangeZoom(v: number) { if (onSetZoom) { onSetZoom(v); @@ -110,7 +121,7 @@ export const ListViewOptions: React.FC = ({ variant="secondary" title={intl.formatMessage( { id: "display_mode.label_current" }, - { current: getLabel(displayMode) } + { current: getLabel(intl, displayMode) } )} onClick={() => setShowOptions(!showOptions)} > @@ -130,11 +141,10 @@ export const ListViewOptions: React.FC = ({
        {onSetZoom && zoomIndex !== undefined && - displayMode === DisplayMode.Grid ? ( + (displayMode === DisplayMode.Grid || + displayMode === DisplayMode.Wall) ? (
        @@ -149,7 +159,7 @@ export const ListViewOptions: React.FC = ({ onSetDisplayMode(option); }} > - {getLabel(option)} + {getLabel(intl, option)} ))}
        @@ -160,3 +170,48 @@ export const ListViewOptions: React.FC = ({ ); }; + +export const ListViewButtonGroup: React.FC = ({ + zoomIndex, + onSetZoom, + displayMode, + onSetDisplayMode, + displayModeOptions, +}) => { + const intl = useIntl(); + + return ( + <> + {displayModeOptions.length > 1 && ( + + {displayModeOptions.map((option) => ( + + {getLabel(intl, option)} + + } + > + + + ))} + + )} +
        + {onSetZoom && + zoomIndex !== undefined && + (displayMode === DisplayMode.Grid || + displayMode === DisplayMode.Wall) ? ( + + ) : null} +
        + + ); +}; diff --git a/ui/v2.5/src/components/List/Pagination.tsx b/ui/v2.5/src/components/List/Pagination.tsx index e5cc89b40..bfa6697ee 100644 --- a/ui/v2.5/src/components/List/Pagination.tsx +++ b/ui/v2.5/src/components/List/Pagination.tsx @@ -14,6 +14,7 @@ import { Icon } from "../Shared/Icon"; import { faCheck, faChevronDown } from "@fortawesome/free-solid-svg-icons"; import { useStopWheelScroll } from "src/utils/form"; import { Placement } from "react-bootstrap/esm/Overlay"; +import { PatchComponent } from "src/patch"; const PageCount: React.FC<{ totalPages: number; @@ -43,7 +44,7 @@ const PageCount: React.FC<{ useStopWheelScroll(pageInput); const pageOptions = useMemo(() => { - const maxPagesToShow = 10; + const maxPagesToShow = 1000; const min = Math.max(1, currentPage - maxPagesToShow / 2); const max = Math.min(min + maxPagesToShow, totalPages); const pages = []; @@ -158,114 +159,114 @@ interface IPaginationIndexProps { const minPagesForCompact = 4; -export const Pagination: React.FC = ({ - itemsPerPage, - currentPage, - totalItems, - onChangePage, - pagePopupPlacement, -}) => { - const intl = useIntl(); - const totalPages = useMemo( - () => Math.ceil(totalItems / itemsPerPage), - [totalItems, itemsPerPage] - ); +export const Pagination: React.FC = PatchComponent( + "Pagination", + ({ + itemsPerPage, + currentPage, + totalItems, + onChangePage, + pagePopupPlacement, + }) => { + const intl = useIntl(); + const totalPages = useMemo( + () => Math.ceil(totalItems / itemsPerPage), + [totalItems, itemsPerPage] + ); - const pageButtons = useMemo(() => { - if (totalPages >= minPagesForCompact) - return ( - - ); + const pageButtons = useMemo(() => { + if (totalPages >= minPagesForCompact) + return ( + + ); - const pages = [...Array(totalPages).keys()].map((i) => i + 1); + const pages = [...Array(totalPages).keys()].map((i) => i + 1); - return pages.map((page: number) => ( - - )); - }, [totalPages, currentPage, onChangePage, pagePopupPlacement]); + return pages.map((page: number) => ( + + )); + }, [totalPages, currentPage, onChangePage, pagePopupPlacement]); - if (totalPages <= 1) return
        ; + if (totalPages <= 1) return
        ; - return ( - - - - {pageButtons} - - - - ); -}; + return ( + + + + {pageButtons} + + + + ); + } +); -export const PaginationIndex: React.FC = ({ - loading, - itemsPerPage, - currentPage, - totalItems, - metadataByline, -}) => { - const intl = useIntl(); +export const PaginationIndex: React.FC = PatchComponent( + "PaginationIndex", + ({ loading, itemsPerPage, currentPage, totalItems, metadataByline }) => { + const intl = useIntl(); - if (loading) return null; + if (loading) return null; - // Build the pagination index string - const firstItemCount: number = Math.min( - (currentPage - 1) * itemsPerPage + 1, - totalItems - ); - const lastItemCount: number = Math.min( - firstItemCount + (itemsPerPage - 1), - totalItems - ); - const indexText: string = `${intl.formatNumber( - firstItemCount - )}-${intl.formatNumber(lastItemCount)} of ${intl.formatNumber(totalItems)}`; + // Build the pagination index string + const firstItemCount: number = Math.min( + (currentPage - 1) * itemsPerPage + 1, + totalItems + ); + const lastItemCount: number = Math.min( + firstItemCount + (itemsPerPage - 1), + totalItems + ); + const indexText: string = `${intl.formatNumber( + firstItemCount + )}-${intl.formatNumber(lastItemCount)} of ${intl.formatNumber(totalItems)}`; - return ( - - {indexText} -
        - {metadataByline} -
        - ); -}; + return ( + + {indexText} +
        + {metadataByline} +
        + ); + } +); diff --git a/ui/v2.5/src/components/List/SavedFilterList.tsx b/ui/v2.5/src/components/List/SavedFilterList.tsx index cbeeaa70a..df1d6136a 100644 --- a/ui/v2.5/src/components/List/SavedFilterList.tsx +++ b/ui/v2.5/src/components/List/SavedFilterList.tsx @@ -30,12 +30,15 @@ import { faBookmark, faSave, faTimes } from "@fortawesome/free-solid-svg-icons"; import { AlertModal } from "../Shared/Alert"; import cx from "classnames"; import { TruncatedInlineText } from "../Shared/TruncatedText"; +import { OperationButton } from "../Shared/OperationButton"; +import { createPortal } from "react-dom"; const ExistingSavedFilterList: React.FC<{ name: string; - setName: (name: string) => void; - existing: { name: string; id: string }[]; -}> = ({ name, setName, existing }) => { + onSelect: (value: SavedFilterDataFragment) => void; + savedFilters: SavedFilterDataFragment[]; + disabled?: boolean; +}> = ({ name, onSelect, savedFilters: existing, disabled = false }) => { const filtered = useMemo(() => { if (!name) return existing; @@ -51,7 +54,8 @@ const ExistingSavedFilterList: React.FC<{ @@ -64,7 +68,8 @@ const ExistingSavedFilterList: React.FC<{ export const SaveFilterDialog: React.FC<{ mode: FilterMode; onClose: (name?: string, id?: string) => void; -}> = ({ mode, onClose }) => { + isSaving?: boolean; +}> = ({ mode, onClose, isSaving = false }) => { const intl = useIntl(); const [filterName, setFilterName] = useState(""); @@ -79,6 +84,74 @@ export const SaveFilterDialog: React.FC<{ return ( + + + + + + + + + setFilterName(e.target.value)} + disabled={isSaving} + /> + + + setFilterName(f.name)} + savedFilters={data?.findSavedFilters ?? []} + /> + + {!!overwritingFilter && ( + + + + )} + + + + onClose(filterName, overwritingFilter?.id)} + > + {intl.formatMessage({ id: "actions.save" })} + + + + ); +}; + +export const LoadFilterDialog: React.FC<{ + mode: FilterMode; + onClose: (filter?: SavedFilterDataFragment) => void; +}> = ({ mode, onClose }) => { + const intl = useIntl(); + const [filterName, setFilterName] = useState(""); + + const { data } = useFindSavedFilters(mode); + + return ( + + + + @@ -94,31 +167,14 @@ export const SaveFilterDialog: React.FC<{ onClose(f)} + savedFilters={data?.findSavedFilters ?? []} /> - - {!!overwritingFilter && ( - - - - )} - ); @@ -166,7 +222,7 @@ const OverwriteAlert: React.FC<{ void; view?: View; + menuPortalTarget?: Element | DocumentFragment; } export const SavedFilterList: React.FC = ({ @@ -786,8 +843,15 @@ export const SavedFilterDropdown: React.FC = (props) => { )); SavedFilterDropdownRef.displayName = "SavedFilterDropdown"; + const menu = ( + + ); + return ( - + = (props) => { - + {props.menuPortalTarget + ? createPortal(menu, props.menuPortalTarget) + : menu} ); }; diff --git a/ui/v2.5/src/components/List/ZoomSlider.tsx b/ui/v2.5/src/components/List/ZoomSlider.tsx index dff8e4f57..093b5ec7a 100644 --- a/ui/v2.5/src/components/List/ZoomSlider.tsx +++ b/ui/v2.5/src/components/List/ZoomSlider.tsx @@ -2,19 +2,14 @@ import React, { useEffect } from "react"; import Mousetrap from "mousetrap"; import { Form } from "react-bootstrap"; -export interface IZoomSelectProps { - minZoom: number; - maxZoom: number; - zoomIndex: number; - onChangeZoom: (v: number) => void; -} +const minZoom = 0; +const maxZoom = 3; -export const ZoomSelect: React.FC = ({ - minZoom, - maxZoom, - zoomIndex, - onChangeZoom, -}) => { +export function useZoomKeybinds(props: { + zoomIndex: number | undefined; + onChangeZoom: (v: number) => void; +}) { + const { zoomIndex, onChangeZoom } = props; useEffect(() => { Mousetrap.bind("+", () => { if (zoomIndex !== undefined && zoomIndex < maxZoom) { @@ -32,7 +27,17 @@ export const ZoomSelect: React.FC = ({ Mousetrap.unbind("-"); }; }); +} +export interface IZoomSelectProps { + zoomIndex: number; + onChangeZoom: (v: number) => void; +} + +export const ZoomSelect: React.FC = ({ + zoomIndex, + onChangeZoom, +}) => { return ( div > :not(:first-child) { + margin-left: 0.25rem; + } + } + + .search-term-row { + align-items: center; + display: flex; + gap: 0.5rem; + justify-content: space-between; + margin-bottom: 0.5rem; + margin-left: 1.5rem; + margin-right: 1rem; + + .search-term-input { + flex-basis: 75%; + } + + @include media-breakpoint-down(xs) { + flex-wrap: wrap; + + > span { + width: 100%; + } + + .search-term-input { + flex-basis: 100%; + } + } + } + .filter-tags { border-top: 1px solid rgb(16 22 26 / 40%); padding: 1rem 1rem 0 1rem; @@ -412,11 +454,26 @@ input[type="range"].zoom-slider { } } -.filter-tags .clear-all-button { - color: $text-color; - // to match filter pills - line-height: 16px; - padding: 0; +.filter-tags { + display: flex; + justify-content: center; + margin-bottom: 0.5rem; + + .more-tags { + background-color: transparent; + color: #fff; + } + + .clear-all-button { + color: $text-color; + // to match filter pills + line-height: 16px; + padding: 0; + } + + .tag-item.unsupported { + background-color: $warning; + } } .filter-button { @@ -450,7 +507,8 @@ input[type="range"].zoom-slider { } } -.selectable-filter ul { +.selectable-filter ul, +ul.selectable-list { list-style-type: none; margin-top: 0.5rem; max-height: 300px; @@ -476,14 +534,14 @@ input[type="range"].zoom-slider { .excluded-object, .unselected-object { cursor: pointer; - height: 2em; margin-bottom: 0.25rem; + min-height: 2em; a { align-items: center; display: flex; - height: 2em; justify-content: space-between; + min-height: 2em; outline: none; &:hover, @@ -556,7 +614,8 @@ input[type="range"].zoom-slider { margin-bottom: 0.5rem; } -.sidebar-list-filter ul { +.sidebar-list-filter ul, +.folder-filter ul { list-style-type: none; margin-bottom: 0.25rem; max-height: 300px; @@ -582,14 +641,14 @@ input[type="range"].zoom-slider { .excluded-object, .unselected-object { cursor: pointer; - height: 2em; margin-bottom: 0.25rem; + min-height: 2em; a { align-items: center; display: flex; - height: 2em; justify-content: space-between; + min-height: 2em; outline: none; &:hover, @@ -630,7 +689,7 @@ input[type="range"].zoom-slider { } &:hover { - background-color: inherit; + background-color: transparent; } &:hover .exclude-button-text, @@ -673,6 +732,47 @@ input[type="range"].zoom-slider { min-height: 2em; } +.duplicate-sub-options { + margin-left: 2rem; + padding-left: 0.5rem; + + .duplicate-sub-option { + align-items: center; + cursor: pointer; + display: flex; + height: 2em; + opacity: 0.8; + padding-left: 0.5rem; + + &:hover { + background-color: rgba(138, 155, 168, 0.15); + } + } +} + +.sidebar-folder-filter ul, +.folder-filter ul, +ul.selectable-list { + margin-top: 0.25rem; + + .btn.expand-collapse { + font-size: 0.8rem; + padding-left: 0; + padding-right: 0.25rem; + text-align: left; + } + + .empty .btn.expand-collapse { + visibility: hidden; + } + + .selected-object a .selected-object-label { + font-size: 0.8em; + overflow-wrap: break-word; + white-space: normal; + } +} + .tilted { transform: rotate(45deg); } @@ -695,7 +795,7 @@ input[type="range"].zoom-slider { background-color: #202b33; position: sticky; top: 0; - z-index: 100; + z-index: 1; } td:first-child { @@ -864,8 +964,15 @@ input[type="range"].zoom-slider { } .filtered-list-toolbar { + align-items: center; + background-color: $body-bg; + gap: 0.5rem; justify-content: center; - margin-bottom: 0.5rem; + + // offset the main padding + margin-top: -0.5rem; + padding-bottom: 0.5rem; + padding-top: 0.5rem; & > .btn-group { flex-wrap: wrap; @@ -881,23 +988,13 @@ input[type="range"].zoom-slider { } } - .btn.display-mode-select { - margin-left: 0.5rem; + // set the width of the zoom-slider-container to prevent buttons moving when + // the slider appears/disappears + .zoom-slider-container { + min-width: 60px; } } -.sidebar-pane .filtered-list-toolbar { - flex-wrap: nowrap; - - & > .btn-group { - align-items: baseline; - } -} - -.search-term-input { - margin-right: 0.5rem; -} - .custom-field-filter { align-items: center; display: flex; @@ -932,37 +1029,185 @@ input[type="range"].zoom-slider { .sidebar-search-container { display: flex; margin-bottom: 0.5rem; - margin-top: 0.25rem; } .search-term-input { flex-grow: 1; - margin-right: 0.25rem; + margin-right: 0; .clearable-text-field { height: 100%; } } -} -@include media-breakpoint-down(xs) { - .sidebar .search-term-input { - margin-right: 0.5rem; + .edit-filter-button { + width: 100%; + } + + .sidebar-footer { + background-color: $body-bg; + bottom: 0; + display: none; + padding: 0.5rem; + position: sticky; + + @include media-breakpoint-down(xs) { + display: flex; + justify-content: center; + } } } -.pagination-footer { - background-color: $body-bg; +@include media-breakpoint-down(xs) { + .sidebar .sidebar-search-container { + margin-top: 0.25rem; + } +} + +.pagination-footer-container { + background-color: transparent; bottom: $navbar-height; - padding: 0.5rem 1rem; position: sticky; z-index: 10; @include media-breakpoint-up(sm) { bottom: 0; } +} + +.pagination-footer { + margin: auto; + padding: 0.5rem 1rem 0.75rem; + + width: fit-content; + + .pagination.btn-group { + box-shadow: 0 8px 10px 2px rgb(0 0 0 / 30%); + } .pagination { margin-bottom: 0; + + .btn:disabled { + color: #888; + opacity: 1; + } + } +} + +// on very large screens, offset the margins to center the pagination controls +@media (min-width: 1800px) { + .sidebar-pane:not(.hide-sidebar) { + .filter-tags, + .pagination-index-container, + .pagination-footer-container { + margin-left: -$sidebar-width; + margin-right: 0; + } + } +} + +// hide sidebar Edit Filter button on larger screens +@include media-breakpoint-up(md) { + .sidebar .edit-filter-button { + display: none; + } +} + +// hide the search input field if the sidebar is open on smaller screens +@media (min-width: 576px) and (max-width: 1400px) { + .sidebar-pane:not(.hide-sidebar) .filtered-list-toolbar .search-term-input { + display: none; + } +} + +#more-criteria-popover { + box-shadow: 0 8px 10px 2px rgb(0 0 0 / 30%); + max-width: 400px; + padding: 0.25rem; +} + +// Duration slider styles +.duration-slider, +.age-slider-container { + padding: 0.5rem 0 1rem; + width: 100%; +} + +.duration-label-input, +.age-label-input { + background: transparent; + border: 1px solid transparent; + border-radius: 0.25rem; + color: $text-color; + font-size: 0.875rem; + font-weight: 500; + padding: 0.125rem 0.25rem; + width: 4rem; + + &:hover { + border-color: $secondary; + } + + &:focus { + border-color: $primary; + outline: none; + } +} + +.duration-preset { + cursor: pointer; +} + +.selected-items-info { + align-items: center; + border: 1px solid $secondary; + display: flex; + gap: 0.25rem; + justify-content: flex-end; +} + +.scene-list-toolbar .selected-items-info, +.gallery-list-toolbar .selected-items-info { + justify-content: flex-start; +} + +// modify margins for toolbar within sidebar pane to accommodate toggle button +.sidebar-pane .filtered-list-toolbar { + margin-left: 40px; + margin-right: 40px; +} + +// on very large screens, offset the margins to center the toolbar +@media (min-width: 1800px) { + .sidebar-pane:not(.hide-sidebar) { + .filtered-list-toolbar { + margin-left: -$sidebar-width; + margin-right: 0; + } + } +} + +.item-list-container .filtered-list-toolbar.has-selection { + border-radius: 0.5rem; + margin-left: auto; + margin-right: auto; + padding-left: 0.5rem; + padding-right: 0.5rem; + position: sticky; + top: $navbar-height; + width: fit-content; + z-index: 10; + + @include media-breakpoint-down(xs) { + top: 0; + } +} + +.detail-body .filtered-list-toolbar.has-selection { + top: calc($sticky-detail-header-height + $navbar-height); + + @include media-breakpoint-down(xs) { + top: 0; } } diff --git a/ui/v2.5/src/components/List/util.ts b/ui/v2.5/src/components/List/util.ts index bb85145e7..da52ea765 100644 --- a/ui/v2.5/src/components/List/util.ts +++ b/ui/v2.5/src/components/List/util.ts @@ -1,17 +1,24 @@ -import { useCallback, useContext, useEffect, useMemo, useState } from "react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import Mousetrap from "mousetrap"; import { ListFilterModel } from "src/models/list-filter/filter"; import { useHistory, useLocation } from "react-router-dom"; import { isEqual, isFunction } from "lodash-es"; import { QueryResult } from "@apollo/client"; import { IHasID } from "src/utils/data"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { View } from "./views"; import { usePrevious } from "src/hooks/state"; import * as GQL from "src/core/generated-graphql"; import { DisplayMode } from "src/models/list-filter/types"; import { Criterion } from "src/models/list-filter/criteria/criterion"; +function locationEquals( + loc1: ReturnType | undefined, + loc2: ReturnType +) { + return loc1 && loc1.pathname === loc2.pathname && loc1.search === loc2.search; +} + export function useFilterURL( filter: ListFilterModel, setFilter: React.Dispatch>, @@ -24,6 +31,7 @@ export function useFilterURL( const history = useHistory(); const location = useLocation(); + const prevLocation = usePrevious(location); // when the filter changes, update the URL const updateFilter = useCallback( @@ -47,7 +55,8 @@ export function useFilterURL( // and updates the filter accordingly. useEffect(() => { // don't apply if active is false - if (!active) return; + // also don't apply if location is unchanged + if (!active || locationEquals(prevLocation, location)) return; // re-init to load default filter on empty new query params if (!location.search) { @@ -73,7 +82,8 @@ export function useFilterURL( }); }, [ active, - location.search, + prevLocation, + location, defaultFilter, setFilter, updateFilter, @@ -84,7 +94,7 @@ export function useFilterURL( } export function useDefaultFilter(emptyFilter: ListFilterModel, view?: View) { - const { configuration: config, loading } = useContext(ConfigurationContext); + const { configuration: config } = useConfigurationContext(); const defaultFilter = useMemo(() => { if (view && config?.ui.defaultFilters?.[view]) { @@ -104,9 +114,9 @@ export function useDefaultFilter(emptyFilter: ListFilterModel, view?: View) { } }, [view, config?.ui.defaultFilters, emptyFilter]); - const retFilter = loading ? undefined : defaultFilter ?? emptyFilter; + const retFilter = defaultFilter ?? emptyFilter; - return { defaultFilter: retFilter, loading }; + return { defaultFilter: retFilter }; } function useEmptyFilter(props: { @@ -129,6 +139,7 @@ function useEmptyFilter(props: { export interface IFilterStateHook { filterMode: GQL.FilterMode; + defaultFilter?: ListFilterModel; defaultSort?: string; view?: View; useURL?: boolean; @@ -139,7 +150,14 @@ export function useFilterState( config?: GQL.ConfigDataFragment; } ) { - const { filterMode, defaultSort, config, view, useURL } = props; + const { + filterMode, + defaultSort, + config, + view, + useURL, + defaultFilter: propDefaultFilter, + } = props; const [filter, setFilterState] = useState( () => @@ -148,14 +166,17 @@ export function useFilterState( const emptyFilter = useEmptyFilter({ filterMode, defaultSort, config }); - const { defaultFilter, loading } = useDefaultFilter(emptyFilter, view); + const { defaultFilter: defaultFilterFromConfig } = useDefaultFilter( + emptyFilter, + view + ); const { setFilter } = useFilterURL(filter, setFilterState, { - defaultFilter, + defaultFilter: propDefaultFilter ?? defaultFilterFromConfig, active: useURL, }); - return { loading, filter, setFilter }; + return { filter, setFilter }; } export function useFilterOperations(props: { @@ -196,9 +217,12 @@ export function useFilterOperations(props: { [setFilter] ); - const clearAllCriteria = useCallback(() => { - setFilter((cv) => cv.clearCriteria()); - }, [setFilter]); + const clearAllCriteria = useCallback( + (includeSearchTerm = false) => { + setFilter((cv) => cv.clearCriteria(includeSearchTerm)); + }, + [setFilter] + ); return { setPage, @@ -216,6 +240,7 @@ export function useListKeyboardShortcuts(props: { pages?: number; onSelectAll?: () => void; onSelectNone?: () => void; + onInvertSelection?: () => void; }) { const { currentPage, @@ -224,6 +249,7 @@ export function useListKeyboardShortcuts(props: { pages = 0, onSelectAll, onSelectNone, + onInvertSelection, } = props; // set up hotkeys @@ -285,12 +311,14 @@ export function useListKeyboardShortcuts(props: { useEffect(() => { Mousetrap.bind("s a", () => onSelectAll?.()); Mousetrap.bind("s n", () => onSelectNone?.()); + Mousetrap.bind("s i", () => onInvertSelection?.()); return () => { Mousetrap.unbind("s a"); Mousetrap.unbind("s n"); + Mousetrap.unbind("s i"); }; - }, [onSelectAll, onSelectNone]); + }, [onSelectAll, onSelectNone, onInvertSelection]); } export function useListSelect(items: T[]) { @@ -407,6 +435,14 @@ export function useListSelect(items: T[]) { setLastClickedId(undefined); } + function onInvertSelection() { + setItemsSelected((prevSelected) => { + const selectedSet = new Set(prevSelected.map((item) => item.id)); + return items.filter((item) => !selectedSet.has(item.id)); + }); + setLastClickedId(undefined); + } + // TODO - this is for backwards compatibility const getSelected = useCallback(() => itemsSelected, [itemsSelected]); @@ -420,6 +456,7 @@ export function useListSelect(items: T[]) { onSelectChange, onSelectAll, onSelectNone, + onInvertSelection, hasSelection, }; } @@ -452,43 +489,47 @@ export function useCachedQueryResult( result: T ) { const [cachedResult, setCachedResult] = useState(result); - const [lastFilter, setLastFilter] = useState(filter); + const lastFilterRef = useRef(filter); // if we are only changing the page or sort, don't update the result count useEffect(() => { if (!result.loading) { setCachedResult(result); } else { - if (totalCountImpacted(lastFilter, filter)) { + if (totalCountImpacted(lastFilterRef.current, filter)) { setCachedResult(result); } } - setLastFilter(filter); - }, [filter, result, lastFilter]); + lastFilterRef.current = filter; + }, [filter, result]); return cachedResult; } export interface IQueryResultHook< T extends QueryResult, - E extends IHasID = IHasID + E extends IHasID = IHasID, + M = unknown > { filterHook?: (filter: ListFilterModel) => ListFilterModel; useResult: (filter: ListFilterModel) => T; + useMetadataInfo?: (filter: ListFilterModel) => M; getCount: (data: T) => number; getItems: (data: T) => E[]; } export function useQueryResult< T extends QueryResult, - E extends IHasID = IHasID + E extends IHasID = IHasID, + M = unknown >( - props: IQueryResultHook & { + props: IQueryResultHook & { filter: ListFilterModel; } ) { - const { filter, filterHook, useResult, getItems, getCount } = props; + const { filter, filterHook, useResult, useMetadataInfo, getItems, getCount } = + props; const effectiveFilter = useMemo(() => { if (filterHook) { @@ -497,7 +538,14 @@ export function useQueryResult< return filter; }, [filter, filterHook]); + // metadata filter is the effective filter with the sort, page size and page number removed + const metadataFilter = useMemo( + () => effectiveFilter.metadataInfo(), + [effectiveFilter] + ); + const result = useResult(effectiveFilter); + const metadataInfo = useMetadataInfo?.(metadataFilter); // use cached query result for pagination and metadata rendering const cachedResult = useCachedQueryResult(effectiveFilter, result); @@ -512,6 +560,7 @@ export function useQueryResult< return { effectiveFilter, + metadataInfo, result, cachedResult, items, diff --git a/ui/v2.5/src/components/List/views.ts b/ui/v2.5/src/components/List/views.ts index 5b9f9798f..4ea4e46d8 100644 --- a/ui/v2.5/src/components/List/views.ts +++ b/ui/v2.5/src/components/List/views.ts @@ -13,6 +13,7 @@ export enum View { TagScenes = "tag_scenes", TagImages = "tag_images", TagPerformers = "tag_performers", + TagGroups = "tag_groups", PerformerScenes = "performer_scenes", PerformerGalleries = "performer_galleries", diff --git a/ui/v2.5/src/components/MainNavbar.tsx b/ui/v2.5/src/components/MainNavbar.tsx index 98bbc26c6..c70994476 100644 --- a/ui/v2.5/src/components/MainNavbar.tsx +++ b/ui/v2.5/src/components/MainNavbar.tsx @@ -11,7 +11,7 @@ import { MessageDescriptor, useIntl, } from "react-intl"; -import { Nav, Navbar, Button, Fade } from "react-bootstrap"; +import { Nav, Navbar, Button } from "react-bootstrap"; import { IconDefinition } from "@fortawesome/fontawesome-svg-core"; import { LinkContainer } from "react-router-bootstrap"; import { Link, NavLink, useLocation, useHistory } from "react-router-dom"; @@ -19,7 +19,7 @@ import Mousetrap from "mousetrap"; import SessionUtils from "src/utils/session"; import { Icon } from "src/components/Shared/Icon"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { ManualStateContext } from "./Help/context"; import { SettingsButton } from "./SettingsButton"; import { @@ -182,7 +182,7 @@ const MainNavbarUtilityItems = PatchComponent( export const MainNavbar: React.FC = () => { const history = useHistory(); const location = useLocation(); - const { configuration, loading } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const { openManual } = React.useContext(ManualStateContext); const [expanded, setExpanded] = useState(false); @@ -360,35 +360,31 @@ export const MainNavbar: React.FC = () => { ref={navbarRef} > - - <> - - {menuItems.map(({ href, icon, message }) => ( - - - - - - ))} - - - - + + {menuItems.map(({ href, icon, message }) => ( + + + + + + ))} + + diff --git a/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx b/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx index 71fcbedd9..06ae2834a 100644 --- a/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx +++ b/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx @@ -1,6 +1,6 @@ import React, { useEffect, useState } from "react"; -import { Col, Form, Row } from "react-bootstrap"; -import { FormattedMessage, useIntl } from "react-intl"; +import { Form } from "react-bootstrap"; +import { useIntl } from "react-intl"; import { useBulkPerformerUpdate } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; import { ModalComponent } from "../Shared/Modal"; @@ -23,10 +23,13 @@ import { stringToCircumcised, } from "src/utils/circumcised"; import { IndeterminateCheckbox } from "../Shared/IndeterminateCheckbox"; -import { BulkUpdateTextInput } from "../Shared/BulkUpdateTextInput"; +import { BulkUpdateFormGroup, BulkUpdateTextInput } from "../Shared/BulkUpdate"; import { faPencilAlt } from "@fortawesome/free-solid-svg-icons"; -import * as FormUtils from "src/utils/form"; import { CountrySelect } from "../Shared/CountrySelect"; +import { useConfigurationContext } from "src/hooks/Config"; +import cx from "classnames"; +import { BulkUpdateDateInput } from "../Shared/DateInput"; +import { getDateError } from "src/utils/yup"; interface IListOperationProps { selected: GQL.SlimPerformerDataFragment[]; @@ -40,7 +43,8 @@ const performerFields = [ "gender", "birthdate", "death_date", - "career_length", + "career_start", + "career_end", "country", "ethnicity", "eye_color", @@ -61,6 +65,10 @@ export const EditPerformersDialog: React.FC = ( ) => { const intl = useIntl(); const Toast = useToast(); + + const { configuration } = useConfigurationContext(); + const { sfwContentMode } = configuration.interface; + const [tagIds, setTagIds] = useState({ mode: GQL.BulkUpdateIdMode.Add, }); @@ -68,17 +76,42 @@ export const EditPerformersDialog: React.FC = ( const [aggregateState, setAggregateState] = useState({}); // height and weight needs conversion to/from number - const [height, setHeight] = useState(); - const [weight, setWeight] = useState(); - const [penis_length, setPenisLength] = useState(); + const [height, setHeight] = useState(); + const [weight, setWeight] = useState(); + const [penis_length, setPenisLength] = useState(); const [updateInput, setUpdateInput] = useState( {} ); const genderOptions = [""].concat(genderStrings); const circumcisedOptions = [""].concat(circumcisedStrings); + const unsetDisabled = props.selected.length < 2; + const [updatePerformers] = useBulkPerformerUpdate(getPerformerInput()); + const [birthdateError, setBirthdateError] = useState(); + const [deathDateError, setDeathDateError] = useState(); + const [careerStartError, setCareerStartError] = useState< + string | undefined + >(); + const [careerEndError, setCareerEndError] = useState(); + + useEffect(() => { + setBirthdateError(getDateError(updateInput.birthdate ?? "", intl)); + }, [updateInput.birthdate, intl]); + + useEffect(() => { + setDeathDateError(getDateError(updateInput.death_date ?? "", intl)); + }, [updateInput.death_date, intl]); + + useEffect(() => { + setCareerStartError(getDateError(updateInput.career_start ?? "", intl)); + }, [updateInput.career_start, intl]); + + useEffect(() => { + setCareerEndError(getDateError(updateInput.career_end ?? "", intl)); + }, [updateInput.career_end, intl]); + // Network state const [isUpdating, setIsUpdating] = useState(false); @@ -114,14 +147,14 @@ export const EditPerformersDialog: React.FC = ( ); if (height !== undefined) { - performerInput.height_cm = parseFloat(height); + performerInput.height_cm = height === null ? null : parseFloat(height); } if (weight !== undefined) { - performerInput.weight = parseFloat(weight); + performerInput.weight = weight === null ? null : parseFloat(weight); } - if (penis_length !== undefined) { - performerInput.penis_length = parseFloat(penis_length); + performerInput.penis_length = + penis_length === null ? null : parseFloat(penis_length); } return performerInput; @@ -198,39 +231,29 @@ export const EditPerformersDialog: React.FC = ( setUpdateInput(updateState); }, [props.selected]); - function renderTextField( - name: string, - value: string | undefined | null, - setter: (newValue: string | undefined) => void - ) { - return ( - - - - - setter(newValue)} - unsetDisabled={props.selected.length < 2} - /> - - ); - } - function render() { + // sfw class needs to be set because it is outside body + return ( props.onClose(false), text: intl.formatMessage({ id: "actions.cancel" }), @@ -238,11 +261,8 @@ export const EditPerformersDialog: React.FC = ( }} isRunning={isUpdating} > - - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "rating" }), - })} - +
        + @@ -250,9 +270,8 @@ export const EditPerformersDialog: React.FC = ( } disabled={isUpdating} /> - - - + + setUpdateField({ favorite: checked })} @@ -261,10 +280,7 @@ export const EditPerformersDialog: React.FC = ( /> - - - - + = ( ))} - + - {renderTextField("disambiguation", updateInput.disambiguation, (v) => - setUpdateField({ disambiguation: v }) - )} - {renderTextField("birthdate", updateInput.birthdate, (v) => - setUpdateField({ birthdate: v }) - )} - {renderTextField("death_date", updateInput.death_date, (v) => - setUpdateField({ death_date: v }) - )} + + + setUpdateField({ disambiguation: newValue }) + } + unsetDisabled={unsetDisabled} + /> + - - - - + + + setUpdateField({ birthdate: newValue }) + } + unsetDisabled={unsetDisabled} + error={birthdateError} + /> + + + + setUpdateField({ death_date: newValue }) + } + unsetDisabled={unsetDisabled} + error={deathDateError} + /> + + setUpdateField({ country: v })} showFlag /> - + - {renderTextField("ethnicity", updateInput.ethnicity, (v) => - setUpdateField({ ethnicity: v }) - )} - {renderTextField("hair_color", updateInput.hair_color, (v) => - setUpdateField({ hair_color: v }) - )} - {renderTextField("eye_color", updateInput.eye_color, (v) => - setUpdateField({ eye_color: v }) - )} - {renderTextField("height", height, (v) => setHeight(v))} - {renderTextField("weight", weight, (v) => setWeight(v))} - {renderTextField("measurements", updateInput.measurements, (v) => - setUpdateField({ measurements: v }) - )} - {renderTextField("penis_length", penis_length, (v) => - setPenisLength(v) - )} + + + setUpdateField({ ethnicity: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ hair_color: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ eye_color: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + setHeight(newValue)} + unsetDisabled={unsetDisabled} + /> + + + setWeight(newValue)} + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ measurements: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + setPenisLength(newValue)} + unsetDisabled={unsetDisabled} + /> + - - - - + = ( ))} - + - {renderTextField("fake_tits", updateInput.fake_tits, (v) => - setUpdateField({ fake_tits: v }) - )} - {renderTextField("tattoos", updateInput.tattoos, (v) => - setUpdateField({ tattoos: v }) - )} - {renderTextField("piercings", updateInput.piercings, (v) => - setUpdateField({ piercings: v }) - )} - {renderTextField("career_length", updateInput.career_length, (v) => - setUpdateField({ career_length: v }) - )} + + + setUpdateField({ fake_tits: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + setUpdateField({ tattoos: newValue })} + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ piercings: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ career_start: newValue }) + } + unsetDisabled={unsetDisabled} + error={careerStartError} + /> + + + + setUpdateField({ career_end: newValue }) + } + unsetDisabled={unsetDisabled} + error={careerEndError} + /> + - - - - + setTagIds({ ...tagIds, ids: itemIDs })} - onSetMode={(newMode) => setTagIds({ ...tagIds, mode: newMode })} - existingIds={existingTagIds ?? []} + onUpdate={(itemIDs) => { + setTagIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setTagIds((c) => ({ ...c, mode: newMode })); + }} ids={tagIds.ids ?? []} + existingIds={existingTagIds} mode={tagIds.mode} menuPortalTarget={document.body} /> - + = ({ gender, className }) => { const intl = useIntl(); if (gender) { - const icon = - gender === GQL.GenderEnum.Male - ? faMars - : gender === GQL.GenderEnum.Female - ? faVenus - : faTransgenderAlt; + const icon = genderIcon(gender); + + // new version of fontawesome doesn't seem to support titles on icons, so adding it + // to a span instead return ( - + + + ); } return null; diff --git a/ui/v2.5/src/components/Performers/PerformerCard.tsx b/ui/v2.5/src/components/Performers/PerformerCard.tsx index 02e2a68fd..5f7a26d42 100644 --- a/ui/v2.5/src/components/Performers/PerformerCard.tsx +++ b/ui/v2.5/src/components/Performers/PerformerCard.tsx @@ -6,7 +6,6 @@ import NavUtils from "src/utils/navigation"; import TextUtils from "src/utils/text"; import { GridCard } from "../Shared/GridCard/GridCard"; import { CountryFlag } from "../Shared/CountryFlag"; -import { SweatDrops } from "../Shared/SweatDrops"; import { HoverPopover } from "../Shared/HoverPopover"; import { Icon } from "../Shared/Icon"; import { TagLink } from "../Shared/TagLink"; @@ -17,12 +16,16 @@ import { } from "src/models/list-filter/criteria/criterion"; import { PopoverCountButton } from "../Shared/PopoverCountButton"; import GenderIcon from "./GenderIcon"; -import { faTag } from "@fortawesome/free-solid-svg-icons"; +import { faLink, faTag } from "@fortawesome/free-solid-svg-icons"; +import { faInstagram, faTwitter } from "@fortawesome/free-brands-svg-icons"; import { RatingBanner } from "../Shared/RatingBanner"; import { usePerformerUpdate } from "src/core/StashService"; import { ILabeledId } from "src/models/list-filter/types"; import { FavoriteIcon } from "../Shared/FavoriteIcon"; import { PatchComponent } from "src/patch"; +import { ExternalLinksButton } from "../Shared/ExternalLinksButton"; +import { useConfigurationContext } from "src/hooks/Config"; +import { OCounterButton } from "../Shared/CountButton"; export interface IPerformerCardExtraCriteria { scenes?: ModifierCriterion[]; @@ -100,16 +103,7 @@ const PerformerCardPopovers: React.FC = PatchComponent( function maybeRenderOCounter() { if (!performer.o_counter) return; - return ( -
        - -
        - ); + return ; } function maybeRenderTagPopoverButton() { @@ -176,6 +170,8 @@ const PerformerCardPopovers: React.FC = PatchComponent( const PerformerCardOverlays: React.FC = PatchComponent( "PerformerCard.Overlays", ({ performer }) => { + const { configuration } = useConfigurationContext(); + const uiConfig = configuration?.ui; const [updatePerformer] = usePerformerUpdate(); function onToggleFavorite(v: boolean) { @@ -215,6 +211,63 @@ const PerformerCardOverlays: React.FC = PatchComponent( } } + function maybeRenderLinks() { + if (!uiConfig?.showLinksOnPerformerCard) { + return; + } + + if (performer.urls && performer.urls.length > 0) { + const twitter = performer.urls.filter((u) => + u.match(/https?:\/\/(?:www\.)?(?:twitter|x).com\//) + ); + const instagram = performer.urls.filter((u) => + u.match(/https?:\/\/(?:www\.)?instagram.com\//) + ); + const others = performer.urls.filter( + (u) => !twitter.includes(u) && !instagram.includes(u) + ); + + return ( +
        + {twitter.length > 0 && ( + + )} + {instagram.length > 0 && ( + + )} + {others.length > 0 && ( + + )} +
        + ); + } + } + return ( <> = PatchComponent( className="hide-not-favorite" /> {maybeRenderRatingBanner()} + {maybeRenderLinks()} {maybeRenderFlag()} ); diff --git a/ui/v2.5/src/components/Performers/PerformerCardGrid.tsx b/ui/v2.5/src/components/Performers/PerformerCardGrid.tsx index f8c02d5a7..9d10c0dd1 100644 --- a/ui/v2.5/src/components/Performers/PerformerCardGrid.tsx +++ b/ui/v2.5/src/components/Performers/PerformerCardGrid.tsx @@ -5,6 +5,7 @@ import { useCardWidth, useContainerDimensions, } from "../Shared/GridCard/GridCard"; +import { PatchComponent } from "src/patch"; interface IPerformerCardGrid { performers: GQL.PerformerDataFragment[]; @@ -16,32 +17,29 @@ interface IPerformerCardGrid { const zoomWidths = [240, 300, 375, 470]; -export const PerformerCardGrid: React.FC = ({ - performers, - selectedIds, - zoomIndex, - onSelectChange, - extraCriteria, -}) => { - const [componentRef, { width: containerWidth }] = useContainerDimensions(); - const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); +export const PerformerCardGrid: React.FC = PatchComponent( + "PerformerCardGrid", + ({ performers, selectedIds, zoomIndex, onSelectChange, extraCriteria }) => { + const [componentRef, { width: containerWidth }] = useContainerDimensions(); + const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); - return ( -
        - {performers.map((p) => ( - 0} - selected={selectedIds.has(p.id)} - onSelectedChanged={(selected: boolean, shiftKey: boolean) => - onSelectChange(p.id, selected, shiftKey) - } - extraCriteria={extraCriteria} - /> - ))} -
        - ); -}; + return ( +
        + {performers.map((p) => ( + 0} + selected={selectedIds.has(p.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + onSelectChange(p.id, selected, shiftKey) + } + extraCriteria={extraCriteria} + /> + ))} +
        + ); + } +); diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx index 03530c52e..92a563a81 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx @@ -1,6 +1,6 @@ import React, { useEffect, useMemo, useState } from "react"; -import { Tabs, Tab, Col, Row } from "react-bootstrap"; -import { useIntl } from "react-intl"; +import { Button, Tabs, Tab, Col, Row } from "react-bootstrap"; +import { FormattedMessage, useIntl } from "react-intl"; import { useHistory, Redirect, RouteComponentProps } from "react-router-dom"; import { Helmet } from "react-helmet"; import cx from "classnames"; @@ -16,7 +16,7 @@ import { DetailsEditNavbar } from "src/components/Shared/DetailsEditNavbar"; import { ErrorMessage } from "src/components/Shared/ErrorMessage"; import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; import { useToast } from "src/hooks/Toast"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; import { CompressedPerformerDetailsPanel, @@ -28,6 +28,7 @@ import { PerformerGroupsPanel } from "./PerformerGroupsPanel"; import { PerformerImagesPanel } from "./PerformerImagesPanel"; import { PerformerAppearsWithPanel } from "./performerAppearsWithPanel"; import { PerformerEditPanel } from "./PerformerEditPanel"; +import { PerformerMergeModal } from "../PerformerMergeDialog"; import { PerformerSubmitButton } from "./PerformerSubmitButton"; import { useRatingKeybinds } from "src/hooks/keybinds"; import { DetailImage } from "src/components/Shared/DetailImage"; @@ -47,6 +48,8 @@ import { HeaderImage } from "src/components/Shared/DetailsPage/HeaderImage"; import { LightboxLink } from "src/hooks/Lightbox/LightboxLink"; import { PatchComponent } from "src/patch"; import { ILightboxImage } from "src/hooks/Lightbox/types"; +import { goBackOrReplace } from "src/utils/history"; +import { OCounterButton } from "src/components/Shared/CountButton"; interface IProps { performer: GQL.PerformerDataFragment; @@ -238,7 +241,7 @@ const PerformerPage: React.FC = PatchComponent( const intl = useIntl(); // Configuration settings - const { configuration } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const uiConfig = configuration?.ui; const abbreviateCounter = uiConfig?.abbreviateCounters ?? false; const enableBackgroundImage = @@ -248,6 +251,7 @@ const PerformerPage: React.FC = PatchComponent( const [collapsed, setCollapsed] = useState(!showAllDetails); const [isEditing, setIsEditing] = useState(false); + const [isMerging, setIsMerging] = useState(false); const [image, setImage] = useState(); const [encodingImage, setEncodingImage] = useState(false); const loadStickyHeader = useLoadStickyHeader(); @@ -283,6 +287,33 @@ const PerformerPage: React.FC = PatchComponent( } } + function renderMergeButton() { + return ( + + ); + } + + function renderMergeDialog() { + if (!performer.id) return; + return ( + { + setIsMerging(false); + if (mergedId !== undefined && mergedId !== performer.id) { + // By default, the merge destination is the current performer, but + // the user can change it, in which case we need to redirect. + history.replace(`/performers/${mergedId}`); + } + }} + performers={[performer]} + /> + ); + } + useRatingKeybinds( true, configuration?.ui.ratingSystemOptions?.type, @@ -330,7 +361,7 @@ const PerformerPage: React.FC = PatchComponent( return; } - history.goBack(); + goBackOrReplace(history, "/performers"); } function toggleEditing(value?: boolean) { @@ -422,12 +453,17 @@ const PerformerPage: React.FC = PatchComponent( - setRating(value)} - clickToRate - withoutContext - /> +
        + setRating(value)} + clickToRate + withoutContext + /> + {!!performer.o_counter && ( + + )} +
        {!isEditing && ( = PatchComponent( onImageChange={() => {}} classNames="mb-2" customButtons={ -
        - -
        + <> + {renderMergeButton()} +
        + +
        + } > @@ -492,6 +531,7 @@ const PerformerPage: React.FC = PatchComponent(
        + {renderMergeDialog()}
    ); } diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerCreate.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerCreate.tsx index 7726ed9bc..e6d77761e 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerCreate.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerCreate.tsx @@ -23,12 +23,14 @@ const PerformerCreate: React.FC = () => { const [createPerformer] = usePerformerCreate(); - async function onSave(input: GQL.PerformerCreateInput) { + async function onSave(input: GQL.PerformerCreateInput, andNew?: boolean) { const result = await createPerformer({ variables: { input }, }); if (result.data?.performerCreate) { - history.push(`/performers/${result.data.performerCreate.id}`); + if (!andNew) { + history.push(`/performers/${result.data.performerCreate.id}`); + } Toast.success( intl.formatMessage( { id: "toast.created_entity" }, diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx index e96d464be..473bbbd47 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx @@ -12,6 +12,7 @@ import { FormatHeight, FormatPenisLength, FormatWeight, + formatYearRange, } from "../PerformerList"; import { PatchComponent } from "src/patch"; import { CustomFields } from "src/components/Shared/CustomFields"; @@ -89,12 +90,19 @@ export const PerformerDetailsPanel: React.FC = } title={ !fullWidth - ? TextUtils.formatDate(intl, performer.birthdate ?? undefined) + ? TextUtils.formatFuzzyDate( + intl, + performer.birthdate ?? undefined + ) : "" } fullWidth={fullWidth} /> - + {performer.country ? ( = /> @@ -214,7 +225,7 @@ export const CompressedPerformerDetailsPanel: React.FC = / ; isVisible: boolean; - onSubmit: (performer: GQL.PerformerCreateInput) => Promise; + onSubmit: ( + performer: GQL.PerformerCreateInput, + andNew?: boolean + ) => Promise; onCancel?: () => void; setImage: (image?: string | null) => void; setEncodingImage: (loading: boolean) => void; } -function customFieldInput(isNew: boolean, input: {}) { - if (isNew) { - return input; - } else { - return { - full: input, - }; - } -} - export const PerformerEditPanel: React.FC = ({ performer, isVisible, @@ -88,6 +85,8 @@ export const PerformerEditPanel: React.FC = ({ // Editing state const [scraper, setScraper] = useState(); const [isScraperModalOpen, setIsScraperModalOpen] = useState(false); + const [isStashIDSearchOpen, setIsStashIDSearchOpen] = + useState(false); // Network state const [isLoading, setIsLoading] = useState(false); @@ -97,14 +96,14 @@ export const PerformerEditPanel: React.FC = ({ const [scrapedPerformer, setScrapedPerformer] = useState(); - const { configuration: stashConfig } = React.useContext(ConfigurationContext); + const { configuration: stashConfig } = useConfigurationContext(); const intl = useIntl(); const schema = yup.object({ name: yup.string().required(), disambiguation: yup.string().ensure(), - alias_list: yupUniqueAliases(intl, "name"), + alias_list: yupRequiredStringArray(intl).defined(), gender: yupInputEnum(GQL.GenderEnum).nullable().defined(), birthdate: yupDateString(intl), death_date: yupDateString(intl), @@ -117,10 +116,11 @@ export const PerformerEditPanel: React.FC = ({ measurements: yup.string().ensure(), fake_tits: yup.string().ensure(), penis_length: yupInputNumber().positive().nullable().defined(), - circumcised: yupInputEnum(GQL.CircumisedEnum).nullable().defined(), + circumcised: yupInputEnum(GQL.CircumcisedEnum).nullable().defined(), tattoos: yup.string().ensure(), piercings: yup.string().ensure(), - career_length: yup.string().ensure(), + career_start: yupDateString(intl), + career_end: yupDateString(intl), urls: yupUniqueStringList(intl), details: yup.string().ensure(), tag_ids: yup.array(yup.string().required()).defined(), @@ -149,7 +149,8 @@ export const PerformerEditPanel: React.FC = ({ circumcised: performer.circumcised ?? null, tattoos: performer.tattoos ?? "", piercings: performer.piercings ?? "", - career_length: performer.career_length ?? "", + career_start: performer.career_start ?? "", + career_end: performer.career_end ?? "", urls: performer.urls ?? [], details: performer.details ?? "", tag_ids: (performer.tags ?? []).map((t) => t.id), @@ -165,7 +166,7 @@ export const PerformerEditPanel: React.FC = ({ function submit(values: InputValues) { const input = { ...schema.cast(values), - custom_fields: customFieldInput(isNew, values.custom_fields), + custom_fields: formatCustomFieldInput(isNew, values.custom_fields), }; onSave(input); } @@ -250,8 +251,11 @@ export const PerformerEditPanel: React.FC = ({ if (state.fake_tits) { formik.setFieldValue("fake_tits", state.fake_tits); } - if (state.career_length) { - formik.setFieldValue("career_length", state.career_length); + if (state.career_start) { + formik.setFieldValue("career_start", state.career_start); + } + if (state.career_end) { + formik.setFieldValue("career_end", state.career_end); } if (state.tattoos) { formik.setFieldValue("tattoos", state.tattoos); @@ -342,10 +346,10 @@ export const PerformerEditPanel: React.FC = ({ ImageUtils.onImageChange(event, onImageLoad); } - async function onSave(input: InputValues) { + async function onSave(input: InputValues, andNew?: boolean) { setIsLoading(true); try { - await onSubmit(input); + await onSubmit(input, andNew); formik.resetForm(); } catch (e) { Toast.error(e); @@ -353,6 +357,15 @@ export const PerformerEditPanel: React.FC = ({ setIsLoading(false); } + async function onSaveAndNewClick() { + const { values } = formik; + const input = { + ...schema.cast(values), + custom_fields: formatCustomFieldInput(isNew, values.custom_fields), + }; + onSave(input, true); + } + // set up hotkeys useEffect(() => { if (isVisible) { @@ -466,7 +479,6 @@ export const PerformerEditPanel: React.FC = ({ setScraper(undefined); } else { setScrapedPerformer(result); - updateStashIDs(performerResult.remote_site_id); } } @@ -570,6 +582,14 @@ export const PerformerEditPanel: React.FC = ({ setScraper(undefined); } + function onStashIDSelected(item?: GQL.StashIdInput) { + if (!item) return; + formik.setFieldValue( + "stash_ids", + addUpdateStashID(formik.values.stash_ids, item) + ); + } + function renderButtons(classNames: string) { return (
    @@ -593,17 +613,33 @@ export const PerformerEditPanel: React.FC = ({
    - + {isNew ? ( + formik.submitForm()} + > + onSaveAndNewClick()}> + + + + ) : ( + + )}
    ); } @@ -660,6 +696,20 @@ export const PerformerEditPanel: React.FC = ({ <> {renderScrapeModal()} {maybeRenderScrapeDialog()} + {isStashIDSearchOpen && ( + s.endpoint + )} + onSelectItem={(item) => { + onStashIDSelected(item); + setIsStashIDSearchOpen(false); + }} + initialQuery={performer.name ?? ""} + /> + )} = ({ {renderInputField("name")} {renderInputField("disambiguation")} - {renderStringListField("alias_list", "aliases")} + {renderStringListField("alias_list", "aliases", { orderable: false })} {renderSelectField("gender", stringGenderMap)} @@ -695,14 +745,29 @@ export const PerformerEditPanel: React.FC = ({ {renderInputField("tattoos", "textarea")} {renderInputField("piercings", "textarea")} - {renderInputField("career_length")} + {renderDateField("career_start")} + {renderDateField("career_end")} {renderURLListField("urls", onScrapePerformerURL, urlScrapable)} {renderInputField("details", "textarea")} {renderTagsField()} - {renderStashIDsField("stash_ids", "performers")} + {renderStashIDsField( + "stash_ids", + "performers", + "stash_ids", + undefined, + + )}
    diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerGalleriesPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerGalleriesPanel.tsx index 5a9d0b81d..44b0401e9 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerGalleriesPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerGalleriesPanel.tsx @@ -1,6 +1,6 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; -import { GalleryList } from "src/components/Galleries/GalleryList"; +import { FilteredGalleryList } from "src/components/Galleries/GalleryList"; import { usePerformerFilterHook } from "src/core/performers"; import { View } from "src/components/List/views"; import { PatchComponent } from "src/patch"; @@ -14,7 +14,7 @@ export const PerformerGalleriesPanel: React.FC = PatchComponent("PerformerGalleriesPanel", ({ active, performer }) => { const filterHook = usePerformerFilterHook(performer); return ( - = PatchComponent("PerformerGroupsPanel", ({ active, performer }) => { const filterHook = usePerformerFilterHook(performer); return ( - = PatchComponent("PerformerImagesPanel", ({ active, performer }) => { const filterHook = usePerformerFilterHook(performer); return ( - , onChange: (value: ScrapeResult) => void ) { return ( renderScrapedGender(result)} - renderNewField={() => - renderScrapedGender(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } + originalField={renderScrapedGender(result)} + newField={renderScrapedGender(result, true, (value) => + onChange(result.cloneWithValue(value)) + )} onChange={onChange} /> ); @@ -105,7 +104,7 @@ function renderScrapedCircumcised( ); } -function renderScrapedCircumcisedRow( +export function renderScrapedCircumcisedRow( title: string, result: ScrapeResult, onChange: (value: ScrapeResult) => void @@ -113,13 +112,12 @@ function renderScrapedCircumcisedRow( return ( renderScrapedCircumcised(result)} - renderNewField={() => - renderScrapedCircumcised(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } + originalField={renderScrapedCircumcised(result)} + newField={renderScrapedCircumcised(result, true, (value) => + onChange(result.cloneWithValue(value)) + )} onChange={onChange} /> ); @@ -146,6 +144,22 @@ export const PerformerScrapeDialog: React.FC = ( return; } + // #6257 - it is possible (though unsupported) to have multiple stash IDs for the same + // endpoint; in that case, we should prefer the one matching the scraped remote site ID + // if it exists + const stashIDs = (props.performer.stash_ids ?? []).filter( + (s) => s.endpoint === endpoint + ); + if (stashIDs.length > 1 && props.scraped.remote_site_id) { + const matchingID = stashIDs.find( + (s) => s.stash_id === props.scraped.remote_site_id + ); + if (matchingID) { + return matchingID.stash_id; + } + } + + // otherwise, return the first stash ID for the endpoint return props.performer.stash_ids?.find((s) => s.endpoint === endpoint) ?.stash_id; } @@ -176,7 +190,7 @@ export const PerformerScrapeDialog: React.FC = ( return; } - let retEnum: GQL.CircumisedEnum | undefined; + let retEnum: GQL.CircumcisedEnum | undefined; // try to translate from enum values first const upperCircumcised = scrapedCircumcised.toUpperCase(); @@ -258,10 +272,16 @@ export const PerformerScrapeDialog: React.FC = ( const [fakeTits, setFakeTits] = useState>( new ScrapeResult(props.performer.fake_tits, props.scraped.fake_tits) ); - const [careerLength, setCareerLength] = useState>( + const [careerStart, setCareerStart] = useState>( new ScrapeResult( - props.performer.career_length, - props.scraped.career_length + props.performer.career_start, + props.scraped.career_start + ) + ); + const [careerEnd, setCareerEnd] = useState>( + new ScrapeResult( + props.performer.career_end, + props.scraped.career_end ) ); const [tattoos, setTattoos] = useState>( @@ -300,9 +320,10 @@ export const PerformerScrapeDialog: React.FC = ( ) ); - const { tags, newTags, scrapedTagsRow } = useScrapedTags( + const { tags, newTags, scrapedTagsRow, linkDialog } = useScrapedTags( props.performerTags, - props.scraped.tags + props.scraped.tags, + endpoint ); const [image, setImage] = useState>( @@ -332,7 +353,8 @@ export const PerformerScrapeDialog: React.FC = ( fakeTits, penisLength, circumcised, - careerLength, + careerStart, + careerEnd, tattoos, piercings, urls, @@ -364,7 +386,8 @@ export const PerformerScrapeDialog: React.FC = ( height: height.getNewValue(), measurements: measurements.getNewValue(), fake_tits: fakeTits.getNewValue(), - career_length: careerLength.getNewValue(), + career_start: careerStart.getNewValue(), + career_end: careerEnd.getNewValue(), tattoos: tattoos.getNewValue(), piercings: piercings.getNewValue(), urls: urls.getNewValue(), @@ -385,16 +408,19 @@ export const PerformerScrapeDialog: React.FC = ( return ( <> setName(value)} /> setDisambiguation(value)} /> setAliases(value)} @@ -405,46 +431,55 @@ export const PerformerScrapeDialog: React.FC = ( (value) => setGender(value) )} setBirthdate(value)} /> setDeathDate(value)} /> setEthnicity(value)} /> setCountry(value)} /> setHairColor(value)} /> setEyeColor(value)} /> setWeight(value)} /> setHeight(value)} /> setPenisLength(value)} @@ -455,42 +490,56 @@ export const PerformerScrapeDialog: React.FC = ( (value) => setCircumcised(value) )} setMeasurements(value)} /> setFakeTits(value)} /> setCareerLength(value)} + field="career_start" + title={intl.formatMessage({ id: "career_start" })} + result={careerStart} + onChange={(value) => setCareerStart(value)} + /> + setCareerEnd(value)} /> setTattoos(value)} /> setPiercings(value)} /> setURLs(value)} /> setDetails(value)} /> {scrapedTagsRow} = ( onChange={(value) => setImage(value)} /> = ( ); } + if (linkDialog) { + return linkDialog; + } + return ( { props.onClose(apply ? makeNewScrapedItem() : undefined); }} - /> + > + {renderScrapeRows()} + ); }; diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/performerAppearsWithPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/performerAppearsWithPanel.tsx index 913d16625..b6973bf83 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/performerAppearsWithPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/performerAppearsWithPanel.tsx @@ -1,6 +1,6 @@ import React from "react"; import * as GQL from "src/core/generated-graphql"; -import { PerformerList } from "src/components/Performers/PerformerList"; +import { FilteredPerformerList } from "src/components/Performers/PerformerList"; import { usePerformerFilterHook } from "src/core/performers"; import { View } from "src/components/List/views"; import { PatchComponent } from "src/patch"; @@ -24,7 +24,7 @@ export const PerformerAppearsWithPanel: React.FC = const filterHook = usePerformerFilterHook(performer); return ( - { const intl = useIntl(); @@ -109,7 +137,16 @@ export const FormatWeight = (weight?: number | null) => { ); }; -export const FormatCircumcised = (circumcised?: GQL.CircumisedEnum | null) => { +export function formatYearRange( + start?: string | null, + end?: string | null +): string | undefined { + if (!start && !end) return undefined; + + return `${start ?? ""} - ${end ?? ""}`; +} + +export const FormatCircumcised = (circumcised?: GQL.CircumcisedEnum | null) => { const intl = useIntl(); if (!circumcised) { return ""; @@ -159,184 +196,466 @@ interface IPerformerList { view?: View; alterQuery?: boolean; extraCriteria?: IPerformerCardExtraCriteria; + extraOperations?: IItemListOperation[]; } -export const PerformerList: React.FC = ({ +const PerformerList: React.FC<{ + performers: GQL.PerformerDataFragment[]; + filter: ListFilterModel; + selectedIds: Set; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; + extraCriteria?: IPerformerCardExtraCriteria; +}> = PatchComponent( + "PerformerList", + ({ performers, filter, selectedIds, onSelectChange, extraCriteria }) => { + if (performers.length === 0 && filter.displayMode !== DisplayMode.Tagger) { + return null; + } + + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.List) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.Tagger) { + return ; + } + + return null; + } +); + +const PerformerFilterSidebarSections = PatchContainerComponent( + "FilteredPerformerList.SidebarSections" +); + +const SidebarContent: React.FC<{ + filter: ListFilterModel; + setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + sidebarOpen: boolean; + onClose?: () => void; + showEditFilter: (editingCriterion?: string) => void; + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, filterHook, view, - alterQuery, - extraCriteria, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, }) => { - const intl = useIntl(); + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; + + const AgeCriterionOption = PerformerListFilterOptions.criterionOptions.find( + (c) => c.type === "age" + ); + + return ( + <> + + + + + + } + data-type={FavoritePerformerCriterionOption.type} + option={FavoritePerformerCriterionOption} + filter={filter} + setFilter={setFilter} + sectionID="favourite" + /> + } + option={GenderCriterionOption} + filter={filter} + setFilter={setFilter} + sectionID="gender" + /> + } + option={AgeCriterionOption!} + filter={filter} + setFilter={setFilter} + sectionID="age" + /> + + +
    + +
    + + ); +}; + +function useViewRandom(filter: ListFilterModel, count: number) { const history = useHistory(); - const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); - const [isExportAll, setIsExportAll] = useState(false); - const filterMode = GQL.FilterMode.Performers; + const viewRandom = useCallback(async () => { + // query for a random performer + if (count === 0) { + return; + } - const otherOperations = [ - { - text: intl.formatMessage({ id: "actions.open_random" }), - onClick: openRandom, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: onExport, - isDisplayed: showWhenSelected, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: onExportAll, - }, - ]; + const index = Math.floor(Math.random() * count); + const filterCopy = cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindPerformers(filterCopy); + if (singleResult.data.findPerformers.performers.length === 1) { + const { id } = singleResult.data.findPerformers.performers[0]; + // navigate to the image player page + history.push(`/performers/${id}`); + } + }, [history, filter, count]); - function addKeybinds( - result: GQL.FindPerformersQueryResult, - filter: ListFilterModel - ) { + return viewRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const viewRandom = useViewRandom(filter, count); + + useEffect(() => { Mousetrap.bind("p r", () => { - openRandom(result, filter); + viewRandom(); }); return () => { Mousetrap.unbind("p r"); }; - } + }, [viewRandom]); +} - async function openRandom( - result: GQL.FindPerformersQueryResult, - filter: ListFilterModel - ) { - if (result.data?.findPerformers) { - const { count } = result.data.findPerformers; - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindPerformers(filterCopy); - if (singleResult.data.findPerformers.performers.length === 1) { - const { id } = singleResult.data.findPerformers.performers[0]!; - history.push(`/performers/${id}`); - } - } - } +export const FilteredPerformerList = PatchComponent( + "FilteredPerformerList", + (props: IPerformerList) => { + const intl = useIntl(); + const history = useHistory(); - async function onExport() { - setIsExportAll(false); - setIsExportDialogOpen(true); - } + const searchFocus = useFocus(); - async function onExportAll() { - setIsExportAll(true); - setIsExportDialogOpen(true); - } + const { + filterHook, + view, + alterQuery, + extraCriteria, + extraOperations = [], + } = props; - function renderContent( - result: GQL.FindPerformersQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - function maybeRenderPerformerExportDialog() { - if (isExportDialogOpen) { - return ( - <> - setIsExportDialogOpen(false)} - /> - - ); - } + // States + const { + showSidebar, + setShowSidebar, + sectionOpen, + setSectionOpen, + loading: sidebarStateLoading, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Performers, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindPerformers, + getCount: (r) => r.data?.findPerformers.count ?? 0, + getItems: (r) => r.data?.findPerformers.performers ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(effectiveFilter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); + + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }); + + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); + + const viewRandom = useViewRandom(effectiveFilter, totalCount); + + function onExport(all: boolean) { + showModal( + closeModal()} + /> + ); } - function renderPerformers() { - if (!result.data?.findPerformers) return; - - if (filter.displayMode === DisplayMode.Grid) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.List) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.Tagger) { - return ( - - ); - } + function onEdit() { + showModal( + + ); } - return ( - <> - {maybeRenderPerformerExportDialog()} - {renderPerformers()} - - ); - } + function onDelete() { + showModal( + + ); + } - function renderEditDialog( - selectedPerformers: GQL.SlimPerformerDataFragment[], - onClose: (applied: boolean) => void - ) { - return ( - - ); - } + function onMerge() { + showModal( + { + closeModal(); + if (mergedId) { + history.push(`/performers/${mergedId}`); + } + }} + show + /> + ); + } - function renderDeleteDialog( - selectedPerformers: GQL.SlimPerformerDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ( - ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); + + const otherOperations: IListFilterOperation[] = [ + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.open_random" }), + onClick: viewRandom, + }, + { + text: `${intl.formatMessage({ id: "actions.merge" })}…`, + onClick: onMerge, + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + ); - } - return ( - - - - ); -}; + return ( +
    + {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
    + setFilter(filter.changePage(page))} + /> + +
    + + + + + + {totalCount > filter.itemsPerPage && ( +
    +
    + +
    +
    + )} +
    +
    +
    +
    + ); + } +); diff --git a/ui/v2.5/src/components/Performers/PerformerListTable.tsx b/ui/v2.5/src/components/Performers/PerformerListTable.tsx index 6a3818824..c155d1298 100644 --- a/ui/v2.5/src/components/Performers/PerformerListTable.tsx +++ b/ui/v2.5/src/components/Performers/PerformerListTable.tsx @@ -17,6 +17,7 @@ import { FormatHeight, FormatPenisLength, FormatWeight, + formatYearRange, } from "./PerformerList"; import TextUtils from "src/utils/text"; import { getCountryByISO } from "src/utils/country"; @@ -116,7 +117,7 @@ export const PerformerListTable: React.FC = ( @@ -188,7 +189,7 @@ export const PerformerListTable: React.FC = ( ); const CareerLengthCell = (performer: GQL.PerformerDataFragment) => ( - {performer.career_length} + <>{formatYearRange(performer.career_start, performer.career_end) ?? ""} ); const SceneCountCell = (performer: GQL.PerformerDataFragment) => ( @@ -333,19 +334,19 @@ export const PerformerListTable: React.FC = ( }, { value: "scene_count", - label: intl.formatMessage({ id: "scene_count" }), + label: intl.formatMessage({ id: "scenes" }), defaultShow: true, render: SceneCountCell, }, { value: "gallery_count", - label: intl.formatMessage({ id: "gallery_count" }), + label: intl.formatMessage({ id: "galleries" }), defaultShow: true, render: GalleryCountCell, }, { value: "image_count", - label: intl.formatMessage({ id: "image_count" }), + label: intl.formatMessage({ id: "images" }), defaultShow: true, render: ImageCountCell, }, diff --git a/ui/v2.5/src/components/Performers/PerformerMergeDialog.tsx b/ui/v2.5/src/components/Performers/PerformerMergeDialog.tsx new file mode 100644 index 000000000..c44a65a8f --- /dev/null +++ b/ui/v2.5/src/components/Performers/PerformerMergeDialog.tsx @@ -0,0 +1,916 @@ +import { Form, Col, Row, Button } from "react-bootstrap"; +import React, { useEffect, useMemo, useState } from "react"; +import * as GQL from "src/core/generated-graphql"; +import { Icon } from "../Shared/Icon"; +import { LoadingIndicator } from "../Shared/LoadingIndicator"; +import { + circumcisedToString, + stringToCircumcised, +} from "src/utils/circumcised"; +import * as FormUtils from "src/utils/form"; +import { genderToString, stringToGender } from "src/utils/gender"; +import ImageUtils from "src/utils/image"; +import { + mutatePerformerMerge, + queryFindPerformersByID, +} from "src/core/StashService"; +import { FormattedMessage, useIntl } from "react-intl"; +import { useToast } from "src/hooks/Toast"; +import { faExchangeAlt, faSignInAlt } from "@fortawesome/free-solid-svg-icons"; +import { ScrapeDialog } from "../Shared/ScrapeDialog/ScrapeDialog"; +import { + ScrapedCustomFieldRows, + ScrapeDialogRow, + ScrapedImageRow, + ScrapedInputGroupRow, + ScrapedStringListRow, + ScrapedTextAreaRow, +} from "../Shared/ScrapeDialog/ScrapeDialogRow"; +import { ModalComponent } from "../Shared/Modal"; +import { sortStoredIdObjects, uniqIDStoredIDs } from "src/utils/data"; +import { + CustomFieldScrapeResults, + ObjectListScrapeResult, + ScrapeResult, + hasScrapedValues, +} from "../Shared/ScrapeDialog/scrapeResult"; +import { ScrapedTagsRow } from "../Shared/ScrapeDialog/ScrapedObjectsRow"; +import { + renderScrapedGenderRow, + renderScrapedCircumcisedRow, +} from "./PerformerDetails/PerformerScrapeDialog"; +import { PerformerSelect } from "./PerformerSelect"; +import { uniq } from "lodash-es"; +import { StashIDsField } from "../Shared/StashID"; + +type MergeOptions = { + values: GQL.PerformerUpdateInput; +}; + +interface IPerformerMergeDetailsProps { + sources: GQL.PerformerDataFragment[]; + dest: GQL.PerformerDataFragment; + onClose: (options?: MergeOptions) => void; +} + +const PerformerMergeDetails: React.FC = ({ + sources, + dest, + onClose, +}) => { + const intl = useIntl(); + + const [loading, setLoading] = useState(true); + + const [name, setName] = useState>( + new ScrapeResult(dest.name) + ); + const [disambiguation, setDisambiguation] = useState>( + new ScrapeResult(dest.disambiguation) + ); + const [aliases, setAliases] = useState>( + new ScrapeResult(dest.alias_list) + ); + const [birthdate, setBirthdate] = useState>( + new ScrapeResult(dest.birthdate) + ); + const [deathDate, setDeathDate] = useState>( + new ScrapeResult(dest.death_date) + ); + const [ethnicity, setEthnicity] = useState>( + new ScrapeResult(dest.ethnicity) + ); + const [country, setCountry] = useState>( + new ScrapeResult(dest.country) + ); + const [hairColor, setHairColor] = useState>( + new ScrapeResult(dest.hair_color) + ); + const [eyeColor, setEyeColor] = useState>( + new ScrapeResult(dest.eye_color) + ); + const [height, setHeight] = useState>( + new ScrapeResult(dest.height_cm?.toString()) + ); + const [weight, setWeight] = useState>( + new ScrapeResult(dest.weight?.toString()) + ); + const [penisLength, setPenisLength] = useState>( + new ScrapeResult(dest.penis_length?.toString()) + ); + const [measurements, setMeasurements] = useState>( + new ScrapeResult(dest.measurements) + ); + const [fakeTits, setFakeTits] = useState>( + new ScrapeResult(dest.fake_tits) + ); + const [careerStart, setCareerStart] = useState>( + new ScrapeResult(dest.career_start?.toString()) + ); + const [careerEnd, setCareerEnd] = useState>( + new ScrapeResult(dest.career_end?.toString()) + ); + const [tattoos, setTattoos] = useState>( + new ScrapeResult(dest.tattoos) + ); + const [piercings, setPiercings] = useState>( + new ScrapeResult(dest.piercings) + ); + const [urls, setURLs] = useState>( + new ScrapeResult(dest.urls) + ); + const [gender, setGender] = useState>( + new ScrapeResult(genderToString(dest.gender)) + ); + const [circumcised, setCircumcised] = useState>( + new ScrapeResult(circumcisedToString(dest.circumcised)) + ); + const [details, setDetails] = useState>( + new ScrapeResult(dest.details) + ); + const [tags, setTags] = useState>( + new ObjectListScrapeResult( + sortStoredIdObjects(dest.tags.map(idToStoredID)) + ) + ); + + const [stashIDs, setStashIDs] = useState(new ScrapeResult([])); + + const [image, setImage] = useState>( + new ScrapeResult(dest.image_path) + ); + + const [customFields, setCustomFields] = useState( + new Map() + ); + + function idToStoredID(o: { id: string; name: string }) { + return { + stored_id: o.id, + name: o.name, + }; + } + + // calculate the values for everything + // uses the first set value for single value fields, and combines all + useEffect(() => { + async function loadImages() { + const src = sources.find((s) => s.image_path); + if (!dest.image_path || !src) return; + + setLoading(true); + + const destData = await ImageUtils.imageToDataURL(dest.image_path); + const srcData = await ImageUtils.imageToDataURL(src.image_path!); + + // keep destination image by default + const useNewValue = false; + setImage(new ScrapeResult(destData, srcData, useNewValue)); + + setLoading(false); + } + + // append dest to all so that if dest has stash_ids with the same + // endpoint, then it will be excluded first + const all = sources.concat(dest); + + setName( + new ScrapeResult(dest.name, sources.find((s) => s.name)?.name, !dest.name) + ); + setDisambiguation( + new ScrapeResult( + dest.disambiguation, + sources.find((s) => s.disambiguation)?.disambiguation, + !dest.disambiguation + ) + ); + + // default alias list should be the existing aliases, plus the names of all sources, + // plus all source aliases, deduplicated + const allAliases = uniq( + dest.alias_list.concat( + sources.map((s) => s.name), + sources.flatMap((s) => s.alias_list) + ) + ); + + setAliases( + new ScrapeResult(dest.alias_list, allAliases, !!allAliases.length) + ); + setBirthdate( + new ScrapeResult( + dest.birthdate, + sources.find((s) => s.birthdate)?.birthdate, + !dest.birthdate + ) + ); + setDeathDate( + new ScrapeResult( + dest.death_date, + sources.find((s) => s.death_date)?.death_date, + !dest.death_date + ) + ); + setEthnicity( + new ScrapeResult( + dest.ethnicity, + sources.find((s) => s.ethnicity)?.ethnicity, + !dest.ethnicity + ) + ); + setCountry( + new ScrapeResult( + dest.country, + sources.find((s) => s.country)?.country, + !dest.country + ) + ); + setHairColor( + new ScrapeResult( + dest.hair_color, + sources.find((s) => s.hair_color)?.hair_color, + !dest.hair_color + ) + ); + setEyeColor( + new ScrapeResult( + dest.eye_color, + sources.find((s) => s.eye_color)?.eye_color, + !dest.eye_color + ) + ); + setHeight( + new ScrapeResult( + dest.height_cm?.toString(), + sources.find((s) => s.height_cm)?.height_cm?.toString(), + !dest.height_cm + ) + ); + setWeight( + new ScrapeResult( + dest.weight?.toString(), + sources.find((s) => s.weight)?.weight?.toString(), + !dest.weight + ) + ); + + setPenisLength( + new ScrapeResult( + dest.penis_length?.toString(), + sources.find((s) => s.penis_length)?.penis_length?.toString(), + !dest.penis_length + ) + ); + setMeasurements( + new ScrapeResult( + dest.measurements, + sources.find((s) => s.measurements)?.measurements, + !dest.measurements + ) + ); + setFakeTits( + new ScrapeResult( + dest.fake_tits, + sources.find((s) => s.fake_tits)?.fake_tits, + !dest.fake_tits + ) + ); + setCareerStart( + new ScrapeResult( + dest.career_start?.toString(), + sources.find((s) => s.career_start)?.career_start?.toString(), + !dest.career_start + ) + ); + setCareerEnd( + new ScrapeResult( + dest.career_end?.toString(), + sources.find((s) => s.career_end)?.career_end?.toString(), + !dest.career_end + ) + ); + setTattoos( + new ScrapeResult( + dest.tattoos, + sources.find((s) => s.tattoos)?.tattoos, + !dest.tattoos + ) + ); + setPiercings( + new ScrapeResult( + dest.piercings, + sources.find((s) => s.piercings)?.piercings, + !dest.piercings + ) + ); + setURLs( + new ScrapeResult( + dest.urls ?? [], + uniq(all.map((s) => s.urls ?? []).flat()) + ) + ); + setGender( + new ScrapeResult( + genderToString(dest.gender), + sources.find((s) => s.gender)?.gender + ? genderToString(sources.find((s) => s.gender)?.gender) + : undefined, + !dest.gender + ) + ); + setCircumcised( + new ScrapeResult( + circumcisedToString(dest.circumcised), + sources.find((s) => s.circumcised)?.circumcised + ? circumcisedToString(sources.find((s) => s.circumcised)?.circumcised) + : undefined, + !dest.circumcised + ) + ); + setDetails( + new ScrapeResult( + dest.details, + sources.find((s) => s.details)?.details, + !dest.details + ) + ); + setTags( + new ObjectListScrapeResult( + sortStoredIdObjects(dest.tags.map(idToStoredID)), + uniqIDStoredIDs(all.map((s) => s.tags.map(idToStoredID)).flat()) + ) + ); + setStashIDs( + new ScrapeResult( + dest.stash_ids, + all + .map((s) => s.stash_ids) + .flat() + .filter((s, index, a) => { + // remove entries with duplicate endpoints + return index === a.findIndex((ss) => ss.endpoint === s.endpoint); + }) + ) + ); + + setImage( + new ScrapeResult( + dest.image_path, + sources.find((s) => s.image_path)?.image_path, + !dest.image_path + ) + ); + + const customFieldNames = new Set(Object.keys(dest.custom_fields)); + + for (const s of sources) { + for (const n of Object.keys(s.custom_fields)) { + customFieldNames.add(n); + } + } + + setCustomFields( + new Map( + Array.from(customFieldNames) + .sort() + .map((field) => { + return [ + field, + new ScrapeResult( + dest.custom_fields?.[field], + sources.find((s) => s.custom_fields?.[field])?.custom_fields?.[ + field + ], + dest.custom_fields?.[field] === undefined + ), + ]; + }) + ) + ); + + loadImages(); + }, [sources, dest]); + + const hasCustomFieldValues = useMemo(() => { + return hasScrapedValues(Array.from(customFields.values())); + }, [customFields]); + + // ensure this is updated if fields are changed + const hasValues = useMemo(() => { + return ( + hasCustomFieldValues || + hasScrapedValues([ + name, + disambiguation, + aliases, + birthdate, + deathDate, + ethnicity, + country, + hairColor, + eyeColor, + height, + weight, + penisLength, + measurements, + fakeTits, + careerStart, + careerEnd, + tattoos, + piercings, + urls, + gender, + circumcised, + details, + tags, + image, + ]) + ); + }, [ + name, + disambiguation, + aliases, + birthdate, + deathDate, + ethnicity, + country, + hairColor, + eyeColor, + height, + weight, + penisLength, + measurements, + fakeTits, + careerStart, + careerEnd, + tattoos, + piercings, + urls, + gender, + circumcised, + details, + tags, + image, + hasCustomFieldValues, + ]); + + function renderScrapeRows() { + if (loading) { + return ( +
    + +
    + ); + } + + if (!hasValues) { + return ( +
    + +
    + ); + } + + return ( + <> + setName(value)} + /> + setDisambiguation(value)} + /> + setAliases(value)} + /> + setBirthdate(value)} + /> + setDeathDate(value)} + /> + setEthnicity(value)} + /> + setCountry(value)} + /> + setHairColor(value)} + /> + setEyeColor(value)} + /> + setHeight(value)} + /> + setWeight(value)} + /> + setPenisLength(value)} + /> + setMeasurements(value)} + /> + setFakeTits(value)} + /> + setCareerStart(value)} + /> + setCareerEnd(value)} + /> + setTattoos(value)} + /> + setPiercings(value)} + /> + setURLs(value)} + /> + {renderScrapedGenderRow( + intl.formatMessage({ id: "gender" }), + gender, + (value) => setGender(value) + )} + {renderScrapedCircumcisedRow( + intl.formatMessage({ id: "circumcised" }), + circumcised, + (value) => setCircumcised(value) + )} + setTags(value)} + /> + setDetails(value)} + /> + + } + newField={} + onChange={(value) => setStashIDs(value)} + alwaysShow={ + !!stashIDs.originalValue?.length || !!stashIDs.newValue?.length + } + /> + setImage(value)} + /> + {hasCustomFieldValues && ( + setCustomFields(newCustomFields)} + /> + )} + + ); + } + + function createValues(): MergeOptions { + // only set the cover image if it's different from the existing cover image + const coverImage = image.useNewValue ? image.getNewValue() : undefined; + + return { + values: { + id: dest.id, + name: name.getNewValue(), + disambiguation: disambiguation.getNewValue(), + alias_list: aliases + .getNewValue() + ?.map((s) => s.trim()) + .filter((s) => s.length > 0), + birthdate: birthdate.getNewValue(), + death_date: deathDate.getNewValue(), + ethnicity: ethnicity.getNewValue(), + country: country.getNewValue(), + hair_color: hairColor.getNewValue(), + eye_color: eyeColor.getNewValue(), + height_cm: height.getNewValue() + ? parseFloat(height.getNewValue()!) + : undefined, + weight: weight.getNewValue() + ? parseFloat(weight.getNewValue()!) + : undefined, + penis_length: penisLength.getNewValue() + ? parseFloat(penisLength.getNewValue()!) + : undefined, + measurements: measurements.getNewValue(), + fake_tits: fakeTits.getNewValue(), + career_start: careerStart.getNewValue(), + career_end: careerEnd.getNewValue(), + tattoos: tattoos.getNewValue(), + piercings: piercings.getNewValue(), + urls: urls.getNewValue(), + gender: stringToGender(gender.getNewValue()), + circumcised: stringToCircumcised(circumcised.getNewValue()), + tag_ids: tags.getNewValue()?.map((t) => t.stored_id!), + details: details.getNewValue(), + stash_ids: stashIDs.getNewValue(), + image: coverImage, + custom_fields: { + partial: Object.fromEntries( + Array.from(customFields.entries()).flatMap(([field, v]) => + v.useNewValue ? [[field, v.getNewValue()]] : [] + ) + ), + }, + }, + }; + } + + const dialogTitle = intl.formatMessage({ + id: "actions.merge", + }); + + const destinationLabel = !hasValues + ? "" + : intl.formatMessage({ id: "dialogs.merge.destination" }); + const sourceLabel = !hasValues + ? "" + : intl.formatMessage({ id: "dialogs.merge.combined" }); + + return ( + { + if (!apply) { + onClose(); + } else { + onClose(createValues()); + } + }} + > + {renderScrapeRows()} + + ); +}; + +interface IPerformerMergeModalProps { + show: boolean; + onClose: (mergedId?: string) => void; + performers: GQL.SelectPerformerDataFragment[]; +} + +export const PerformerMergeModal: React.FC = ({ + show, + onClose, + performers, +}) => { + const [sourcePerformers, setSourcePerformers] = useState< + GQL.SelectPerformerDataFragment[] + >([]); + const [destPerformer, setDestPerformer] = useState< + GQL.SelectPerformerDataFragment[] + >([]); + + const [loadedSources, setLoadedSources] = useState< + GQL.PerformerDataFragment[] + >([]); + const [loadedDest, setLoadedDest] = useState(); + + const [running, setRunning] = useState(false); + const [secondStep, setSecondStep] = useState(false); + + const intl = useIntl(); + const Toast = useToast(); + + const title = intl.formatMessage({ + id: "actions.merge", + }); + + const srcIDs = useMemo( + () => sourcePerformers.map((s) => s.id), + [sourcePerformers] + ); + const destID = useMemo( + () => (destPerformer[0] ? [destPerformer[0].id] : []), + [destPerformer] + ); + + useEffect(() => { + if (performers.length > 0) { + // set the first performer as the destination, others as source + setDestPerformer([performers[0]]); + + if (performers.length > 1) { + setSourcePerformers(performers.slice(1)); + } + } + }, [performers]); + + async function loadPerformers() { + const performerIDs = sourcePerformers.map((s) => parseInt(s.id)); + performerIDs.push(parseInt(destPerformer[0].id)); + const query = await queryFindPerformersByID(performerIDs); + const { performers: loadedPerformers } = query.data.findPerformers; + + setLoadedDest(loadedPerformers.find((s) => s.id === destPerformer[0].id)); + setLoadedSources( + loadedPerformers.filter((s) => s.id !== destPerformer[0].id) + ); + setSecondStep(true); + } + + async function onMerge(options: MergeOptions) { + const { values } = options; + try { + setRunning(true); + const result = await mutatePerformerMerge( + destPerformer[0].id, + sourcePerformers.map((s) => s.id), + values + ); + if (result.data?.performerMerge) { + Toast.success(intl.formatMessage({ id: "toast.merged_performers" })); + onClose(destPerformer[0].id); + } + onClose(); + } catch (e) { + Toast.error(e); + } finally { + setRunning(false); + } + } + + function canMerge() { + return sourcePerformers.length > 0 && destPerformer.length !== 0; + } + + function switchPerformers() { + if (sourcePerformers.length && destPerformer.length) { + const newDest = sourcePerformers[0]; + setSourcePerformers([...sourcePerformers.slice(1), destPerformer[0]]); + setDestPerformer([newDest]); + } + } + + if (secondStep && destPerformer.length > 0) { + return ( + { + setSecondStep(false); + if (values) { + onMerge(values); + } else { + onClose(); + } + }} + /> + ); + } + + return ( + loadPerformers(), + }} + disabled={!canMerge()} + cancel={{ + variant: "secondary", + onClick: () => onClose(), + }} + isRunning={running} + > +
    +
    + + {FormUtils.renderLabel({ + title: intl.formatMessage({ id: "dialogs.merge.source" }), + labelProps: { + column: true, + sm: 3, + xl: 12, + }, + })} + + setSourcePerformers(items)} + values={sourcePerformers} + menuPortalTarget={document.body} + excludeIds={destID} + /> + + + + + + + {FormUtils.renderLabel({ + title: intl.formatMessage({ + id: "dialogs.merge.destination", + }), + labelProps: { + column: true, + sm: 3, + xl: 12, + }, + })} + + setDestPerformer(items)} + values={destPerformer} + menuPortalTarget={document.body} + excludeIds={srcIDs} + /> + + +
    +
    +
    + ); +}; diff --git a/ui/v2.5/src/components/Performers/PerformerPopover.tsx b/ui/v2.5/src/components/Performers/PerformerPopover.tsx index fa5b60e69..04cbf8e87 100644 --- a/ui/v2.5/src/components/Performers/PerformerPopover.tsx +++ b/ui/v2.5/src/components/Performers/PerformerPopover.tsx @@ -4,7 +4,7 @@ import { LoadingIndicator } from "../Shared/LoadingIndicator"; import { HoverPopover } from "../Shared/HoverPopover"; import { useFindPerformer } from "../../core/StashService"; import { PerformerCard } from "./PerformerCard"; -import { ConfigurationContext } from "../../hooks/Config"; +import { useConfigurationContext } from "../../hooks/Config"; import { Placement } from "react-bootstrap/esm/Overlay"; interface IPeromerPopoverCardProps { @@ -49,7 +49,7 @@ export const PerformerPopover: React.FC = ({ placement = "top", target, }) => { - const { configuration: config } = React.useContext(ConfigurationContext); + const { configuration: config } = useConfigurationContext(); const showPerformerCardOnHover = config?.ui.showTagCardOnHover ?? true; diff --git a/ui/v2.5/src/components/Performers/PerformerRecommendationRow.tsx b/ui/v2.5/src/components/Performers/PerformerRecommendationRow.tsx index 3c094f7ad..e07c44947 100644 --- a/ui/v2.5/src/components/Performers/PerformerRecommendationRow.tsx +++ b/ui/v2.5/src/components/Performers/PerformerRecommendationRow.tsx @@ -1,12 +1,9 @@ import React from "react"; -import { Link } from "react-router-dom"; import { useFindPerformers } from "src/core/StashService"; -import Slider from "@ant-design/react-slick"; import { PerformerCard } from "./PerformerCard"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { getSlickSliderSettings } from "src/core/recommendations"; -import { RecommendationRow } from "../FrontPage/RecommendationRow"; -import { FormattedMessage } from "react-intl"; +import { PatchComponent } from "src/patch"; +import { FilteredRecommendationRow } from "../FrontPage/FilteredRecommendationRow"; interface IProps { isTouch: boolean; @@ -14,29 +11,21 @@ interface IProps { header: string; } -export const PerformerRecommendationRow: React.FC = (props) => { - const result = useFindPerformers(props.filter); - const cardCount = result.data?.findPerformers.count; +export const PerformerRecommendationRow: React.FC = PatchComponent( + "PerformerRecommendationRow", + (props) => { + const result = useFindPerformers(props.filter); + const count = result.data?.findPerformers.count ?? 0; - if (!result.loading && !cardCount) { - return null; - } - - return ( - - - - } - > - {result.loading ? [...Array(props.filter.itemsPerPage)].map((i) => ( @@ -48,7 +37,7 @@ export const PerformerRecommendationRow: React.FC = (props) => { : result.data?.findPerformers.performers.map((p) => ( ))} - - - ); -}; + + ); + } +); diff --git a/ui/v2.5/src/components/Performers/PerformerSelect.tsx b/ui/v2.5/src/components/Performers/PerformerSelect.tsx index d31dc3ec7..fbb6fe785 100644 --- a/ui/v2.5/src/components/Performers/PerformerSelect.tsx +++ b/ui/v2.5/src/components/Performers/PerformerSelect.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useState } from "react"; +import React, { useEffect, useMemo, useState } from "react"; import { OptionProps, components as reactSelectComponents, @@ -13,7 +13,7 @@ import { queryFindPerformersByIDForSelect, queryFindPerformersForSelect, } from "src/core/StashService"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { useIntl } from "react-intl"; import { defaultMaxOptionsShown } from "src/core/config"; import { ListFilterModel } from "src/models/list-filter/filter"; @@ -23,6 +23,7 @@ import { IFilterProps, IFilterValueProps, Option as SelectOption, + toOption, } from "../Shared/FilterSelect"; import { useCompare } from "src/hooks/state"; import { Link } from "react-router-dom"; @@ -32,6 +33,8 @@ import { TruncatedText } from "../Shared/TruncatedText"; import TextUtils from "src/utils/text"; import { PerformerPopover } from "./PerformerPopover"; import { Placement } from "react-bootstrap/esm/Overlay"; +import { isUUID } from "src/utils/stashIds"; +import { filterByStashID } from "src/models/list-filter/utils"; export type SelectObject = { id: string; @@ -78,32 +81,55 @@ const _PerformerSelect: React.FC< ageFromDate?: string | null; hoverPlacementLabel?: Placement; hoverPlacementOptions?: Placement; + excludeIds?: string[]; } > = (props) => { const [createPerformer] = usePerformerCreate(); - const { configuration } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const intl = useIntl(); const maxOptionsShown = configuration?.ui.maxOptionsShown ?? defaultMaxOptionsShown; const defaultCreatable = - !configuration?.interface.disableDropdownCreate.performer ?? true; + !configuration?.interface.disableDropdownCreate.performer; + + const exclude = useMemo(() => props.excludeIds ?? [], [props.excludeIds]); + + function filterExcluded(performer: Performer) { + // HACK - we should probably exclude these in the backend query, but + // this will do in the short-term + return !exclude.includes(performer.id.toString()); + } async function loadPerformers(input: string): Promise { const filter = new ListFilterModel(GQL.FilterMode.Performers); - filter.searchTerm = input; filter.currentPage = 1; filter.itemsPerPage = maxOptionsShown; filter.sortBy = "name"; filter.sortDirection = GQL.SortDirectionEnum.Asc; + + // If the input looks like a GUID, search for stash_id first and return match immediately + if (isUUID(input)) { + filterByStashID(filter, input); + + const query = await queryFindPerformersForSelect(filter); + const matches = + query.data.findPerformers.performers.filter(filterExcluded); + if (matches.length > 0) { + // Matches found, return them immediately. + return matches.map(toOption); + } + // If no stash_id matches found, continue with standard name/alias search. + filter.criteria = []; // Clear stash_id criterion to search by name/alias below. + } + + filter.searchTerm = input; + const query = await queryFindPerformersForSelect(filter); return performerSelectSort( input, - query.data.findPerformers.performers.slice() - ).map((performer) => ({ - value: performer.id, - object: performer, - })); + query.data.findPerformers.performers.filter(filterExcluded) + ).map(toOption); } const PerformerOption: React.FC> = ( diff --git a/ui/v2.5/src/components/Performers/Performers.tsx b/ui/v2.5/src/components/Performers/Performers.tsx index d240ce988..7b6e32b8f 100644 --- a/ui/v2.5/src/components/Performers/Performers.tsx +++ b/ui/v2.5/src/components/Performers/Performers.tsx @@ -4,11 +4,11 @@ import { Helmet } from "react-helmet"; import { useTitleProps } from "src/hooks/title"; import Performer from "./PerformerDetails/Performer"; import PerformerCreate from "./PerformerDetails/PerformerCreate"; -import { PerformerList } from "./PerformerList"; +import { FilteredPerformerList } from "./PerformerList"; import { View } from "../List/views"; const Performers: React.FC = () => { - return ; + return ; }; const PerformerRoutes: React.FC = () => { diff --git a/ui/v2.5/src/components/Performers/styles.scss b/ui/v2.5/src/components/Performers/styles.scss index f76816f05..49dc27550 100644 --- a/ui/v2.5/src/components/Performers/styles.scss +++ b/ui/v2.5/src/components/Performers/styles.scss @@ -35,19 +35,41 @@ .rating-number .form-control { width: inherit; } + + // The following min-width declarations prevent + // the performer's O-Count from moving around + // when hovering over rating stars + .rating-stars-precision-full .star-rating-number { + min-width: 0.75rem; + } + + .rating-stars-precision-half .star-rating-number, + .rating-stars-precision-tenth .star-rating-number { + min-width: 1.45rem; + } + + .rating-stars-precision-quarter .star-rating-number { + min-width: 2rem; + } } .alias { font-weight: bold; } + .quality-group { + display: inline-flex; + margin-top: 0.25rem; + } + // the detail element ids are the same as field type name // which don't follow the correct convention /* stylelint-disable selector-class-pattern */ .collapsed { .detail-item.tattoos, .detail-item.piercings, - .detail-item.career_length, + .detail-item.career_start, + .detail-item.career_end, .detail-item.details, .detail-item.tags, .detail-item.stash_ids { @@ -60,11 +82,6 @@ font-weight: 700; padding-left: 0; } - - .custom-fields .detail-item-title, - .custom-fields .detail-item-value { - font-family: "Courier New", Courier, monospace; - } /* stylelint-enable selector-class-pattern */ } @@ -86,6 +103,10 @@ .thumbnail-section { position: relative; + + .instagram { + color: pink; + } } &-image { @@ -168,17 +189,21 @@ display: flex; } -.fa-mars { - color: #89cff0; -} +.gender-icon { + &[data-gender="FEMALE"], + &[data-gender="TRANSGENDER_FEMALE"] { + color: #f38cac; + } -.fa-venus { - color: #f38cac; -} + &[data-gender="MALE"], + &[data-gender="TRANSGENDER_MALE"] { + color: #89cff0; + } -.fa-transgender, -.fa-transgender-alt { - color: #c8a2c8; + &[data-gender="NON_BINARY"], + &[data-gender="INTERSEX"] { + color: #c8a2c8; + } } .performer-height .height-imperial, @@ -277,3 +302,11 @@ overflow-y: auto; padding-right: 1.5rem; } + +.performer-merge-dialog .custom-field { + // ensure we don't catch the destination/source labels + & > .form-label, + .form-control { + font-family: "Courier New", Courier, monospace; + } +} diff --git a/ui/v2.5/src/components/SceneDuplicateChecker/SceneDuplicateChecker.tsx b/ui/v2.5/src/components/SceneDuplicateChecker/SceneDuplicateChecker.tsx index 2d8114935..d396a01f4 100644 --- a/ui/v2.5/src/components/SceneDuplicateChecker/SceneDuplicateChecker.tsx +++ b/ui/v2.5/src/components/SceneDuplicateChecker/SceneDuplicateChecker.tsx @@ -79,7 +79,24 @@ export const SceneDuplicateChecker: React.FC = () => { }, }); - const scenes = data?.findDuplicateScenes ?? []; + const getGroupTotalSize = (group: GQL.SlimSceneDataFragment[]) => { + // Sum all file sizes across all scenes in the group + return group.reduce((groupTotal, scene) => { + const sceneTotal = scene.files.reduce( + (fileTotal, file) => fileTotal + file.size, + 0 + ); + return groupTotal + sceneTotal; + }, 0); + }; + + const scenes = useMemo(() => { + const groups = data?.findDuplicateScenes ?? []; + // Sort by total file size descending (largest groups first) + return [...groups].sort((a, b) => { + return getGroupTotalSize(b) - getGroupTotalSize(a); + }); + }, [data?.findDuplicateScenes]); const { data: missingPhash } = GQL.useFindScenesQuery({ variables: { diff --git a/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx b/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx index 5749f6331..36df653ba 100644 --- a/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx +++ b/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx @@ -1,7 +1,6 @@ import React, { KeyboardEvent, useCallback, - useContext, useEffect, useMemo, useRef, @@ -17,21 +16,25 @@ import "./live"; import "./PlaylistButtons"; import "./source-selector"; import "./persist-volume"; +import "./autostart-button"; import MarkersPlugin, { type IMarker } from "./markers"; void MarkersPlugin; import "./vtt-thumbnails"; import "./big-buttons"; import "./track-activity"; import "./vrmode"; +import "./media-session"; +import "./wake-sentinel"; import cx from "classnames"; import { useSceneSaveActivity, useSceneIncrementPlayCount, + useConfigureInterface, } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; import { ScenePlayerScrubber } from "./ScenePlayerScrubber"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { ConnectionState, InteractiveContext, @@ -120,6 +123,22 @@ function handleHotkeys(player: VideoJsPlayer, event: videojs.KeyboardEvent) { return; } + const skipButtons = player.skipButtons(); + if (skipButtons) { + // handle multimedia keys + switch (event.key) { + case "MediaTrackNext": + if (!skipButtons.onNext) return; + skipButtons.onNext(); + break; + case "MediaTrackPrevious": + if (!skipButtons.onPrevious) return; + skipButtons.onPrevious(); + break; + // MediaPlayPause handled by videojs + } + } + switch (event.which) { case 32: // space case 13: // enter @@ -224,7 +243,7 @@ export const ScenePlayer: React.FC = PatchComponent( onNext, onPrevious, }) => { - const { configuration } = useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const interfaceConfig = configuration?.interface; const uiConfig = configuration?.ui; const videoRef = useRef(null); @@ -232,6 +251,7 @@ export const ScenePlayer: React.FC = PatchComponent( const sceneId = useRef(); const [sceneSaveActivity] = useSceneSaveActivity(); const [sceneIncrementPlayCount] = useSceneIncrementPlayCount(); + const [updateInterfaceConfig] = useConfigureInterface(); const [time, setTime] = useState(0); const [ready, setReady] = useState(false); @@ -344,7 +364,7 @@ export const ScenePlayer: React.FC = PatchComponent( }, nativeControlsForTouch: false, playbackRates: [0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2], - inactivityTimeout: 2000, + inactivityTimeout: 700, preload: "none", playsinline: true, techOrder: ["chromecast", "html5"], @@ -354,7 +374,9 @@ export const ScenePlayer: React.FC = PatchComponent( }, }, plugins: { - airPlay: {}, + airPlay: { + addButtonToControlBar: uiConfig?.enableChromecast ?? false, + }, chromecast: {}, vttThumbnails: { showTimestamp: true, @@ -370,6 +392,9 @@ export const ScenePlayer: React.FC = PatchComponent( skipButtons: {}, trackActivity: {}, vrMenu: {}, + autostartButton: { + enabled: interfaceConfig?.autostartVideo ?? false, + }, abLoopPlugin: { start: 0, end: false, @@ -380,6 +405,8 @@ export const ScenePlayer: React.FC = PatchComponent( pauseBeforeLooping: false, createButtons: uiConfig?.showAbLoopControls ?? false, }, + mediaSession: {}, + wakeSentinel: {}, }, }; @@ -413,7 +440,10 @@ export const ScenePlayer: React.FC = PatchComponent( }; // empty deps - only init once // showAbLoopControls is necessary to re-init the player when the config changes - }, [uiConfig?.showAbLoopControls]); + // Note: interfaceConfig?.autostartVideo is intentionally excluded to prevent + // player re-initialization when toggling autostart (which would interrupt playback) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [uiConfig?.showAbLoopControls, uiConfig?.enableChromecast]); useEffect(() => { const player = getPlayer(); @@ -654,11 +684,6 @@ export const ScenePlayer: React.FC = PatchComponent( } } - auto.current = - autoplay || - (interfaceConfig?.autostartVideo ?? false) || - _initialTimestamp > 0; - const alwaysStartFromBeginning = uiConfig?.alwaysStartFromBeginning ?? false; const resumeTime = scene.resume_time ?? 0; @@ -677,6 +702,15 @@ export const ScenePlayer: React.FC = PatchComponent( player.load(); player.focus(); + // Check the autostart button plugin for user preference + const autostartButton = player.autostartButton(); + const buttonEnabled = autostartButton.getEnabled(); + auto.current = + autoplay || + buttonEnabled || + (interfaceConfig?.autostartVideo ?? false) || + _initialTimestamp > 0; + player.ready(() => { player.vttThumbnails().src(scene.paths.vtt ?? null); @@ -820,6 +854,30 @@ export const ScenePlayer: React.FC = PatchComponent( sceneSaveActivity, ]); + // Sync autostart button with config changes + useEffect(() => { + const player = getPlayer(); + if (!player) return; + + async function updateAutoStart(enabled: boolean) { + await updateInterfaceConfig({ + variables: { + input: { + autostartVideo: enabled, + }, + }, + }); + } + + const autostartButton = player.autostartButton(); + if (autostartButton) { + autostartButton.syncWithConfig( + interfaceConfig?.autostartVideo ?? false + ); + autostartButton.updateAutoStart = updateAutoStart; + } + }, [getPlayer, updateInterfaceConfig, interfaceConfig?.autostartVideo]); + useEffect(() => { const player = getPlayer(); if (!player) return; @@ -857,15 +915,40 @@ export const ScenePlayer: React.FC = PatchComponent( return () => player.off("ended"); }, [getPlayer, onComplete]); + // set up mediaSession plugin + useEffect(() => { + const player = getPlayer(); + if (!player) return; + + // set up mediasession plugin + // get performer names as array + const performers = scene?.performers.map((p) => p.name).join(", "); + player + .mediaSession() + .setMetadata( + scene?.title ?? "Stash", + scene?.studio?.name ?? performers ?? "Stash", + scene.paths.screenshot || "" + ); + }, [getPlayer, scene]); + + const pausedBeforeScrubber = useRef(true); + function onScrubberScroll() { - if (started.current) { - getPlayer()?.pause(); + const player = getPlayer(); + if (started.current && player) { + pausedBeforeScrubber.current = player.paused(); + player.pause(); } } function onScrubberSeek(seconds: number) { - if (started.current) { - getPlayer()?.currentTime(seconds); + const player = getPlayer(); + if (started.current && player) { + player.currentTime(seconds); + if (!pausedBeforeScrubber.current) { + player.play(); + } } else { setTime(seconds); } diff --git a/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx b/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx index 93e45a7e7..8fb3fed67 100644 --- a/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx +++ b/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx @@ -28,6 +28,10 @@ interface ISceneSpriteItem { time: string; } +const scrubberViewportHeight = 120; +const scrubberTagsHeight = 30; +const scrubberSpriteHeight = scrubberViewportHeight - scrubberTagsHeight; + export const ScenePlayerScrubber: React.FC = ({ file, scene, @@ -86,16 +90,36 @@ export const ScenePlayerScrubber: React.FC = ({ const [spriteItems, setSpriteItems] = useState(); useEffect(() => { - if (!spriteInfo) return; + if (!spriteInfo || spriteInfo.length === 0) return; let totalWidth = 0; + + // calculate total width/height of scrubber image so we can scale it + const maxX = Math.max(...spriteInfo.map((sprite) => sprite.x + sprite.w)); + const maxY = Math.max(...spriteInfo.map((sprite) => sprite.y + sprite.h)); + const spriteWidth = spriteInfo[0].w; + const spriteHeight = spriteInfo[0].h; + const scale = scrubberSpriteHeight / spriteHeight; + + const w = spriteWidth * scale; + const h = scrubberSpriteHeight; + + const sizeX = maxX * scale; + const sizeY = maxY * scale; + + // scale sprite dimensions to fit scrubber height, and calculate background position for each sprite const newSprites = spriteInfo?.map((sprite, index) => { - totalWidth += sprite.w; - const left = sprite.w * index; + totalWidth += w; + const left = w * index; + + const spriteX = sprite.x * scale; + const spriteY = sprite.y * scale; + const style = { - width: `${sprite.w}px`, - height: `${sprite.h}px`, - backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, + width: `${w}px`, + height: `${h}px`, + backgroundPosition: `${-spriteX}px ${-spriteY}px`, backgroundImage: `url(${sprite.url})`, + backgroundSize: `${sizeX}px ${sizeY}px`, left: `${left}px`, }; const start = TextUtils.secondsToTimestamp(sprite.start); @@ -325,9 +349,10 @@ export const ScenePlayerScrubber: React.FC = ({
    diff --git a/ui/v2.5/src/components/ScenePlayer/autostart-button.ts b/ui/v2.5/src/components/ScenePlayer/autostart-button.ts new file mode 100644 index 000000000..f5a35a63f --- /dev/null +++ b/ui/v2.5/src/components/ScenePlayer/autostart-button.ts @@ -0,0 +1,126 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import videojs, { VideoJsPlayer } from "video.js"; + +interface IAutostartButtonOptions { + enabled?: boolean; +} + +interface AutostartButtonOptions extends videojs.ComponentOptions { + autostartEnabled: boolean; +} + +class AutostartButton extends videojs.getComponent("Button") { + private autostartEnabled: boolean; + + constructor(player: VideoJsPlayer, options: AutostartButtonOptions) { + super(player, options); + this.autostartEnabled = options.autostartEnabled; + this.updateIcon(); + } + + buildCSSClass() { + return `vjs-autostart-button ${super.buildCSSClass()}`; + } + + private updateIcon() { + this.removeClass("vjs-icon-play-circle"); + this.removeClass("vjs-icon-cancel"); + + if (this.autostartEnabled) { + this.addClass("vjs-icon-play-circle"); + this.controlText(this.localize("Auto-start enabled (click to disable)")); + } else { + this.addClass("vjs-icon-cancel"); + this.controlText(this.localize("Auto-start disabled (click to enable)")); + } + } + + handleClick(event: Event) { + // Prevent the click from bubbling up and affecting the video player + event.stopPropagation(); + + this.autostartEnabled = !this.autostartEnabled; + this.updateIcon(); + this.trigger("autostartchanged", { enabled: this.autostartEnabled }); + } + + public setEnabled(enabled: boolean) { + this.autostartEnabled = enabled; + this.updateIcon(); + } +} + +class AutostartButtonPlugin extends videojs.getPlugin("plugin") { + private button: AutostartButton; + private autostartEnabled: boolean; + updateAutoStart: (enabled: boolean) => Promise = () => { + return Promise.resolve(); + }; + + constructor(player: VideoJsPlayer, options?: IAutostartButtonOptions) { + super(player, options); + + this.autostartEnabled = options?.enabled ?? false; + + this.button = new AutostartButton(player, { + autostartEnabled: this.autostartEnabled, + }); + + player.ready(() => { + this.ready(); + }); + } + + private ready() { + // Add button to control bar, before the fullscreen button + const { controlBar } = this.player; + const fullscreenToggle = controlBar.getChild("fullscreenToggle"); + if (fullscreenToggle) { + controlBar.addChild(this.button); + controlBar.el().insertBefore(this.button.el(), fullscreenToggle.el()); + } else { + controlBar.addChild(this.button); + } + + // Listen for changes + this.button.on("autostartchanged", (_, data: { enabled: boolean }) => { + this.autostartEnabled = data.enabled; + this.updateAutoStart(this.autostartEnabled); + }); + } + + public isEnabled(): boolean { + return this.autostartEnabled; + } + + public getEnabled(): boolean { + return this.autostartEnabled; + } + + public setEnabled(enabled: boolean) { + this.autostartEnabled = enabled; + this.button.setEnabled(enabled); + } + + public syncWithConfig(configEnabled: boolean) { + // Sync button state with external config changes + if (this.autostartEnabled !== configEnabled) { + this.setEnabled(configEnabled); + } + } +} + +// Register the plugin with video.js. +videojs.registerComponent("AutostartButton", AutostartButton); +videojs.registerPlugin("autostartButton", AutostartButtonPlugin); + +declare module "video.js" { + interface VideoJsPlayer { + autostartButton: () => AutostartButtonPlugin; + } + interface VideoJsPlayerPluginOptions { + autostartButton?: IAutostartButtonOptions; + } +} + +export default AutostartButtonPlugin; diff --git a/ui/v2.5/src/components/ScenePlayer/markers.ts b/ui/v2.5/src/components/ScenePlayer/markers.ts index 83a695c1b..55e0c7cd2 100644 --- a/ui/v2.5/src/components/ScenePlayer/markers.ts +++ b/ui/v2.5/src/components/ScenePlayer/markers.ts @@ -5,6 +5,7 @@ export interface IMarker { title: string; seconds: number; end_seconds?: number | null; + primaryTag: { name: string }; } interface IMarkersOptions { @@ -85,8 +86,13 @@ class MarkersPlugin extends videojs.getPlugin("plugin") { markerSet.dot.toggleAttribute("marker-tooltip-shown", true); // Set background color based on tag (if available) - if (marker.title && this.tagColors[marker.title]) { - markerSet.dot.style.backgroundColor = this.tagColors[marker.title]; + if ( + marker.primaryTag && + marker.primaryTag.name && + this.tagColors[marker.primaryTag.name] + ) { + markerSet.dot.style.backgroundColor = + this.tagColors[marker.primaryTag.name]; } markerSet.dot.addEventListener("mouseenter", () => { this.showMarkerTooltip(marker.title); @@ -136,24 +142,33 @@ class MarkersPlugin extends videojs.getPlugin("plugin") { const rangeDiv = videojs.dom.createEl("div") as HTMLDivElement; rangeDiv.className = "vjs-marker-range"; - // start/end percent is relative to the parent element, which is the vjs-progress-control - // vjs-progress-control has 15px margins on each side - const left = seekBar.clientWidth * (marker.seconds / duration) + 15; + // Use percentage-based positioning for proper scaling in fullscreen mode + // The range marker is inside vjs-progress-control, but needs to align with + // vjs-progress-holder which has 15px margins on each side. + // We use calc() to combine percentage positioning with the fixed margin offset. + const startPercent = (marker.seconds / duration) * 100; + const widthPercent = + ((marker.end_seconds - marker.seconds) / duration) * 100; - // minimum width of 8px - const width = Math.max( - seekBar.clientWidth * ((marker.end_seconds - marker.seconds) / duration), - 8 - ); + // left: 15px margin + percentage of the progress holder width + // Since progress-holder has margin: 0 15px, we need calc(15px + X% of remaining width) + // The progress-holder width is (100% - 30px), so the actual left position is: + // 15px + startPercent% * (100% - 30px) = 15px + startPercent% * 100% - startPercent% * 30px + rangeDiv.style.left = `calc(15px + ${startPercent}% - ${ + startPercent * 0.3 + }px)`; - rangeDiv.style.left = `${left}px`; - rangeDiv.style.width = `${width}px`; + rangeDiv.style.width = `calc(${widthPercent}% - ${widthPercent * 0.3}px)`; rangeDiv.style.bottom = `${layer * this.layerHeight}px`; // Adjust height based on layer rangeDiv.style.display = "none"; // Initially hidden // Set background color based on tag (if available) - if (marker.title && this.tagColors[marker.title]) { - rangeDiv.style.backgroundColor = this.tagColors[marker.title]; + if ( + marker.primaryTag && + marker.primaryTag.name && + this.tagColors[marker.primaryTag.name] + ) { + rangeDiv.style.backgroundColor = this.tagColors[marker.primaryTag.name]; } markerSet.range = rangeDiv; diff --git a/ui/v2.5/src/components/ScenePlayer/media-session.ts b/ui/v2.5/src/components/ScenePlayer/media-session.ts new file mode 100644 index 000000000..b3ce2d0ea --- /dev/null +++ b/ui/v2.5/src/components/ScenePlayer/media-session.ts @@ -0,0 +1,71 @@ +import videojs, { VideoJsPlayer } from "video.js"; + +class MediaSessionPlugin extends videojs.getPlugin("plugin") { + constructor(player: VideoJsPlayer) { + super(player); + + player.ready(() => { + player.addClass("vjs-media-session"); + this.setActionHandlers(); + }); + + player.on("play", () => { + this.updatePlaybackState(); + }); + + player.on("pause", () => { + this.updatePlaybackState(); + }); + this.updatePlaybackState(); + } + + // manually set poster since it's only set on useEffect + public setMetadata(title: string, artist: string, poster: string): void { + if ("mediaSession" in navigator) { + navigator.mediaSession.metadata = new MediaMetadata({ + title, + artist, + artwork: [ + { + src: poster || this.player.poster() || "", + type: "image/jpeg", + }, + ], + }); + } + } + + private updatePlaybackState(): void { + if ("mediaSession" in navigator) { + const playbackState = this.player.paused() ? "paused" : "playing"; + navigator.mediaSession.playbackState = playbackState; + } + } + + private setActionHandlers(): void { + // method initialization + navigator.mediaSession.setActionHandler("play", () => { + this.player.play(); + }); + navigator.mediaSession.setActionHandler("pause", () => { + this.player.pause(); + }); + navigator.mediaSession.setActionHandler("nexttrack", () => { + this.player.skipButtons()?.handleForward(); + }); + navigator.mediaSession.setActionHandler("previoustrack", () => { + this.player.skipButtons()?.handleBackward(); + }); + } +} + +videojs.registerPlugin("mediaSession", MediaSessionPlugin); + +/* eslint-disable @typescript-eslint/naming-convention */ +declare module "video.js" { + interface VideoJsPlayer { + mediaSession: () => MediaSessionPlugin; + } +} + +export default MediaSessionPlugin; diff --git a/ui/v2.5/src/components/ScenePlayer/styles.scss b/ui/v2.5/src/components/ScenePlayer/styles.scss index 95b1df8c7..fc143a873 100644 --- a/ui/v2.5/src/components/ScenePlayer/styles.scss +++ b/ui/v2.5/src/components/ScenePlayer/styles.scss @@ -100,6 +100,57 @@ $sceneTabWidth: 450px; width: 1.6em; } + .vjs-autostart-button { + cursor: pointer; + + &.vjs-icon-play-circle::before { + align-items: center; + background-color: rgba(255, 255, 255, 0.9); + border-radius: 50%; + color: rgba(80, 80, 80, 0.9); + content: "\f101"; + font-size: 1em; + line-height: 1; + margin-left: 1rem; + padding: 0.3em; + position: relative; + z-index: 2; + } + + &.vjs-icon-cancel::before { + align-items: center; + background-color: rgba(80, 80, 80, 0.9); + border-radius: 50%; + color: #fff; + content: "\f103"; + font-size: 1em; + line-height: 1; + margin-right: 1rem; + padding: 0.3em; + position: relative; + z-index: 2; + } + + &.vjs-icon-play-circle::after, + &.vjs-icon-cancel::after { + background-color: rgb(255 255 255 / 70%); + border-radius: 8px; + content: ""; + height: 2.5rem; + left: 50%; + opacity: 0.7; + position: absolute; + top: 50%; + transform: translate(-50%, -50%) rotate(90deg); + width: 1rem; + z-index: 1; + } + + &:hover { + text-shadow: 0 0 1em rgba(255, 255, 255, 0.75); + } + } + .vjs-touch-overlay .vjs-play-control { z-index: 1; } @@ -271,6 +322,7 @@ $sceneTabWidth: 450px; border-radius: 2px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); height: 8px; + min-width: 8px; position: absolute; transform: translateY(-28px); transition: none; @@ -343,9 +395,16 @@ $sceneTabWidth: 450px; } } } + @media (max-width: 576px) { + .vjs-control-bar { + .vjs-autostart-button { + display: none; + } + } + } // make controls a little more compact on smaller screens - @media (max-width: 576px) { + @media (max-width: 768px) { .vjs-control-bar { .vjs-control { width: 2.5em; diff --git a/ui/v2.5/src/components/ScenePlayer/track-activity.ts b/ui/v2.5/src/components/ScenePlayer/track-activity.ts index a3846dc87..4e7c1ce76 100644 --- a/ui/v2.5/src/components/ScenePlayer/track-activity.ts +++ b/ui/v2.5/src/components/ScenePlayer/track-activity.ts @@ -44,6 +44,10 @@ class TrackActivityPlugin extends videojs.getPlugin("plugin") { player.on("dispose", () => { this.stop(); }); + + player.on("ended", () => { + this.stop(); + }); } private start() { diff --git a/ui/v2.5/src/components/ScenePlayer/util.ts b/ui/v2.5/src/components/ScenePlayer/util.ts index a63ab6a2e..21ed99b62 100644 --- a/ui/v2.5/src/components/ScenePlayer/util.ts +++ b/ui/v2.5/src/components/ScenePlayer/util.ts @@ -1,6 +1,27 @@ -import videojs from "video.js"; +import videojs, { VideoJsPlayer } from "video.js"; export const VIDEO_PLAYER_ID = "VideoJsPlayer"; -export const getPlayerPosition = () => - videojs.getPlayer(VIDEO_PLAYER_ID)?.currentTime(); +export const getPlayer = () => videojs.getPlayer(VIDEO_PLAYER_ID); + +export const getPlayerPosition = () => getPlayer()?.currentTime(); + +export type AbLoopOptions = { + start: number; + end: number | false; + enabled?: boolean; +}; + +export type AbLoopPluginApi = { + getOptions: () => AbLoopOptions; + setOptions: (options: AbLoopOptions) => void; +}; + +export const getAbLoopPlugin = () => { + const player = getPlayer(); + if (!player) return null; + const { abLoopPlugin } = player as VideoJsPlayer & { + abLoopPlugin?: AbLoopPluginApi; + }; + return abLoopPlugin ?? null; +}; diff --git a/ui/v2.5/src/components/ScenePlayer/wake-sentinel.ts b/ui/v2.5/src/components/ScenePlayer/wake-sentinel.ts new file mode 100644 index 000000000..a51c050f5 --- /dev/null +++ b/ui/v2.5/src/components/ScenePlayer/wake-sentinel.ts @@ -0,0 +1,65 @@ +import videojs, { VideoJsPlayer } from "video.js"; + +class WakeSentinelPlugin extends videojs.getPlugin("plugin") { + public wakeLock: WakeLockSentinel | null = null; + public wakeLockFail: boolean = false; + constructor(player: VideoJsPlayer) { + super(player); + + // listen for visibility change events + document.addEventListener("visibilitychange", async () => { + if (document.visibilityState === "visible") { + // reacquire the wake lock when the page becomes visible + await this.acquireWakeLock(); + } + }); + + // acquire wake lock on ready and play + player.ready(async () => { + player.addClass("vjs-wake-sentinel"); + await this.acquireWakeLock(true); + }); + player.on("play", () => this.acquireWakeLock()); + + // release wake lock on pause, dispose and end + player.on("pause", () => this.releaseWakeLock()); + player.on("dispose", () => this.releaseWakeLock()); + player.on("ended", () => this.releaseWakeLock()); + } + + private async releaseWakeLock(): Promise { + this.wakeLock?.release().then(() => (this.wakeLock = null)); + } + + private async acquireWakeLock(log = false): Promise { + // if wake lock failed, don't even try + if (this.wakeLockFail) return; + // check for wake lock on startup + if ("wakeLock" in navigator) { + try { + this.wakeLock = await navigator.wakeLock.request("screen"); + } catch (err) { + if (log) console.error("Failed to obtain Screen Wake Lock:", err); + this.wakeLockFail = true; + } + } else { + if (log) { + console.warn( + "Screen Wake Lock API not supported. Secure context (https or localhost) and modern browser required." + ); + } + this.wakeLockFail = true; + } + } +} + +videojs.registerPlugin("wakeSentinel", WakeSentinelPlugin); + +/* eslint-disable @typescript-eslint/naming-convention */ +declare module "video.js" { + interface VideoJsPlayer { + wakeSentinel: () => WakeSentinelPlugin; + } +} + +export default WakeSentinelPlugin; diff --git a/ui/v2.5/src/components/Scenes/DeleteScenesDialog.tsx b/ui/v2.5/src/components/Scenes/DeleteScenesDialog.tsx index 88f133a80..56cbd69b0 100644 --- a/ui/v2.5/src/components/Scenes/DeleteScenesDialog.tsx +++ b/ui/v2.5/src/components/Scenes/DeleteScenesDialog.tsx @@ -4,7 +4,7 @@ import { useScenesDestroy } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; import { ModalComponent } from "src/components/Shared/Modal"; import { useToast } from "src/hooks/Toast"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { FormattedMessage, useIntl } from "react-intl"; import { faTrashAlt } from "@fortawesome/free-solid-svg-icons"; import { objectPath } from "src/core/files"; @@ -34,7 +34,7 @@ export const DeleteScenesDialog: React.FC = ( { count: props.selected.length, singularEntity, pluralEntity } ); - const { configuration: config } = React.useContext(ConfigurationContext); + const { configuration: config } = useConfigurationContext(); const [deleteFile, setDeleteFile] = useState( config?.defaults.deleteFile ?? false @@ -94,6 +94,11 @@ export const DeleteScenesDialog: React.FC = ( } }); + const deleteTrashPath = config?.general.deleteTrashPath; + const deleteAlertId = deleteTrashPath + ? "dialogs.delete_alert_to_trash" + : "dialogs.delete_alert"; + return (

    @@ -103,7 +108,7 @@ export const DeleteScenesDialog: React.FC = ( singularEntity: intl.formatMessage({ id: "file" }), pluralEntity: intl.formatMessage({ id: "files" }), }} - id="dialogs.delete_alert" + id={deleteAlertId} />

      diff --git a/ui/v2.5/src/components/Scenes/EditSceneMarkersDialog.tsx b/ui/v2.5/src/components/Scenes/EditSceneMarkersDialog.tsx new file mode 100644 index 000000000..1856543f9 --- /dev/null +++ b/ui/v2.5/src/components/Scenes/EditSceneMarkersDialog.tsx @@ -0,0 +1,185 @@ +import React, { useEffect, useMemo, useState } from "react"; +import { Form } from "react-bootstrap"; +import { useIntl } from "react-intl"; +import { useBulkSceneMarkerUpdate } from "src/core/StashService"; +import * as GQL from "src/core/generated-graphql"; +import { ModalComponent } from "../Shared/Modal"; +import { useToast } from "src/hooks/Toast"; +import { MultiSet } from "../Shared/MultiSet"; +import { + getAggregateState, + getAggregateStateObject, +} from "src/utils/bulkUpdate"; +import { BulkUpdateFormGroup, BulkUpdateTextInput } from "../Shared/BulkUpdate"; +import { faPencilAlt } from "@fortawesome/free-solid-svg-icons"; +import { TagSelect } from "../Shared/Select"; + +interface IListOperationProps { + selected: GQL.SceneMarkerDataFragment[]; + onClose: (applied: boolean) => void; +} + +const scenemarkerFields = ["title"]; + +export const EditSceneMarkersDialog: React.FC = ( + props: IListOperationProps +) => { + const intl = useIntl(); + const Toast = useToast(); + + const [updateInput, setUpdateInput] = + useState({ + ids: props.selected.map((scenemarker) => { + return scenemarker.id; + }), + }); + + const [tagIds, setTagIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + + const unsetDisabled = props.selected.length < 2; + + const [updateSceneMarkers] = useBulkSceneMarkerUpdate(); + + // Network state + const [isUpdating, setIsUpdating] = useState(false); + + const aggregateState = useMemo(() => { + const updateState: Partial = {}; + const state = props.selected; + let updateTagIds: string[] = []; + let first = true; + + state.forEach((scenemarker: GQL.SceneMarkerDataFragment) => { + getAggregateStateObject( + updateState, + scenemarker, + scenemarkerFields, + first + ); + + // sceneMarker data fragment doesn't have primary_tag_id, so handle separately + updateState.primary_tag_id = getAggregateState( + updateState.primary_tag_id, + scenemarker.primary_tag.id, + first + ); + + const thisTagIDs = (scenemarker.tags ?? []).map((p) => p.id).sort(); + + updateTagIds = getAggregateState(updateTagIds, thisTagIDs, first) ?? []; + + first = false; + }); + + return { state: updateState, tagIds: updateTagIds }; + }, [props.selected]); + + // update initial state from aggregate + useEffect(() => { + setUpdateInput((current) => ({ ...current, ...aggregateState.state })); + }, [aggregateState]); + + function setUpdateField(input: Partial) { + setUpdateInput((current) => ({ ...current, ...input })); + } + + function getSceneMarkerInput(): GQL.BulkSceneMarkerUpdateInput { + const sceneMarkerInput: GQL.BulkSceneMarkerUpdateInput = { + ...updateInput, + tag_ids: tagIds, + }; + + return sceneMarkerInput; + } + + async function onSave() { + setIsUpdating(true); + try { + await updateSceneMarkers({ + variables: { + input: getSceneMarkerInput(), + }, + }); + Toast.success( + intl.formatMessage( + { id: "toast.updated_entity" }, + { + entity: intl.formatMessage({ id: "markers" }).toLocaleLowerCase(), + } + ) + ); + props.onClose(true); + } catch (e) { + Toast.error(e); + } + setIsUpdating(false); + } + + function render() { + return ( + props.onClose(false), + text: intl.formatMessage({ id: "actions.cancel" }), + variant: "secondary", + }} + isRunning={isUpdating} + > + + + setUpdateField({ title: newValue })} + unsetDisabled={unsetDisabled} + /> + + + + setUpdateField({ primary_tag_id: t[0]?.id })} + ids={ + updateInput.primary_tag_id ? [updateInput.primary_tag_id] : [] + } + /> + + + + { + setTagIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setTagIds((c) => ({ ...c, mode: newMode })); + }} + ids={tagIds.ids ?? []} + existingIds={aggregateState.tagIds ?? []} + mode={tagIds.mode} + menuPortalTarget={document.body} + /> + + + + ); + } + + return render(); +}; diff --git a/ui/v2.5/src/components/Scenes/EditScenesDialog.tsx b/ui/v2.5/src/components/Scenes/EditScenesDialog.tsx index 7b69cf655..17466bfc9 100644 --- a/ui/v2.5/src/components/Scenes/EditScenesDialog.tsx +++ b/ui/v2.5/src/components/Scenes/EditScenesDialog.tsx @@ -1,93 +1,121 @@ -import React, { useEffect, useState } from "react"; -import { Form, Col, Row } from "react-bootstrap"; -import { FormattedMessage, useIntl } from "react-intl"; -import isEqual from "lodash-es/isEqual"; +import React, { useEffect, useMemo, useState } from "react"; +import { Form } from "react-bootstrap"; +import { useIntl } from "react-intl"; import { useBulkSceneUpdate } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; import { StudioSelect } from "../Shared/Select"; import { ModalComponent } from "../Shared/Modal"; import { MultiSet } from "../Shared/MultiSet"; import { useToast } from "src/hooks/Toast"; -import * as FormUtils from "src/utils/form"; import { RatingSystem } from "../Shared/Rating/RatingSystem"; import { - getAggregateInputIDs, getAggregateInputValue, getAggregateGroupIds, getAggregatePerformerIds, - getAggregateRating, - getAggregateStudioId, + getAggregateStateObject, getAggregateTagIds, + getAggregateStudioId, } from "src/utils/bulkUpdate"; import { faPencilAlt } from "@fortawesome/free-solid-svg-icons"; +import { IndeterminateCheckbox } from "../Shared/IndeterminateCheckbox"; +import { BulkUpdateFormGroup, BulkUpdateTextInput } from "../Shared/BulkUpdate"; +import { BulkUpdateDateInput } from "../Shared/DateInput"; +import { getDateError } from "src/utils/yup"; interface IListOperationProps { selected: GQL.SlimSceneDataFragment[]; onClose: (applied: boolean) => void; } +const sceneFields = [ + "code", + "rating100", + "details", + "organized", + "director", + "date", +]; + export const EditScenesDialog: React.FC = ( props: IListOperationProps ) => { const intl = useIntl(); const Toast = useToast(); - const [rating100, setRating] = useState(); - const [studioId, setStudioId] = useState(); - const [performerMode, setPerformerMode] = - React.useState(GQL.BulkUpdateIdMode.Add); - const [performerIds, setPerformerIds] = useState(); - const [existingPerformerIds, setExistingPerformerIds] = useState(); - const [tagMode, setTagMode] = React.useState( - GQL.BulkUpdateIdMode.Add - ); - const [tagIds, setTagIds] = useState(); - const [existingTagIds, setExistingTagIds] = useState(); - const [groupMode, setGroupMode] = React.useState( - GQL.BulkUpdateIdMode.Add - ); - const [groupIds, setGroupIds] = useState(); - const [existingGroupIds, setExistingGroupIds] = useState(); - const [organized, setOrganized] = useState(); - const [updateScenes] = useBulkSceneUpdate(getSceneInput()); + const [updateInput, setUpdateInput] = useState({ + ids: props.selected.map((scene) => { + return scene.id; + }), + }); + + const [dateError, setDateError] = useState(); + + const [performerIds, setPerformerIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + const [tagIds, setTagIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + const [groupIds, setGroupIds] = useState({ + mode: GQL.BulkUpdateIdMode.Add, + }); + + const unsetDisabled = props.selected.length < 2; + + const [updateScenes] = useBulkSceneUpdate(); // Network state const [isUpdating, setIsUpdating] = useState(false); - const checkboxRef = React.createRef(); + const aggregateState = useMemo(() => { + const updateState: Partial = {}; + const state = props.selected; + updateState.studio_id = getAggregateStudioId(props.selected); + const updateTagIds = getAggregateTagIds(props.selected); + const updatePerformerIds = getAggregatePerformerIds(props.selected); + const updateGroupIds = getAggregateGroupIds(props.selected); + let first = true; + + state.forEach((scene: GQL.SlimSceneDataFragment) => { + getAggregateStateObject(updateState, scene, sceneFields, first); + first = false; + }); + + return { + state: updateState, + tagIds: updateTagIds, + performerIds: updatePerformerIds, + groupIds: updateGroupIds, + }; + }, [props.selected]); + + // update initial state from aggregate + useEffect(() => { + setUpdateInput((current) => ({ ...current, ...aggregateState.state })); + }, [aggregateState]); + + useEffect(() => { + setDateError(getDateError(updateInput.date ?? "", intl)); + }, [updateInput.date, intl]); + + function setUpdateField(input: Partial) { + setUpdateInput((current) => ({ ...current, ...input })); + } function getSceneInput(): GQL.BulkSceneUpdateInput { - // need to determine what we are actually setting on each scene - const aggregateRating = getAggregateRating(props.selected); - const aggregateStudioId = getAggregateStudioId(props.selected); - const aggregatePerformerIds = getAggregatePerformerIds(props.selected); - const aggregateTagIds = getAggregateTagIds(props.selected); - const aggregateGroupIds = getAggregateGroupIds(props.selected); - const sceneInput: GQL.BulkSceneUpdateInput = { - ids: props.selected.map((scene) => { - return scene.id; - }), + ...updateInput, + tag_ids: tagIds, + performer_ids: performerIds, + group_ids: groupIds, }; - sceneInput.rating100 = getAggregateInputValue(rating100, aggregateRating); - sceneInput.studio_id = getAggregateInputValue(studioId, aggregateStudioId); - - sceneInput.performer_ids = getAggregateInputIDs( - performerMode, - performerIds, - aggregatePerformerIds + // we don't have unset functionality for the rating star control + // so need to determine if we are setting a rating or not + sceneInput.rating100 = getAggregateInputValue( + updateInput.rating100, + aggregateState.state.rating100 ); - sceneInput.tag_ids = getAggregateInputIDs(tagMode, tagIds, aggregateTagIds); - sceneInput.group_ids = getAggregateInputIDs( - groupMode, - groupIds, - aggregateGroupIds - ); - - if (organized !== undefined) { - sceneInput.organized = organized; - } return sceneInput; } @@ -95,7 +123,7 @@ export const EditScenesDialog: React.FC = ( async function onSave() { setIsUpdating(true); try { - await updateScenes(); + await updateScenes({ variables: { input: getSceneInput() } }); Toast.success( intl.formatMessage( { id: "toast.updated_entity" }, @@ -109,145 +137,13 @@ export const EditScenesDialog: React.FC = ( setIsUpdating(false); } - useEffect(() => { - const state = props.selected; - let updateRating: number | undefined; - let updateStudioID: string | undefined; - let updatePerformerIds: string[] = []; - let updateTagIds: string[] = []; - let updateGroupIds: string[] = []; - let updateOrganized: boolean | undefined; - let first = true; - - state.forEach((scene: GQL.SlimSceneDataFragment) => { - const sceneRating = scene.rating100; - const sceneStudioID = scene?.studio?.id; - const scenePerformerIDs = (scene.performers ?? []) - .map((p) => p.id) - .sort(); - const sceneTagIDs = (scene.tags ?? []).map((p) => p.id).sort(); - const sceneGroupIDs = (scene.groups ?? []).map((m) => m.group.id).sort(); - - if (first) { - updateRating = sceneRating ?? undefined; - updateStudioID = sceneStudioID; - updatePerformerIds = scenePerformerIDs; - updateTagIds = sceneTagIDs; - updateGroupIds = sceneGroupIDs; - first = false; - updateOrganized = scene.organized; - } else { - if (sceneRating !== updateRating) { - updateRating = undefined; - } - if (sceneStudioID !== updateStudioID) { - updateStudioID = undefined; - } - if (!isEqual(scenePerformerIDs, updatePerformerIds)) { - updatePerformerIds = []; - } - if (!isEqual(sceneTagIDs, updateTagIds)) { - updateTagIds = []; - } - if (!isEqual(sceneGroupIDs, updateGroupIds)) { - updateGroupIds = []; - } - if (scene.organized !== updateOrganized) { - updateOrganized = undefined; - } - } - }); - - setRating(updateRating); - setStudioId(updateStudioID); - setExistingPerformerIds(updatePerformerIds); - setExistingTagIds(updateTagIds); - setExistingGroupIds(updateGroupIds); - setOrganized(updateOrganized); - }, [props.selected]); - - useEffect(() => { - if (checkboxRef.current) { - checkboxRef.current.indeterminate = organized === undefined; - } - }, [organized, checkboxRef]); - - function renderMultiSelect( - type: "performers" | "tags" | "groups", - ids: string[] | undefined - ) { - let mode = GQL.BulkUpdateIdMode.Add; - let existingIds: string[] | undefined = []; - switch (type) { - case "performers": - mode = performerMode; - existingIds = existingPerformerIds; - break; - case "tags": - mode = tagMode; - existingIds = existingTagIds; - break; - case "groups": - mode = groupMode; - existingIds = existingGroupIds; - break; - } - - return ( - { - switch (type) { - case "performers": - setPerformerIds(itemIDs); - break; - case "tags": - setTagIds(itemIDs); - break; - case "groups": - setGroupIds(itemIDs); - break; - } - }} - onSetMode={(newMode) => { - switch (type) { - case "performers": - setPerformerMode(newMode); - break; - case "tags": - setTagMode(newMode); - break; - case "groups": - setGroupMode(newMode); - break; - } - }} - ids={ids ?? []} - existingIds={existingIds ?? []} - mode={mode} - menuPortalTarget={document.body} - /> - ); - } - - function cycleOrganized() { - if (organized) { - setOrganized(undefined); - } else if (organized === undefined) { - setOrganized(false); - } else { - setOrganized(true); - } - } - function render() { return ( = ( onClick: onSave, text: intl.formatMessage({ id: "actions.apply" }), }} + disabled={isUpdating || !!dateError} cancel={{ onClick: () => props.onClose(false), text: intl.formatMessage({ id: "actions.cancel" }), @@ -266,62 +163,121 @@ export const EditScenesDialog: React.FC = ( isRunning={isUpdating} >
      - - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "rating" }), - })} - - setRating(value ?? undefined)} - disabled={isUpdating} - /> - - - - {FormUtils.renderLabel({ - title: intl.formatMessage({ id: "studio" }), - })} - - - setStudioId(items.length > 0 ? items[0]?.id : undefined) - } - ids={studioId ? [studioId] : []} - isDisabled={isUpdating} - menuPortalTarget={document.body} - /> - - + + + setUpdateField({ rating100: value ?? undefined }) + } + disabled={isUpdating} + /> + - - - - - {renderMultiSelect("performers", performerIds)} - + + setUpdateField({ code: newValue })} + unsetDisabled={unsetDisabled} + /> + - - - - - {renderMultiSelect("tags", tagIds)} - + + setUpdateField({ date: newValue })} + unsetDisabled={unsetDisabled} + error={dateError} + /> + - - - - - {renderMultiSelect("groups", groupIds)} - + + + setUpdateField({ director: newValue }) + } + unsetDisabled={unsetDisabled} + /> + + + + + setUpdateField({ + studio_id: items.length > 0 ? items[0]?.id : undefined, + }) + } + ids={updateInput.studio_id ? [updateInput.studio_id] : []} + isDisabled={isUpdating} + menuPortalTarget={document.body} + /> + + + + { + setPerformerIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setPerformerIds((c) => ({ ...c, mode: newMode })); + }} + ids={performerIds.ids ?? []} + existingIds={aggregateState.performerIds} + mode={performerIds.mode} + menuPortalTarget={document.body} + /> + + + + { + setGroupIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setGroupIds((c) => ({ ...c, mode: newMode })); + }} + ids={groupIds.ids ?? []} + existingIds={aggregateState.groupIds} + mode={groupIds.mode} + menuPortalTarget={document.body} + /> + + + + { + setTagIds((c) => ({ ...c, ids: itemIDs })); + }} + onSetMode={(newMode) => { + setTagIds((c) => ({ ...c, mode: newMode })); + }} + ids={tagIds.ids ?? []} + existingIds={aggregateState.tagIds} + mode={tagIds.mode} + menuPortalTarget={document.body} + /> + + + + setUpdateField({ details: newValue })} + unsetDisabled={unsetDisabled} + as="textarea" + /> + - cycleOrganized()} + setChecked={(checked) => setUpdateField({ organized: checked })} + checked={updateInput.organized ?? undefined} />
      diff --git a/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx index 143daca4f..e60c638d7 100644 --- a/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx +++ b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx @@ -13,6 +13,7 @@ import { HoverScrubber } from "../Shared/HoverScrubber"; interface IScenePreviewProps { vttPath: string | undefined; onClick?: (timestamp: number) => void; + disabled?: boolean; } function scaleToFit(dimensions: { w: number; h: number }, bounds: DOMRect) { @@ -32,6 +33,7 @@ const defaultSprites = 81; // 9x9 grid by default export const PreviewScrubber: React.FC = ({ vttPath, onClick, + disabled, }) => { const imageParentRef = useRef(null); const [style, setStyle] = useState({}); @@ -44,6 +46,18 @@ export const PreviewScrubber: React.FC = ({ const [hasLoaded, setHasLoaded] = useState(false); const spriteInfo = useSpriteInfo(hasLoaded ? vttPath : undefined); + const spriteSheetSize = useMemo(() => { + if (!spriteInfo) { + return { x: 0, y: 0 }; + } + + // calculate total width/height of scrubber image so we can scale it + const maxX = Math.max(...spriteInfo.map((sprite) => sprite.x + sprite.w)); + const maxY = Math.max(...spriteInfo.map((sprite) => sprite.y + sprite.h)); + + return { x: maxX, y: maxY }; + }, [spriteInfo]); + const sprite = useMemo(() => { if (!spriteInfo || activeIndex === undefined) { return undefined; @@ -69,13 +83,15 @@ export const PreviewScrubber: React.FC = ({ const scale = scaleToFit(sprite, clientRect); setStyle({ - backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, + backgroundPosition: `${-sprite.x * scale}px ${-sprite.y * scale}px`, backgroundImage: `url(${sprite.url})`, - width: `${sprite.w}px`, - height: `${sprite.h}px`, - transform: `scale(${scale})`, + backgroundSize: `${spriteSheetSize.x * scale}px ${ + spriteSheetSize.y * scale + }px`, + width: `${sprite.w * scale}px`, + height: `${sprite.h * scale}px`, }); - }, [sprite]); + }, [sprite, spriteSheetSize]); const currentTime = useMemo(() => { if (!sprite) return undefined; @@ -111,6 +127,7 @@ export const PreviewScrubber: React.FC = ({ activeIndex={activeIndex} setActiveIndex={(i) => debounceSetActiveIndex(i)} onClick={onScrubberClick} + disabled={disabled} />
    ); diff --git a/ui/v2.5/src/components/Scenes/SceneCard.tsx b/ui/v2.5/src/components/Scenes/SceneCard.tsx index 99b910f67..55124e9b0 100644 --- a/ui/v2.5/src/components/Scenes/SceneCard.tsx +++ b/ui/v2.5/src/components/Scenes/SceneCard.tsx @@ -6,12 +6,11 @@ import * as GQL from "src/core/generated-graphql"; import { Icon } from "../Shared/Icon"; import { GalleryLink, TagLink, SceneMarkerLink } from "../Shared/TagLink"; import { HoverPopover } from "../Shared/HoverPopover"; -import { SweatDrops } from "../Shared/SweatDrops"; import { TruncatedText } from "../Shared/TruncatedText"; import NavUtils from "src/utils/navigation"; import TextUtils from "src/utils/text"; import { SceneQueue } from "src/models/sceneQueue"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { PerformerPopoverButton } from "../Shared/PerformerPopoverButton"; import { GridCard } from "../Shared/GridCard/GridCard"; import { RatingBanner } from "../Shared/RatingBanner"; @@ -30,14 +29,18 @@ import { PatchComponent } from "src/patch"; import { StudioOverlay } from "../Shared/GridCard/StudioOverlay"; import { GroupTag } from "../Groups/GroupTag"; import { FileSize } from "../Shared/FileSize"; +import { OCounterButton } from "../Shared/CountButton"; +import { defaultPreviewVolume } from "src/core/config"; interface IScenePreviewProps { isPortrait: boolean; image?: string; video?: string; soundActive: boolean; + volume?: number; vttPath?: string; onScrubberClick?: (timestamp: number) => void; + disabled?: boolean; } export const ScenePreview: React.FC = ({ @@ -47,6 +50,8 @@ export const ScenePreview: React.FC = ({ soundActive, vttPath, onScrubberClick, + disabled, + volume, }) => { const videoEl = useRef(null); @@ -65,8 +70,8 @@ export const ScenePreview: React.FC = ({ useEffect(() => { if (videoEl?.current?.volume) - videoEl.current.volume = soundActive ? 0.05 : 0; - }, [soundActive]); + videoEl.current.volume = soundActive ? (volume ?? 0) / 100 : 0; + }, [volume, soundActive]); return (
    @@ -86,7 +91,11 @@ export const ScenePreview: React.FC = ({ ref={videoEl} src={video} /> - +
    ); }; @@ -218,16 +227,7 @@ const SceneCardPopovers = PatchComponent( function maybeRenderOCounter() { if (props.scene.o_counter) { - return ( -
    - -
    - ); + return ; } } @@ -345,7 +345,46 @@ const SceneCardDetails = PatchComponent( const SceneCardOverlays = PatchComponent( "SceneCard.Overlays", (props: ISceneCardProps) => { - return ; + const ret = useMemo(() => { + return ( + + ); + }, [props.scene.studio, props.selecting]); + + return ret; + } +); + +interface ISceneSpecsOverlay { + scene: GQL.SlimSceneDataFragment; +} + +export const SceneSpecsOverlay: React.FC = PatchComponent( + "SceneCard.SceneSpecs", + ({ scene }) => { + const file = scene.files?.[0]; + if (!file) return null; + return ( +
    + + + + {file.width && file.height ? ( + + {TextUtils.resolution(file.width, file.height)} + + ) : ( + "" + )} + {file.duration > 0 ? ( + + {TextUtils.secondsToTimestamp(file.duration)} + + ) : ( + "" + )} +
    + ); } ); @@ -353,7 +392,7 @@ const SceneCardImage = PatchComponent( "SceneCard.Image", (props: ISceneCardProps) => { const history = useHistory(); - const { configuration } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const cont = configuration?.interface.continuePlaylistDefault ?? false; const file = useMemo( @@ -361,35 +400,6 @@ const SceneCardImage = PatchComponent( [props.scene] ); - function maybeRenderSceneSpecsOverlay() { - return ( -
    - {file?.size !== undefined ? ( - - - - ) : ( - "" - )} - {file?.width && file?.height ? ( - - {" "} - {TextUtils.resolution(file?.width, file?.height)} - - ) : ( - "" - )} - {(file?.duration ?? 0) >= 1 ? ( - - {TextUtils.secondsToTimestamp(file?.duration ?? 0)} - - ) : ( - "" - )} -
    - ); - } - function maybeRenderInteractiveSpeedOverlay() { return (
    @@ -399,6 +409,7 @@ const SceneCardImage = PatchComponent( } function onScrubberClick(timestamp: number) { + if (props.selecting) return; const link = props.queue ? props.queue.makeLink(props.scene.id, { sceneIndex: props.index, @@ -423,11 +434,13 @@ const SceneCardImage = PatchComponent( video={props.scene.paths.preview ?? undefined} isPortrait={isPortrait()} soundActive={configuration?.interface?.soundOnPreview ?? false} + volume={configuration?.ui.previewVolume ?? defaultPreviewVolume} vttPath={props.scene.paths.vtt ?? undefined} onScrubberClick={onScrubberClick} + disabled={props.selecting} /> - {maybeRenderSceneSpecsOverlay()} + {maybeRenderInteractiveSpeedOverlay()} ); @@ -437,7 +450,7 @@ const SceneCardImage = PatchComponent( export const SceneCard = PatchComponent( "SceneCard", (props: ISceneCardProps) => { - const { configuration } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const file = useMemo( () => (props.scene.files.length > 0 ? props.scene.files[0] : undefined), diff --git a/ui/v2.5/src/components/Scenes/SceneCardGrid.tsx b/ui/v2.5/src/components/Scenes/SceneCardGrid.tsx new file mode 100644 index 000000000..f60b412d3 --- /dev/null +++ b/ui/v2.5/src/components/Scenes/SceneCardGrid.tsx @@ -0,0 +1,50 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { SceneQueue } from "src/models/sceneQueue"; +import { SceneCard } from "./SceneCard"; +import { + useCardWidth, + useContainerDimensions, +} from "../Shared/GridCard/GridCard"; +import { PatchComponent } from "src/patch"; + +interface ISceneCardGrid { + scenes: GQL.SlimSceneDataFragment[]; + queue?: SceneQueue; + selectedIds: Set; + zoomIndex: number; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; + fromGroupId?: string; +} + +const zoomWidths = [280, 340, 480, 640]; + +export const SceneCardGrid: React.FC = PatchComponent( + "SceneCardGrid", + ({ scenes, queue, selectedIds, zoomIndex, onSelectChange, fromGroupId }) => { + const [componentRef, { width: containerWidth }] = useContainerDimensions(); + + const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); + + return ( +
    + {scenes.map((scene, index) => ( + 0} + selected={selectedIds.has(scene.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + onSelectChange(scene.id, selected, shiftKey) + } + fromGroupId={fromGroupId} + /> + ))} +
    + ); + } +); diff --git a/ui/v2.5/src/components/Scenes/SceneCardsGrid.tsx b/ui/v2.5/src/components/Scenes/SceneCardsGrid.tsx deleted file mode 100644 index 03b907938..000000000 --- a/ui/v2.5/src/components/Scenes/SceneCardsGrid.tsx +++ /dev/null @@ -1,53 +0,0 @@ -import React from "react"; -import * as GQL from "src/core/generated-graphql"; -import { SceneQueue } from "src/models/sceneQueue"; -import { SceneCard } from "./SceneCard"; -import { - useCardWidth, - useContainerDimensions, -} from "../Shared/GridCard/GridCard"; - -interface ISceneCardsGrid { - scenes: GQL.SlimSceneDataFragment[]; - queue?: SceneQueue; - selectedIds: Set; - zoomIndex: number; - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; - fromGroupId?: string; -} - -const zoomWidths = [280, 340, 480, 640]; - -export const SceneCardsGrid: React.FC = ({ - scenes, - queue, - selectedIds, - zoomIndex, - onSelectChange, - fromGroupId, -}) => { - const [componentRef, { width: containerWidth }] = useContainerDimensions(); - - const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); - - return ( -
    - {scenes.map((scene, index) => ( - 0} - selected={selectedIds.has(scene.id)} - onSelectedChanged={(selected: boolean, shiftKey: boolean) => - onSelectChange(scene.id, selected, shiftKey) - } - fromGroupId={fromGroupId} - /> - ))} -
    - ); -}; diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/ExternalPlayerButton.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/ExternalPlayerButton.tsx index b17dfb6bb..3701f4138 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/ExternalPlayerButton.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/ExternalPlayerButton.tsx @@ -29,7 +29,7 @@ export const ExternalPlayerButton: React.FC = ({ const streamURL = new URL(stream); if (isAndroid) { const scheme = streamURL.protocol.slice(0, -1); - streamURL.hash = `Intent;action=android.intent.action.VIEW;scheme=${scheme};type=video/mp4;S.title=${encodeURI( + streamURL.hash = `Intent;action=android.intent.action.VIEW;scheme=${scheme};type=video/mp4;S.title=${encodeURIComponent( title )};end`; diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/OCounterButton.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/OCounterButton.tsx index 8fdb7dfd7..d8963df4d 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/OCounterButton.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/OCounterButton.tsx @@ -1,10 +1,11 @@ -import { faBan, faMinus } from "@fortawesome/free-solid-svg-icons"; +import { faBan, faMinus, faThumbsUp } from "@fortawesome/free-solid-svg-icons"; import React, { useState } from "react"; import { Button, ButtonGroup, Dropdown, DropdownButton } from "react-bootstrap"; import { useIntl } from "react-intl"; import { Icon } from "src/components/Shared/Icon"; import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; import { SweatDrops } from "src/components/Shared/SweatDrops"; +import { useConfigurationContext } from "src/hooks/Config"; export interface IOCounterButtonProps { value: number; @@ -17,6 +18,12 @@ export const OCounterButton: React.FC = ( props: IOCounterButtonProps ) => { const intl = useIntl(); + const { configuration } = useConfigurationContext(); + const { sfwContentMode } = configuration.interface; + + const icon = !sfwContentMode ? : ; + const messageID = !sfwContentMode ? "o_count" : "o_count_sfw"; + const [loading, setLoading] = useState(false); async function increment() { @@ -44,9 +51,9 @@ export const OCounterButton: React.FC = ( className="minimal pr-1" onClick={increment} variant="secondary" - title={intl.formatMessage({ id: "o_counter" })} + title={intl.formatMessage({ id: messageID })} > - + {icon} {props.value} ); diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx index 11c805ec6..d5a32fc31 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx @@ -4,18 +4,24 @@ import * as GQL from "src/core/generated-graphql"; import { Button, Badge, Card } from "react-bootstrap"; import TextUtils from "src/utils/text"; import { markerTitle } from "src/core/markers"; +import { useConfigurationContext } from "src/hooks/Config"; interface IPrimaryTags { sceneMarkers: GQL.SceneMarkerDataFragment[]; onClickMarker: (marker: GQL.SceneMarkerDataFragment) => void; + onLoopMarker: (marker: GQL.SceneMarkerDataFragment) => void; onEdit: (marker: GQL.SceneMarkerDataFragment) => void; } export const PrimaryTags: React.FC = ({ sceneMarkers, onClickMarker, + onLoopMarker, onEdit, }) => { + const { configuration } = useConfigurationContext(); + const showAbLoopControls = configuration?.ui?.showAbLoopControls; + if (!sceneMarkers?.length) return
    ; const primaryTagNames: Record = {}; @@ -52,10 +58,21 @@ export const PrimaryTags: React.FC = ({
    -
    - {TextUtils.formatTimestampRange( - marker.seconds, - marker.end_seconds ?? undefined +
    +
    + {TextUtils.formatTimestampRange( + marker.seconds, + marker.end_seconds ?? undefined + )} +
    + {showAbLoopControls && marker.end_seconds != null && ( + )}
    {tags}
    diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx index c4088654a..7d1b245fc 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx @@ -3,12 +3,11 @@ import React, { useEffect, useState, useMemo, - useContext, useRef, useLayoutEffect, } from "react"; -import { FormattedDate, FormattedMessage, useIntl } from "react-intl"; -import { Link, RouteComponentProps } from "react-router-dom"; +import { FormattedMessage, useIntl } from "react-intl"; +import { useHistory, RouteComponentProps } from "react-router-dom"; import { Helmet } from "react-helmet"; import * as GQL from "src/core/generated-graphql"; import { @@ -32,8 +31,11 @@ import SceneQueue, { QueuedScene } from "src/models/sceneQueue"; import { ListFilterModel } from "src/models/list-filter/filter"; import Mousetrap from "mousetrap"; import { OrganizedButton } from "./OrganizedButton"; -import { ConfigurationContext } from "src/hooks/Config"; -import { getPlayerPosition } from "src/components/ScenePlayer/util"; +import { useConfigurationContext } from "src/hooks/Config"; +import { + getAbLoopPlugin, + getPlayerPosition, +} from "src/components/ScenePlayer/util"; import { faEllipsisV, faChevronRight, @@ -51,6 +53,10 @@ import { lazyComponent } from "src/utils/lazyComponent"; import cx from "classnames"; import { TruncatedText } from "src/components/Shared/TruncatedText"; import { PatchComponent, PatchContainerComponent } from "src/patch"; +import { SceneMergeModal } from "../SceneMergeDialog"; +import { goBackOrReplace } from "src/utils/history"; +import { FormattedDate } from "src/components/Shared/Date"; +import { StudioLogo } from "src/components/Shared/StudioLogo"; const SubmitStashBoxDraft = lazyComponent( () => import("src/components/Dialogs/SubmitDraft") @@ -181,9 +187,11 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { const Toast = useToast(); const intl = useIntl(); + const history = useHistory(); const [updateScene] = useSceneUpdate(); const [generateScreenshot] = useSceneGenerateScreenshot(); - const { configuration } = useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); + const { showStudioText } = configuration?.ui ?? {}; const [showDraftModal, setShowDraftModal] = useState(false); const boxes = configuration?.general?.stashBoxes ?? []; @@ -204,6 +212,7 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { const [activeTabKey, setActiveTabKey] = useState("scene-details-panel"); + const [isMerging, setIsMerging] = useState(false); const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); const [isGenerateDialogOpen, setIsGenerateDialogOpen] = useState(false); @@ -247,6 +256,13 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { Mousetrap.bind("p p", () => onQueuePrevious()); Mousetrap.bind("p r", () => onQueueRandom()); Mousetrap.bind(",", () => setCollapsed(!collapsed)); + Mousetrap.bind("d d", () => setIsDeleteAlertOpen(true)); + Mousetrap.bind("c c", () => { + onGenerateScreenshot(getPlayerPosition()); + }); + Mousetrap.bind("c d", () => { + onGenerateScreenshot(); + }); return () => { Mousetrap.unbind("a"); @@ -256,10 +272,13 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { Mousetrap.unbind("i"); Mousetrap.unbind("h"); Mousetrap.unbind("o"); + Mousetrap.unbind("d d"); Mousetrap.unbind("p n"); Mousetrap.unbind("p p"); Mousetrap.unbind("p r"); Mousetrap.unbind(","); + Mousetrap.unbind("c c"); + Mousetrap.unbind("c d"); }; }); @@ -299,9 +318,53 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { }; function onClickMarker(marker: GQL.SceneMarkerDataFragment) { + const abLoopPlugin = getAbLoopPlugin(); + const opts = abLoopPlugin?.getOptions(); + const start = opts?.start; + const end = opts?.end; + + const hasLoopRange = + opts?.enabled && + typeof start === "number" && + typeof end === "number" && + Number.isFinite(start) && + Number.isFinite(end); + + if ( + abLoopPlugin && + opts && + hasLoopRange && + (marker.seconds < Math.min(start as number, end as number) || + marker.seconds > Math.max(start as number, end as number)) + ) { + abLoopPlugin.setOptions({ + ...opts, + enabled: false, + }); + } + setTimestamp(marker.seconds); } + function onLoopMarker(marker: GQL.SceneMarkerDataFragment) { + if (marker.end_seconds == null) return; + + setTimestamp(marker.seconds); + const start = Math.min(marker.seconds, marker.end_seconds); + const end = Math.max(marker.seconds, marker.end_seconds); + const abLoopPlugin = getAbLoopPlugin(); + const opts = abLoopPlugin?.getOptions(); + + if (opts && abLoopPlugin) { + abLoopPlugin.setOptions({ + ...opts, + start, + end, + enabled: true, + }); + } + } + async function onRescan() { await mutateMetadataScan({ paths: [objectPath(scene)], @@ -338,6 +401,24 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { } } + function maybeRenderMergeDialog() { + if (!scene.id) return; + return ( + { + setIsMerging(false); + if (mergedId !== undefined && mergedId !== scene.id) { + // By default, the merge destination is the current scene, but + // the user can change it, in which case we need to redirect. + history.replace(`/scenes/${mergedId}`); + } + }} + scenes={[{ id: scene.id, title: objectTitle(scene) }]} + /> + ); + } + function maybeRenderDeleteDialog() { if (isDeleteAlertOpen) { return ( @@ -385,7 +466,7 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { className="bg-secondary text-white" onClick={() => setIsGenerateDialogOpen(true)} > - + = PatchComponent("ScenePage", (props) => { )} + setIsMerging(true)} + > + + ... + = PatchComponent("ScenePage", (props) => { @@ -579,6 +669,7 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => { {title} {maybeRenderSceneGenerateDialog()} + {maybeRenderMergeDialog()} {maybeRenderDeleteDialog()}
    = PatchComponent("ScenePage", (props) => { >
    - {scene.studio && ( -

    - - {`${scene.studio.name} - -

    - )} +

    @@ -605,13 +686,7 @@ const ScenePage: React.FC = PatchComponent("ScenePage", (props) => {
    - {!!scene.date && ( - - )} + {!!scene.date && } > = ({ match, }) => { const { id } = match.params; - const { configuration } = useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const { data, loading, error } = useFindScene(id); const [scene, setScene] = useState(); @@ -909,7 +984,7 @@ const SceneLoader: React.FC> = ({ ) { loadScene(queueScenes[currentQueueIndex + 1].id); } else { - history.goBack(); + goBackOrReplace(history, "/scenes"); } } diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneCreate.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneCreate.tsx index 707740605..8e3807c83 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneCreate.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneCreate.tsx @@ -57,14 +57,16 @@ const SceneCreate: React.FC = () => { return ; } - async function onSave(input: GQL.SceneCreateInput) { + async function onSave(input: GQL.SceneCreateInput, andNew?: boolean) { const fileID = query.get("file_id") ?? undefined; const result = await mutateCreateScene({ ...input, file_ids: fileID ? [fileID] : undefined, }); if (result.data?.sceneCreate?.id) { - history.push(`/scenes/${result.data.sceneCreate.id}`); + if (!andNew) { + history.push(`/scenes/${result.data.sceneCreate.id}`); + } Toast.success( intl.formatMessage( { id: "toast.created_entity" }, diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneDetailPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneDetailPanel.tsx index ad7663e9d..b109016b1 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneDetailPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneDetailPanel.tsx @@ -6,6 +6,7 @@ import { TagLink } from "src/components/Shared/TagLink"; import { PerformerCard } from "src/components/Performers/PerformerCard"; import { sortPerformers } from "src/core/performers"; import { DirectorLink } from "src/components/Shared/Link"; +import { CustomFields } from "src/components/Shared/CustomFields"; interface ISceneDetailProps { scene: GQL.SceneDataFragment; @@ -103,6 +104,7 @@ export const SceneDetailPanel: React.FC = (props) => { {renderDetails()} {renderTags()} {renderPerformers()} +
    diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx index 69b378787..41293ff78 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx @@ -1,6 +1,14 @@ import React, { useEffect, useState, useMemo } from "react"; import { FormattedMessage, useIntl } from "react-intl"; -import { Button, Form, Col, Row, ButtonGroup } from "react-bootstrap"; +import { + Button, + Dropdown, + Form, + Col, + Row, + ButtonGroup, + SplitButton, +} from "react-bootstrap"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; import * as yup from "yup"; @@ -16,12 +24,12 @@ import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; import { ImageInput } from "src/components/Shared/ImageInput"; import { useToast } from "src/hooks/Toast"; import ImageUtils from "src/utils/image"; -import { getStashIDs } from "src/utils/stashIds"; +import { addUpdateStashID, getStashIDs } from "src/utils/stashIds"; import { useFormik } from "formik"; import { Prompt } from "react-router-dom"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { IGroupEntry, SceneGroupTable } from "./SceneGroupTable"; -import { faSearch } from "@fortawesome/free-solid-svg-icons"; +import { faSearch, faPlus } from "@fortawesome/free-solid-svg-icons"; import { objectTitle } from "src/core/files"; import { galleryTitle } from "src/core/galleries"; import { lazyComponent } from "src/utils/lazyComponent"; @@ -41,6 +49,12 @@ import { Gallery, GallerySelect } from "src/components/Galleries/GallerySelect"; import { Group } from "src/components/Groups/GroupSelect"; import { useTagsEdit } from "src/hooks/tagsEdit"; import { ScraperMenu } from "src/components/Shared/ScraperMenu"; +import StashBoxIDSearchModal from "src/components/Shared/StashBoxIDSearchModal"; +import { + CustomFieldsInput, + formatCustomFieldInput, +} from "src/components/Shared/CustomFields"; +import { cloneDeep } from "@apollo/client/utilities"; const SceneScrapeDialog = lazyComponent(() => import("./SceneScrapeDialog")); const SceneQueryModal = lazyComponent(() => import("./SceneQueryModal")); @@ -50,7 +64,7 @@ interface IProps { initialCoverImage?: string; isNew?: boolean; isVisible: boolean; - onSubmit: (input: GQL.SceneCreateInput) => Promise; + onSubmit: (input: GQL.SceneCreateInput, andNew?: boolean) => Promise; onDelete?: () => void; } @@ -77,6 +91,8 @@ export const SceneEditPanel: React.FC = ({ const [scraper, setScraper] = useState(); const [isScraperQueryModalOpen, setIsScraperQueryModalOpen] = useState(false); + const [isStashIDSearchOpen, setIsStashIDSearchOpen] = + useState(false); const [scrapedScene, setScrapedScene] = useState(); const [endpoint, setEndpoint] = useState(); @@ -103,7 +119,7 @@ export const SceneEditPanel: React.FC = ({ setStudio(scene.studio ?? null); }, [scene.studio]); - const { configuration: stashConfig } = React.useContext(ConfigurationContext); + const { configuration: stashConfig } = useConfigurationContext(); // Network state const [isLoading, setIsLoading] = useState(false); @@ -129,6 +145,7 @@ export const SceneEditPanel: React.FC = ({ stash_ids: yup.mixed().defined(), details: yup.string().ensure(), cover_image: yup.string().nullable().optional(), + custom_fields: yup.object().required().defined(), }); const initialValues = useMemo( @@ -148,17 +165,28 @@ export const SceneEditPanel: React.FC = ({ stash_ids: getStashIDs(scene.stash_ids), details: scene.details ?? "", cover_image: initialCoverImage, + custom_fields: cloneDeep(scene.custom_fields ?? {}), }), [scene, initialCoverImage] ); type InputValues = yup.InferType; + const [customFieldsError, setCustomFieldsError] = useState(); + + function submit(values: InputValues) { + const input = { + ...schema.cast(values), + custom_fields: formatCustomFieldInput(isNew, values.custom_fields), + }; + onSave(input); + } + const formik = useFormik({ initialValues, enableReinitialize: true, validate: yupFormikValidate(schema), - onSubmit: (values) => onSave(schema.cast(values)), + onSubmit: submit, }); const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit( @@ -265,10 +293,10 @@ export const SceneEditPanel: React.FC = ({ formik.setFieldValue("groups", newGroups); } - async function onSave(input: InputValues) { + async function onSave(input: InputValues, andNew?: boolean) { setIsLoading(true); try { - await onSubmit(input); + await onSubmit(input, andNew); formik.resetForm(); } catch (e) { Toast.error(e); @@ -276,6 +304,14 @@ export const SceneEditPanel: React.FC = ({ setIsLoading(false); } + async function onSaveAndNewClick() { + const input = { + ...schema.cast(formik.values), + custom_fields: formatCustomFieldInput(isNew, formik.values.custom_fields), + }; + onSave(input, true); + } + const encodingImage = ImageUtils.usePasteImage(onImageLoad); function onImageLoad(imageData: string) { @@ -286,6 +322,10 @@ export const SceneEditPanel: React.FC = ({ ImageUtils.onImageChange(event, onImageLoad); } + function onResetCover() { + formik.setFieldValue("cover_image", null); + } + async function onScrapeClicked(s: GQL.ScraperSourceInput) { setIsLoading(true); try { @@ -547,6 +587,14 @@ export const SceneEditPanel: React.FC = ({ } } + function onStashIDSelected(item?: GQL.StashIdInput) { + if (!item) return; + formik.setFieldValue( + "stash_ids", + addUpdateStashID(formik.values.stash_ids, item) + ); + } + const image = useMemo(() => { if (encodingImage) { return ( @@ -591,6 +639,19 @@ export const SceneEditPanel: React.FC = ({ xl: 12, }, }; + const urlProps = isNew + ? splitProps + : { + labelProps: { + column: true, + md: 3, + lg: 12, + }, + fieldProps: { + md: 9, + lg: 12, + }, + }; const { renderField, renderInputField, @@ -696,19 +757,52 @@ export const SceneEditPanel: React.FC = ({ {renderScrapeQueryModal()} {maybeRenderScrapeDialog()} + {isStashIDSearchOpen && ( + s.endpoint + )} + onSelectItem={(item) => { + onStashIDSelected(item); + setIsStashIDSearchOpen(false); + }} + initialQuery={scene.title ?? ""} + /> + )}
    - + {isNew ? ( + formik.submitForm()} + > + onSaveAndNewClick()}> + + + + ) : ( + + )} {onDelete && ( )} @@ -775,8 +884,16 @@ export const SceneEditPanel: React.FC = ({ isEditing onImageChange={onCoverImageChange} onImageURL={onImageLoad} + onReset={scene.id ? onResetCover : undefined} /> + + formik.setFieldValue("custom_fields", v)} + error={customFieldsError} + setError={(e) => setCustomFieldsError(e)} + /> diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneFileInfoPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneFileInfoPanel.tsx index 63490a2ee..cd11a2c8a 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneFileInfoPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneFileInfoPanel.tsx @@ -9,6 +9,7 @@ import { import { useHistory } from "react-router-dom"; import { TruncatedText } from "src/components/Shared/TruncatedText"; import { DeleteFilesDialog } from "src/components/Shared/DeleteFilesDialog"; +import { RevealInFilesystemButton } from "src/components/Shared/RevealInFilesystemButton"; import { ReassignFilesDialog } from "src/components/Shared/ReassignFilesDialog"; import * as GQL from "src/core/generated-graphql"; import { mutateSceneSetPrimaryFile } from "src/core/StashService"; @@ -59,23 +60,28 @@ const FileInfoPanel: React.FC = ( )} - - + + - + + + + + + diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneHistoryPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneHistoryPanel.tsx index 1ac9dd5a2..2ba587a2b 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneHistoryPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneHistoryPanel.tsx @@ -21,6 +21,7 @@ import { useSceneResetActivity, } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; +import { useConfigurationContext } from "src/hooks/Config"; import { useToast } from "src/hooks/Toast"; import { TextField } from "src/utils/field"; import TextUtils from "src/utils/text"; @@ -172,6 +173,9 @@ export const SceneHistoryPanel: React.FC = ({ scene }) => { const intl = useIntl(); const Toast = useToast(); + const { configuration } = useConfigurationContext(); + const { sfwContentMode } = configuration.interface; + const [dialogs, setDialogs] = React.useState({ playHistory: false, oHistory: false, @@ -299,6 +303,9 @@ export const SceneHistoryPanel: React.FC = ({ scene }) => { } function maybeRenderDialogs() { + const clearHistoryMessageID = sfwContentMode + ? "dialogs.clear_o_history_confirm_sfw" + : "dialogs.clear_play_history_confirm"; return ( <> = ({ scene }) => { /> handleClearODates()} onCancel={() => setDialogPartial({ oHistory: false })} @@ -351,6 +358,11 @@ export const SceneHistoryPanel: React.FC = ({ scene }) => { ) as string[]; const oHistory = (scene.o_history ?? []).filter((h) => h != null) as string[]; + const oHistoryMessageID = sfwContentMode ? "o_history_sfw" : "o_history"; + const noneMessageID = sfwContentMode + ? "odate_recorded_no_sfw" + : "odate_recorded_no"; + return (
    {maybeRenderDialogs()} @@ -401,7 +413,7 @@ export const SceneHistoryPanel: React.FC = ({ scene }) => {
    - + @@ -427,7 +439,7 @@ export const SceneHistoryPanel: React.FC = ({ scene }) => {
    handleDeleteODate(t)} /> diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx index 1670bcc7b..a2bad2f8e 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx @@ -11,7 +11,10 @@ import { } from "src/core/StashService"; import { DurationInput } from "src/components/Shared/DurationInput"; import { MarkerTitleSuggest } from "src/components/Shared/Select"; -import { getPlayerPosition } from "src/components/ScenePlayer/util"; +import { + getAbLoopPlugin, + getPlayerPosition, +} from "src/components/ScenePlayer/util"; import { useToast } from "src/hooks/Toast"; import isEqual from "lodash-es/isEqual"; import { formikUtils } from "src/utils/form"; @@ -61,16 +64,39 @@ export const SceneMarkerForm: React.FC = ({ }); // useMemo to only run getPlayerPosition when the input marker actually changes - const initialValues = useMemo( - () => ({ + const initialValues = useMemo(() => { + if (!marker) { + const abLoopPlugin = getAbLoopPlugin(); + const opts = abLoopPlugin?.getOptions(); + const start = opts?.start; + const end = opts?.end; + const hasAbLoop = Number.isFinite(start); + + if (opts?.enabled && hasAbLoop) { + const current = Math.round(getPlayerPosition() ?? 0); + const rawEnd = + Number.isFinite(end) && (end as number) > 0 ? (end as number) : null; + const endSeconds = + rawEnd !== null ? rawEnd : Math.max(start as number, current); + + return { + title: "", + seconds: start as number, + end_seconds: endSeconds, + primary_tag_id: "", + tag_ids: [], + }; + } + } + + return { title: marker?.title ?? "", seconds: marker?.seconds ?? Math.round(getPlayerPosition() ?? 0), end_seconds: marker?.end_seconds ?? null, primary_tag_id: marker?.primary_tag.id ?? "", tag_ids: marker?.tags.map((tag) => tag.id) ?? [], - }), - [marker] - ); + }; + }, [marker]); type InputValues = yup.InferType; @@ -96,7 +122,9 @@ export const SceneMarkerForm: React.FC = ({ useEffect(() => { setPrimaryTag( - marker?.primary_tag ? { ...marker.primary_tag, aliases: [] } : undefined + marker?.primary_tag + ? { ...marker.primary_tag, aliases: [], stash_ids: [] } + : undefined ); }, [marker?.primary_tag]); @@ -105,6 +133,7 @@ export const SceneMarkerForm: React.FC = ({ marker?.tags.map((t) => ({ ...t, aliases: [], + stash_ids: [], })) ?? [] ); }, [marker?.tags]); diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkersPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkersPanel.tsx index 331c58c78..28a6e4d98 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkersPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkersPanel.tsx @@ -11,12 +11,14 @@ interface ISceneMarkersPanelProps { sceneId: string; isVisible: boolean; onClickMarker: (marker: GQL.SceneMarkerDataFragment) => void; + onLoopMarker: (marker: GQL.SceneMarkerDataFragment) => void; } export const SceneMarkersPanel: React.FC = ({ sceneId, isVisible, onClickMarker, + onLoopMarker, }) => { const { data, loading } = GQL.useFindSceneMarkerTagsQuery({ variables: { id: sceneId }, @@ -70,6 +72,7 @@ export const SceneMarkersPanel: React.FC = ({
    diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx index d6acfabfb..9b9a6bc40 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx @@ -1,12 +1,12 @@ import React, { useState } from "react"; import * as GQL from "src/core/generated-graphql"; import { - ScrapeDialog, ScrapedInputGroupRow, ScrapedTextAreaRow, ScrapedImageRow, ScrapedStringListRow, -} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialogRow"; +import { ScrapeDialog } from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import { useIntl } from "react-intl"; import { uniq } from "lodash-es"; import { Performer } from "src/components/Performers/PerformerSelect"; @@ -131,9 +131,10 @@ export const SceneScrapeDialog: React.FC = ({ scraped.groups?.filter((t) => !t.stored_id) ?? [] ); - const { tags, newTags, scrapedTagsRow } = useScrapedTags( + const { tags, newTags, scrapedTagsRow, linkDialog } = useScrapedTags( sceneTags, - scraped.tags + scraped.tags, + endpoint ); const [details, setDetails] = useState>( @@ -148,6 +149,7 @@ export const SceneScrapeDialog: React.FC = ({ scrapeResult: studio, setScrapeResult: setStudio, setNewObject: setNewStudio, + endpoint, }); const createNewPerformer = useCreateScrapedPerformer({ @@ -155,6 +157,7 @@ export const SceneScrapeDialog: React.FC = ({ setScrapeResult: setPerformers, newObjects: newPerformers, setNewObjects: setNewPerformers, + endpoint, }); const createNewGroup = useCreateScrapedGroup({ @@ -162,6 +165,7 @@ export const SceneScrapeDialog: React.FC = ({ setScrapeResult: setGroups, newObjects: newGroups, setNewObjects: setNewGroups, + endpoint, }); const intl = useIntl(); @@ -214,32 +218,38 @@ export const SceneScrapeDialog: React.FC = ({ return ( <> setTitle(value)} /> setCode(value)} /> setURLs(value)} /> setDate(value)} /> setDirector(value)} /> setStudio(value)} @@ -247,6 +257,7 @@ export const SceneScrapeDialog: React.FC = ({ onCreateNew={createNewStudio} /> setPerformers(value)} @@ -255,6 +266,7 @@ export const SceneScrapeDialog: React.FC = ({ ageFromDate={date.useNewValue ? date.newValue : date.originalValue} /> setGroups(value)} @@ -263,17 +275,20 @@ export const SceneScrapeDialog: React.FC = ({ /> {scrapedTagsRow} setDetails(value)} /> setStashID(value)} /> = ({ ); } + if (linkDialog) { + return linkDialog; + } + return ( { onClose(apply ? makeNewScrapedItem() : undefined); }} - /> + > + {renderScrapeRows()} + ); }; diff --git a/ui/v2.5/src/components/Scenes/SceneList.tsx b/ui/v2.5/src/components/Scenes/SceneList.tsx index 3f9bf6315..156258045 100644 --- a/ui/v2.5/src/components/Scenes/SceneList.tsx +++ b/ui/v2.5/src/components/Scenes/SceneList.tsx @@ -1,7 +1,7 @@ -import React, { useCallback, useContext, useEffect, useMemo } from "react"; +import React, { useCallback, useEffect, useMemo } from "react"; import cloneDeep from "lodash-es/cloneDeep"; import { FormattedMessage, useIntl } from "react-intl"; -import { useHistory } from "react-router-dom"; +import { useHistory, useLocation } from "react-router-dom"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; import { queryFindScenes, useFindScenes } from "src/core/StashService"; @@ -15,11 +15,10 @@ import { EditScenesDialog } from "./EditScenesDialog"; import { DeleteScenesDialog } from "./DeleteScenesDialog"; import { GenerateDialog } from "../Dialogs/GenerateDialog"; import { ExportDialog } from "../Shared/ExportDialog"; -import { SceneCardsGrid } from "./SceneCardsGrid"; +import { SceneCardGrid } from "./SceneCardGrid"; import { TaggerContext } from "../Tagger/context"; import { IdentifyDialog } from "../Dialogs/IdentifyDialog/IdentifyDialog"; -import { ConfigurationContext } from "src/hooks/Config"; -import { faPlay } from "@fortawesome/free-solid-svg-icons"; +import { useConfigurationContext } from "src/hooks/Config"; import { SceneMergeModal } from "./SceneMergeDialog"; import { objectTitle } from "src/core/files"; import TextUtils from "src/utils/text"; @@ -27,28 +26,39 @@ import { View } from "../List/views"; import { FileSize } from "../Shared/FileSize"; import { LoadedContent } from "../List/PagedList"; import { useCloseEditDelete, useFilterOperations } from "../List/util"; -import { IListFilterOperation } from "../List/ListOperationButtons"; -import { FilteredListToolbar } from "../List/FilteredListToolbar"; +import { ListOperations } from "../List/ListOperationButtons"; import { useFilteredItemList } from "../List/ItemList"; -import { FilterTags } from "../List/FilterTags"; -import { Sidebar, SidebarPane, useSidebarState } from "../Shared/Sidebar"; +import { + Sidebar, + SidebarPane, + SidebarPaneContent, + SidebarStateContext, + useSidebarState, +} from "../Shared/Sidebar"; import { SidebarPerformersFilter } from "../List/Filters/PerformersFilter"; import { SidebarStudiosFilter } from "../List/Filters/StudiosFilter"; -import { PerformersCriterionOption } from "src/models/list-filter/criteria/performers"; -import { StudiosCriterionOption } from "src/models/list-filter/criteria/studios"; -import { TagsCriterionOption } from "src/models/list-filter/criteria/tags"; import { SidebarTagsFilter } from "../List/Filters/TagsFilter"; import cx from "classnames"; -import { RatingCriterionOption } from "src/models/list-filter/criteria/rating"; import { SidebarRatingFilter } from "../List/Filters/RatingFilter"; import { OrganizedCriterionOption } from "src/models/list-filter/criteria/organized"; +import { HasMarkersCriterionOption } from "src/models/list-filter/criteria/has-markers"; import { SidebarBooleanFilter } from "../List/Filters/BooleanFilter"; +import { PerformerAgeCriterionOption } from "src/models/list-filter/scenes"; +import { SidebarDuplicateFilter } from "../List/Filters/DuplicateFilter"; +import { SidebarAgeFilter } from "../List/Filters/SidebarAgeFilter"; +import { SidebarDurationFilter } from "../List/Filters/SidebarDurationFilter"; import { FilteredSidebarHeader, useFilteredSidebarKeybinds, } from "../List/Filters/FilterSidebar"; -import { PatchContainerComponent } from "src/patch"; +import { PatchComponent, PatchContainerComponent } from "src/patch"; import { Pagination, PaginationIndex } from "../List/Pagination"; +import { Button } from "react-bootstrap"; +import useFocus from "src/utils/focus"; +import { useZoomKeybinds } from "../List/ZoomSlider"; +import { FilteredListToolbar } from "../List/FilteredListToolbar"; +import { FilterTags } from "../List/FilterTags"; +import { SidebarFolderFilter } from "../List/Filters/FolderFilter"; function renderMetadataByline(result: GQL.FindScenesQueryResult) { const duration = result?.data?.findScenes?.duration; @@ -82,33 +92,51 @@ function renderMetadataByline(result: GQL.FindScenesQueryResult) { function usePlayScene() { const history = useHistory(); + const { configuration: config } = useConfigurationContext(); + const cont = config?.interface.continuePlaylistDefault ?? false; + const autoPlay = config?.interface.autostartVideoOnPlaySelected ?? false; + const playScene = useCallback( - (queue: SceneQueue, sceneID: string, options: IPlaySceneOptions) => { - history.push(queue.makeLink(sceneID, options)); + (queue: SceneQueue, sceneID: string, options?: IPlaySceneOptions) => { + history.push( + queue.makeLink(sceneID, { autoPlay, continue: cont, ...options }) + ); }, - [history] + [history, cont, autoPlay] ); return playScene; } function usePlaySelected(selectedIds: Set) { - const { configuration: config } = useContext(ConfigurationContext); const playScene = usePlayScene(); const playSelected = useCallback(() => { // populate queue and go to first scene const sceneIDs = Array.from(selectedIds.values()); const queue = SceneQueue.fromSceneIDList(sceneIDs); - const autoPlay = config?.interface.autostartVideoOnPlaySelected ?? false; - playScene(queue, sceneIDs[0], { autoPlay }); - }, [selectedIds, config?.interface.autostartVideoOnPlaySelected, playScene]); + + playScene(queue, sceneIDs[0]); + }, [selectedIds, playScene]); return playSelected; } +function usePlayFirst() { + const playScene = usePlayScene(); + + const playFirst = useCallback( + (queue: SceneQueue, sceneID: string, index: number) => { + // populate queue and go to first scene + playScene(queue, sceneID, { sceneIndex: index }); + }, + [playScene] + ); + + return playFirst; +} + function usePlayRandom(filter: ListFilterModel, count: number) { - const { configuration: config } = useContext(ConfigurationContext); const playScene = usePlayScene(); const playRandom = useCallback(async () => { @@ -130,15 +158,9 @@ function usePlayRandom(filter: ListFilterModel, count: number) { if (scene) { // navigate to the image player page const queue = SceneQueue.fromListFilterModel(filterCopy); - const autoPlay = config?.interface.autostartVideoOnPlaySelected ?? false; - playScene(queue, scene.id, { sceneIndex: index, autoPlay }); + playScene(queue, scene.id, { sceneIndex: index }); } - }, [ - filter, - count, - config?.interface.autostartVideoOnPlaySelected, - playScene, - ]); + }, [filter, count, playScene]); return playRandom; } @@ -163,44 +185,65 @@ const SceneList: React.FC<{ selectedIds: Set; onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; fromGroupId?: string; -}> = ({ scenes, filter, selectedIds, onSelectChange, fromGroupId }) => { - const queue = useMemo(() => SceneQueue.fromListFilterModel(filter), [filter]); +}> = PatchComponent( + "SceneList", + ({ scenes, filter, selectedIds, onSelectChange, fromGroupId }) => { + const queue = useMemo( + () => SceneQueue.fromListFilterModel(filter), + [filter] + ); + + if (scenes.length === 0 && filter.displayMode !== DisplayMode.Tagger) { + return null; + } + + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.List) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.Wall) { + return ( + + ); + } + if (filter.displayMode === DisplayMode.Tagger) { + return ( + + ); + } - if (scenes.length === 0) { return null; } - - if (filter.displayMode === DisplayMode.Grid) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.List) { - return ( - - ); - } - if (filter.displayMode === DisplayMode.Wall) { - return ; - } - if (filter.displayMode === DisplayMode.Tagger) { - return ; - } - - return null; -}; +); const ScenesFilterSidebarSections = PatchContainerComponent( "FilteredSceneList.SidebarSections" @@ -209,50 +252,73 @@ const ScenesFilterSidebarSections = PatchContainerComponent( const SidebarContent: React.FC<{ filter: ListFilterModel; setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; view?: View; sidebarOpen: boolean; onClose?: () => void; showEditFilter: (editingCriterion?: string) => void; -}> = ({ filter, setFilter, view, showEditFilter, sidebarOpen, onClose }) => { + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, + filterHook, + view, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, +}) => { + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; + + const hideStudios = view === View.StudioScenes; + return ( <> - } - data-type={StudiosCriterionOption.type} - option={StudiosCriterionOption} - filter={filter} - setFilter={setFilter} - /> + {!hideStudios && ( + + )} } - data-type={PerformersCriterionOption.type} - option={PerformersCriterionOption} filter={filter} setFilter={setFilter} + filterHook={filterHook} /> } - data-type={TagsCriterionOption.type} - option={TagsCriterionOption} filter={filter} setFilter={setFilter} + filterHook={filterHook} /> - } - data-type={RatingCriterionOption.type} - option={RatingCriterionOption} + + + } filter={filter} setFilter={setFilter} + sectionID="folder" + /> + } + data-type={HasMarkersCriterionOption.type} + option={HasMarkersCriterionOption} + filter={filter} + setFilter={setFilter} + sectionID="hasMarkers" /> } @@ -260,8 +326,28 @@ const SidebarContent: React.FC<{ option={OrganizedCriterionOption} filter={filter} setFilter={setFilter} + sectionID="organized" + /> + } + filter={filter} + setFilter={setFilter} + sectionID="duplicated" + /> + } + option={PerformerAgeCriterionOption} + filter={filter} + setFilter={setFilter} + sectionID="performer_age" /> + +
    + +
    ); }; @@ -274,256 +360,379 @@ interface IFilteredScenes { fromGroupId?: string; } -export const FilteredSceneList = (props: IFilteredScenes) => { - const intl = useIntl(); - const history = useHistory(); +export const FilteredSceneList = PatchComponent( + "FilteredSceneList", + (props: IFilteredScenes) => { + const intl = useIntl(); + const history = useHistory(); + const location = useLocation(); - const { filterHook, defaultSort, view, alterQuery, fromGroupId } = props; + const searchFocus = useFocus(); - // States - const { - showSidebar, - setShowSidebar, - loading: sidebarStateLoading, - } = useSidebarState(view); + const { filterHook, defaultSort, view, alterQuery, fromGroupId } = props; - const { filterState, queryResult, modalState, listSelect, showEditFilter } = - useFilteredItemList({ - filterStateProps: { - filterMode: GQL.FilterMode.Scenes, - defaultSort, - view, - useURL: alterQuery, - }, - queryResultProps: { - useResult: useFindScenes, - getCount: (r) => r.data?.findScenes.count ?? 0, - getItems: (r) => r.data?.findScenes.scenes ?? [], - filterHook, - }, + // States + const { + showSidebar, + setShowSidebar, + loading: sidebarStateLoading, + sectionOpen, + setSectionOpen, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.Scenes, + defaultSort, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindScenes, + getCount: (r) => r.data?.findScenes.count ?? 0, + getItems: (r) => r.data?.findScenes.scenes ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, }); - const { filter, setFilter, loading: filterLoading } = filterState; + useAddKeybinds(effectiveFilter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); - const { effectiveFilter, result, cachedResult, items, totalCount } = - queryResult; + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); - const { - selectedIds, - selectedItems, - onSelectChange, - onSelectNone, - hasSelection, - } = listSelect; + const onEdit = useCallback(() => { + showModal( + + ); + }, [showModal, selectedItems, onCloseEditDelete]); - const { modal, showModal, closeModal } = modalState; + const onDelete = useCallback(() => { + showModal( + + ); + }, [showModal, selectedItems, onCloseEditDelete]); - // Utility hooks - const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ - filter, - setFilter, - }); + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); - useAddKeybinds(filter, totalCount); - useFilteredSidebarKeybinds({ - showSidebar, - setShowSidebar, - }); + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); - const onCloseEditDelete = useCloseEditDelete({ - closeModal, - onSelectNone, - result, - }); + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }, [onSelectAll, onSelectNone, hasSelection, onEdit, onDelete]); + useZoomKeybinds({ + zoomIndex: filter.zoomIndex, + onChangeZoom: (zoom) => setFilter(filter.setZoom(zoom)), + }); - const metadataByline = useMemo(() => { - if (cachedResult.loading) return ""; + const metadataByline = useMemo(() => { + if (cachedResult.loading) return null; - return renderMetadataByline(cachedResult) ?? ""; - }, [cachedResult]); + return renderMetadataByline(cachedResult) ?? null; + }, [cachedResult]); - const playSelected = usePlaySelected(selectedIds); - const playRandom = usePlayRandom(filter, totalCount); + const queue = useMemo( + () => SceneQueue.fromListFilterModel(filter), + [filter] + ); - function onExport(all: boolean) { - showModal( - closeModal()} + const playRandom = usePlayRandom(effectiveFilter, totalCount); + const playSelected = usePlaySelected(selectedIds); + const playFirst = usePlayFirst(); + + function onCreateNew() { + let queryParam = new URLSearchParams(location.search).get("q"); + let newPath = "/scenes/new"; + if (queryParam) { + newPath += "?q=" + encodeURIComponent(queryParam); + } + history.push(newPath); + } + + function onPlay() { + if (items.length === 0) { + return; + } + + // if there are selected items, play those + if (hasSelection) { + playSelected(); + return; + } + + // otherwise, play the first item in the list + const sceneID = items[0].id; + playFirst(queue, sceneID, 0); + } + + function onExport(all: boolean) { + showModal( + closeModal()} + /> + ); + } + + function onMerge() { + const selected = + selectedItems.map((s) => { + return { + id: s.id, + title: objectTitle(s), + }; + }) ?? []; + showModal( + { + closeModal(); + if (mergedID) { + history.push(`/scenes/${mergedID}`); + } + }} + show + /> + ); + } + + const otherOperations = [ + { + text: intl.formatMessage({ id: "actions.play" }), + onClick: () => onPlay(), + isDisplayed: () => items.length > 0, + className: "play-item", + }, + { + text: intl.formatMessage( + { id: "actions.create_entity" }, + { entityType: intl.formatMessage({ id: "scene" }) } + ), + onClick: () => onCreateNew(), + isDisplayed: () => !hasSelection, + className: "create-new-item", + }, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.play_random" }), + onClick: playRandom, + isDisplayed: () => totalCount > 1, + }, + { + text: `${intl.formatMessage({ id: "actions.generate" })}…`, + onClick: () => + showModal( + closeModal()} + /> + ), + isDisplayed: () => hasSelection, + }, + { + text: `${intl.formatMessage({ id: "actions.identify" })}…`, + onClick: () => + showModal( + closeModal()} + /> + ), + isDisplayed: () => hasSelection, + }, + { + text: `${intl.formatMessage({ id: "actions.merge" })}…`, + onClick: () => onMerge(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export" }), + onClick: () => onExport(false), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.export_all" }), + onClick: () => onExport(true), + }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + ); - } - function onMerge() { - const selected = - selectedItems.map((s) => { - return { - id: s.id, - title: objectTitle(s), - }; - }) ?? []; - showModal( - { - closeModal(); - if (mergedID) { - history.push(`/scenes/${mergedID}`); - } - }} - show - /> - ); - } + return ( + +
    + {modal} - const otherOperations: IListFilterOperation[] = [ - { - text: intl.formatMessage({ id: "actions.play_selected" }), - onClick: playSelected, - isDisplayed: () => hasSelection, - icon: faPlay, - }, - { - text: intl.formatMessage({ id: "actions.play_random" }), - onClick: playRandom, - }, - { - text: `${intl.formatMessage({ id: "actions.generate" })}…`, - onClick: () => - showModal( - closeModal()} - /> - ), - isDisplayed: () => hasSelection, - }, - { - text: `${intl.formatMessage({ id: "actions.identify" })}…`, - onClick: () => - showModal( - closeModal()} - /> - ), - isDisplayed: () => hasSelection, - }, - { - text: `${intl.formatMessage({ id: "actions.merge" })}…`, - onClick: () => onMerge(), - isDisplayed: () => hasSelection, - }, - { - text: intl.formatMessage({ id: "actions.export" }), - onClick: () => onExport(false), - isDisplayed: () => hasSelection, - }, - { - text: intl.formatMessage({ id: "actions.export_all" }), - onClick: () => onExport(true), - }, - ]; - - // render - if (filterLoading || sidebarStateLoading) return null; - - return ( - -
    - {modal} - - - setShowSidebar(false)}> - setShowSidebar(false)} - /> - -
    - - showModal( - - ) - } - onDelete={() => { - showModal( - - ); - }} - operations={otherOperations} - onToggleSidebar={() => setShowSidebar((v) => !v)} - zoomable - /> - - showEditFilter(c.criterionOption.type)} - onRemoveCriterion={removeCriterion} - onRemoveAll={() => clearAllCriteria()} - /> - - - - - - - - {totalCount > filter.itemsPerPage && ( -
    - + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} /> -
    - )} -
    -
    -
    -
    - ); -}; + + setShowSidebar(!showSidebar)} + > + + + + showEditFilter(c.criterionOption.type) + } + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
    + setFilter(filter.changePage(page))} + /> + +
    + + + + + + {totalCount > filter.itemsPerPage && ( +
    +
    + +
    +
    + )} +
    + + +
    +
    + ); + } +); export default FilteredSceneList; diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerCard.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerCard.tsx index 644decf42..96961d68b 100644 --- a/ui/v2.5/src/components/Scenes/SceneMarkerCard.tsx +++ b/ui/v2.5/src/components/Scenes/SceneMarkerCard.tsx @@ -1,4 +1,4 @@ -import React, { useMemo } from "react"; +import { useMemo } from "react"; import { Button, ButtonGroup } from "react-bootstrap"; import * as GQL from "src/core/generated-graphql"; import { Icon } from "../Shared/Icon"; @@ -6,12 +6,13 @@ import { TagLink } from "../Shared/TagLink"; import { HoverPopover } from "../Shared/HoverPopover"; import NavUtils from "src/utils/navigation"; import TextUtils from "src/utils/text"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { GridCard } from "../Shared/GridCard/GridCard"; import { faTag } from "@fortawesome/free-solid-svg-icons"; import { markerTitle } from "src/core/markers"; import { Link } from "react-router-dom"; import { objectTitle } from "src/core/files"; +import { PatchComponent } from "src/patch"; import { PerformerPopoverButton } from "../Shared/PerformerPopoverButton"; import { ScenePreview } from "./SceneCard"; import { TruncatedText } from "../Shared/TruncatedText"; @@ -28,154 +29,166 @@ interface ISceneMarkerCardProps { onSelectedChanged?: (selected: boolean, shiftKey: boolean) => void; } -const SceneMarkerCardPopovers = (props: ISceneMarkerCardProps) => { - function maybeRenderPerformerPopoverButton() { - if (props.marker.scene.performers.length <= 0) return; +const SceneMarkerCardPopovers = PatchComponent( + "SceneMarkerCard.Popovers", + (props: ISceneMarkerCardProps) => { + function maybeRenderPerformerPopoverButton() { + if (props.marker.scene.performers.length <= 0) return; - return ( - - ); - } - - function renderTagPopoverButton() { - const popoverContent = [ - , - ]; - - props.marker.tags.map((tag) => - popoverContent.push( - - ) - ); - - return ( - - - - ); - } - - function renderPopoverButtonGroup() { - if (!props.compact) { return ( - <> -
    - - {maybeRenderPerformerPopoverButton()} - {renderTagPopoverButton()} - - + ); } + + function renderTagPopoverButton() { + const popoverContent = [ + , + ]; + + props.marker.tags.map((tag) => + popoverContent.push( + + ) + ); + + return ( + + + + ); + } + + function renderPopoverButtonGroup() { + if (!props.compact) { + return ( + <> +
    + + {maybeRenderPerformerPopoverButton()} + {renderTagPopoverButton()} + + + ); + } + } + + return <>{renderPopoverButtonGroup()}; } +); - return <>{renderPopoverButtonGroup()}; -}; - -const SceneMarkerCardDetails = (props: ISceneMarkerCardProps) => { - return ( -
    - - {TextUtils.formatTimestampRange( - props.marker.seconds, - props.marker.end_seconds ?? undefined - )} - - - {objectTitle(props.marker.scene)} - - } - /> -
    - ); -}; - -const SceneMarkerCardImage = (props: ISceneMarkerCardProps) => { - const { configuration } = React.useContext(ConfigurationContext); - - const file = useMemo( - () => - props.marker.scene.files.length > 0 - ? props.marker.scene.files[0] - : undefined, - [props.marker.scene] - ); - - function isPortrait() { - const width = file?.width ? file.width : 0; - const height = file?.height ? file.height : 0; - return height > width; - } - - function maybeRenderSceneSpecsOverlay() { +const SceneMarkerCardDetails = PatchComponent( + "SceneMarkerCard.Details", + (props: ISceneMarkerCardProps) => { return ( -
    - {props.marker.end_seconds && ( - - {TextUtils.secondsToTimestamp( - props.marker.end_seconds - props.marker.seconds - )} - - )} +
    + + {TextUtils.formatTimestampRange( + props.marker.seconds, + props.marker.end_seconds ?? undefined + )} + + + {objectTitle(props.marker.scene)} + + } + />
    ); } +); - return ( - <> - - {maybeRenderSceneSpecsOverlay()} - - ); -}; +const SceneMarkerCardImage = PatchComponent( + "SceneMarkerCard.Image", + (props: ISceneMarkerCardProps) => { + const { configuration } = useConfigurationContext(); -export const SceneMarkerCard = (props: ISceneMarkerCardProps) => { - function zoomIndex() { - if (!props.compact && props.zoomIndex !== undefined) { - return `zoom-${props.zoomIndex}`; + const file = useMemo( + () => + props.marker.scene.files.length > 0 + ? props.marker.scene.files[0] + : undefined, + [props.marker.scene] + ); + + function isPortrait() { + const width = file?.width ? file.width : 0; + const height = file?.height ? file.height : 0; + return height > width; } - return ""; - } + function maybeRenderSceneSpecsOverlay() { + return ( +
    + {props.marker.end_seconds && ( + + {TextUtils.secondsToTimestamp( + props.marker.end_seconds - props.marker.seconds + )} + + )} +
    + ); + } - return ( - } - details={} - popovers={} - selected={props.selected} - selecting={props.selecting} - onSelectedChanged={props.onSelectedChanged} - /> - ); -}; + return ( + <> + + {maybeRenderSceneSpecsOverlay()} + + ); + } +); + +export const SceneMarkerCard = PatchComponent( + "SceneMarkerCard", + (props: ISceneMarkerCardProps) => { + function zoomIndex() { + if (!props.compact && props.zoomIndex !== undefined) { + return `zoom-${props.zoomIndex}`; + } + + return ""; + } + + return ( + } + details={} + popovers={} + selected={props.selected} + selecting={props.selecting} + onSelectedChanged={props.onSelectedChanged} + /> + ); + } +); diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerCardGrid.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerCardGrid.tsx new file mode 100644 index 000000000..ad869918b --- /dev/null +++ b/ui/v2.5/src/components/Scenes/SceneMarkerCardGrid.tsx @@ -0,0 +1,46 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { SceneMarkerCard } from "./SceneMarkerCard"; +import { + useCardWidth, + useContainerDimensions, +} from "../Shared/GridCard/GridCard"; +import { PatchComponent } from "src/patch"; + +interface ISceneMarkerCardGrid { + markers: GQL.SceneMarkerDataFragment[]; + selectedIds: Set; + zoomIndex: number; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; +} + +const zoomWidths = [240, 340, 480, 640]; + +export const SceneMarkerCardGrid: React.FC = + PatchComponent( + "SceneMarkerCardGrid", + ({ markers, selectedIds, zoomIndex, onSelectChange }) => { + const [componentRef, { width: containerWidth }] = + useContainerDimensions(); + const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); + + return ( +
    + {markers.map((marker, index) => ( + 0} + selected={selectedIds.has(marker.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + onSelectChange(marker.id, selected, shiftKey) + } + /> + ))} +
    + ); + } + ); diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerCardsGrid.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerCardsGrid.tsx deleted file mode 100644 index 9f01fe6da..000000000 --- a/ui/v2.5/src/components/Scenes/SceneMarkerCardsGrid.tsx +++ /dev/null @@ -1,45 +0,0 @@ -import React from "react"; -import * as GQL from "src/core/generated-graphql"; -import { SceneMarkerCard } from "./SceneMarkerCard"; -import { - useCardWidth, - useContainerDimensions, -} from "../Shared/GridCard/GridCard"; - -interface ISceneMarkerCardsGrid { - markers: GQL.SceneMarkerDataFragment[]; - selectedIds: Set; - zoomIndex: number; - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; -} - -const zoomWidths = [240, 340, 480, 640]; - -export const SceneMarkerCardsGrid: React.FC = ({ - markers, - selectedIds, - zoomIndex, - onSelectChange, -}) => { - const [componentRef, { width: containerWidth }] = useContainerDimensions(); - const cardWidth = useCardWidth(containerWidth, zoomIndex, zoomWidths); - - return ( -
    - {markers.map((marker, index) => ( - 0} - selected={selectedIds.has(marker.id)} - onSelectedChanged={(selected: boolean, shiftKey: boolean) => - onSelectChange(marker.id, selected, shiftKey) - } - /> - ))} -
    - ); -}; diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx index f28ac718b..6287b25ae 100644 --- a/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx +++ b/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx @@ -1,7 +1,7 @@ import cloneDeep from "lodash-es/cloneDeep"; -import React from "react"; +import React, { useCallback, useEffect } from "react"; import { useHistory } from "react-router-dom"; -import { useIntl } from "react-intl"; +import { FormattedMessage, useIntl } from "react-intl"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; import { @@ -9,141 +9,463 @@ import { useFindSceneMarkers, } from "src/core/StashService"; import NavUtils from "src/utils/navigation"; -import { ItemList, ItemListContext } from "../List/ItemList"; +import { useFilteredItemList } from "../List/ItemList"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; import { MarkerWallPanel } from "./SceneMarkerWallPanel"; import { View } from "../List/views"; -import { SceneMarkerCardsGrid } from "./SceneMarkerCardsGrid"; +import { SceneMarkerCardGrid } from "./SceneMarkerCardGrid"; import { DeleteSceneMarkersDialog } from "./DeleteSceneMarkersDialog"; +import { EditSceneMarkersDialog } from "./EditSceneMarkersDialog"; +import { PatchComponent, PatchContainerComponent } from "src/patch"; +import { + FilteredListToolbar, + IItemListOperation, +} from "../List/FilteredListToolbar"; +import { + Sidebar, + SidebarPane, + SidebarPaneContent, + SidebarStateContext, + useSidebarState, +} from "../Shared/Sidebar"; +import { useCloseEditDelete, useFilterOperations } from "../List/util"; +import { + FilteredSidebarHeader, + useFilteredSidebarKeybinds, +} from "../List/Filters/FilterSidebar"; +import { useZoomKeybinds } from "../List/ZoomSlider"; +import { + IListFilterOperation, + ListOperations, +} from "../List/ListOperationButtons"; +import cx from "classnames"; +import { FilterTags } from "../List/FilterTags"; +import { Pagination, PaginationIndex } from "../List/Pagination"; +import { LoadedContent } from "../List/PagedList"; +import useFocus from "src/utils/focus"; +import { SidebarPerformersFilter } from "../List/Filters/PerformersFilter"; +import { SidebarTagsFilter } from "../List/Filters/TagsFilter"; +import { Button } from "react-bootstrap"; -function getItems(result: GQL.FindSceneMarkersQueryResult) { - return result?.data?.findSceneMarkers?.scene_markers ?? []; -} - -function getCount(result: GQL.FindSceneMarkersQueryResult) { - return result?.data?.findSceneMarkers?.count ?? 0; -} - -interface ISceneMarkerList { - filterHook?: (filter: ListFilterModel) => ListFilterModel; - view?: View; - alterQuery?: boolean; - defaultSort?: string; -} - -export const SceneMarkerList: React.FC = ({ - filterHook, - view, - alterQuery, -}) => { - const intl = useIntl(); - const history = useHistory(); - - const filterMode = GQL.FilterMode.SceneMarkers; - - const otherOperations = [ - { - text: intl.formatMessage({ id: "actions.play_random" }), - onClick: playRandom, - }, - ]; - - function addKeybinds( - result: GQL.FindSceneMarkersQueryResult, - filter: ListFilterModel - ) { - Mousetrap.bind("p r", () => { - playRandom(result, filter); - }); - - return () => { - Mousetrap.unbind("p r"); - }; - } - - async function playRandom( - result: GQL.FindSceneMarkersQueryResult, - filter: ListFilterModel - ) { - // query for a random scene - if (result.data?.findSceneMarkers) { - const { count } = result.data.findSceneMarkers; - - const index = Math.floor(Math.random() * count); - const filterCopy = cloneDeep(filter); - filterCopy.itemsPerPage = 1; - filterCopy.currentPage = index + 1; - const singleResult = await queryFindSceneMarkers(filterCopy); - if (singleResult.data.findSceneMarkers.scene_markers.length === 1) { - // navigate to the scene player page - const url = NavUtils.makeSceneMarkerUrl( - singleResult.data.findSceneMarkers.scene_markers[0] - ); - history.push(url); - } +const SceneMarkerList: React.FC<{ + markers: GQL.SceneMarkerDataFragment[]; + filter: ListFilterModel; + selectedIds: Set; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; +}> = PatchComponent( + "SceneMarkerList", + ({ markers, filter, selectedIds, onSelectChange }) => { + if (markers.length === 0) { + return null; } - } - - function renderContent( - result: GQL.FindSceneMarkersQueryResult, - filter: ListFilterModel, - selectedIds: Set, - onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void - ) { - if (!result.data?.findSceneMarkers) return; if (filter.displayMode === DisplayMode.Wall) { return ( - - ); - } - - if (filter.displayMode === DisplayMode.Grid) { - return ( - ); } - } - function renderDeleteDialog( - selectedSceneMarkers: GQL.SceneMarkerDataFragment[], - onClose: (confirmed: boolean) => void - ) { - return ( - - ); + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + + return null; } +); + +function usePlayRandom(filter: ListFilterModel, count: number) { + const history = useHistory(); + + const playRandom = useCallback(async () => { + // query for a random scene + if (count === 0) { + return; + } + + const pages = Math.ceil(count / filter.itemsPerPage); + const page = Math.floor(Math.random() * pages) + 1; + + const indexMax = Math.min(filter.itemsPerPage, count); + const index = Math.floor(Math.random() * indexMax); + const filterCopy = cloneDeep(filter); + filterCopy.currentPage = page; + filterCopy.sortBy = "random"; + const queryResults = await queryFindSceneMarkers(filterCopy); + const marker = queryResults.data.findSceneMarkers.scene_markers[index]; + if (marker) { + // navigate to the scene player page + const url = NavUtils.makeSceneMarkerUrl(marker); + history.push(url); + } + }, [filter, count, history]); + + return playRandom; +} + +function useAddKeybinds(filter: ListFilterModel, count: number) { + const playRandom = usePlayRandom(filter, count); + + useEffect(() => { + Mousetrap.bind("p r", () => { + playRandom(); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }, [playRandom]); +} + +const ScenesFilterSidebarSections = PatchContainerComponent( + "FilteredSceneMarkerList.SidebarSections" +); + +const SidebarContent: React.FC<{ + filter: ListFilterModel; + setFilter: (filter: ListFilterModel) => void; + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + sidebarOpen: boolean; + onClose?: () => void; + showEditFilter: (editingCriterion?: string) => void; + count?: number; + focus?: ReturnType; +}> = ({ + filter, + setFilter, + filterHook, + view, + showEditFilter, + sidebarOpen, + onClose, + count, + focus, +}) => { + const showResultsId = + count !== undefined ? "actions.show_count_results" : "actions.show_results"; return ( - - + - + + + + + + +
    + +
    + ); }; -export default SceneMarkerList; +interface ISceneMarkerList { + filterHook?: (filter: ListFilterModel) => ListFilterModel; + view?: View; + alterQuery?: boolean; + defaultSort?: string; + extraOperations?: IItemListOperation[]; +} + +export const FilteredSceneMarkerList = PatchComponent( + "FilteredSceneMarkerList", + (props: ISceneMarkerList) => { + const intl = useIntl(); + + const searchFocus = useFocus(); + + const { + filterHook, + defaultSort, + view, + alterQuery, + extraOperations = [], + } = props; + + // States + const { + showSidebar, + setShowSidebar, + loading: sidebarStateLoading, + sectionOpen, + setSectionOpen, + } = useSidebarState(view); + + const { filterState, queryResult, modalState, listSelect, showEditFilter } = + useFilteredItemList({ + filterStateProps: { + filterMode: GQL.FilterMode.SceneMarkers, + defaultSort, + view, + useURL: alterQuery, + }, + queryResultProps: { + useResult: useFindSceneMarkers, + getCount: (r) => r.data?.findSceneMarkers.count ?? 0, + getItems: (r) => r.data?.findSceneMarkers.scene_markers ?? [], + filterHook, + }, + }); + + const { filter, setFilter } = filterState; + + const { effectiveFilter, result, cachedResult, items, totalCount } = + queryResult; + + const { + selectedIds, + selectedItems, + onSelectChange, + onSelectAll, + onSelectNone, + onInvertSelection, + hasSelection, + } = listSelect; + + const { modal, showModal, closeModal } = modalState; + + // Utility hooks + const { setPage, removeCriterion, clearAllCriteria } = useFilterOperations({ + filter, + setFilter, + }); + + useAddKeybinds(filter, totalCount); + useFilteredSidebarKeybinds({ + showSidebar, + setShowSidebar, + }); + + const onCloseEditDelete = useCloseEditDelete({ + closeModal, + onSelectNone, + result, + }); + + const onEdit = useCallback(() => { + showModal( + + ); + }, [showModal, selectedItems, onCloseEditDelete]); + + const onDelete = useCallback(() => { + showModal( + + ); + }, [showModal, selectedItems, onCloseEditDelete]); + + useEffect(() => { + Mousetrap.bind("e", () => { + if (hasSelection) { + onEdit?.(); + } + }); + + Mousetrap.bind("d d", () => { + if (hasSelection) { + onDelete?.(); + } + }); + + return () => { + Mousetrap.unbind("e"); + Mousetrap.unbind("d d"); + }; + }, [onSelectAll, onSelectNone, hasSelection, onEdit, onDelete]); + + useZoomKeybinds({ + zoomIndex: filter.zoomIndex, + onChangeZoom: (zoom) => setFilter(filter.setZoom(zoom)), + }); + + const playRandom = usePlayRandom(effectiveFilter, totalCount); + + const convertedExtraOperations: IListFilterOperation[] = + extraOperations.map((o) => ({ + ...o, + isDisplayed: o.isDisplayed + ? () => o.isDisplayed!(result, filter, selectedIds) + : undefined, + onClick: () => { + o.onClick(result, filter, selectedIds); + }, + })); + + const otherOperations = [ + ...convertedExtraOperations, + { + text: intl.formatMessage({ id: "actions.select_all" }), + onClick: () => onSelectAll(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.select_none" }), + onClick: () => onSelectNone(), + isDisplayed: () => hasSelection, + }, + { + text: intl.formatMessage({ id: "actions.invert_selection" }), + onClick: () => onInvertSelection(), + isDisplayed: () => totalCount > 0, + }, + { + text: intl.formatMessage({ id: "actions.play_random" }), + onClick: playRandom, + isDisplayed: () => totalCount > 1, + }, + // { + // text: `${intl.formatMessage({ id: "actions.generate" })}…`, + // onClick: () => + // showModal( + // closeModal()} + // /> + // ), + // isDisplayed: () => hasSelection, + // }, + ]; + + // render + if (sidebarStateLoading) return null; + + const operations = ( + + ); + + return ( +
    + {modal} + + + + setShowSidebar(false)}> + setShowSidebar(false)} + count={cachedResult.loading ? undefined : totalCount} + focus={searchFocus} + /> + + setShowSidebar(!showSidebar)} + > + + + showEditFilter(c.criterionOption.type)} + onRemoveCriterion={removeCriterion} + onRemoveAll={clearAllCriteria} + /> + +
    + setFilter(filter.changePage(page))} + /> + +
    + + + + + + {totalCount > filter.itemsPerPage && ( +
    +
    + +
    +
    + )} +
    +
    +
    +
    + ); + } +); + +export default FilteredSceneMarkerList; diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerRecommendationRow.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerRecommendationRow.tsx new file mode 100644 index 000000000..891801f2d --- /dev/null +++ b/ui/v2.5/src/components/Scenes/SceneMarkerRecommendationRow.tsx @@ -0,0 +1,48 @@ +import React from "react"; +import { useFindSceneMarkers } from "src/core/StashService"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { SceneMarkerCard } from "./SceneMarkerCard"; +import { PatchComponent } from "src/patch"; +import { FilteredRecommendationRow } from "../FrontPage/FilteredRecommendationRow"; + +interface IProps { + isTouch: boolean; + filter: ListFilterModel; + header: string; +} + +export const SceneMarkerRecommendationRow: React.FC = PatchComponent( + "SceneMarkerRecommendationRow", + (props) => { + const result = useFindSceneMarkers(props.filter); + const count = result.data?.findSceneMarkers.count ?? 0; + + return ( + + {result.loading + ? [...Array(props.filter.itemsPerPage)].map((i) => ( +
    + )) + : result.data?.findSceneMarkers.scene_markers.map((marker, index) => ( + + ))} +
    + ); + } +); diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerWallPanel.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerWallPanel.tsx index f240b36e6..863078c4e 100644 --- a/ui/v2.5/src/components/Scenes/SceneMarkerWallPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneMarkerWallPanel.tsx @@ -1,21 +1,17 @@ -import React, { - useCallback, - useContext, - useEffect, - useMemo, - useState, -} from "react"; +import React, { useCallback, useEffect, useMemo, useState } from "react"; +import { Form } from "react-bootstrap"; import * as GQL from "src/core/generated-graphql"; import Gallery, { GalleryI, PhotoProps, RenderImageProps, } from "react-photo-gallery"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { objectTitle } from "src/core/files"; import { Link, useHistory } from "react-router-dom"; import { TruncatedText } from "../Shared/TruncatedText"; import TextUtils from "src/utils/text"; +import { useDragMoveSelect } from "../Shared/GridCard/dragMoveSelect"; import cx from "classnames"; import NavUtils from "src/utils/navigation"; import { markerTitle } from "src/core/markers"; @@ -39,15 +35,32 @@ interface IMarkerPhoto { onError?: (photo: PhotoProps) => void; } -export const MarkerWallItem: React.FC> = ( - props: RenderImageProps -) => { - const { configuration } = useContext(ConfigurationContext); +interface IExtraProps { + maxHeight: number; + selected?: boolean; + onSelectedChanged?: (selected: boolean, shiftKey: boolean) => void; + selecting?: boolean; +} + +export const MarkerWallItem: React.FC< + RenderImageProps & IExtraProps +> = (props: RenderImageProps & IExtraProps) => { + const { dragProps } = useDragMoveSelect({ + selecting: props.selecting || false, + selected: props.selected || false, + onSelectedChanged: props.onSelectedChanged, + }); + + const { configuration } = useConfigurationContext(); const playSound = configuration?.interface.soundOnPreview ?? false; const showTitle = configuration?.interface.wallShowTitle ?? false; const [active, setActive] = useState(false); + const height = Math.min(props.maxHeight, props.photo.height); + const zoomFactor = height / props.photo.height; + const width = props.photo.width * zoomFactor; + type style = Record; var divStyle: style = { margin: props.margin, @@ -61,6 +74,12 @@ export const MarkerWallItem: React.FC> = ( } var handleClick = function handleClick(event: React.MouseEvent) { + if (props.selecting && props.onSelectedChanged) { + props.onSelectedChanged(!props.selected, event.shiftKey); + event.preventDefault(); + event.stopPropagation(); + return; + } if (props.onClick) { props.onClick(event, { index: props.index }); } @@ -73,25 +92,42 @@ export const MarkerWallItem: React.FC> = ( const title = wallItemTitle(marker); const tagNames = marker.tags.map((p) => p.name); + let shiftKey = false; + return (
    + {props.onSelectedChanged && ( + props.onSelectedChanged!(!props.selected, shiftKey)} + onClick={(event: React.MouseEvent) => { + shiftKey = event.shiftKey; + event.stopPropagation(); + }} + /> + )} setActive(true)} onMouseLeave={() => setActive(false)} @@ -120,6 +156,10 @@ export const MarkerWallItem: React.FC> = ( interface IMarkerWallProps { markers: GQL.SceneMarkerDataFragment[]; + zoomIndex: number; + selectedIds?: Set; + onSelectChange?: (id: string, selected: boolean, shiftKey: boolean) => void; + selecting?: boolean; } // HACK: typescript doesn't allow Gallery to accept a parameter for some reason @@ -152,11 +192,24 @@ function getDimensions(file?: IFile) { }; } -const defaultTargetRowHeight = 250; +const breakpointZoomHeights = [ + { minWidth: 576, heights: [100, 120, 240, 360] }, + { minWidth: 768, heights: [120, 160, 240, 480] }, + { minWidth: 1200, heights: [120, 160, 240, 300] }, + { minWidth: 1400, heights: [160, 240, 300, 480] }, +]; -const MarkerWall: React.FC = ({ markers }) => { +const MarkerWall: React.FC = ({ + markers, + zoomIndex, + selectedIds, + onSelectChange, + selecting, +}) => { const history = useHistory(); + const containerRef = React.useRef(null); + const margin = 3; const direction = "row"; @@ -202,12 +255,50 @@ const MarkerWall: React.FC = ({ markers }) => { return Math.round(columnCount); } - const renderImage = useCallback((props: RenderImageProps) => { - return ; - }, []); + const targetRowHeight = useCallback( + (containerWidth: number) => { + let zoomHeight = 280; + breakpointZoomHeights.forEach((e) => { + if (containerWidth >= e.minWidth) { + zoomHeight = e.heights[zoomIndex]; + } + }); + return zoomHeight; + }, + [zoomIndex] + ); + + // set the max height as a factor of the targetRowHeight + // this allows some images to be taller than the target row height + // but prevents images from becoming too tall when there is a small number of items + const maxHeightFactor = 1.3; + + const renderImage = useCallback( + (props: RenderImageProps) => { + const markerId = props.photo.marker.id; + return ( + + onSelectChange(markerId, selected, shiftKey) + : undefined + } + selecting={selecting} + /> + ); + }, + [targetRowHeight, selectedIds, onSelectChange, selecting] + ); return ( -
    +
    {photos.length ? ( = ({ markers }) => { margin={margin} direction={direction} columns={columns} - targetRowHeight={defaultTargetRowHeight} + targetRowHeight={targetRowHeight} /> ) : null}
    @@ -225,10 +316,25 @@ const MarkerWall: React.FC = ({ markers }) => { interface IMarkerWallPanelProps { markers: GQL.SceneMarkerDataFragment[]; + zoomIndex: number; + selectedIds?: Set; + onSelectChange?: (id: string, selected: boolean, shiftKey: boolean) => void; } export const MarkerWallPanel: React.FC = ({ markers, + zoomIndex, + selectedIds, + onSelectChange, }) => { - return ; + const selecting = !!selectedIds && selectedIds.size > 0; + return ( + + ); }; diff --git a/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx b/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx index 52b3ea67c..273de1f09 100644 --- a/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx +++ b/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx @@ -3,27 +3,32 @@ import React, { useEffect, useMemo, useState } from "react"; import * as GQL from "src/core/generated-graphql"; import { Icon } from "../Shared/Icon"; import { LoadingIndicator } from "../Shared/LoadingIndicator"; -import { StringListSelect, GallerySelect } from "../Shared/Select"; +import { GallerySelect } from "../Shared/Select"; import * as FormUtils from "src/utils/form"; import ImageUtils from "src/utils/image"; import TextUtils from "src/utils/text"; -import { mutateSceneMerge, queryFindScenesByID } from "src/core/StashService"; +import { + mutateSceneMerge, + queryFindFullScenesByID, +} from "src/core/StashService"; import { FormattedMessage, useIntl } from "react-intl"; import { useToast } from "src/hooks/Toast"; import { faExchangeAlt, faSignInAlt } from "@fortawesome/free-solid-svg-icons"; import { - ScrapeDialog, ScrapeDialogRow, + ScrapedCustomFieldRows, ScrapedImageRow, ScrapedInputGroupRow, ScrapedStringListRow, ScrapedTextAreaRow, -} from "../Shared/ScrapeDialog/ScrapeDialog"; +} from "../Shared/ScrapeDialog/ScrapeDialogRow"; +import { ScrapeDialog } from "../Shared/ScrapeDialog/ScrapeDialog"; import { clone, uniq } from "lodash-es"; import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; import { ModalComponent } from "../Shared/Modal"; -import { IHasStoredID, sortStoredIdObjects } from "src/utils/data"; +import { sortStoredIdObjects, uniqIDStoredIDs } from "src/utils/data"; import { + CustomFieldScrapeResults, ObjectListScrapeResult, ScrapeResult, ZeroableScrapeResult, @@ -36,14 +41,7 @@ import { ScrapedTagsRow, } from "../Shared/ScrapeDialog/ScrapedObjectsRow"; import { Scene, SceneSelect } from "src/components/Scenes/SceneSelect"; - -interface IStashIDsField { - values: GQL.StashId[]; -} - -const StashIDsField: React.FC = ({ values }) => { - return v.stash_id)} />; -}; +import { StashIDsField } from "../Shared/StashID"; type MergeOptions = { values: GQL.SceneUpdateInput; @@ -52,8 +50,8 @@ type MergeOptions = { }; interface ISceneMergeDetailsProps { - sources: GQL.SlimSceneDataFragment[]; - dest: GQL.SlimSceneDataFragment; + sources: GQL.SceneDataFragment[]; + dest: GQL.SceneDataFragment; onClose: (options?: MergeOptions) => void; } @@ -127,12 +125,6 @@ const SceneMergeDetails: React.FC = ({ return ret; } - function uniqIDStoredIDs(objs: T[]) { - return objs.filter((o, i) => { - return objs.findIndex((oo) => oo.stored_id === o.stored_id) === i; - }); - } - const [performers, setPerformers] = useState< ObjectListScrapeResult >( @@ -173,6 +165,10 @@ const SceneMergeDetails: React.FC = ({ new ScrapeResult(dest.paths.screenshot) ); + const [customFields, setCustomFields] = useState( + new Map() + ); + // calculate the values for everything // uses the first set value for single value fields, and combines all useEffect(() => { @@ -206,13 +202,7 @@ const SceneMergeDetails: React.FC = ({ setCode( new ScrapeResult(dest.code, sources.find((s) => s.code)?.code, !dest.code) ); - setURL( - new ScrapeResult( - dest.urls, - sources.find((s) => s.urls)?.urls, - !dest.urls?.length - ) - ); + setURL(new ScrapeResult(dest.urls, uniq(all.map((s) => s.urls).flat()))); setDate( new ScrapeResult(dest.date, sources.find((s) => s.date)?.date, !dest.date) ); @@ -311,33 +301,68 @@ const SceneMergeDetails: React.FC = ({ .filter((s, index, a) => { // remove entries with duplicate endpoints return index === a.findIndex((ss) => ss.endpoint === s.endpoint); - }), - !dest.stash_ids.length + }) + ) + ); + + const customFieldNames = new Set( + Object.keys(dest.custom_fields ?? {}) + ); + + for (const s of sources) { + for (const n of Object.keys(s.custom_fields ?? {})) { + customFieldNames.add(n); + } + } + + setCustomFields( + new Map( + Array.from(customFieldNames) + .sort() + .map((field) => { + return [ + field, + new ScrapeResult( + dest.custom_fields?.[field], + sources.find((s) => s.custom_fields?.[field])?.custom_fields?.[ + field + ], + dest.custom_fields?.[field] === undefined + ), + ]; + }) ) ); loadImages(); }, [sources, dest]); + const hasCustomFieldValues = useMemo(() => { + return hasScrapedValues(Array.from(customFields.values())); + }, [customFields]); + // ensure this is updated if fields are changed const hasValues = useMemo(() => { - return hasScrapedValues([ - title, - code, - url, - date, - rating, - oCounter, - galleries, - studio, - performers, - groups, - tags, - details, - organized, - stashIDs, - image, - ]); + return ( + hasCustomFieldValues || + hasScrapedValues([ + title, + code, + url, + date, + rating, + oCounter, + galleries, + studio, + performers, + groups, + tags, + details, + organized, + stashIDs, + image, + ]) + ); }, [ title, code, @@ -354,6 +379,7 @@ const SceneMergeDetails: React.FC = ({ organized, stashIDs, image, + hasCustomFieldValues, ]); function renderScrapeRows() { @@ -379,83 +405,87 @@ const SceneMergeDetails: React.FC = ({ return ( <> setTitle(value)} /> setCode(value)} /> setURL(value)} /> setDate(value)} /> ( - - )} - renderNewField={() => ( - - )} + originalField={} + newField={} onChange={(value) => setRating(value)} /> ( + originalField={ {}} className="bg-secondary text-white border-secondary" /> - )} - renderNewField={() => ( + } + newField={ {}} className="bg-secondary text-white border-secondary" /> - )} + } onChange={(value) => setOCounter(value)} /> ( + originalField={ {}} className="bg-secondary text-white border-secondary" /> - )} - renderNewField={() => ( + } + newField={ {}} className="bg-secondary text-white border-secondary" /> - )} + } onChange={(value) => setPlayCount(value)} /> ( + originalField={ = ({ onChange={() => {}} className="bg-secondary text-white border-secondary" /> - )} - renderNewField={() => ( + } + newField={ {}} className="bg-secondary text-white border-secondary" /> - )} + } onChange={(value) => setPlayDuration(value)} /> ( + originalField={ = ({ isMulti isDisabled /> - )} - renderNewField={() => ( + } + newField={ = ({ isMulti isDisabled /> - )} + } onChange={(value) => setGalleries(value)} /> setStudio(value)} /> setPerformers(value)} ageFromDate={date.useNewValue ? date.newValue : date.originalValue} /> setGroups(value)} /> setTags(value)} /> setDetails(value)} /> ( + originalField={ {}} className="bg-secondary text-white border-secondary" /> - )} - renderNewField={() => ( + } + newField={ {}} className="bg-secondary text-white border-secondary" /> - )} + } onChange={(value) => setOrganized(value)} /> ( + originalField={ - )} - renderNewField={() => ( - - )} + } + newField={} onChange={(value) => setStashIDs(value)} + alwaysShow={ + !!stashIDs.originalValue?.length || !!stashIDs.newValue?.length + } /> setImage(value)} /> + {hasCustomFieldValues && ( + setCustomFields(newCustomFields)} + /> + )} ); } @@ -602,6 +648,13 @@ const SceneMergeDetails: React.FC = ({ organized: organized.getNewValue(), stash_ids: stashIDs.getNewValue(), cover_image: coverImage, + custom_fields: { + partial: Object.fromEntries( + Array.from(customFields.entries()).flatMap(([field, v]) => + v.useNewValue ? [[field, v.getNewValue()]] : [] + ) + ), + }, }, includeViewHistory: playCount.getNewValue() !== undefined, includeOHistory: oCounter.getNewValue() !== undefined, @@ -617,14 +670,13 @@ const SceneMergeDetails: React.FC = ({ : intl.formatMessage({ id: "dialogs.merge.destination" }); const sourceLabel = !hasValues ? "" - : intl.formatMessage({ id: "dialogs.merge.source" }); + : intl.formatMessage({ id: "dialogs.merge.combined" }); return ( { if (!apply) { onClose(); @@ -632,7 +684,9 @@ const SceneMergeDetails: React.FC = ({ onClose(createValues()); } }} - /> + > + {renderScrapeRows()} + ); }; @@ -650,10 +704,10 @@ export const SceneMergeModal: React.FC = ({ const [sourceScenes, setSourceScenes] = useState([]); const [destScene, setDestScene] = useState([]); - const [loadedSources, setLoadedSources] = useState< - GQL.SlimSceneDataFragment[] - >([]); - const [loadedDest, setLoadedDest] = useState(); + const [loadedSources, setLoadedSources] = useState( + [] + ); + const [loadedDest, setLoadedDest] = useState(); const [running, setRunning] = useState(false); const [secondStep, setSecondStep] = useState(false); @@ -665,6 +719,12 @@ export const SceneMergeModal: React.FC = ({ id: "actions.merge", }); + const srcIDs = useMemo(() => sourceScenes.map((s) => s.id), [sourceScenes]); + const destID = useMemo( + () => (destScene[0] ? [destScene[0].id] : []), + [destScene] + ); + useEffect(() => { if (scenes.length > 0) { // set the first scene as the destination, others as source @@ -679,7 +739,7 @@ export const SceneMergeModal: React.FC = ({ async function loadScenes() { const sceneIDs = sourceScenes.map((s) => parseInt(s.id)); sceneIDs.push(parseInt(destScene[0].id)); - const query = await queryFindScenesByID(sceneIDs); + const query = await queryFindFullScenesByID(sceneIDs); const { scenes: loadedScenes } = query.data.findScenes; setLoadedDest(loadedScenes.find((s) => s.id === destScene[0].id)); @@ -700,8 +760,6 @@ export const SceneMergeModal: React.FC = ({ ); if (result.data?.sceneMerge) { Toast.success(intl.formatMessage({ id: "toast.merged_scenes" })); - // refetch the scene - await queryFindScenesByID([parseInt(destScene[0].id)]); onClose(destScene[0].id); } onClose(); @@ -730,6 +788,7 @@ export const SceneMergeModal: React.FC = ({ sources={loadedSources} dest={loadedDest!} onClose={(values) => { + setSecondStep(false); if (values) { onMerge(values); } else { @@ -773,6 +832,7 @@ export const SceneMergeModal: React.FC = ({ onSelect={(items) => setSourceScenes(items)} values={sourceScenes} menuPortalTarget={document.body} + excludeIds={destID} /> @@ -806,6 +866,7 @@ export const SceneMergeModal: React.FC = ({ onSelect={(items) => setDestScene(items)} values={destScene} menuPortalTarget={document.body} + excludeIds={srcIDs} /> diff --git a/ui/v2.5/src/components/Scenes/SceneRecommendationRow.tsx b/ui/v2.5/src/components/Scenes/SceneRecommendationRow.tsx index d33762761..f5aafe846 100644 --- a/ui/v2.5/src/components/Scenes/SceneRecommendationRow.tsx +++ b/ui/v2.5/src/components/Scenes/SceneRecommendationRow.tsx @@ -1,13 +1,10 @@ import React, { useMemo } from "react"; -import { Link } from "react-router-dom"; import { useFindScenes } from "src/core/StashService"; -import Slider from "@ant-design/react-slick"; import { SceneCard } from "./SceneCard"; import { SceneQueue } from "src/models/sceneQueue"; import { ListFilterModel } from "src/models/list-filter/filter"; -import { getSlickSliderSettings } from "src/core/recommendations"; -import { RecommendationRow } from "../FrontPage/RecommendationRow"; -import { FormattedMessage } from "react-intl"; +import { PatchComponent } from "src/patch"; +import { FilteredRecommendationRow } from "../FrontPage/FilteredRecommendationRow"; interface IProps { isTouch: boolean; @@ -15,33 +12,25 @@ interface IProps { header: string; } -export const SceneRecommendationRow: React.FC = (props) => { - const result = useFindScenes(props.filter); - const cardCount = result.data?.findScenes.count; +export const SceneRecommendationRow: React.FC = PatchComponent( + "SceneRecommendationRow", + (props) => { + const result = useFindScenes(props.filter); + const count = result.data?.findScenes.count ?? 0; - const queue = useMemo(() => { - return SceneQueue.fromListFilterModel(props.filter); - }, [props.filter]); + const queue = useMemo(() => { + return SceneQueue.fromListFilterModel(props.filter); + }, [props.filter]); - if (!result.loading && !cardCount) { - return null; - } - - return ( - - - - } - > - {result.loading ? [...Array(props.filter.itemsPerPage)].map((i) => ( @@ -56,7 +45,7 @@ export const SceneRecommendationRow: React.FC = (props) => { zoomIndex={1} /> ))} - - - ); -}; + + ); + } +); diff --git a/ui/v2.5/src/components/Scenes/SceneSelect.tsx b/ui/v2.5/src/components/Scenes/SceneSelect.tsx index 7871bc43e..fed72dd53 100644 --- a/ui/v2.5/src/components/Scenes/SceneSelect.tsx +++ b/ui/v2.5/src/components/Scenes/SceneSelect.tsx @@ -12,7 +12,7 @@ import { queryFindScenesForSelect, queryFindScenesByIDForSelect, } from "src/core/StashService"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { useIntl } from "react-intl"; import { defaultMaxOptionsShown } from "src/core/config"; import { ListFilterModel } from "src/models/list-filter/filter"; @@ -22,6 +22,7 @@ import { IFilterProps, IFilterValueProps, Option as SelectOption, + toOption, } from "../Shared/FilterSelect"; import { useCompare } from "src/hooks/state"; import { Placement } from "react-bootstrap/esm/Overlay"; @@ -33,6 +34,8 @@ import { CriterionValue, } from "src/models/list-filter/criteria/criterion"; import { TruncatedText } from "../Shared/TruncatedText"; +import { isUUID } from "src/utils/stashIds"; +import { filterByStashID } from "src/models/list-filter/utils"; export type Scene = Pick & { studio?: Pick | null; @@ -66,36 +69,51 @@ const sceneSelectSort = PatchFunction( const _SceneSelect: React.FC< IFilterProps & IFilterValueProps & ExtraSceneProps > = (props) => { - const { configuration } = React.useContext(ConfigurationContext); + const { configuration } = useConfigurationContext(); const intl = useIntl(); const maxOptionsShown = configuration?.ui.maxOptionsShown ?? defaultMaxOptionsShown; const exclude = useMemo(() => props.excludeIds ?? [], [props.excludeIds]); + function filterExcluded(scene: Scene) { + // HACK - we should probably exclude these in the backend query, but + // this will do in the short-term + return !exclude.includes(scene.id.toString()); + } + async function loadScenes(input: string): Promise { const filter = new ListFilterModel(GQL.FilterMode.Scenes); - filter.searchTerm = input; filter.currentPage = 1; filter.itemsPerPage = maxOptionsShown; filter.sortBy = "title"; filter.sortDirection = GQL.SortDirectionEnum.Asc; - if (props.extraCriteria) { - filter.criteria = [...props.extraCriteria]; + filter.criteria = [...(props.extraCriteria ?? [])]; + + if (isUUID(input)) { + const oldCriteria = filter.criteria; + + filterByStashID(filter, input); + + const query = await queryFindScenesForSelect(filter); + const matches = query.data.findScenes.scenes.filter(filterExcluded); + + if (matches.length > 0) { + // Matches found, return them immediately. + return matches.map(toOption); + } + + // If no stash_id matches found, continue with standard name/alias search. + filter.criteria = oldCriteria; // Clear stash_id criterion to search by name/alias below. } - const query = await queryFindScenesForSelect(filter); - let ret = query.data.findScenes.scenes.filter((scene) => { - // HACK - we should probably exclude these in the backend query, but - // this will do in the short-term - return !exclude.includes(scene.id.toString()); - }); + filter.searchTerm = input; - return sceneSelectSort(input, ret).map((scene) => ({ - value: scene.id, - object: scene, - })); + const query = await queryFindScenesForSelect(filter); + const ret = query.data.findScenes.scenes.filter(filterExcluded); + + return sceneSelectSort(input, ret).map(toOption); } const SceneOption: React.FC> = (optionProps) => { diff --git a/ui/v2.5/src/components/Scenes/SceneWallPanel.tsx b/ui/v2.5/src/components/Scenes/SceneWallPanel.tsx index 546a1488a..d49d9b73e 100644 --- a/ui/v2.5/src/components/Scenes/SceneWallPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneWallPanel.tsx @@ -1,10 +1,11 @@ import React, { useCallback, - useContext, useEffect, useMemo, + useRef, useState, } from "react"; +import { Form } from "react-bootstrap"; import * as GQL from "src/core/generated-graphql"; import { SceneQueue } from "src/models/sceneQueue"; import Gallery, { @@ -12,13 +13,15 @@ import Gallery, { PhotoProps, RenderImageProps, } from "react-photo-gallery"; -import { ConfigurationContext } from "src/hooks/Config"; +import { useConfigurationContext } from "src/hooks/Config"; import { objectTitle } from "src/core/files"; import { Link, useHistory } from "react-router-dom"; import { TruncatedText } from "../Shared/TruncatedText"; import TextUtils from "src/utils/text"; import { useIntl } from "react-intl"; +import { useDragMoveSelect } from "../Shared/GridCard/dragMoveSelect"; import cx from "classnames"; +import { defaultPreviewVolume } from "src/core/config"; interface IScenePhoto { scene: GQL.SlimSceneDataFragment; @@ -26,15 +29,33 @@ interface IScenePhoto { onError?: (photo: PhotoProps) => void; } -export const SceneWallItem: React.FC> = ( - props: RenderImageProps -) => { +interface IExtraProps { + maxHeight: number; + selected?: boolean; + onSelectedChanged?: (selected: boolean, shiftKey: boolean) => void; + selecting?: boolean; +} + +export const SceneWallItem: React.FC< + RenderImageProps & IExtraProps +> = (props: RenderImageProps & IExtraProps) => { const intl = useIntl(); - const { configuration } = useContext(ConfigurationContext); + const { dragProps } = useDragMoveSelect({ + selecting: props.selecting || false, + selected: props.selected || false, + onSelectedChanged: props.onSelectedChanged, + }); + + const { configuration } = useConfigurationContext(); const playSound = configuration?.interface.soundOnPreview ?? false; + const volume = configuration?.ui.previewVolume ?? defaultPreviewVolume; const showTitle = configuration?.interface.wallShowTitle ?? false; + const height = Math.min(props.maxHeight, props.photo.height); + const zoomFactor = height / props.photo.height; + const width = props.photo.width * zoomFactor; + const [active, setActive] = useState(false); type style = Record; @@ -50,13 +71,43 @@ export const SceneWallItem: React.FC> = ( } var handleClick = function handleClick(event: React.MouseEvent) { + if (props.selecting && props.onSelectedChanged) { + props.onSelectedChanged(!props.selected, event.shiftKey); + event.preventDefault(); + event.stopPropagation(); + return; + } if (props.onClick) { props.onClick(event, { index: props.index }); } }; const video = props.photo.src.includes("preview"); - const ImagePreview = video ? "video" : "img"; + const previewProps = { + loading: "lazy", + loop: video, + muted: !video || !playSound || !active, + autoPlay: video, + playsInline: video, + key: props.photo.key, + src: props.photo.src, + width, + height, + alt: props.photo.alt, + onMouseEnter: () => setActive(true), + onMouseLeave: () => setActive(false), + onClick: handleClick, + onError: () => { + props.photo.onError?.(props.photo); + }, + }; + + const videoEl = useRef(null); + + useEffect(() => { + if (video && videoEl?.current?.volume) + videoEl.current.volume = playSound ? volume / 100 : 0; + }, [video, playSound, volume]); const { scene } = props.photo; const title = objectTitle(scene); @@ -66,36 +117,49 @@ export const SceneWallItem: React.FC> = ( ? [...performerNames.slice(0, -2), performerNames.slice(-2).join(" & ")] : performerNames; + let shiftKey = false; + return (
    - setActive(true)} - onMouseLeave={() => setActive(false)} - onClick={handleClick} - onError={() => { - props.photo.onError?.(props.photo); - }} - /> + {props.onSelectedChanged && ( + props.onSelectedChanged!(!props.selected, shiftKey)} + onClick={(event: React.MouseEvent) => { + shiftKey = event.shiftKey; + event.stopPropagation(); + }} + /> + )} + {video ? ( +