diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 0dc6d10a8..5ca1021a0 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -6,6 +6,15 @@ body:
attributes:
value: |
Thanks for taking the time to fill out this bug report!
+ - type: checkboxes
+ id: confirm-troubleshooting
+ attributes:
+ label: Have you enabled troubleshooting mode?
+ description: |
+ To ensure the bug is not caused by custom modifications or plugins make sure to enable troubleshooting mode before filing the report. In Stash go to Settings and click **Troubleshooting mode** and then retest to see if the bug still occurs.
+ options:
+ - label: I confirm that the troubleshooting mode is enabled.
+ required: true
- type: textarea
id: description
attributes:
@@ -61,4 +70,4 @@ body:
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output from Settings > Logs. This will be automatically formatted into code, so no need for backticks.
- render: shell
\ No newline at end of file
+ render: shell
diff --git a/.github/workflows/build-compiler.yml b/.github/workflows/build-compiler.yml
new file mode 100644
index 000000000..42562c95c
--- /dev/null
+++ b/.github/workflows/build-compiler.yml
@@ -0,0 +1,28 @@
+name: Compiler Build
+
+on:
+ workflow_dispatch:
+
+env:
+ COMPILER_IMAGE: ghcr.io/stashapp/compiler:14
+
+jobs:
+ build-compiler:
+ runs-on: ubuntu-24.04
+ steps:
+ - uses: actions/checkout@v6
+ - uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ - uses: docker/setup-buildx-action@v3
+ - uses: docker/build-push-action@v6
+ with:
+ push: true
+ context: "{{defaultContext}}:docker/compiler"
+ tags: |
+ ${{ env.COMPILER_IMAGE }}
+ ghcr.io/stashapp/compiler:latest
+ cache-from: type=gha,scope=all,mode=max
+ cache-to: type=gha,scope=all,mode=max
\ No newline at end of file
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 1e46ecd69..7f6f5696d 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -2,7 +2,7 @@ name: Build
on:
push:
- branches:
+ branches:
- develop
- master
- 'releases/**'
@@ -15,50 +15,165 @@ concurrency:
cancel-in-progress: true
env:
- COMPILER_IMAGE: stashapp/compiler:12
+ COMPILER_IMAGE: ghcr.io/stashapp/compiler:14
jobs:
- build:
- runs-on: ubuntu-22.04
+ # Job 1: Generate code and build UI
+ # Runs natively (no Docker) — go generate/gqlgen and node don't need cross-compilers.
+ # Produces artifacts (generated Go files + UI build) consumed by test and build jobs.
+ generate:
+ runs-on: ubuntu-24.04
steps:
- - uses: actions/checkout@v2
-
- - name: Checkout
- run: git fetch --prune --unshallow --tags
-
+ - uses: actions/checkout@v6
+ with:
+ fetch-depth: 0
+ fetch-tags: true
- name: Setup Go
- uses: actions/setup-go@v5
+ uses: actions/setup-go@v6
with:
go-version-file: 'go.mod'
- - name: Pull compiler image
- run: docker pull $COMPILER_IMAGE
-
- - name: Cache node modules
- uses: actions/cache@v3
- env:
- cache-name: cache-node_modules
+ # pnpm version is read from the packageManager field in package.json
+ # very broken (4.3, 4.4)
+ - name: Install pnpm
+ uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
with:
- path: ui/v2.5/node_modules
- key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml') }}
+ package_json_file: ui/v2.5/package.json
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v6
+ with:
+ node-version: '20'
+ cache: 'pnpm'
+ cache-dependency-path: ui/v2.5/pnpm-lock.yaml
+
+ - name: Install UI dependencies
+ run: cd ui/v2.5 && pnpm install --frozen-lockfile
+
+ - name: Generate
+ run: make generate
- name: Cache UI build
- uses: actions/cache@v3
+ uses: actions/cache@v5
id: cache-ui
- env:
- cache-name: cache-ui
with:
path: ui/v2.5/build
- key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
+ key: ${{ runner.os }}-ui-build-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
- - name: Cache go build
- uses: actions/cache@v3
- env:
- # increment the number suffix to bump the cache
- cache-name: cache-go-cache-1
+ - name: Validate UI
+ # skip UI validation for pull requests if UI is unchanged
+ if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
+ run: make validate-ui
+
+ - name: Build UI
+ # skip UI build for pull requests if UI is unchanged (UI was cached)
+ if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
+ run: make ui
+
+ # Bundle generated Go files + UI build for downstream jobs (test + build)
+ - name: Upload generated artifacts
+ uses: actions/upload-artifact@v7
+ with:
+ name: generated
+ retention-days: 1
+ path: |
+ internal/api/generated_exec.go
+ internal/api/generated_models.go
+ ui/v2.5/build/
+ ui/login/locales/
+
+ # Job 2: Integration tests
+ # Runs natively (no Docker) — only needs Go + GCC (for CGO/SQLite), both on ubuntu-22.04.
+ # Runs in parallel with the build matrix jobs.
+ test:
+ needs: generate
+ runs-on: ubuntu-24.04
+ steps:
+ - uses: actions/checkout@v6
+
+ - name: Setup Go
+ uses: actions/setup-go@v6
+ with:
+ go-version-file: 'go.mod'
+
+ # Places generated Go files + UI build into the working tree so the build compiles
+ - name: Download generated artifacts
+ uses: actions/download-artifact@v8
+ with:
+ name: generated
+
+ - name: Test Backend
+ run: make it
+
+ # Job 3: Cross-compile for all platforms
+ # Each platform gets its own runner and Docker container (ghcr.io/stashapp/compiler:13).
+ # Each build-cc-* make target is self-contained (sets its own GOOS/GOARCH/CC),
+ # so running them in separate containers is functionally identical to one container.
+ # Runs in parallel with the test job.
+ build:
+ needs: generate
+ runs-on: ubuntu-24.04
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - platform: windows
+ make-target: build-cc-windows
+ artifact-paths: |
+ dist/stash-win.exe
+ tag: win
+ - platform: macos
+ make-target: build-cc-macos
+ artifact-paths: |
+ dist/stash-macos
+ dist/Stash.app.zip
+ tag: osx
+ - platform: linux
+ make-target: build-cc-linux
+ artifact-paths: |
+ dist/stash-linux
+ tag: linux
+ - platform: linux-arm64v8
+ make-target: build-cc-linux-arm64v8
+ artifact-paths: |
+ dist/stash-linux-arm64v8
+ tag: arm
+ - platform: linux-arm32v7
+ make-target: build-cc-linux-arm32v7
+ artifact-paths: |
+ dist/stash-linux-arm32v7
+ tag: arm
+ - platform: linux-arm32v6
+ make-target: build-cc-linux-arm32v6
+ artifact-paths: |
+ dist/stash-linux-arm32v6
+ tag: arm
+ - platform: freebsd
+ make-target: build-cc-freebsd
+ artifact-paths: |
+ dist/stash-freebsd
+ tag: freebsd
+
+ steps:
+ - uses: actions/checkout@v6
+ with:
+ fetch-depth: 0
+ fetch-tags: true
+
+ - name: Download generated artifacts
+ uses: actions/download-artifact@v8
+ with:
+ name: generated
+
+ - name: Cache Go build
+ uses: actions/cache@v5
with:
path: .go-cache
- key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('go.mod', '**/go.sum') }}
+ key: ${{ runner.os }}-go-cache-${{ matrix.platform }}-${{ hashFiles('go.mod', '**/go.sum') }}
+
+ # kept seperate to test timings
+ - name: pull compiler image
+ run: docker pull $COMPILER_IMAGE
- name: Start build container
env:
@@ -67,45 +182,50 @@ jobs:
mkdir -p .go-cache
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null
- - name: Pre-install
- run: docker exec -t build /bin/bash -c "make CI=1 pre-ui"
-
- - name: Generate
- run: docker exec -t build /bin/bash -c "make generate"
-
- - name: Validate UI
- # skip UI validation for pull requests if UI is unchanged
- if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
- run: docker exec -t build /bin/bash -c "make validate-ui"
-
- # Static validation happens in the linter workflow in parallel to this workflow
- # Run Dynamic validation here, to make sure we pass all the projects integration tests
- - name: Test Backend
- run: docker exec -t build /bin/bash -c "make it"
-
- - name: Build UI
- # skip UI build for pull requests if UI is unchanged (UI was cached)
- # this means that the build version/time may be incorrect if the UI is
- # not changed in a pull request
- if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
- run: docker exec -t build /bin/bash -c "make ui"
-
- - name: Compile for all supported platforms
- run: |
- docker exec -t build /bin/bash -c "make build-cc-windows"
- docker exec -t build /bin/bash -c "make build-cc-macos"
- docker exec -t build /bin/bash -c "make build-cc-linux"
- docker exec -t build /bin/bash -c "make build-cc-linux-arm64v8"
- docker exec -t build /bin/bash -c "make build-cc-linux-arm32v7"
- docker exec -t build /bin/bash -c "make build-cc-linux-arm32v6"
- docker exec -t build /bin/bash -c "make build-cc-freebsd"
-
- - name: Zip UI
- run: docker exec -t build /bin/bash -c "make zip-ui"
+ - name: Build (${{ matrix.platform }})
+ run: docker exec -t build /bin/bash -c "make ${{ matrix.make-target }}"
- name: Cleanup build container
run: docker rm -f -v build
+ - name: Upload build artifact
+ uses: actions/upload-artifact@v7
+ with:
+ name: build-${{ matrix.platform }}
+ retention-days: 1
+ path: ${{ matrix.artifact-paths }}
+
+ # Job 4: Release
+ # Waits for both test and build to pass, then collects all platform artifacts
+ # into dist/ for checksums, GitHub releases, and multi-arch Docker push.
+ release:
+ needs: [test, build]
+ runs-on: ubuntu-24.04
+ steps:
+ - uses: actions/checkout@v6
+ with:
+ fetch-depth: 0
+ fetch-tags: true
+
+ # Downloads all artifacts (generated + 7 platform builds) into artifacts/ subdirectories
+ - name: Download all build artifacts
+ uses: actions/download-artifact@v8
+ with:
+ path: artifacts
+
+ # Reassemble platform binaries from matrix job artifacts into a single dist/ directory
+ # make sure that artifacts have executable bit set
+ # upload-artifact@v4 strips the common path prefix (dist/), so files are at the artifact root
+ - name: Collect binaries
+ run: |
+ mkdir -p dist
+ cp artifacts/build-*/* dist/
+ chmod +x dist/*
+
+ - name: Zip UI
+ run: |
+ cd artifacts/generated/ui/v2.5/build && zip -r ../../../../../dist/stash-ui.zip .
+
- name: Generate checksums
run: |
git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1
@@ -116,7 +236,7 @@ jobs:
- name: Upload Windows binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: stash-win.exe
path: dist/stash-win.exe
@@ -124,15 +244,23 @@ jobs:
- name: Upload macOS binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: stash-macos
path: dist/stash-macos
+ - name: Upload macOS bundle
+ # only upload binaries for pull requests
+ if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
+ uses: actions/upload-artifact@v7
+ with:
+ name: Stash.app.zip
+ path: dist/Stash.app.zip
+
- name: Upload Linux binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: stash-linux
path: dist/stash-linux
@@ -140,14 +268,14 @@ jobs:
- name: Upload UI
# only upload for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v7
with:
name: stash-ui.zip
path: dist/stash-ui.zip
- name: Update latest_develop tag
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
- run : git tag -f latest_develop; git push -f --tags
+ run: git tag -f latest_develop; git push -f --tags
- name: Development Release
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
@@ -197,7 +325,7 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
- docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
+ docker run --rm --privileged tonistiigi/binfmt
docker info
docker buildx create --name builder --use
docker buildx inspect --bootstrap
@@ -213,7 +341,7 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
- docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
+ docker run --rm --privileged tonistiigi/binfmt
docker info
docker buildx create --name builder --use
docker buildx inspect --bootstrap
diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml
index 71c743ced..d2d54b207 100644
--- a/.github/workflows/golangci-lint.yml
+++ b/.github/workflows/golangci-lint.yml
@@ -9,65 +9,24 @@ on:
- 'releases/**'
pull_request:
-env:
- COMPILER_IMAGE: stashapp/compiler:12
-
jobs:
golangci:
name: lint
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
-
- - name: Checkout
- run: git fetch --prune --unshallow --tags
-
- - name: Setup Go
- uses: actions/setup-go@v5
+ # no tags or depth needed for lint
+ - uses: actions/checkout@v6
+ - uses: actions/setup-go@v6
with:
go-version-file: 'go.mod'
- - name: Pull compiler image
- run: docker pull $COMPILER_IMAGE
-
- - name: Start build container
- run: |
- mkdir -p .go-cache
- docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null
-
+ # generate-backend runs natively (just go generate + touch-ui) — no Docker needed
- name: Generate Backend
- run: docker exec -t build /bin/bash -c "make generate-backend"
+ run: make generate-backend
+ ## WARN
+ ## using v1, update in a later PR
- name: Run golangci-lint
- uses: golangci/golangci-lint-action@v6
+ uses: golangci/golangci-lint-action@v8
with:
- # Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
- version: latest
-
- # Optional: working directory, useful for monorepos
- # working-directory: somedir
-
- # Optional: golangci-lint command line arguments.
- #
- # Note: By default, the `.golangci.yml` file should be at the root of the repository.
- # The location of the configuration file can be changed by using `--config=`
- args: --timeout=5m
-
- # Optional: show only new issues if it's a pull request. The default value is `false`.
- # only-new-issues: true
-
- # Optional: if set to true, then all caching functionality will be completely disabled,
- # takes precedence over all other caching options.
- # skip-cache: true
-
- # Optional: if set to true, then the action won't cache or restore ~/go/pkg.
- # skip-pkg-cache: true
-
- # Optional: if set to true, then the action won't cache or restore ~/.cache/go-build.
- # skip-build-cache: true
-
- # Optional: The mode to install golangci-lint. It can be 'binary' or 'goinstall'.
- # install-mode: "goinstall"
-
- - name: Cleanup build container
- run: docker rm -f -v build
+ version: v2.11.4
\ No newline at end of file
diff --git a/.golangci.yml b/.golangci.yml
index 5ed4d715c..2521ebfc2 100644
--- a/.golangci.yml
+++ b/.golangci.yml
@@ -1,87 +1,100 @@
-# options for analysis running
-run:
- timeout: 5m
-
+version: "2"
linters:
- disable-all: true
+ default: none
enable:
- # Default set of linters from golangci-lint
- - errcheck
- - gosimple
- - govet
- - ineffassign
- - staticcheck
- - typecheck
- - unused
- # Linters added by the stash project.
- # - contextcheck
- copyloopvar
- dogsled
+ - errcheck
- errchkjson
- errorlint
- # - exhaustive
- gocritic
- # - goerr113
- - gofmt
- # - gomnd
- # - ifshort
+ - govet
+ - ineffassign
- misspell
- # - nakedret
- - noctx
+
+ # TODO - fix these in a later PR
+ # - noctx
+
- revive
- rowserrcheck
- sqlclosecheck
-
-# Project-specific linter overrides
-linters-settings:
- gofmt:
- simplify: false
-
- errorlint:
- # Disable errorf because there are false positives, where you don't want to wrap
- # an error.
- errorf: false
- asserts: true
- comparison: true
-
- revive:
- ignore-generated-header: true
- severity: error
- confidence: 0.8
- rules:
- - name: blank-imports
- disabled: true
- - name: context-as-argument
- - name: context-keys-type
- - name: dot-imports
- - name: error-return
- - name: error-strings
- - name: error-naming
- - name: exported
- disabled: true
- - name: if-return
- disabled: true
- - name: increment-decrement
- - name: var-naming
- disabled: true
- - name: var-declaration
- - name: package-comments
- - name: range
- - name: receiver-naming
- - name: time-naming
- - name: unexported-return
- disabled: true
- - name: indent-error-flow
- disabled: true
- - name: errorf
- - name: empty-block
- disabled: true
- - name: superfluous-else
- - name: unused-parameter
- disabled: true
- - name: unreachable-code
- - name: redefines-builtin-id
-
- rowserrcheck:
- packages:
- - github.com/jmoiron/sqlx
+ - staticcheck
+ - unused
+
+ settings:
+ staticcheck:
+ checks:
+ - all
+
+ # we specify (unnecessary) embedded fields for clarity in many places
+ - -QF1008
+
+ # there's lots of misnamed (eg intId instead of intID) fields in the code.
+ # it's not exactly world-ending, so I'm deferring fixing these for now
+ - -ST1003
+ errorlint:
+ errorf: false
+ asserts: true
+ comparison: true
+ revive:
+ confidence: 0.8
+ severity: error
+ rules:
+ - name: blank-imports
+ disabled: true
+ - name: context-as-argument
+ - name: context-keys-type
+ - name: dot-imports
+ - name: error-return
+ - name: error-strings
+ - name: error-naming
+ - name: exported
+ disabled: true
+ - name: if-return
+ disabled: true
+ - name: increment-decrement
+ - name: var-naming
+ disabled: true
+ - name: var-declaration
+ - name: package-comments
+ - name: range
+ - name: receiver-naming
+ - name: time-naming
+ - name: unexported-return
+ disabled: true
+ - name: indent-error-flow
+ disabled: true
+ - name: errorf
+ - name: empty-block
+ disabled: true
+ - name: superfluous-else
+ - name: unused-parameter
+ disabled: true
+ - name: unreachable-code
+ - name: redefines-builtin-id
+ rowserrcheck:
+ packages:
+ - github.com/jmoiron/sqlx
+ exclusions:
+ generated: lax
+ presets:
+ - comments
+ - common-false-positives
+ - legacy
+ - std-error-handling
+ paths:
+ - third_party$
+ - builtin$
+ - examples$
+formatters:
+ enable:
+ - gofmt
+ settings:
+ gofmt:
+ simplify: false
+ exclusions:
+ generated: lax
+ paths:
+ - third_party$
+ - builtin$
+ - examples$
diff --git a/Makefile b/Makefile
index 7e19063a3..d9caf0ee5 100644
--- a/Makefile
+++ b/Makefile
@@ -50,7 +50,7 @@ export CGO_ENABLED := 1
# define COMPILER_IMAGE for cross-compilation docker container
ifndef COMPILER_IMAGE
- COMPILER_IMAGE := stashapp/compiler:latest
+ COMPILER_IMAGE := ghcr.io/stashapp/compiler:latest
endif
.PHONY: release
@@ -129,7 +129,7 @@ phasher: build-flags
# builds dynamically-linked debug binaries
.PHONY: build
-build: stash phasher
+build: stash
# builds dynamically-linked PIE release binaries
.PHONY: build-release
@@ -187,8 +187,6 @@ build-cc-macos:
# Combine into universal binaries
lipo -create -output dist/stash-macos dist/stash-macos-intel dist/stash-macos-arm
rm dist/stash-macos-intel dist/stash-macos-arm
- lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm
- rm dist/phasher-macos-intel dist/phasher-macos-arm
# Place into bundle and zip up
rm -rf dist/Stash.app
@@ -198,6 +196,16 @@ build-cc-macos:
cd dist && rm -f Stash.app.zip && zip -r Stash.app.zip Stash.app
rm -rf dist/Stash.app
+.PHONY: build-cc-macos-phasher
+build-cc-macos-phasher:
+ make build-cc-macos-arm
+ make build-cc-macos-intel
+
+ # Combine into universal binaries
+ lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm
+ rm dist/phasher-macos-intel dist/phasher-macos-arm
+ # do not bundle phasher
+
.PHONY: build-cc-freebsd
build-cc-freebsd: export GOOS := freebsd
build-cc-freebsd: export GOARCH := amd64
diff --git a/README.md b/README.md
index 5ccefe4bc..781eb5fcb 100644
--- a/README.md
+++ b/README.md
@@ -9,14 +9,14 @@
[](https://github.com/stashapp/stash/releases/latest)
[](https://github.com/stashapp/stash/labels/bounty)
-### **Stash is a self-hosted webapp written in Go which organizes and serves your diverse content collection, catering to both your SFW and NSFW needs.**
+
Stash is a self-hosted webapp written in Go which organizes and serves your diverse content collection, catering to both your SFW and NSFW needs.

-* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.
-* Stash supports a wide variety of both video and image formats.
-* You can tag videos and find them later.
-* Stash provides statistics about performers, tags, studios and more.
+- Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.
+- Stash supports a wide variety of both video and image formats.
+- You can tag videos and find them later.
+- Stash provides statistics about performers, tags, studios and more.
You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action.
@@ -24,17 +24,20 @@ For further information you can consult the [documentation](https://docs.stashap
# Installing Stash
+> [!tip]
Step-by-step instructions are available at [docs.stashapp.cc/installation](https://docs.stashapp.cc/installation/).
-#### Windows Users:
-
-As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._
-At least Windows 10 or Server 2016 is required.
-
-#### Mac Users:
-
-As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later.
-Stash can still be run through docker on older versions of macOS.
+> [!important]
+> **Windows Users**
+>
+> As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._
+> At least Windows 10 or Server 2016 is required.
+>
+> **macOS Users**
+>
+> As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later.
+> As of version 0.32.0, Stash requires _macOS 12 Monterey_ or later.
+> Stash can still be run through Docker on older versions of macOS.
Windows | macOS | Linux | Docker
:---:|:---:|:---:|:---:
@@ -85,23 +88,36 @@ The badge below shows the current translation status of Stash across all support
Need help or want to get involved? Start with the documentation, then reach out to the community if you need further assistance.
-- Documentation
- - Official docs: https://docs.stashapp.cc - official guides guides and troubleshooting.
- - In-app manual: press Shift + ? in the app or view the manual online: https://docs.stashapp.cc/in-app-manual.
- - FAQ: https://discourse.stashapp.cc/c/support/faq/28 - common questions and answers.
- - Community wiki: https://discourse.stashapp.cc/tags/c/community-wiki/22/stash - guides, how-to’s and tips.
+### Documentation
+- [Official documentation](https://docs.stashapp.cc) - official guides guides and troubleshooting.
+- [In-app manual](https://docs.stashapp.cc/in-app-manual) press Shift + ? in the app or view the manual online.
+- [FAQ](https://discourse.stashapp.cc/c/support/faq/28) - common questions and answers.
+- [Community wiki](https://discourse.stashapp.cc/tags/c/community-wiki/22/stash) - guides, how-to’s and tips.
-- Community & discussion
- - Community forum: https://discourse.stashapp.cc - community support, feature requests and discussions.
- - Discord: https://discord.gg/2TsNFKt - real-time chat and community support.
- - GitHub discussions: https://github.com/stashapp/stash/discussions - community support and feature discussions.
- - Lemmy community: https://discuss.online/c/stashapp - Reddit-style community space.
+### Community & discussion
+- [Community forum](https://discourse.stashapp.cc) - community support, feature requests and discussions.
+- [Discord](https://discord.gg/2TsNFKt) - real-time chat and community support.
+- [GitHub discussions](https://github.com/stashapp/stash/discussions) - community support and feature discussions.
+- [Lemmy community](https://discuss.online/c/stashapp) - board-style community space.
-- Community scrapers & plugins
- - Metadata sources: https://docs.stashapp.cc/metadata-sources/
- - Plugins: https://docs.stashapp.cc/plugins/
- - Themes: https://docs.stashapp.cc/themes/
- - Other projects: https://docs.stashapp.cc/other-projects/
+### Community scrapers & plugins
+- [Metadata sources](https://docs.stashapp.cc/metadata-sources/)
+- [Plugins](https://docs.stashapp.cc/plugins/)
+- [Themes](https://docs.stashapp.cc/themes/)
+- [Other projects](https://docs.stashapp.cc/other-projects/)
+
+# Architecture
+
+## Backend
+
+- Go
+- GraphQL API
+- SQLite
+
+## Frontend
+
+- React
+- TypeScript
# For Developers
diff --git a/cmd/stash/main.go b/cmd/stash/main.go
index 57fedd0e2..def4f3368 100644
--- a/cmd/stash/main.go
+++ b/cmd/stash/main.go
@@ -148,7 +148,7 @@ func recoverPanic() {
exitCode = 1
logger.Errorf("panic: %v\n%s", err, debug.Stack())
if desktop.IsDesktop() {
- desktop.FatalError(fmt.Errorf("Panic: %v", err))
+ desktop.FatalError(fmt.Errorf("panic: %v", err))
}
}
}
diff --git a/docker/ci/x86_64/Dockerfile b/docker/ci/x86_64/Dockerfile
index 6a9c6b76d..2161cb6af 100644
--- a/docker/ci/x86_64/Dockerfile
+++ b/docker/ci/x86_64/Dockerfile
@@ -12,7 +12,7 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-linux-arm32v6; \
FROM --platform=$TARGETPLATFORM alpine:latest AS app
COPY --from=binary /stash /usr/bin/
-RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools \
+RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools vips-heif \
&& pip install --break-system-packages mechanicalsoup cloudscraper stashapp-tools
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
diff --git a/docker/compiler/.gitignore b/docker/compiler/.gitignore
deleted file mode 100644
index 7012bfd63..000000000
--- a/docker/compiler/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-*.sdk.tar.*
\ No newline at end of file
diff --git a/docker/compiler/Dockerfile b/docker/compiler/Dockerfile
index 0154d7e61..d41be11a3 100644
--- a/docker/compiler/Dockerfile
+++ b/docker/compiler/Dockerfile
@@ -1,82 +1,86 @@
-FROM golang:1.24.3
+### OSXCROSS
+FROM debian:bookworm AS osxcross
+# add osxcross
+WORKDIR /tmp/osxcross
+ARG OSXCROSS_REVISION=5e1b71fcceb23952f3229995edca1b6231525b5b
+ADD --checksum=sha256:d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647 https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION} /tmp/osxcross.tar.gz
-LABEL maintainer="https://discord.gg/2TsNFKt"
+ARG OSX_SDK_VERSION=12.3
+ARG OSX_SDK_DOWNLOAD_FILE=MacOSX${OSX_SDK_VERSION}.sdk.tar.xz
+ARG OSX_SDK_DOWNLOAD_URL=https://github.com/joseluisq/macosx-sdks/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE}
+ADD --checksum=sha256:3abd261ceb483c44295a6623fdffe5d44fc4ac2c872526576ec5ab5ad0f6e26c ${OSX_SDK_DOWNLOAD_URL} /tmp/osxcross/tarballs/${OSX_SDK_DOWNLOAD_FILE}
-RUN apt-get update && apt-get install -y apt-transport-https ca-certificates gnupg
+ENV UNATTENDED=yes \
+ SDK_VERSION=${OSX_SDK_VERSION} \
+ OSX_VERSION_MIN=12.0
+RUN apt update && \
+ apt install -y --no-install-recommends \
+ bash ca-certificates clang cmake git patch libssl-dev bzip2 cpio libbz2-dev libxml2-dev make python3 xz-utils zlib1g-dev
+# lzma-dev libxml2-dev xz
+RUN tar --strip=1 -C /tmp/osxcross -xf /tmp/osxcross.tar.gz
+RUN ./build.sh
-RUN mkdir -p /etc/apt/keyrings
+### FREEBSD cross-compilation stage
+# use alpine for cacheable image since apt is notorous for not caching
+FROM alpine:3 AS freebsd
+# match golang latest
+# https://go.dev/wiki/FreeBSD
+ARG FREEBSD_VERSION=12.4
+ADD --checksum=sha256:581c7edacfd2fca2bdf5791f667402d22fccd8a5e184635e0cac075564d57aa8 \
+ http://ftp-archive.freebsd.org/mirror/FreeBSD-Archive/old-releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz \
+ /tmp/base.txz
-ADD https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key nodesource.gpg.key
-RUN cat nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && rm nodesource.gpg.key
-RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_24.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
+WORKDIR /opt/cross-freebsd
+RUN apk add --no-cache tar xz
+RUN tar -xf /tmp/base.txz --strip-components=1 ./usr/lib ./usr/include ./lib
+RUN cd /opt/cross-freebsd/usr/lib && \
+ find . -type l -exec sh -c ' \
+ for link; do \
+ target=$(readlink "$link"); \
+ case "$target" in \
+ /lib/*) ln -sf "/opt/cross-freebsd$target" "$link";; \
+ esac; \
+ done \
+ ' sh {} + && \
+ ln -s libc++.a libstdc++.a && \
+ ln -s libc++.so libstdc++.so
-RUN apt-get update && \
- apt-get install -y --no-install-recommends \
- git make tar bash nodejs zip \
- clang llvm-dev cmake patch libxml2-dev uuid-dev libssl-dev xz-utils \
- bzip2 gzip sed cpio libbz2-dev zlib1g-dev \
- gcc-mingw-w64 \
- gcc-arm-linux-gnueabi libc-dev-armel-cross linux-libc-dev-armel-cross \
- gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
- rm -rf /var/lib/apt/lists/*;
+### BUILDER
+FROM golang:1.25.9 AS builder
+ENV PATH=/opt/osx-ndk-x86/bin:$PATH
+
+# copy in nodejs instead of using nodesource :thumbsup:
+COPY --from=docker.io/library/node:24-bookworm /usr/local /usr/local
+# copy in osxcross
+COPY --from=osxcross /tmp/osxcross/target/lib /usr/lib
+COPY --from=osxcross /tmp/osxcross/target /opt/osx-ndk-x86
+# copy in cross-freebsd
+COPY --from=freebsd /opt/cross-freebsd /opt/cross-freebsd
# pnpm install with npm
RUN npm install -g pnpm
-# FreeBSD cross-compilation setup
-# https://github.com/smartmontools/docker-build/blob/6b8c92560d17d325310ba02d9f5a4b250cb0764a/Dockerfile#L66
-ENV FREEBSD_VERSION 13.4
-ENV FREEBSD_DOWNLOAD_URL http://ftp.plusline.de/FreeBSD/releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz
-ENV FREEBSD_SHA 8e13b0a93daba349b8d28ad246d7beb327659b2ef4fe44d89f447392daec5a7c
+# git for getting hash
+# make and bash for building
-RUN cd /tmp && \
- curl -o base.txz $FREEBSD_DOWNLOAD_URL && \
- echo "$FREEBSD_SHA base.txz" | sha256sum -c - && \
- mkdir -p /opt/cross-freebsd && \
- cd /opt/cross-freebsd && \
- tar -xf /tmp/base.txz ./lib/ ./usr/lib/ ./usr/include/ && \
- rm -f /tmp/base.txz && \
- cd /opt/cross-freebsd/usr/lib && \
- find . -xtype l | xargs ls -l | grep ' /lib/' | awk '{print "ln -sf /opt/cross-freebsd"$11 " " $9}' | /bin/sh && \
- ln -s libc++.a libstdc++.a && \
- ln -s libc++.so libstdc++.so
-
-# macOS cross-compilation setup
-ENV OSX_SDK_VERSION 11.3
-ENV OSX_SDK_DOWNLOAD_FILE MacOSX${OSX_SDK_VERSION}.sdk.tar.xz
-ENV OSX_SDK_DOWNLOAD_URL https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE}
-ENV OSX_SDK_SHA cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4
-ENV OSXCROSS_REVISION 5e1b71fcceb23952f3229995edca1b6231525b5b
-ENV OSXCROSS_DOWNLOAD_URL https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION}
-ENV OSXCROSS_SHA d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647
-
-RUN cd /tmp && \
- curl -o osxcross.tar.gz $OSXCROSS_DOWNLOAD_URL && \
- echo "$OSXCROSS_SHA osxcross.tar.gz" | sha256sum -c - && \
- mkdir osxcross && \
- tar --strip=1 -C osxcross -xf osxcross.tar.gz && \
- rm -f osxcross.tar.gz && \
- curl -Lo $OSX_SDK_DOWNLOAD_FILE $OSX_SDK_DOWNLOAD_URL && \
- echo "$OSX_SDK_SHA $OSX_SDK_DOWNLOAD_FILE" | sha256sum -c - && \
- mv $OSX_SDK_DOWNLOAD_FILE osxcross/tarballs/ && \
- UNATTENDED=yes SDK_VERSION=$OSX_SDK_VERSION OSX_VERSION_MIN=10.10 osxcross/build.sh && \
- cp osxcross/target/lib/* /usr/lib/ && \
- mv osxcross/target /opt/osx-ndk-x86 && \
- rm -rf /tmp/osxcross
-
-ENV PATH /opt/osx-ndk-x86/bin:$PATH
-
-RUN mkdir -p /root/.ssh && \
- chmod 0700 /root/.ssh && \
- ssh-keyscan github.com > /root/.ssh/known_hosts
-
-# ignore "dubious ownership" errors
+# clang for macos
+# zip for stashapp.zip
+# gcc-extensions for cross-arch build
+# we still target arm soft float?
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ git make bash \
+ clang zip \
+ gcc-mingw-w64 \
+ gcc-arm-linux-gnueabi \
+ libc-dev-armel-cross linux-libc-dev-armel-cross \
+ gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
+ rm -rf /var/lib/apt/lists/*;
RUN git config --global safe.directory '*'
-
# To test locally:
# make generate
# make ui
# cd docker/compiler
-# make build
-# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t stashapp/compiler:latest make build-cc-all
-# # binaries will show up in /dist
+# docker build . -t ghcr.io/stashapp/compiler:latest
+# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t ghcr.io/stashapp/compiler:latest make build-cc-all
+# # binaries will show up in /dist
\ No newline at end of file
diff --git a/docker/compiler/Makefile b/docker/compiler/Makefile
index ed6a9a285..2a81222a0 100644
--- a/docker/compiler/Makefile
+++ b/docker/compiler/Makefile
@@ -1,16 +1,22 @@
+host=ghcr.io
user=stashapp
repo=compiler
-version=12
+version=14
+
+VERSION_IMAGE = ${host}/${user}/${repo}:${version}
+LATEST_IMAGE = ${host}/${user}/${repo}:latest
latest:
- docker build -t ${user}/${repo}:latest .
+ docker build -t ${LATEST_IMAGE} .
build:
- docker build -t ${user}/${repo}:${version} -t ${user}/${repo}:latest .
+ docker build -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} .
build-no-cache:
- docker build --no-cache -t ${user}/${repo}:${version} -t ${user}/${repo}:latest .
+ docker build --no-cache -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} .
-install: build
- docker push ${user}/${repo}:${version}
- docker push ${user}/${repo}:latest
+# requires docker login ghcr.io
+# echo $CR_PAT | docker login ghcr.io -u USERNAME --password-stdin
+push:
+ docker push ${VERSION_IMAGE}
+ docker push ${LATEST_IMAGE}
\ No newline at end of file
diff --git a/docker/compiler/README.md b/docker/compiler/README.md
index 6bb7d8d99..c7b4840f9 100644
--- a/docker/compiler/README.md
+++ b/docker/compiler/README.md
@@ -1,3 +1,3 @@
Modified from https://github.com/bep/dockerfiles/tree/master/ci-goreleaser
-When the Dockerfile is changed, the version number should be incremented in the Makefile and the new version tag should be pushed to Docker Hub. The GitHub workflow files also need to be updated to pull the correct image tag.
+When the Dockerfile is changed, the version number should be incremented in [.github/workflows/build-compiler.yml](../../.github/workflows/build-compiler.yml) and the workflow [manually ran](). `env: COMPILER_IMAGE` in [.github/workflows/build.yml](../../.github/workflows/build.yml) also needs to be updated to pull the correct image tag.
\ No newline at end of file
diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md
index 85c2f6f23..a26ce6817 100644
--- a/docs/DEVELOPMENT.md
+++ b/docs/DEVELOPMENT.md
@@ -118,8 +118,8 @@ This project uses a modification of the [CI-GoReleaser](https://github.com/bep/d
To cross-compile the app yourself:
1. Run `make pre-ui`, `make generate` and `make ui` outside the container, to generate files and build the UI.
-2. Pull the latest compiler image from Docker Hub: `docker pull stashapp/compiler`
-3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it stashapp/compiler /bin/bash` to open a shell inside the container.
+2. Pull the latest compiler image from GHCR: `docker pull ghcr.io/stashapp/compiler`
+3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it ghcr.io/stashapp/compiler /bin/bash` to open a shell inside the container.
4. From inside the container, run `make build-cc-all` to build for all platforms, or run `make build-cc-{platform}` to build for a specific platform (have a look at the `Makefile` for the list of targets).
5. You will find the compiled binaries in `dist/`.
diff --git a/go.mod b/go.mod
index db0d6fe34..48495d738 100644
--- a/go.mod
+++ b/go.mod
@@ -1,6 +1,6 @@
module github.com/stashapp/stash
-go 1.24.3
+go 1.25.0
require (
github.com/99designs/gqlgen v0.17.73
@@ -15,6 +15,7 @@ require (
github.com/disintegration/imaging v1.6.2
github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d
github.com/doug-martin/goqu/v9 v9.18.0
+ github.com/feederbox826/gosx-notifier v0.2.2
github.com/go-chi/chi/v5 v5.2.2
github.com/go-chi/cors v1.2.1
github.com/go-chi/httplog v0.3.1
@@ -30,7 +31,6 @@ require (
github.com/jinzhu/copier v0.4.0
github.com/jmoiron/sqlx v1.4.0
github.com/json-iterator/go v1.1.12
- github.com/kermieisinthehouse/gosx-notifier v0.1.2
github.com/kermieisinthehouse/systray v1.2.4
github.com/knadh/koanf/parsers/yaml v1.1.0
github.com/knadh/koanf/providers/env v1.1.0
@@ -44,6 +44,7 @@ require (
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
+ github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cast v1.6.0
github.com/spf13/pflag v1.0.6
@@ -55,12 +56,12 @@ require (
github.com/vektra/mockery/v2 v2.10.0
github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e
github.com/zencoder/go-dash/v3 v3.0.2
- golang.org/x/crypto v0.45.0
- golang.org/x/image v0.18.0
- golang.org/x/net v0.47.0
- golang.org/x/sys v0.38.0
- golang.org/x/term v0.37.0
- golang.org/x/text v0.31.0
+ golang.org/x/crypto v0.48.0
+ golang.org/x/image v0.38.0
+ golang.org/x/net v0.50.0
+ golang.org/x/sys v0.41.0
+ golang.org/x/term v0.40.0
+ golang.org/x/text v0.35.0
golang.org/x/time v0.10.0
gopkg.in/guregu/null.v4 v4.0.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1
@@ -69,7 +70,7 @@ require (
require (
github.com/agnivade/levenshtein v1.2.1 // indirect
- github.com/antchfx/xpath v1.3.5 // indirect
+ github.com/antchfx/xpath v1.3.6 // indirect
github.com/asticode/go-astikit v0.20.0 // indirect
github.com/asticode/go-astits v1.8.0 // indirect
github.com/chromedp/sysutil v1.1.0 // indirect
@@ -120,9 +121,9 @@ require (
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.yaml.in/yaml/v3 v3.0.3 // indirect
- golang.org/x/mod v0.29.0 // indirect
- golang.org/x/sync v0.18.0 // indirect
- golang.org/x/tools v0.38.0 // indirect
+ golang.org/x/mod v0.33.0 // indirect
+ golang.org/x/sync v0.20.0 // indirect
+ golang.org/x/tools v0.42.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
diff --git a/go.sum b/go.sum
index dbe82cf99..25a5fd02a 100644
--- a/go.sum
+++ b/go.sum
@@ -87,8 +87,9 @@ github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kk
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0=
github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA=
-github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ=
github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
+github.com/antchfx/xpath v1.3.6 h1:s0y+ElRRtTQdfHP609qFu0+c6bglDv20pqOViQjjdPI=
+github.com/antchfx/xpath v1.3.6/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
@@ -187,6 +188,8 @@ github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
+github.com/feederbox826/gosx-notifier v0.2.2 h1:26NkaJZ8Wzptx82R46c9pkVAcFwGSU7kxWrOKmRWlC0=
+github.com/feederbox826/gosx-notifier v0.2.2/go.mod h1:R6rqw7VuwuiCuvsr7EOONmWq++CRA5Ijmkmx75/C3Fs=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
@@ -389,8 +392,6 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
-github.com/kermieisinthehouse/gosx-notifier v0.1.2 h1:KV0KBeKK2B24kIHY7iK0jgS64Q05f4oB+hUZmsPodxQ=
-github.com/kermieisinthehouse/gosx-notifier v0.1.2/go.mod h1:xyWT07azFtUOcHl96qMVvKhvKzsMcS7rKTHQyv8WTho=
github.com/kermieisinthehouse/systray v1.2.4 h1:pdH5vnl+KKjRrVCRU4g/2W1/0HVzuuJ6WXHlPPHYY6s=
github.com/kermieisinthehouse/systray v1.2.4/go.mod h1:axh6C/jNuSyC0QGtidZJURc9h+h41HNoMySoLVrhVR4=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
@@ -537,6 +538,8 @@ github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
+github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI=
+github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs=
github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=
github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
@@ -665,8 +668,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
-golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
-golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
+golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
+golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -680,8 +683,8 @@ golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMk
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
-golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ=
-golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E=
+golang.org/x/image v0.38.0 h1:5l+q+Y9JDC7mBOMjo4/aPhMDcxEptsX+Tt3GgRQRPuE=
+golang.org/x/image v0.38.0/go.mod h1:/3f6vaXC+6CEanU4KJxbcUZyEePbyKbaLoDOe4ehFYY=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -712,8 +715,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
-golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
-golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
+golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
+golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -768,8 +771,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
-golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
-golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
+golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
+golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -804,8 +807,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
-golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
-golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
+golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
+golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -892,8 +895,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
-golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
+golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
+golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@@ -903,8 +906,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
-golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
-golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
+golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
+golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -921,8 +924,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
-golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
-golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
+golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8=
+golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -990,8 +993,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
-golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
-golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
+golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
+golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql
index 7fda85b24..7f07e4579 100644
--- a/graphql/schema/schema.graphql
+++ b/graphql/schema/schema.graphql
@@ -426,6 +426,10 @@ type Mutation {
destroyFiles(ids: [ID!]!): Boolean!
fileSetFingerprints(input: FileSetFingerprintsInput!): Boolean!
+ "Reveal the file in the system file manager"
+ revealFileInFileManager(id: ID!): Boolean!
+ "Reveal the folder in the system file manager"
+ revealFolderInFileManager(id: ID!): Boolean!
# Saved filters
saveFilter(input: SaveFilterInput!): SavedFilter!
@@ -579,6 +583,8 @@ type Mutation {
stashBoxBatchPerformerTag(input: StashBoxBatchTagInput!): String!
"Run batch studio tag task. Returns the job ID."
stashBoxBatchStudioTag(input: StashBoxBatchTagInput!): String!
+ "Run batch tag tag task. Returns the job ID."
+ stashBoxBatchTagTag(input: StashBoxBatchTagInput!): String!
"Enables DLNA for an optional duration. Has no effect if DLNA is enabled by default"
enableDLNA(input: EnableDLNAInput!): Boolean!
diff --git a/graphql/schema/types/file.graphql b/graphql/schema/types/file.graphql
index 835479fad..fcc2a58c8 100644
--- a/graphql/schema/types/file.graphql
+++ b/graphql/schema/types/file.graphql
@@ -6,13 +6,19 @@ type Fingerprint {
type Folder {
id: ID!
path: String!
+ basename: String!
parent_folder_id: ID @deprecated(reason: "Use parent_folder instead")
zip_file_id: ID @deprecated(reason: "Use zip_file instead")
parent_folder: Folder
+ "Returns all parent folders in order from immediate parent to top-level"
+ parent_folders: [Folder!]!
zip_file: BasicFile
+ "Returns direct sub-folders"
+ sub_folders: [Folder!]!
+
mod_time: Time!
created_at: Time!
@@ -153,7 +159,7 @@ input MoveFilesInput {
input SetFingerprintsInput {
type: String!
- "an null value will remove the fingerprint"
+ "a null value will remove the fingerprint"
value: String
}
diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql
index 075e40372..c7d880266 100644
--- a/graphql/schema/types/filters.graphql
+++ b/graphql/schema/types/filters.graphql
@@ -152,15 +152,15 @@ input PerformerFilterType {
fake_tits: StringCriterionInput
"Filter by penis length value"
penis_length: FloatCriterionInput
- "Filter by ciricumcision"
+ "Filter by circumcision"
circumcised: CircumcisionCriterionInput
"Deprecated: use career_start and career_end. This filter is non-functional."
career_length: StringCriterionInput
@deprecated(reason: "Use career_start and career_end")
- "Filter by career start year"
- career_start: IntCriterionInput
- "Filter by career end year"
- career_end: IntCriterionInput
+ "Filter by career start"
+ career_start: DateCriterionInput
+ "Filter by career end"
+ career_end: DateCriterionInput
"Filter by tattoos"
tattoos: StringCriterionInput
"Filter by piercings"
@@ -177,6 +177,8 @@ input PerformerFilterType {
tag_count: IntCriterionInput
"Filter by scene count"
scene_count: IntCriterionInput
+ "Filter by marker count (via scene)"
+ marker_count: IntCriterionInput
"Filter by image count"
image_count: IntCriterionInput
"Filter by gallery count"
@@ -220,6 +222,8 @@ input PerformerFilterType {
galleries_filter: GalleryFilterType
"Filter by related tags that meet this criteria"
tags_filter: TagFilterType
+ "Filter by related scene markers (via scene) that meet this criteria"
+ markers_filter: SceneMarkerFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"
@@ -245,9 +249,9 @@ input SceneMarkerFilterType {
updated_at: TimestampCriterionInput
"Filter by scene date"
scene_date: DateCriterionInput
- "Filter by cscene reation time"
+ "Filter by scene creation time"
scene_created_at: TimestampCriterionInput
- "Filter by lscene ast update time"
+ "Filter by scene last update time"
scene_updated_at: TimestampCriterionInput
"Filter by related scenes that meet this criteria"
scene_filter: SceneFilterType
@@ -462,6 +466,9 @@ input GroupFilterType {
scenes_filter: SceneFilterType
"Filter by related studios that meet this criteria"
studios_filter: StudioFilterType
+
+ "Filter by custom fields"
+ custom_fields: [CustomFieldCriterionInput!]
}
input StudioFilterType {
@@ -596,6 +603,10 @@ input GalleryFilterType {
files_filter: FileFilterType
"Filter by related folders that meet this criteria"
folders_filter: FolderFilterType
+ "Filter by parent folder of the zip or folder the gallery is in"
+ parent_folder: HierarchicalMultiCriterionInput
+
+ custom_fields: [CustomFieldCriterionInput!]
}
input TagFilterType {
@@ -654,7 +665,7 @@ input TagFilterType {
"Filter by number of parent tags the tag has"
parent_count: IntCriterionInput
- "Filter by number f child tags the tag has"
+ "Filter by number of child tags the tag has"
child_count: IntCriterionInput
"Filter by autotag ignore value"
@@ -679,6 +690,8 @@ input TagFilterType {
performers_filter: PerformerFilterType
"Filter by related studios that meet this criteria"
studios_filter: StudioFilterType
+ "Filter by related scene markers that meet this criteria"
+ markers_filter: SceneMarkerFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
@@ -760,6 +773,8 @@ input ImageFilterType {
tags_filter: TagFilterType
"Filter by related files that meet this criteria"
files_filter: FileFilterType
+ "Filter by custom fields"
+ custom_fields: [CustomFieldCriterionInput!]
}
input FileFilterType {
@@ -809,6 +824,7 @@ input FolderFilterType {
NOT: FolderFilterType
path: StringCriterionInput
+ basename: StringCriterionInput
parent_folder: HierarchicalMultiCriterionInput
zip_file: MultiCriterionInput
@@ -917,7 +933,7 @@ input GenderCriterionInput {
}
input CircumcisionCriterionInput {
- value: [CircumisedEnum!]
+ value: [CircumcisedEnum!]
modifier: CriterionModifier!
}
diff --git a/graphql/schema/types/gallery.graphql b/graphql/schema/types/gallery.graphql
index f456157a7..e28c3802b 100644
--- a/graphql/schema/types/gallery.graphql
+++ b/graphql/schema/types/gallery.graphql
@@ -32,6 +32,7 @@ type Gallery {
cover: Image
paths: GalleryPathsType! # Resolver
+ custom_fields: Map!
image(index: Int!): Image!
}
@@ -50,6 +51,8 @@ input GalleryCreateInput {
studio_id: ID
tag_ids: [ID!]
performer_ids: [ID!]
+
+ custom_fields: Map
}
input GalleryUpdateInput {
@@ -71,6 +74,8 @@ input GalleryUpdateInput {
performer_ids: [ID!]
primary_file_id: ID
+
+ custom_fields: CustomFieldsInput
}
input BulkGalleryUpdateInput {
@@ -89,6 +94,8 @@ input BulkGalleryUpdateInput {
studio_id: ID
tag_ids: BulkUpdateIds
performer_ids: BulkUpdateIds
+
+ custom_fields: CustomFieldsInput
}
input GalleryDestroyInput {
diff --git a/graphql/schema/types/group.graphql b/graphql/schema/types/group.graphql
index a46932054..8610f39dc 100644
--- a/graphql/schema/types/group.graphql
+++ b/graphql/schema/types/group.graphql
@@ -31,6 +31,7 @@ type Group {
sub_group_count(depth: Int): Int! # Resolver
scenes: [Scene!]!
o_counter: Int # Resolver
+ custom_fields: Map!
}
input GroupDescriptionInput {
@@ -59,6 +60,8 @@ input GroupCreateInput {
front_image: String
"This should be a URL or a base64 encoded data URL"
back_image: String
+
+ custom_fields: Map
}
input GroupUpdateInput {
@@ -82,6 +85,8 @@ input GroupUpdateInput {
front_image: String
"This should be a URL or a base64 encoded data URL"
back_image: String
+
+ custom_fields: CustomFieldsInput
}
input BulkUpdateGroupDescriptionsInput {
@@ -94,6 +99,8 @@ input BulkGroupUpdateInput {
ids: [ID!]
# rating expressed as 1-100
rating100: Int
+ date: String
+ synopsis: String
studio_id: ID
director: String
urls: BulkUpdateStrings
@@ -101,6 +108,8 @@ input BulkGroupUpdateInput {
containing_groups: BulkUpdateGroupDescriptionsInput
sub_groups: BulkUpdateGroupDescriptionsInput
+
+ custom_fields: CustomFieldsInput
}
input GroupDestroyInput {
diff --git a/graphql/schema/types/image.graphql b/graphql/schema/types/image.graphql
index b7ec1a9f5..ccc414542 100644
--- a/graphql/schema/types/image.graphql
+++ b/graphql/schema/types/image.graphql
@@ -21,6 +21,7 @@ type Image {
studio: Studio
tags: [Tag!]!
performers: [Performer!]!
+ custom_fields: Map!
}
type ImageFileType {
@@ -56,6 +57,7 @@ input ImageUpdateInput {
gallery_ids: [ID!]
primary_file_id: ID
+ custom_fields: CustomFieldsInput
}
input BulkImageUpdateInput {
@@ -76,6 +78,7 @@ input BulkImageUpdateInput {
performer_ids: BulkUpdateIds
tag_ids: BulkUpdateIds
gallery_ids: BulkUpdateIds
+ custom_fields: CustomFieldsInput
}
input ImageDestroyInput {
diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql
index 27cbb86fb..6ad620dbe 100644
--- a/graphql/schema/types/metadata.graphql
+++ b/graphql/schema/types/metadata.graphql
@@ -26,6 +26,8 @@ input GenerateMetadataInput {
imageIDs: [ID!]
"gallery ids to generate for"
galleryIDs: [ID!]
+ "paths to run generate on, in addition to the other ID lists"
+ paths: [String!]
"overwrite existing media"
overwrite: Boolean
@@ -129,6 +131,14 @@ type ScanMetadataOptions {
input CleanMetadataInput {
paths: [String!]
+ """
+ Don't check zip file contents when determining whether to clean a file.
+ This can significantly speed up the clean process, but will potentially miss removed files within zip files.
+ Where users do not modify zip files contents directly, this should be safe to use.
+ Defaults to false.
+ """
+ ignoreZipFileContents: Boolean
+
"Do a dry run. Don't delete any files"
dryRun: Boolean!
}
diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql
index 97a80b94f..bf17298da 100644
--- a/graphql/schema/types/performer.graphql
+++ b/graphql/schema/types/performer.graphql
@@ -7,7 +7,7 @@ enum GenderEnum {
NON_BINARY
}
-enum CircumisedEnum {
+enum CircumcisedEnum {
CUT
UNCUT
}
@@ -29,10 +29,10 @@ type Performer {
measurements: String
fake_tits: String
penis_length: Float
- circumcised: CircumisedEnum
+ circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
- career_start: Int
- career_end: Int
+ career_start: String
+ career_end: String
tattoos: String
piercings: String
alias_list: [String!]!
@@ -78,10 +78,10 @@ input PerformerCreateInput {
measurements: String
fake_tits: String
penis_length: Float
- circumcised: CircumisedEnum
+ circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
- career_start: Int
- career_end: Int
+ career_start: String
+ career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
@@ -119,10 +119,10 @@ input PerformerUpdateInput {
measurements: String
fake_tits: String
penis_length: Float
- circumcised: CircumisedEnum
+ circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
- career_start: Int
- career_end: Int
+ career_start: String
+ career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
@@ -165,10 +165,10 @@ input BulkPerformerUpdateInput {
measurements: String
fake_tits: String
penis_length: Float
- circumcised: CircumisedEnum
+ circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
- career_start: Int
- career_end: Int
+ career_start: String
+ career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will result in an error (case-insensitive)"
diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql
index 0818e61c2..799b5cd6e 100644
--- a/graphql/schema/types/scraped-performer.graphql
+++ b/graphql/schema/types/scraped-performer.graphql
@@ -19,8 +19,8 @@ type ScrapedPerformer {
penis_length: String
circumcised: String
career_length: String @deprecated(reason: "Use career_start and career_end")
- career_start: Int
- career_end: Int
+ career_start: String
+ career_end: String
tattoos: String
piercings: String
# aliases must be comma-delimited to be parsed correctly
@@ -57,8 +57,8 @@ input ScrapedPerformerInput {
penis_length: String
circumcised: String
career_length: String @deprecated(reason: "Use career_start and career_end")
- career_start: Int
- career_end: Int
+ career_start: String
+ career_end: String
tattoos: String
piercings: String
aliases: String
diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql
index 9c0e33fdf..fafd928f7 100644
--- a/graphql/schema/types/scraper.graphql
+++ b/graphql/schema/types/scraper.graphql
@@ -71,6 +71,9 @@ type ScrapedTag {
"Set if tag matched"
stored_id: ID
name: String!
+ description: String
+ alias_list: [String!]
+ parent: ScrapedTag
"Remote site ID, if applicable"
remote_site_id: String
}
diff --git a/graphql/stash-box/query.graphql b/graphql/stash-box/query.graphql
index e2686ac4d..ebaf05648 100644
--- a/graphql/stash-box/query.graphql
+++ b/graphql/stash-box/query.graphql
@@ -29,6 +29,13 @@ fragment StudioFragment on Studio {
fragment TagFragment on Tag {
name
id
+ description
+ aliases
+ category {
+ id
+ name
+ description
+ }
}
fragment MeasurementsFragment on Measurements {
diff --git a/internal/api/authentication.go b/internal/api/authentication.go
index 6ad7117a1..be399d222 100644
--- a/internal/api/authentication.go
+++ b/internal/api/authentication.go
@@ -40,6 +40,8 @@ func authenticateHandler() func(http.Handler) http.Handler {
return
}
+ r = session.SetLocalRequest(r)
+
userID, err := manager.GetInstance().SessionStore.Authenticate(w, r)
if err != nil {
if !errors.Is(err, session.ErrUnauthorized) {
diff --git a/internal/api/check_version.go b/internal/api/check_version.go
index f4c2950f1..10cb2b47a 100644
--- a/internal/api/check_version.go
+++ b/internal/api/check_version.go
@@ -148,12 +148,12 @@ func makeGithubRequest(ctx context.Context, url string, output interface{}) erro
response, err := client.Do(req)
if err != nil {
- //lint:ignore ST1005 Github is a proper capitalized noun
+ //nolint:staticcheck // ST1005 Github is a proper capitalized noun
return fmt.Errorf("Github API request failed: %w", err)
}
if response.StatusCode != http.StatusOK {
- //lint:ignore ST1005 Github is a proper capitalized noun
+ //nolint:staticcheck // ST1005 Github is a proper capitalized noun
return fmt.Errorf("Github API request failed: %s", response.Status)
}
@@ -161,7 +161,7 @@ func makeGithubRequest(ctx context.Context, url string, output interface{}) erro
data, err := io.ReadAll(response.Body)
if err != nil {
- //lint:ignore ST1005 Github is a proper capitalized noun
+ //nolint:staticcheck // ST1005 Github is a proper capitalized noun
return fmt.Errorf("Github API read response failed: %w", err)
}
@@ -295,10 +295,10 @@ func printLatestVersion(ctx context.Context) {
logger.Errorf("Couldn't retrieve latest version: %v", err)
} else {
_, githash, _ := build.Version()
- switch {
- case githash == "":
+ switch githash {
+ case "":
logger.Infof("Latest version: %s (%s)", latestRelease.Version, latestRelease.ShortHash)
- case githash == latestRelease.ShortHash:
+ case latestRelease.ShortHash:
logger.Infof("Version %s (%s) is already the latest released", latestRelease.Version, latestRelease.ShortHash)
default:
logger.Infof("New version available: %s (%s)", latestRelease.Version, latestRelease.ShortHash)
diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go
index 520714432..c1faf61ed 100644
--- a/internal/api/loaders/dataloaders.go
+++ b/internal/api/loaders/dataloaders.go
@@ -11,6 +11,7 @@
//go:generate go run github.com/vektah/dataloaden GroupLoader int *github.com/stashapp/stash/pkg/models.Group
//go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File
//go:generate go run github.com/vektah/dataloaden FolderLoader github.com/stashapp/stash/pkg/models.FolderID *github.com/stashapp/stash/pkg/models.Folder
+//go:generate go run github.com/vektah/dataloaden FolderRelatedFolderIDsLoader github.com/stashapp/stash/pkg/models.FolderID []github.com/stashapp/stash/pkg/models.FolderID
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
@@ -54,8 +55,10 @@ type Loaders struct {
ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader
- GalleryByID *GalleryLoader
- ImageByID *ImageLoader
+ GalleryByID *GalleryLoader
+ GalleryCustomFields *CustomFieldsLoader
+ ImageByID *ImageLoader
+ ImageCustomFields *CustomFieldsLoader
PerformerByID *PerformerLoader
PerformerCustomFields *CustomFieldsLoader
@@ -65,9 +68,15 @@ type Loaders struct {
TagByID *TagLoader
TagCustomFields *CustomFieldsLoader
- GroupByID *GroupLoader
- FileByID *FileLoader
- FolderByID *FolderLoader
+
+ GroupByID *GroupLoader
+ GroupCustomFields *CustomFieldsLoader
+
+ FileByID *FileLoader
+
+ FolderByID *FolderLoader
+ FolderParentFolderIDs *FolderRelatedFolderIDsLoader
+ FolderSubFolderIDs *FolderRelatedFolderIDsLoader
}
type Middleware struct {
@@ -88,11 +97,21 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchGalleries(ctx),
},
+ GalleryCustomFields: &CustomFieldsLoader{
+ wait: wait,
+ maxBatch: maxBatch,
+ fetch: m.fetchGalleryCustomFields(ctx),
+ },
ImageByID: &ImageLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchImages(ctx),
},
+ ImageCustomFields: &CustomFieldsLoader{
+ wait: wait,
+ maxBatch: maxBatch,
+ fetch: m.fetchImageCustomFields(ctx),
+ },
PerformerByID: &PerformerLoader{
wait: wait,
maxBatch: maxBatch,
@@ -133,6 +152,11 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchGroups(ctx),
},
+ GroupCustomFields: &CustomFieldsLoader{
+ wait: wait,
+ maxBatch: maxBatch,
+ fetch: m.fetchGroupCustomFields(ctx),
+ },
FileByID: &FileLoader{
wait: wait,
maxBatch: maxBatch,
@@ -143,6 +167,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchFolders(ctx),
},
+ FolderParentFolderIDs: &FolderRelatedFolderIDsLoader{
+ wait: wait,
+ maxBatch: maxBatch,
+ fetch: m.fetchFoldersParentFolderIDs(ctx),
+ },
+ FolderSubFolderIDs: &FolderRelatedFolderIDsLoader{
+ wait: wait,
+ maxBatch: maxBatch,
+ fetch: m.fetchFoldersSubFolderIDs(ctx),
+ },
SceneFiles: &SceneFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
@@ -237,6 +271,18 @@ func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models
}
}
+func (m Middleware) fetchImageCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
+ return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
+ err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
+ var err error
+ ret, err = m.Repository.Image.GetCustomFieldsBulk(ctx, keys)
+ return err
+ })
+
+ return ret, toErrorSlice(err)
+ }
+}
+
func (m Middleware) fetchGalleries(ctx context.Context) func(keys []int) ([]*models.Gallery, []error) {
return func(keys []int) (ret []*models.Gallery, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
@@ -319,6 +365,30 @@ func (m Middleware) fetchTagCustomFields(ctx context.Context) func(keys []int) (
}
}
+func (m Middleware) fetchGroupCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
+ return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
+ err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
+ var err error
+ ret, err = m.Repository.Group.GetCustomFieldsBulk(ctx, keys)
+ return err
+ })
+
+ return ret, toErrorSlice(err)
+ }
+}
+
+func (m Middleware) fetchGalleryCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
+ return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
+ err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
+ var err error
+ ret, err = m.Repository.Gallery.GetCustomFieldsBulk(ctx, keys)
+ return err
+ })
+
+ return ret, toErrorSlice(err)
+ }
+}
+
func (m Middleware) fetchGroups(ctx context.Context) func(keys []int) ([]*models.Group, []error) {
return func(keys []int) (ret []*models.Group, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
@@ -352,6 +422,28 @@ func (m Middleware) fetchFolders(ctx context.Context) func(keys []models.FolderI
}
}
+func (m Middleware) fetchFoldersParentFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) {
+ return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) {
+ err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
+ var err error
+ ret, err = m.Repository.Folder.GetManyParentFolderIDs(ctx, keys)
+ return err
+ })
+ return ret, toErrorSlice(err)
+ }
+}
+
+func (m Middleware) fetchFoldersSubFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) {
+ return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) {
+ err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
+ var err error
+ ret, err = m.Repository.Folder.GetManySubFolderIDs(ctx, keys)
+ return err
+ })
+ return ret, toErrorSlice(err)
+ }
+}
+
func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) {
return func(keys []int) (ret [][]models.FileID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
diff --git a/internal/api/loaders/folderrelatedfolderidsloader_gen.go b/internal/api/loaders/folderrelatedfolderidsloader_gen.go
new file mode 100644
index 000000000..d0edb92f4
--- /dev/null
+++ b/internal/api/loaders/folderrelatedfolderidsloader_gen.go
@@ -0,0 +1,225 @@
+// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
+
+package loaders
+
+import (
+ "sync"
+ "time"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+// FolderParentFolderIDsLoaderConfig captures the config to create a new FolderParentFolderIDsLoader
+type FolderParentFolderIDsLoaderConfig struct {
+ // Fetch is a method that provides the data for the loader
+ Fetch func(keys []models.FolderID) ([][]models.FolderID, []error)
+
+ // Wait is how long wait before sending a batch
+ Wait time.Duration
+
+ // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
+ MaxBatch int
+}
+
+// NewFolderParentFolderIDsLoader creates a new FolderParentFolderIDsLoader given a fetch, wait, and maxBatch
+func NewFolderParentFolderIDsLoader(config FolderParentFolderIDsLoaderConfig) *FolderRelatedFolderIDsLoader {
+ return &FolderRelatedFolderIDsLoader{
+ fetch: config.Fetch,
+ wait: config.Wait,
+ maxBatch: config.MaxBatch,
+ }
+}
+
+// FolderRelatedFolderIDsLoader batches and caches requests
+type FolderRelatedFolderIDsLoader struct {
+ // this method provides the data for the loader
+ fetch func(keys []models.FolderID) ([][]models.FolderID, []error)
+
+ // how long to done before sending a batch
+ wait time.Duration
+
+ // this will limit the maximum number of keys to send in one batch, 0 = no limit
+ maxBatch int
+
+ // INTERNAL
+
+ // lazily created cache
+ cache map[models.FolderID][]models.FolderID
+
+ // the current batch. keys will continue to be collected until timeout is hit,
+ // then everything will be sent to the fetch method and out to the listeners
+ batch *folderParentFolderIDsLoaderBatch
+
+ // mutex to prevent races
+ mu sync.Mutex
+}
+
+type folderParentFolderIDsLoaderBatch struct {
+ keys []models.FolderID
+ data [][]models.FolderID
+ error []error
+ closing bool
+ done chan struct{}
+}
+
+// Load a FolderID by key, batching and caching will be applied automatically
+func (l *FolderRelatedFolderIDsLoader) Load(key models.FolderID) ([]models.FolderID, error) {
+ return l.LoadThunk(key)()
+}
+
+// LoadThunk returns a function that when called will block waiting for a FolderID.
+// This method should be used if you want one goroutine to make requests to many
+// different data loaders without blocking until the thunk is called.
+func (l *FolderRelatedFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]models.FolderID, error) {
+ l.mu.Lock()
+ if it, ok := l.cache[key]; ok {
+ l.mu.Unlock()
+ return func() ([]models.FolderID, error) {
+ return it, nil
+ }
+ }
+ if l.batch == nil {
+ l.batch = &folderParentFolderIDsLoaderBatch{done: make(chan struct{})}
+ }
+ batch := l.batch
+ pos := batch.keyIndex(l, key)
+ l.mu.Unlock()
+
+ return func() ([]models.FolderID, error) {
+ <-batch.done
+
+ var data []models.FolderID
+ if pos < len(batch.data) {
+ data = batch.data[pos]
+ }
+
+ var err error
+ // its convenient to be able to return a single error for everything
+ if len(batch.error) == 1 {
+ err = batch.error[0]
+ } else if batch.error != nil {
+ err = batch.error[pos]
+ }
+
+ if err == nil {
+ l.mu.Lock()
+ l.unsafeSet(key, data)
+ l.mu.Unlock()
+ }
+
+ return data, err
+ }
+}
+
+// LoadAll fetches many keys at once. It will be broken into appropriate sized
+// sub batches depending on how the loader is configured
+func (l *FolderRelatedFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]models.FolderID, []error) {
+ results := make([]func() ([]models.FolderID, error), len(keys))
+
+ for i, key := range keys {
+ results[i] = l.LoadThunk(key)
+ }
+
+ folderIDs := make([][]models.FolderID, len(keys))
+ errors := make([]error, len(keys))
+ for i, thunk := range results {
+ folderIDs[i], errors[i] = thunk()
+ }
+ return folderIDs, errors
+}
+
+// LoadAllThunk returns a function that when called will block waiting for a FolderIDs.
+// This method should be used if you want one goroutine to make requests to many
+// different data loaders without blocking until the thunk is called.
+func (l *FolderRelatedFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func() ([][]models.FolderID, []error) {
+ results := make([]func() ([]models.FolderID, error), len(keys))
+ for i, key := range keys {
+ results[i] = l.LoadThunk(key)
+ }
+ return func() ([][]models.FolderID, []error) {
+ folderIDs := make([][]models.FolderID, len(keys))
+ errors := make([]error, len(keys))
+ for i, thunk := range results {
+ folderIDs[i], errors[i] = thunk()
+ }
+ return folderIDs, errors
+ }
+}
+
+// Prime the cache with the provided key and value. If the key already exists, no change is made
+// and false is returned.
+// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
+func (l *FolderRelatedFolderIDsLoader) Prime(key models.FolderID, value []models.FolderID) bool {
+ l.mu.Lock()
+ var found bool
+ if _, found = l.cache[key]; !found {
+ // make a copy when writing to the cache, its easy to pass a pointer in from a loop var
+ // and end up with the whole cache pointing to the same value.
+ cpy := make([]models.FolderID, len(value))
+ copy(cpy, value)
+ l.unsafeSet(key, cpy)
+ }
+ l.mu.Unlock()
+ return !found
+}
+
+// Clear the value at key from the cache, if it exists
+func (l *FolderRelatedFolderIDsLoader) Clear(key models.FolderID) {
+ l.mu.Lock()
+ delete(l.cache, key)
+ l.mu.Unlock()
+}
+
+func (l *FolderRelatedFolderIDsLoader) unsafeSet(key models.FolderID, value []models.FolderID) {
+ if l.cache == nil {
+ l.cache = map[models.FolderID][]models.FolderID{}
+ }
+ l.cache[key] = value
+}
+
+// keyIndex will return the location of the key in the batch, if its not found
+// it will add the key to the batch
+func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderRelatedFolderIDsLoader, key models.FolderID) int {
+ for i, existingKey := range b.keys {
+ if key == existingKey {
+ return i
+ }
+ }
+
+ pos := len(b.keys)
+ b.keys = append(b.keys, key)
+ if pos == 0 {
+ go b.startTimer(l)
+ }
+
+ if l.maxBatch != 0 && pos >= l.maxBatch-1 {
+ if !b.closing {
+ b.closing = true
+ l.batch = nil
+ go b.end(l)
+ }
+ }
+
+ return pos
+}
+
+func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderRelatedFolderIDsLoader) {
+ time.Sleep(l.wait)
+ l.mu.Lock()
+
+ // we must have hit a batch limit and are already finalizing this batch
+ if b.closing {
+ l.mu.Unlock()
+ return
+ }
+
+ l.batch = nil
+ l.mu.Unlock()
+
+ b.end(l)
+}
+
+func (b *folderParentFolderIDsLoaderBatch) end(l *FolderRelatedFolderIDsLoader) {
+ b.data, b.error = l.fetch(b.keys)
+ close(b.done)
+}
diff --git a/internal/api/resolver.go b/internal/api/resolver.go
index 061d0e1a9..b1cec1c9d 100644
--- a/internal/api/resolver.go
+++ b/internal/api/resolver.go
@@ -7,6 +7,7 @@ import (
"sort"
"strconv"
+ "github.com/99designs/gqlgen/graphql"
"github.com/stashapp/stash/internal/build"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/logger"
@@ -145,6 +146,13 @@ func (r *Resolver) withReadTxn(ctx context.Context, fn func(ctx context.Context)
return r.repository.WithReadTxn(ctx, fn)
}
+// idOnly returns true if the query is only asking for the id field.
+// This can be used to optimize certain queries where we don't need to load the full object if we're only getting the id.
+func (r *Resolver) idOnly(ctx context.Context) bool {
+ fields := graphql.CollectAllFields(ctx)
+ return len(fields) == 1 && fields[0] == "id"
+}
+
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.SceneMarker.Wall(ctx, q)
diff --git a/internal/api/resolver_model_folder.go b/internal/api/resolver_model_folder.go
index ee6bbfd05..725ca34f8 100644
--- a/internal/api/resolver_model_folder.go
+++ b/internal/api/resolver_model_folder.go
@@ -2,19 +2,77 @@ package api
import (
"context"
+ "path/filepath"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/pkg/models"
)
+func (r *folderResolver) Basename(ctx context.Context, obj *models.Folder) (string, error) {
+ return filepath.Base(obj.Path), nil
+}
+
func (r *folderResolver) ParentFolder(ctx context.Context, obj *models.Folder) (*models.Folder, error) {
if obj.ParentFolderID == nil {
return nil, nil
}
+ if r.idOnly(ctx) {
+ return &models.Folder{ID: *obj.ParentFolderID}, nil
+ }
+
return loaders.From(ctx).FolderByID.Load(*obj.ParentFolderID)
}
+func foldersFromIDs(ids []models.FolderID) []*models.Folder {
+ ret := make([]*models.Folder, len(ids))
+ for i, id := range ids {
+ ret[i] = &models.Folder{ID: id}
+ }
+ return ret
+}
+
+func (r *folderResolver) ParentFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) {
+ ids, err := loaders.From(ctx).FolderParentFolderIDs.Load(obj.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ if r.idOnly(ctx) {
+ return foldersFromIDs(ids), nil
+ }
+
+ var errs []error
+ ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids)
+ return ret, firstError(errs)
+}
+
+func (r *folderResolver) SubFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) {
+ ids, err := loaders.From(ctx).FolderSubFolderIDs.Load(obj.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ if r.idOnly(ctx) {
+ return foldersFromIDs(ids), nil
+ }
+
+ var errs []error
+ ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids)
+ return ret, firstError(errs)
+}
+
func (r *folderResolver) ZipFile(ctx context.Context, obj *models.Folder) (*BasicFile, error) {
+ // shortcut for id only queries
+ if r.idOnly(ctx) {
+ if obj.ZipFileID == nil {
+ return nil, nil
+ }
+
+ return &BasicFile{
+ BaseFile: &models.BaseFile{ID: *obj.ZipFileID},
+ }, nil
+ }
+
return zipFileResolver(ctx, obj.ZipFileID)
}
diff --git a/internal/api/resolver_model_gallery.go b/internal/api/resolver_model_gallery.go
index 9dc68b4c4..773a831d8 100644
--- a/internal/api/resolver_model_gallery.go
+++ b/internal/api/resolver_model_gallery.go
@@ -216,3 +216,16 @@ func (r *galleryResolver) Image(ctx context.Context, obj *models.Gallery, index
return
}
+
+func (r *galleryResolver) CustomFields(ctx context.Context, obj *models.Gallery) (map[string]interface{}, error) {
+ m, err := loaders.From(ctx).GalleryCustomFields.Load(obj.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ if m == nil {
+ return make(map[string]interface{}), nil
+ }
+
+ return m, nil
+}
diff --git a/internal/api/resolver_model_image.go b/internal/api/resolver_model_image.go
index 0886bea40..4a95ae1f4 100644
--- a/internal/api/resolver_model_image.go
+++ b/internal/api/resolver_model_image.go
@@ -161,3 +161,12 @@ func (r *imageResolver) Urls(ctx context.Context, obj *models.Image) ([]string,
return obj.URLs.List(), nil
}
+
+func (r *imageResolver) CustomFields(ctx context.Context, obj *models.Image) (map[string]interface{}, error) {
+ customFields, err := loaders.From(ctx).ImageCustomFields.Load(obj.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ return customFields, nil
+}
diff --git a/internal/api/resolver_model_movie.go b/internal/api/resolver_model_movie.go
index 317123c6e..287d5d51a 100644
--- a/internal/api/resolver_model_movie.go
+++ b/internal/api/resolver_model_movie.go
@@ -215,3 +215,16 @@ func (r *groupResolver) OCounter(ctx context.Context, obj *models.Group) (ret *i
}
return &count, nil
}
+
+func (r *groupResolver) CustomFields(ctx context.Context, obj *models.Group) (map[string]interface{}, error) {
+ m, err := loaders.From(ctx).GroupCustomFields.Load(obj.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ if m == nil {
+ return make(map[string]interface{}), nil
+ }
+
+ return m, nil
+}
diff --git a/internal/api/resolver_model_performer.go b/internal/api/resolver_model_performer.go
index b770f5801..261a98ff3 100644
--- a/internal/api/resolver_model_performer.go
+++ b/internal/api/resolver_model_performer.go
@@ -10,7 +10,6 @@ import (
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/performer"
- "github.com/stashapp/stash/pkg/utils"
)
func (r *performerResolver) AliasList(ctx context.Context, obj *models.Performer) ([]string, error) {
@@ -110,12 +109,28 @@ func (r *performerResolver) HeightCm(ctx context.Context, obj *models.Performer)
return obj.Height, nil
}
+func (r *performerResolver) CareerStart(ctx context.Context, obj *models.Performer) (*string, error) {
+ if obj.CareerStart != nil {
+ ret := obj.CareerStart.String()
+ return &ret, nil
+ }
+ return nil, nil
+}
+
+func (r *performerResolver) CareerEnd(ctx context.Context, obj *models.Performer) (*string, error) {
+ if obj.CareerEnd != nil {
+ ret := obj.CareerEnd.String()
+ return &ret, nil
+ }
+ return nil, nil
+}
+
func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.CareerStart == nil && obj.CareerEnd == nil {
return nil, nil
}
- ret := utils.FormatYearRange(obj.CareerStart, obj.CareerEnd)
+ ret := models.FormatYearRange(obj.CareerStart, obj.CareerEnd)
return &ret, nil
}
diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go
index 81113d858..ecb163765 100644
--- a/internal/api/resolver_model_scene.go
+++ b/internal/api/resolver_model_scene.go
@@ -114,7 +114,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePat
objHash := obj.GetHash(config.GetVideoFileNamingAlgorithm())
vttPath := builder.GetSpriteVTTURL(objHash)
spritePath := builder.GetSpriteURL(objHash)
- funscriptPath := builder.GetFunscriptURL()
+ funscriptPath := builder.GetFunscriptURL(config.GetAPIKey()).String()
captionBasePath := builder.GetCaptionURL()
interactiveHeatmap := builder.GetInteractiveHeatmapURL()
diff --git a/internal/api/resolver_mutation_configure.go b/internal/api/resolver_mutation_configure.go
index 718d24998..3df1c9114 100644
--- a/internal/api/resolver_mutation_configure.go
+++ b/internal/api/resolver_mutation_configure.go
@@ -5,6 +5,7 @@ import (
"encoding/json"
"errors"
"fmt"
+ "io/fs"
"path/filepath"
"regexp"
"strconv"
@@ -85,6 +86,8 @@ func (r *mutationResolver) setConfigFloat(key string, value *float64) {
func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGeneralInput) (*ConfigGeneralResult, error) {
c := config.GetInstance()
+ // #4709 - allow stash paths even if they do not exist, so that users may configure stash
+ // for disconnected drives or network storage.
existingPaths := c.GetStashPaths()
if input.Stashes != nil {
for _, s := range input.Stashes {
@@ -97,8 +100,12 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
}
}
if isNew {
+ s.Path = filepath.Clean(s.Path)
+
+ // if it exists, it must be directory
exists, err := fsutil.DirExists(s.Path)
- if !exists {
+ // allow it to not exist but if it does exist it must be a directory
+ if !exists && !errors.Is(err, fs.ErrNotExist) {
return makeConfigGeneralResult(), err
}
}
diff --git a/internal/api/resolver_mutation_file.go b/internal/api/resolver_mutation_file.go
index afbefe554..b9e36aa76 100644
--- a/internal/api/resolver_mutation_file.go
+++ b/internal/api/resolver_mutation_file.go
@@ -5,10 +5,14 @@ import (
"fmt"
"strconv"
+ "github.com/stashapp/stash/internal/desktop"
"github.com/stashapp/stash/internal/manager"
+ "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil"
+ "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/session"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
@@ -16,7 +20,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput)
if err := r.withTxn(ctx, func(ctx context.Context) error {
fileStore := r.repository.File
folderStore := r.repository.Folder
- mover := file.NewMover(fileStore, folderStore)
+ mover := file.NewMover(fileStore, folderStore, manager.GetInstance().Config.GetStashPaths().Paths())
mover.RegisterHooks(ctx)
var (
@@ -54,13 +58,14 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput)
folderPath := *input.DestinationFolder
// ensure folder path is within the library
- if err := r.validateFolderPath(folderPath); err != nil {
+ stashPaths := manager.GetInstance().Config.GetStashPaths()
+ if err := r.validateFolderPath(stashPaths, folderPath); err != nil {
return err
}
// get or create folder hierarchy
var err error
- folder, err = file.GetOrCreateFolderHierarchy(ctx, folderStore, folderPath)
+ folder, err = file.GetOrCreateFolderHierarchy(ctx, folderStore, folderPath, stashPaths.Paths())
if err != nil {
return fmt.Errorf("getting or creating folder hierarchy: %w", err)
}
@@ -109,8 +114,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput)
return true, nil
}
-func (r *mutationResolver) validateFolderPath(folderPath string) error {
- paths := manager.GetInstance().Config.GetStashPaths()
+func (r *mutationResolver) validateFolderPath(paths config.StashConfigs, folderPath string) error {
if l := paths.GetStashFromDirPath(folderPath); l == nil {
return fmt.Errorf("folder path %s must be within a stash library path", folderPath)
}
@@ -326,3 +330,71 @@ func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSe
return true, nil
}
+
+func (r *mutationResolver) RevealFileInFileManager(ctx context.Context, id string) (bool, error) {
+ // disallow if request did not come from localhost
+ if !session.IsLocalRequest(ctx) {
+ logger.Warnf("Attempt to reveal file in file manager from non-local request")
+ return false, fmt.Errorf("access denied")
+ }
+
+ fileIDInt, err := strconv.Atoi(id)
+ if err != nil {
+ return false, fmt.Errorf("converting id: %w", err)
+ }
+
+ var filePath string
+ if err := r.withReadTxn(ctx, func(ctx context.Context) error {
+ files, err := r.repository.File.Find(ctx, models.FileID(fileIDInt))
+ if err != nil {
+ return fmt.Errorf("finding file: %w", err)
+ }
+ if len(files) == 0 {
+ return fmt.Errorf("file with id %d not found", fileIDInt)
+ }
+ filePath = files[0].Base().Path
+ return nil
+ }); err != nil {
+ return false, err
+ }
+
+ if err := desktop.RevealInFileManager(filePath); err != nil {
+ return false, err
+ }
+
+ return true, nil
+}
+
+func (r *mutationResolver) RevealFolderInFileManager(ctx context.Context, id string) (bool, error) {
+ // disallow if request did not come from localhost
+ if !session.IsLocalRequest(ctx) {
+ logger.Warnf("Attempt to reveal folder in file manager from non-local request")
+ return false, fmt.Errorf("access denied")
+ }
+
+ folderIDInt, err := strconv.Atoi(id)
+ if err != nil {
+ return false, fmt.Errorf("converting id: %w", err)
+ }
+
+ var folderPath string
+ if err := r.withReadTxn(ctx, func(ctx context.Context) error {
+ folder, err := r.repository.Folder.Find(ctx, models.FolderID(folderIDInt))
+ if err != nil {
+ return fmt.Errorf("finding folder: %w", err)
+ }
+ if folder == nil {
+ return fmt.Errorf("folder with id %d not found", folderIDInt)
+ }
+ folderPath = folder.Path
+ return nil
+ }); err != nil {
+ return false, err
+ }
+
+ if err := desktop.RevealInFileManager(folderPath); err != nil {
+ return false, err
+ }
+
+ return true, nil
+}
diff --git a/internal/api/resolver_mutation_gallery.go b/internal/api/resolver_mutation_gallery.go
index e7f853922..2cd80b1ff 100644
--- a/internal/api/resolver_mutation_gallery.go
+++ b/internal/api/resolver_mutation_gallery.go
@@ -42,7 +42,10 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
}
// Populate a new gallery from the input
- newGallery := models.NewGallery()
+ newGallery := models.CreateGalleryInput{
+ Gallery: &models.Gallery{},
+ }
+ *newGallery.Gallery = models.NewGallery()
newGallery.Title = strings.TrimSpace(input.Title)
newGallery.Code = translator.string(input.Code)
@@ -81,10 +84,12 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
newGallery.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)})
}
+ newGallery.CustomFields = convertMapJSONNumbers(input.CustomFields)
+
// Start the transaction and save the gallery
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Gallery
- if err := qb.Create(ctx, &newGallery, nil); err != nil {
+ if err := qb.Create(ctx, &newGallery); err != nil {
return err
}
@@ -241,6 +246,10 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle
return nil, fmt.Errorf("converting scene ids: %w", err)
}
+ if input.CustomFields != nil {
+ updatedGallery.CustomFields = handleUpdateCustomFields(*input.CustomFields)
+ }
+
// gallery scene is set from the scene only
gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery)
@@ -293,6 +302,10 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall
return nil, fmt.Errorf("converting scene ids: %w", err)
}
+ if input.CustomFields != nil {
+ updatedGallery.CustomFields = handleUpdateCustomFields(*input.CustomFields)
+ }
+
ret := []*models.Gallery{}
// Start the transaction and save the galleries
diff --git a/internal/api/resolver_mutation_group.go b/internal/api/resolver_mutation_group.go
index 14dc817b9..6c986c4da 100644
--- a/internal/api/resolver_mutation_group.go
+++ b/internal/api/resolver_mutation_group.go
@@ -14,13 +14,17 @@ import (
"github.com/stashapp/stash/pkg/utils"
)
-func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.Group, error) {
+func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.CreateGroupInput, error) {
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Populate a new group from the input
- newGroup := models.NewGroup()
+ newGroupInput := &models.CreateGroupInput{
+ Group: &models.Group{},
+ }
+ *newGroupInput.Group = models.NewGroup()
+ newGroup := newGroupInput.Group
newGroup.Name = strings.TrimSpace(input.Name)
newGroup.Aliases = translator.string(input.Aliases)
@@ -59,28 +63,19 @@ func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*mo
newGroup.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls))
}
- return &newGroup, nil
-}
-
-func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) {
- newGroup, err := groupFromGroupCreateInput(ctx, input)
- if err != nil {
- return nil, err
- }
+ newGroupInput.CustomFields = convertMapJSONNumbers(input.CustomFields)
// Process the base 64 encoded image string
- var frontimageData []byte
if input.FrontImage != nil {
- frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage)
+ newGroupInput.FrontImageData, err = utils.ProcessImageInput(ctx, *input.FrontImage)
if err != nil {
return nil, fmt.Errorf("processing front image: %w", err)
}
}
// Process the base 64 encoded image string
- var backimageData []byte
if input.BackImage != nil {
- backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage)
+ newGroupInput.BackImageData, err = utils.ProcessImageInput(ctx, *input.BackImage)
if err != nil {
return nil, fmt.Errorf("processing back image: %w", err)
}
@@ -88,13 +83,22 @@ func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInp
// HACK: if back image is being set, set the front image to the default.
// This is because we can't have a null front image with a non-null back image.
- if len(frontimageData) == 0 && len(backimageData) != 0 {
- frontimageData = static.ReadAll(static.DefaultGroupImage)
+ if len(newGroupInput.FrontImageData) == 0 && len(newGroupInput.BackImageData) != 0 {
+ newGroupInput.FrontImageData = static.ReadAll(static.DefaultGroupImage)
+ }
+
+ return newGroupInput, nil
+}
+
+func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) {
+ createGroupInput, err := groupFromGroupCreateInput(ctx, input)
+ if err != nil {
+ return nil, err
}
// Start the transaction and save the group
if err := r.withTxn(ctx, func(ctx context.Context) error {
- if err = r.groupService.Create(ctx, newGroup, frontimageData, backimageData); err != nil {
+ if err = r.groupService.Create(ctx, createGroupInput); err != nil {
return err
}
@@ -104,9 +108,9 @@ func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInp
}
// for backwards compatibility - run both movie and group hooks
- r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.GroupCreatePost, input, nil)
- r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.MovieCreatePost, input, nil)
- return r.getGroup(ctx, newGroup.ID)
+ r.hookExecutor.ExecutePostHooks(ctx, createGroupInput.Group.ID, hook.GroupCreatePost, input, nil)
+ r.hookExecutor.ExecutePostHooks(ctx, createGroupInput.Group.ID, hook.MovieCreatePost, input, nil)
+ return r.getGroup(ctx, createGroupInput.Group.ID)
}
func groupPartialFromGroupUpdateInput(translator changesetTranslator, input GroupUpdateInput) (ret models.GroupPartial, err error) {
@@ -150,6 +154,12 @@ func groupPartialFromGroupUpdateInput(translator changesetTranslator, input Grou
}
updatedGroup.URLs = translator.updateStrings(input.Urls, "urls")
+ if input.CustomFields != nil {
+ updatedGroup.CustomFields = *input.CustomFields
+ // convert json.Numbers to int/float
+ updatedGroup.CustomFields.Full = convertMapJSONNumbers(updatedGroup.CustomFields.Full)
+ updatedGroup.CustomFields.Partial = convertMapJSONNumbers(updatedGroup.CustomFields.Partial)
+ }
return updatedGroup, nil
}
@@ -217,6 +227,12 @@ func (r *mutationResolver) GroupUpdate(ctx context.Context, input GroupUpdateInp
func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input BulkGroupUpdateInput) (ret models.GroupPartial, err error) {
updatedGroup := models.NewGroupPartial()
+ updatedGroup.Date, err = translator.optionalDate(input.Date, "date")
+ if err != nil {
+ err = fmt.Errorf("converting date: %w", err)
+ return
+ }
+ updatedGroup.Synopsis = translator.optionalString(input.Synopsis, "synopsis")
updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100")
updatedGroup.Director = translator.optionalString(input.Director, "director")
@@ -246,6 +262,13 @@ func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input
updatedGroup.URLs = translator.optionalURLsBulk(input.Urls, nil)
+ if input.CustomFields != nil {
+ updatedGroup.CustomFields = *input.CustomFields
+ // convert json.Numbers to int/float
+ updatedGroup.CustomFields.Full = convertMapJSONNumbers(updatedGroup.CustomFields.Full)
+ updatedGroup.CustomFields.Partial = convertMapJSONNumbers(updatedGroup.CustomFields.Partial)
+ }
+
return updatedGroup, nil
}
diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go
index 230d48358..cc03c5286 100644
--- a/internal/api/resolver_mutation_image.go
+++ b/internal/api/resolver_mutation_image.go
@@ -177,6 +177,13 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input models.ImageUp
return nil, fmt.Errorf("converting tag ids: %w", err)
}
+ if input.CustomFields != nil {
+ updatedImage.CustomFields = *input.CustomFields
+ // convert json.Numbers to int/float
+ updatedImage.CustomFields.Full = convertMapJSONNumbers(updatedImage.CustomFields.Full)
+ updatedImage.CustomFields.Partial = convertMapJSONNumbers(updatedImage.CustomFields.Partial)
+ }
+
qb := r.repository.Image
image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
if err != nil {
@@ -237,6 +244,13 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
return nil, fmt.Errorf("converting tag ids: %w", err)
}
+ if input.CustomFields != nil {
+ updatedImage.CustomFields = *input.CustomFields
+ // convert json.Numbers to int/float
+ updatedImage.CustomFields.Full = convertMapJSONNumbers(updatedImage.CustomFields.Full)
+ updatedImage.CustomFields.Partial = convertMapJSONNumbers(updatedImage.CustomFields.Partial)
+ }
+
// Start the transaction and save the images
if err := r.withTxn(ctx, func(ctx context.Context) error {
var updatedGalleryIDs []int
diff --git a/internal/api/resolver_mutation_migrate.go b/internal/api/resolver_mutation_migrate.go
index 083d307e9..b739be1e0 100644
--- a/internal/api/resolver_mutation_migrate.go
+++ b/internal/api/resolver_mutation_migrate.go
@@ -47,6 +47,10 @@ func (r *mutationResolver) Migrate(ctx context.Context, input manager.MigrateInp
Database: mgr.Database,
}
+ if err := t.PreExecute(); err != nil {
+ return "", err
+ }
+
jobID := mgr.JobManager.Add(ctx, "Migrating database...", t)
return strconv.Itoa(jobID), nil
diff --git a/internal/api/resolver_mutation_package.go b/internal/api/resolver_mutation_package.go
index 8e36e6719..e4a24ba37 100644
--- a/internal/api/resolver_mutation_package.go
+++ b/internal/api/resolver_mutation_package.go
@@ -12,9 +12,10 @@ import (
func refreshPackageType(typeArg PackageType) {
mgr := manager.GetInstance()
- if typeArg == PackageTypePlugin {
+ switch typeArg {
+ case PackageTypePlugin:
mgr.RefreshPluginCache()
- } else if typeArg == PackageTypeScraper {
+ case PackageTypeScraper:
mgr.RefreshScraperCache()
}
}
diff --git a/internal/api/resolver_mutation_performer.go b/internal/api/resolver_mutation_performer.go
index 653348304..59e518675 100644
--- a/internal/api/resolver_mutation_performer.go
+++ b/internal/api/resolver_mutation_performer.go
@@ -52,17 +52,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
newPerformer.FakeTits = translator.string(input.FakeTits)
newPerformer.PenisLength = input.PenisLength
newPerformer.Circumcised = input.Circumcised
- newPerformer.CareerStart = input.CareerStart
- newPerformer.CareerEnd = input.CareerEnd
- // if career_start/career_end not provided, parse deprecated career_length
- if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil {
- start, end, err := utils.ParseYearRangeString(*input.CareerLength)
- if err != nil {
- return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
- }
- newPerformer.CareerStart = start
- newPerformer.CareerEnd = end
- }
newPerformer.Tattoos = translator.string(input.Tattoos)
newPerformer.Piercings = translator.string(input.Piercings)
newPerformer.Favorite = translator.bool(input.Favorite)
@@ -100,6 +89,25 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return nil, fmt.Errorf("converting death date: %w", err)
}
+ newPerformer.CareerStart, err = translator.datePtr(input.CareerStart)
+ if err != nil {
+ return nil, fmt.Errorf("converting career start: %w", err)
+ }
+ newPerformer.CareerEnd, err = translator.datePtr(input.CareerEnd)
+ if err != nil {
+ return nil, fmt.Errorf("converting career end: %w", err)
+ }
+
+ // if career_start/career_end not provided, parse deprecated career_length
+ if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil {
+ start, end, err := models.ParseYearRangeString(*input.CareerLength)
+ if err != nil {
+ return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
+ }
+ newPerformer.CareerStart = start
+ newPerformer.CareerEnd = end
+ }
+
newPerformer.TagIDs, err = translator.relatedIds(input.TagIds)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
@@ -273,18 +281,25 @@ func performerPartialFromInput(input models.PerformerUpdateInput, translator cha
updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised")
// prefer career_start/career_end over deprecated career_length
if translator.hasField("career_start") || translator.hasField("career_end") {
- updatedPerformer.CareerStart = translator.optionalInt(input.CareerStart, "career_start")
- updatedPerformer.CareerEnd = translator.optionalInt(input.CareerEnd, "career_end")
+ var err error
+ updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start")
+ if err != nil {
+ return nil, fmt.Errorf("converting career start: %w", err)
+ }
+ updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end")
+ if err != nil {
+ return nil, fmt.Errorf("converting career end: %w", err)
+ }
} else if translator.hasField("career_length") && input.CareerLength != nil {
- start, end, err := utils.ParseYearRangeString(*input.CareerLength)
+ start, end, err := models.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
if start != nil {
- updatedPerformer.CareerStart = models.NewOptionalInt(*start)
+ updatedPerformer.CareerStart = models.NewOptionalDate(*start)
}
if end != nil {
- updatedPerformer.CareerEnd = models.NewOptionalInt(*end)
+ updatedPerformer.CareerEnd = models.NewOptionalDate(*end)
}
}
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
@@ -444,18 +459,24 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised")
// prefer career_start/career_end over deprecated career_length
if translator.hasField("career_start") || translator.hasField("career_end") {
- updatedPerformer.CareerStart = translator.optionalInt(input.CareerStart, "career_start")
- updatedPerformer.CareerEnd = translator.optionalInt(input.CareerEnd, "career_end")
+ updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start")
+ if err != nil {
+ return nil, fmt.Errorf("converting career start: %w", err)
+ }
+ updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end")
+ if err != nil {
+ return nil, fmt.Errorf("converting career end: %w", err)
+ }
} else if translator.hasField("career_length") && input.CareerLength != nil {
- start, end, err := utils.ParseYearRangeString(*input.CareerLength)
+ start, end, err := models.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
if start != nil {
- updatedPerformer.CareerStart = models.NewOptionalInt(*start)
+ updatedPerformer.CareerStart = models.NewOptionalDate(*start)
}
if end != nil {
- updatedPerformer.CareerEnd = models.NewOptionalInt(*end)
+ updatedPerformer.CareerEnd = models.NewOptionalDate(*end)
}
}
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
@@ -633,7 +654,7 @@ func (r *mutationResolver) PerformerMerge(ctx context.Context, input PerformerMe
}
legacyURLs := legacyPerformerURLsFromInput(*input.Values, translator)
if legacyURLs.AnySet() {
- return nil, errors.New("Merging legacy performer URLs is not supported")
+ return nil, errors.New("merging legacy performer URLs is not supported")
}
if input.Values.Image != nil {
diff --git a/internal/api/resolver_mutation_stash_box.go b/internal/api/resolver_mutation_stash_box.go
index 436937511..6d2ab84fd 100644
--- a/internal/api/resolver_mutation_stash_box.go
+++ b/internal/api/resolver_mutation_stash_box.go
@@ -58,6 +58,16 @@ func (r *mutationResolver) StashBoxBatchStudioTag(ctx context.Context, input man
return strconv.Itoa(jobID), nil
}
+func (r *mutationResolver) StashBoxBatchTagTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) {
+ b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) //nolint:staticcheck
+ if err != nil {
+ return "", err
+ }
+
+ jobID := manager.GetInstance().StashBoxBatchTagTag(ctx, b, input)
+ return strconv.Itoa(jobID), nil
+}
+
func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) {
b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint)
if err != nil {
diff --git a/internal/api/resolver_query_job.go b/internal/api/resolver_query_job.go
index 0e1222445..44b6b15c4 100644
--- a/internal/api/resolver_query_job.go
+++ b/internal/api/resolver_query_job.go
@@ -33,15 +33,26 @@ func (r *queryResolver) FindJob(ctx context.Context, input FindJobInput) (*Job,
}
func jobToJobModel(j job.Job) *Job {
+ subTasks := make([]string, len(j.Details))
+ for i, t := range j.Details {
+ subTasks[i] = sanitiseWebsocketString(t)
+ }
+
+ var jobError *string
+ if j.Error != nil {
+ s := sanitiseWebsocketString(*j.Error)
+ jobError = &s
+ }
+
ret := &Job{
ID: strconv.Itoa(j.ID),
Status: JobStatus(j.Status),
- Description: j.Description,
- SubTasks: j.Details,
+ Description: sanitiseWebsocketString(j.Description),
+ SubTasks: subTasks,
StartTime: j.StartTime,
EndTime: j.EndTime,
AddTime: j.AddTime,
- Error: j.Error,
+ Error: jobError,
}
if j.Progress != -1 {
diff --git a/internal/api/resolver_query_scraper.go b/internal/api/resolver_query_scraper.go
index 86d449921..353bb1a32 100644
--- a/internal/api/resolver_query_scraper.go
+++ b/internal/api/resolver_query_scraper.go
@@ -6,6 +6,7 @@ import (
"fmt"
"slices"
"strconv"
+ "strings"
"github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models"
@@ -363,7 +364,8 @@ func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Sour
client := r.newStashBoxClient(*b)
var ret []*models.ScrapedTag
- out, err := client.QueryTag(ctx, *input.Query)
+ query := *input.Query
+ out, err := client.QueryTag(ctx, query)
if err != nil {
return nil, err
@@ -383,6 +385,22 @@ func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Sour
}); err != nil {
return nil, err
}
+
+ // tag name query returns results that may not match the query exactly.
+ // if there is an exact match, it should be first
+ if query != "" {
+ for i, result := range ret {
+ if strings.EqualFold(result.Name, query) {
+ // prepend exact match to the front of the slice
+ if i != 0 {
+ ret = append([]*models.ScrapedTag{result}, append(ret[:i], ret[i+1:]...)...)
+ }
+
+ break
+ }
+ }
+ }
+
return ret, nil
}
diff --git a/internal/api/resolver_subscription_logging.go b/internal/api/resolver_subscription_logging.go
index 423fa88af..b4acb534c 100644
--- a/internal/api/resolver_subscription_logging.go
+++ b/internal/api/resolver_subscription_logging.go
@@ -2,11 +2,19 @@ package api
import (
"context"
+ "strings"
"github.com/stashapp/stash/internal/log"
"github.com/stashapp/stash/internal/manager"
)
+// sanitiseWebsocketString is used to ensure that any strings sent over the websocket are valid UTF-8.
+// Any invalid UTF-8 sequences will be replaced with the Unicode replacement character (U+FFFD).
+// Invalid UTF-8 sequences can cause the websocket connection to be closed.
+func sanitiseWebsocketString(s string) string {
+ return strings.ToValidUTF8(s, "\uFFFD")
+}
+
func getLogLevel(logType string) LogLevel {
switch logType {
case "progress":
@@ -33,7 +41,7 @@ func logEntriesFromLogItems(logItems []log.LogItem) []*LogEntry {
ret[i] = &LogEntry{
Time: entry.Time,
Level: getLogLevel(entry.Type),
- Message: entry.Message,
+ Message: sanitiseWebsocketString(entry.Message),
}
}
diff --git a/internal/api/urlbuilders/scene.go b/internal/api/urlbuilders/scene.go
index 10c4f347c..72a461519 100644
--- a/internal/api/urlbuilders/scene.go
+++ b/internal/api/urlbuilders/scene.go
@@ -57,8 +57,20 @@ func (b SceneURLBuilder) GetScreenshotURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?t=" + b.UpdatedAt
}
-func (b SceneURLBuilder) GetFunscriptURL() string {
- return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
+func (b SceneURLBuilder) GetFunscriptURL(apiKey string) *url.URL {
+ u, err := url.Parse(fmt.Sprintf("%s/scene/%s/funscript", b.BaseURL, b.SceneID))
+ if err != nil {
+ // shouldn't happen
+ panic(err)
+ }
+
+ if apiKey != "" {
+ v := u.Query()
+ v.Set("apikey", apiKey)
+ u.RawQuery = v.Encode()
+ }
+
+ return u
}
func (b SceneURLBuilder) GetCaptionURL() string {
diff --git a/internal/autotag/integration_test.go b/internal/autotag/integration_test.go
index 27cce014e..f537ecfe7 100644
--- a/internal/autotag/integration_test.go
+++ b/internal/autotag/integration_test.go
@@ -365,7 +365,10 @@ func makeImage(expectedResult bool) *models.Image {
}
func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *models.ImageFile) error {
- err := w.Create(ctx, o, []models.FileID{f.ID})
+ err := w.Create(ctx, &models.CreateImageInput{
+ Image: o,
+ FileIDs: []models.FileID{f.ID},
+ })
if err != nil {
return fmt.Errorf("Failed to create image with path '%s': %s", f.Path, err.Error())
@@ -468,7 +471,10 @@ func makeGallery(expectedResult bool) *models.Gallery {
}
func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *models.BaseFile) error {
- err := w.Create(ctx, o, []models.FileID{f.ID})
+ err := w.Create(ctx, &models.CreateGalleryInput{
+ Gallery: o,
+ FileIDs: []models.FileID{f.ID},
+ })
if err != nil {
return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error())
}
diff --git a/internal/desktop/desktop.go b/internal/desktop/desktop.go
index 06d400793..f1ca9bc92 100644
--- a/internal/desktop/desktop.go
+++ b/internal/desktop/desktop.go
@@ -2,6 +2,7 @@
package desktop
import (
+ "fmt"
"os"
"path"
"path/filepath"
@@ -155,15 +156,17 @@ func getIconPath() string {
return path.Join(config.GetInstance().GetConfigPath(), "icon.png")
}
-func RevealInFileManager(path string) {
- exists, err := fsutil.FileExists(path)
+func RevealInFileManager(path string) error {
+ info, err := os.Stat(path)
if err != nil {
- logger.Errorf("Error checking file: %s", err)
- return
+ return fmt.Errorf("error checking path: %w", err)
}
- if exists && IsDesktop() {
- revealInFileManager(path)
+
+ absPath, err := filepath.Abs(path)
+ if err != nil {
+ return fmt.Errorf("error getting absolute path: %w", err)
}
+ return revealInFileManager(absPath, info)
}
func getServerURL(path string) string {
diff --git a/internal/desktop/desktop_platform_darwin.go b/internal/desktop/desktop_platform_darwin.go
index 593e9516f..560cc1893 100644
--- a/internal/desktop/desktop_platform_darwin.go
+++ b/internal/desktop/desktop_platform_darwin.go
@@ -4,9 +4,11 @@
package desktop
import (
+ "fmt"
+ "os"
"os/exec"
- "github.com/kermieisinthehouse/gosx-notifier"
+ gosxnotifier "github.com/feederbox826/gosx-notifier"
"github.com/stashapp/stash/pkg/logger"
)
@@ -32,8 +34,11 @@ func sendNotification(notificationTitle string, notificationText string) {
}
}
-func revealInFileManager(path string) {
- exec.Command(`open`, `-R`, path)
+func revealInFileManager(path string, _ os.FileInfo) error {
+ if err := exec.Command(`open`, `-R`, path).Run(); err != nil {
+ return fmt.Errorf("error revealing path in Finder: %w", err)
+ }
+ return nil
}
func isDoubleClickLaunched() bool {
diff --git a/internal/desktop/desktop_platform_nixes.go b/internal/desktop/desktop_platform_nixes.go
index 69c780d3c..f5ab13384 100644
--- a/internal/desktop/desktop_platform_nixes.go
+++ b/internal/desktop/desktop_platform_nixes.go
@@ -4,8 +4,10 @@
package desktop
import (
+ "fmt"
"os"
"os/exec"
+ "path/filepath"
"strings"
"github.com/stashapp/stash/pkg/logger"
@@ -33,8 +35,15 @@ func sendNotification(notificationTitle string, notificationText string) {
}
}
-func revealInFileManager(path string) {
-
+func revealInFileManager(path string, info os.FileInfo) error {
+ dir := path
+ if !info.IsDir() {
+ dir = filepath.Dir(path)
+ }
+ if err := exec.Command("xdg-open", dir).Run(); err != nil {
+ return fmt.Errorf("error opening directory in file manager: %w", err)
+ }
+ return nil
}
func isDoubleClickLaunched() bool {
diff --git a/internal/desktop/desktop_platform_windows.go b/internal/desktop/desktop_platform_windows.go
index ecb4060e6..48feabed5 100644
--- a/internal/desktop/desktop_platform_windows.go
+++ b/internal/desktop/desktop_platform_windows.go
@@ -4,6 +4,7 @@
package desktop
import (
+ "os"
"os/exec"
"syscall"
"unsafe"
@@ -83,6 +84,10 @@ func sendNotification(notificationTitle string, notificationText string) {
}
}
-func revealInFileManager(path string) {
- exec.Command(`explorer`, `\select`, path)
+func revealInFileManager(path string, _ os.FileInfo) error {
+ c := exec.Command(`explorer`, `/select,`, path)
+ logger.Debugf("Running: %s", c.String())
+ // explorer seems to return an error code even when it works, so ignore the error
+ _ = c.Run()
+ return nil
}
diff --git a/internal/identify/options.go b/internal/identify/options.go
index 9e27a3e39..181bf4612 100644
--- a/internal/identify/options.go
+++ b/internal/identify/options.go
@@ -33,8 +33,10 @@ type MetadataOptions struct {
SetCoverImage *bool `json:"setCoverImage"`
SetOrganized *bool `json:"setOrganized"`
// defaults to true if not provided
+
// Deprecated: use PerformerGenders instead
IncludeMalePerformers *bool `json:"includeMalePerformers"`
+
// Filter to only include performers with these genders. If not provided, all genders are included.
PerformerGenders []models.GenderEnum `json:"performerGenders"`
// defaults to true if not provided
diff --git a/internal/manager/checksum.go b/internal/manager/checksum.go
index cbe9d85d8..86f1b8708 100644
--- a/internal/manager/checksum.go
+++ b/internal/manager/checksum.go
@@ -22,7 +22,8 @@ type SceneMissingHashCounter interface {
// will ensure that all oshash values are set on all scenes.
func ValidateVideoFileNamingAlgorithm(ctx context.Context, qb SceneMissingHashCounter, newValue models.HashAlgorithm) error {
// if algorithm is being set to MD5, then all checksums must be present
- if newValue == models.HashAlgorithmMd5 {
+ switch newValue {
+ case models.HashAlgorithmMd5:
missingMD5, err := qb.CountMissingChecksum(ctx)
if err != nil {
return err
@@ -31,7 +32,7 @@ func ValidateVideoFileNamingAlgorithm(ctx context.Context, qb SceneMissingHashCo
if missingMD5 > 0 {
return errors.New("some checksums are missing on scenes. Run Scan with calculateMD5 set to true")
}
- } else if newValue == models.HashAlgorithmOshash {
+ case models.HashAlgorithmOshash:
missingOSHash, err := qb.CountMissingOSHash(ctx)
if err != nil {
return err
diff --git a/internal/manager/config/stash_config.go b/internal/manager/config/stash_config.go
index 4a2cc7d60..7a103631c 100644
--- a/internal/manager/config/stash_config.go
+++ b/internal/manager/config/stash_config.go
@@ -38,3 +38,12 @@ func (s StashConfigs) GetStashFromDirPath(dirPath string) *StashConfig {
}
return nil
}
+
+func (s StashConfigs) Paths() []string {
+ paths := make([]string, len(s))
+ for i, c := range s {
+ // #6618 - clean the path to ensure comparison works correctly
+ paths[i] = filepath.Clean(c.Path)
+ }
+ return paths
+}
diff --git a/internal/manager/generator_interactive_heatmap_speed.go b/internal/manager/generator_interactive_heatmap_speed.go
index d10ce5b19..aa0ee0e38 100644
--- a/internal/manager/generator_interactive_heatmap_speed.go
+++ b/internal/manager/generator_interactive_heatmap_speed.go
@@ -408,7 +408,7 @@ func ConvertFunscriptToCSV(funscriptPath string) ([]byte, error) {
}
// I don't know whether the csv format requires int or float, so for now we'll use int
- buffer.WriteString(fmt.Sprintf("%d,%d\r\n", int(math.Round(action.At)), pos))
+ fmt.Fprintf(&buffer, "%d,%d\r\n", int(math.Round(action.At)), pos)
}
return buffer.Bytes(), nil
}
diff --git a/internal/manager/import.go b/internal/manager/import.go
index f9fb57c8f..5168ad99c 100644
--- a/internal/manager/import.go
+++ b/internal/manager/import.go
@@ -76,9 +76,10 @@ func performImport(ctx context.Context, i importer, duplicateBehaviour ImportDup
var id int
if existing != nil {
- if duplicateBehaviour == ImportDuplicateEnumFail {
+ switch duplicateBehaviour {
+ case ImportDuplicateEnumFail:
return fmt.Errorf("existing object with name '%s'", name)
- } else if duplicateBehaviour == ImportDuplicateEnumIgnore {
+ case ImportDuplicateEnumIgnore:
logger.Infof("Skipping existing object %q", name)
return nil
}
diff --git a/internal/manager/manager_tasks.go b/internal/manager/manager_tasks.go
index bac726c1b..76938e9ff 100644
--- a/internal/manager/manager_tasks.go
+++ b/internal/manager/manager_tasks.go
@@ -74,6 +74,28 @@ func getScanPaths(inputPaths []string) []*config.StashConfig {
return ret
}
+// Filters the input array for paths that are within the paths managed by stash
+func filterStashPaths(inputPaths []string) []string {
+ if len(inputPaths) == 0 {
+ return inputPaths
+ }
+
+ stashPaths := config.GetInstance().GetStashPaths()
+
+ var ret []string
+ for _, p := range inputPaths {
+ s := stashPaths.GetStashFromDirPath(p)
+ if s == nil {
+ logger.Warnf("%s is not in the configured stash paths", p)
+ continue
+ }
+
+ ret = append(ret, p)
+ }
+
+ return ret
+}
+
// ScanSubscribe subscribes to a notification that is triggered when a
// scan or clean is complete.
func (s *Manager) ScanSubscribe(ctx context.Context) <-chan bool {
@@ -123,7 +145,8 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error
ZipFileExtensions: cfg.GetGalleryExtensions(),
// ScanFilters is set in ScanJob.Execute
// HandlerRequiredFilters is set in ScanJob.Execute
- Rescan: input.Rescan,
+ RootPaths: cfg.GetStashPaths().Paths(),
+ Rescan: input.Rescan,
}
scanJob := ScanJob{
@@ -291,6 +314,8 @@ type CleanMetadataInput struct {
Paths []string `json:"paths"`
// Do a dry run. Don't delete any files
DryRun bool `json:"dryRun"`
+
+ IgnoreZipFileContents bool `json:"ignoreZipFileContents"`
}
func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
@@ -408,7 +433,7 @@ type StashBoxBatchTagInput struct {
ExcludeFields []string `json:"exclude_fields"`
// Refresh items already tagged by StashBox if true. Only tag items with no StashBox tagging if false
Refresh bool `json:"refresh"`
- // If batch adding studios, should their parent studios also be created?
+ // If batch adding studios or tags, should their parent entities also be created?
CreateParent bool `json:"createParent"`
// IDs in stash of the items to update.
// If set, names and stash_ids fields will be ignored.
@@ -704,3 +729,137 @@ func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashB
return s.JobManager.Add(ctx, "Batch stash-box studio tag...", j)
}
+
+func (s *Manager) batchTagTagsByIds(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) {
+ var tasks []Task
+
+ err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
+ tagQuery := s.Repository.Tag
+
+ for _, tagID := range input.Ids {
+ if id, err := strconv.Atoi(tagID); err == nil {
+ t, err := tagQuery.Find(ctx, id)
+ if err != nil {
+ return err
+ }
+
+ if err := t.LoadStashIDs(ctx, tagQuery); err != nil {
+ return fmt.Errorf("loading tag stash ids: %w", err)
+ }
+
+ hasStashID := t.StashIDs.ForEndpoint(box.Endpoint) != nil
+ if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) {
+ tasks = append(tasks, &stashBoxBatchTagTagTask{
+ tag: t,
+ createParent: input.CreateParent,
+ box: box,
+ excludedFields: input.ExcludeFields,
+ })
+ }
+ }
+ }
+ return nil
+ })
+
+ return tasks, err
+}
+
+func (s *Manager) batchTagTagsByNamesOrStashIds(input StashBoxBatchTagInput, box *models.StashBox) []Task {
+ var tasks []Task
+
+ for i := range input.StashIDs {
+ stashID := input.StashIDs[i]
+ if len(stashID) > 0 {
+ tasks = append(tasks, &stashBoxBatchTagTagTask{
+ stashID: &stashID,
+ createParent: input.CreateParent,
+ box: box,
+ excludedFields: input.ExcludeFields,
+ })
+ }
+ }
+
+ for i := range input.Names {
+ name := input.Names[i]
+ if len(name) > 0 {
+ tasks = append(tasks, &stashBoxBatchTagTagTask{
+ name: &name,
+ createParent: input.CreateParent,
+ box: box,
+ excludedFields: input.ExcludeFields,
+ })
+ }
+ }
+
+ return tasks
+}
+
+func (s *Manager) batchTagAllTags(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) {
+ var tasks []Task
+
+ err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
+ tagQuery := s.Repository.Tag
+ var tags []*models.Tag
+ var err error
+
+ tags, err = tagQuery.FindByStashIDStatus(ctx, input.Refresh, box.Endpoint)
+
+ if err != nil {
+ return fmt.Errorf("error querying tags: %v", err)
+ }
+
+ for _, t := range tags {
+ tasks = append(tasks, &stashBoxBatchTagTagTask{
+ tag: t,
+ createParent: input.CreateParent,
+ box: box,
+ excludedFields: input.ExcludeFields,
+ })
+ }
+ return nil
+ })
+
+ return tasks, err
+}
+
+func (s *Manager) StashBoxBatchTagTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int {
+ j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error {
+ logger.Infof("Initiating stash-box batch tag tag")
+
+ var tasks []Task
+ var err error
+
+ switch input.getBatchTagType(false) {
+ case batchTagByIds:
+ tasks, err = s.batchTagTagsByIds(ctx, input, box)
+ case batchTagByNamesOrStashIds:
+ tasks = s.batchTagTagsByNamesOrStashIds(input, box)
+ case batchTagAll:
+ tasks, err = s.batchTagAllTags(ctx, input, box)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if len(tasks) == 0 {
+ return nil
+ }
+
+ progress.SetTotal(len(tasks))
+
+ logger.Infof("Starting stash-box batch operation for %d tags", len(tasks))
+
+ for _, task := range tasks {
+ progress.ExecuteTask(task.GetDescription(), func() {
+ task.Start(ctx)
+ })
+
+ progress.Increment()
+ }
+
+ return nil
+ })
+
+ return s.JobManager.Add(ctx, "Batch stash-box tag tag...", j)
+}
diff --git a/internal/manager/repository.go b/internal/manager/repository.go
index afbf0b963..65514ed1d 100644
--- a/internal/manager/repository.go
+++ b/internal/manager/repository.go
@@ -39,7 +39,7 @@ type GalleryService interface {
}
type GroupService interface {
- Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error
+ Create(ctx context.Context, input *models.CreateGroupInput) error
UpdatePartial(ctx context.Context, id int, updatedGroup models.GroupPartial, frontImage group.ImageInput, backImage group.ImageInput) (*models.Group, error)
AddSubGroups(ctx context.Context, groupID int, subGroups []models.GroupIDDescription, insertIndex *int) error
diff --git a/internal/manager/scan_stashignore_test.go b/internal/manager/scan_stashignore_test.go
new file mode 100644
index 000000000..2745ff970
--- /dev/null
+++ b/internal/manager/scan_stashignore_test.go
@@ -0,0 +1,268 @@
+//go:build integration
+// +build integration
+
+package manager
+
+import (
+ "context"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stashapp/stash/pkg/file"
+
+ // Necessary to register custom migrations.
+ _ "github.com/stashapp/stash/pkg/sqlite/migrations"
+)
+
+// stashIgnorePathFilter wraps StashIgnoreFilter to implement PathFilter for testing.
+// It provides a fixed library root for the filter.
+type stashIgnorePathFilter struct {
+ filter *file.StashIgnoreFilter
+ libraryRoot string
+}
+
+func (f *stashIgnorePathFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
+ return f.filter.Accept(ctx, path, info, f.libraryRoot, zipFilePath)
+}
+
+// createTestFileOnDisk creates a file with some content.
+func createTestFileOnDisk(t *testing.T, dir, name string) string {
+ t.Helper()
+ path := filepath.Join(dir, name)
+ if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
+ t.Fatalf("failed to create directory for %s: %v", path, err)
+ }
+ // Write some content so the file has a non-zero size.
+ if err := os.WriteFile(path, []byte("test content for "+name), 0644); err != nil {
+ t.Fatalf("failed to create file %s: %v", path, err)
+ }
+ return path
+}
+
+// createStashIgnoreFile creates a .stashignore file with the given content.
+func createStashIgnoreFile(t *testing.T, dir, content string) {
+ t.Helper()
+ path := filepath.Join(dir, ".stashignore")
+ if err := os.WriteFile(path, []byte(content), 0644); err != nil {
+ t.Fatalf("failed to create .stashignore: %v", err)
+ }
+}
+
+func TestScannerWithStashIgnore(t *testing.T) {
+ // Create temp directory structure.
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFileOnDisk(t, tmpDir, "video1.mp4")
+ createTestFileOnDisk(t, tmpDir, "video2.mp4")
+ createTestFileOnDisk(t, tmpDir, "ignore_me.mp4")
+ createTestFileOnDisk(t, tmpDir, "subdir/video3.mp4")
+ createTestFileOnDisk(t, tmpDir, "subdir/skip_this.mp4")
+ createTestFileOnDisk(t, tmpDir, "excluded_dir/video4.mp4")
+ createTestFileOnDisk(t, tmpDir, "temp/processing.mp4")
+
+ // Create .stashignore file.
+ stashignore := `# Ignore specific files
+ignore_me.mp4
+subdir/skip_this.mp4
+
+# Ignore directories
+excluded_dir/
+temp/
+`
+ createStashIgnoreFile(t, tmpDir, stashignore)
+
+ // Create stashignore filter with library root.
+ stashIgnoreFilter := &stashIgnorePathFilter{
+ filter: file.NewStashIgnoreFilter(),
+ libraryRoot: tmpDir,
+ }
+
+ // Create scanner.
+ scanner := &file.Scanner{
+ ScanFilters: []file.PathFilter{stashIgnoreFilter},
+ }
+
+ testScenarios := []struct {
+ path string
+ accepted bool
+ }{
+ {filepath.Join(tmpDir, "video1.mp4"), true},
+ {filepath.Join(tmpDir, "video2.mp4"), true},
+ {filepath.Join(tmpDir, "ignore_me.mp4"), false},
+ {filepath.Join(tmpDir, "subdir/video3.mp4"), true},
+ {filepath.Join(tmpDir, "subdir/skip_this.mp4"), false},
+ {filepath.Join(tmpDir, "excluded_dir/video4.mp4"), false},
+ {filepath.Join(tmpDir, "temp/processing.mp4"), false},
+ }
+
+ ctx := context.Background()
+
+ for _, scenario := range testScenarios {
+ info, err := os.Stat(scenario.path)
+ if err != nil {
+ t.Fatalf("failed to stat file %s: %v", scenario.path, err)
+ }
+ accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
+
+ if accepted != scenario.accepted {
+ t.Errorf("unexpected accept result for %s: expected %v, got %v",
+ scenario.path, scenario.accepted, accepted)
+ }
+ }
+}
+
+func TestScannerWithNestedStashIgnore(t *testing.T) {
+ // Create temp directory structure.
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFileOnDisk(t, tmpDir, "root.mp4")
+ createTestFileOnDisk(t, tmpDir, "root.tmp")
+ createTestFileOnDisk(t, tmpDir, "subdir/sub.mp4")
+ createTestFileOnDisk(t, tmpDir, "subdir/sub.log")
+ createTestFileOnDisk(t, tmpDir, "subdir/sub.tmp")
+
+ // Root .stashignore excludes *.tmp.
+ createStashIgnoreFile(t, tmpDir, "*.tmp\n")
+
+ // Subdir .stashignore excludes *.log.
+ createStashIgnoreFile(t, filepath.Join(tmpDir, "subdir"), "*.log\n")
+
+ // Create stashignore filter with library root.
+ stashIgnoreFilter := &stashIgnorePathFilter{
+ filter: file.NewStashIgnoreFilter(),
+ libraryRoot: tmpDir,
+ }
+
+ // Create scanner.
+ scanner := &file.Scanner{
+ ScanFilters: []file.PathFilter{stashIgnoreFilter},
+ }
+
+ testScenarios := []struct {
+ path string
+ accepted bool
+ }{
+ {filepath.Join(tmpDir, "root.mp4"), true},
+ {filepath.Join(tmpDir, "root.tmp"), false},
+ {filepath.Join(tmpDir, "subdir/sub.mp4"), true},
+ {filepath.Join(tmpDir, "subdir/sub.log"), false},
+ {filepath.Join(tmpDir, "subdir/sub.tmp"), false},
+ }
+
+ ctx := context.Background()
+
+ for _, scenario := range testScenarios {
+ info, err := os.Stat(scenario.path)
+ if err != nil {
+ t.Fatalf("failed to stat file %s: %v", scenario.path, err)
+ }
+ accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
+
+ if accepted != scenario.accepted {
+ t.Errorf("unexpected accept result for %s: expected %v, got %v",
+ scenario.path, scenario.accepted, accepted)
+ }
+ }
+}
+
+func TestScannerWithoutStashIgnore(t *testing.T) {
+ // Create temp directory structure (no .stashignore).
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFileOnDisk(t, tmpDir, "video1.mp4")
+ createTestFileOnDisk(t, tmpDir, "video2.mp4")
+ createTestFileOnDisk(t, tmpDir, "subdir/video3.mp4")
+
+ // Create stashignore filter with library root (but no .stashignore file exists).
+ stashIgnoreFilter := &stashIgnorePathFilter{
+ filter: file.NewStashIgnoreFilter(),
+ libraryRoot: tmpDir,
+ }
+
+ // Create scanner.
+ scanner := &file.Scanner{
+ ScanFilters: []file.PathFilter{stashIgnoreFilter},
+ }
+
+ testScenarios := []struct {
+ path string
+ accepted bool
+ }{
+ {filepath.Join(tmpDir, "video1.mp4"), true},
+ {filepath.Join(tmpDir, "video2.mp4"), true},
+ {filepath.Join(tmpDir, "subdir/video3.mp4"), true},
+ }
+
+ ctx := context.Background()
+
+ for _, scenario := range testScenarios {
+ info, err := os.Stat(scenario.path)
+ if err != nil {
+ t.Fatalf("failed to stat file %s: %v", scenario.path, err)
+ }
+ accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
+
+ if accepted != scenario.accepted {
+ t.Errorf("unexpected accept result for %s: expected %v, got %v",
+ scenario.path, scenario.accepted, accepted)
+ }
+ }
+}
+
+func TestScannerWithNegationPattern(t *testing.T) {
+ // Create temp directory structure.
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFileOnDisk(t, tmpDir, "file1.tmp")
+ createTestFileOnDisk(t, tmpDir, "file2.tmp")
+ createTestFileOnDisk(t, tmpDir, "keep_this.tmp")
+ createTestFileOnDisk(t, tmpDir, "video.mp4")
+
+ // Create .stashignore with negation.
+ stashignore := `*.tmp
+!keep_this.tmp
+`
+ createStashIgnoreFile(t, tmpDir, stashignore)
+
+ // Create stashignore filter with library root.
+ stashIgnoreFilter := &stashIgnorePathFilter{
+ filter: file.NewStashIgnoreFilter(),
+ libraryRoot: tmpDir,
+ }
+
+ // Create scanner.
+ scanner := &file.Scanner{
+ ScanFilters: []file.PathFilter{stashIgnoreFilter},
+ }
+
+ testScenarios := []struct {
+ path string
+ accepted bool
+ }{
+ {filepath.Join(tmpDir, "file1.tmp"), false},
+ {filepath.Join(tmpDir, "file2.tmp"), false},
+ {filepath.Join(tmpDir, "keep_this.tmp"), true},
+ {filepath.Join(tmpDir, "video.mp4"), true},
+ }
+
+ ctx := context.Background()
+
+ for _, scenario := range testScenarios {
+ info, err := os.Stat(scenario.path)
+ if err != nil {
+ t.Fatalf("failed to stat file %s: %v", scenario.path, err)
+ }
+ accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
+
+ if accepted != scenario.accepted {
+ t.Errorf("unexpected accept result for %s: expected %v, got %v",
+ scenario.path, scenario.accepted, accepted)
+ }
+ }
+}
diff --git a/internal/manager/task/clean_generated.go b/internal/manager/task/clean_generated.go
index a59bda6d1..378268a17 100644
--- a/internal/manager/task/clean_generated.go
+++ b/internal/manager/task/clean_generated.go
@@ -313,9 +313,36 @@ func (j *CleanGeneratedJob) cleanBlobFiles(ctx context.Context, progress *job.Pr
return err
}
+ // remove empty hash prefix subdirectories
+ j.removeEmptyDirs(j.Paths.Blobs)
+
return nil
}
+func (j *CleanGeneratedJob) removeEmptyDirs(root string) {
+ entries, err := os.ReadDir(root)
+ if err != nil {
+ return
+ }
+
+ for _, entry := range entries {
+ if !entry.IsDir() {
+ continue
+ }
+
+ dirPath := filepath.Join(root, entry.Name())
+ subEntries, err := os.ReadDir(dirPath)
+ if err != nil {
+ continue
+ }
+
+ if len(subEntries) == 0 {
+ j.logDelete("removing empty directory: %s", entry.Name())
+ j.deleteDir(dirPath)
+ }
+ }
+}
+
func (j *CleanGeneratedJob) getScenesWithHash(ctx context.Context, hash string) ([]*models.Scene, error) {
fp := models.Fingerprint{
Fingerprint: hash,
@@ -637,6 +664,8 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job.
return err
}
+ j.removeEmptyDirs(j.Paths.Generated.Markers)
+
return nil
}
@@ -730,5 +759,7 @@ func (j *CleanGeneratedJob) cleanThumbnailFiles(ctx context.Context, progress *j
return err
}
+ j.removeEmptyDirs(j.Paths.Generated.Thumbnails)
+
return nil
}
diff --git a/internal/manager/task/migrate.go b/internal/manager/task/migrate.go
index 95798d301..dd320a83b 100644
--- a/internal/manager/task/migrate.go
+++ b/internal/manager/task/migrate.go
@@ -7,6 +7,8 @@ import (
"os"
"path/filepath"
+ "github.com/stashapp/stash/internal/manager/config"
+ "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/sqlite"
@@ -29,6 +31,21 @@ type databaseSchemaInfo struct {
StepsRequired uint
}
+// PreExecute validates the environment before executing the migration.
+// It returns an error if the migration cannot be performed.
+func (s *MigrateJob) PreExecute() error {
+ // ensure backup directory exists and is writable
+ backupDir := s.Config.GetBackupDirectoryPathOrDefault()
+ if backupDir != "" {
+ if err := fsutil.EnsureDir(backupDir); err != nil {
+ logger.Errorf("error ensuring backup directory exists: %s", err)
+ logger.Warnf("Backup directory (%s) must be modified to a valid directory or removed from the config file", config.BackupDirectoryPath)
+ return fmt.Errorf("error creating backup directory: %w", err)
+ }
+ }
+ return nil
+}
+
func (s *MigrateJob) Execute(ctx context.Context, progress *job.Progress) error {
schemaInfo, err := s.required()
if err != nil {
diff --git a/internal/manager/task_clean.go b/internal/manager/task_clean.go
index ddd86e2f2..67b7038b6 100644
--- a/internal/manager/task_clean.go
+++ b/internal/manager/task_clean.go
@@ -40,9 +40,10 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) error {
}
j.cleaner.Clean(ctx, file.CleanOptions{
- Paths: j.input.Paths,
- DryRun: j.input.DryRun,
- PathFilter: newCleanFilter(instance.Config),
+ Paths: j.input.Paths,
+ DryRun: j.input.DryRun,
+ IgnoreZipFileContents: j.input.IgnoreZipFileContents,
+ PathFilter: newCleanFilter(instance.Config),
}, progress)
if job.IsCancelled(ctx) {
@@ -154,11 +155,12 @@ func newCleanFilter(c *config.Config) *cleanFilter {
generatedPath: c.GetGeneratedPath(),
videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
+ stashIgnoreFilter: file.NewStashIgnoreFilter(),
},
}
}
-func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
+func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
// #1102 - clean anything in generated path
generatedPath := f.generatedPath
@@ -173,12 +175,18 @@ func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
}
if stash == nil {
- logger.Infof("%s not in any stash library directories. Marking to clean: \"%s\"", fileOrFolder, path)
+ logger.Infof("%s not in any stash library directories. Marking to clean: %q", fileOrFolder, path)
return false
}
if fsutil.IsPathInDir(generatedPath, path) {
- logger.Infof("%s is in generated path. Marking to clean: \"%s\"", fileOrFolder, path)
+ logger.Infof("%s is in generated path. Marking to clean: %q", fileOrFolder, path)
+ return false
+ }
+
+ // Check .stashignore files, bounded to the library root.
+ if !f.stashIgnoreFilter.Accept(ctx, path, info, stash.Path, zipFilePath) {
+ logger.Infof("%s is excluded due to .stashignore. Marking to clean: %q", fileOrFolder, path)
return false
}
diff --git a/internal/manager/task_export.go b/internal/manager/task_export.go
index 5f2897670..01bab9430 100644
--- a/internal/manager/task_export.go
+++ b/internal/manager/task_export.go
@@ -651,6 +651,7 @@ func (t *ExportTask) exportImage(ctx context.Context, wg *sync.WaitGroup, jobCha
galleryReader := r.Gallery
performerReader := r.Performer
tagReader := r.Tag
+ imageReader := r.Image
for s := range jobChan {
imageHash := s.Checksum
@@ -665,14 +666,17 @@ func (t *ExportTask) exportImage(ctx context.Context, wg *sync.WaitGroup, jobCha
continue
}
- newImageJSON := image.ToBasicJSON(s)
+ newImageJSON, err := image.ToBasicJSON(ctx, imageReader, s)
+ if err != nil {
+ logger.Errorf("[images] <%s> error converting image to JSON: %v", imageHash, err)
+ continue
+ }
// export files
for _, f := range s.Files.List() {
t.exportFile(f)
}
- var err error
newImageJSON.Studio, err = image.GetStudioName(ctx, studioReader, s)
if err != nil {
logger.Errorf("[images] <%s> error getting image studio name: %v", imageHash, err)
@@ -779,6 +783,7 @@ func (t *ExportTask) exportGallery(ctx context.Context, wg *sync.WaitGroup, jobC
studioReader := r.Studio
performerReader := r.Performer
tagReader := r.Tag
+ galleryReader := r.Gallery
galleryChapterReader := r.GalleryChapter
for g := range jobChan {
@@ -847,6 +852,12 @@ func (t *ExportTask) exportGallery(ctx context.Context, wg *sync.WaitGroup, jobC
newGalleryJSON.Tags = tag.GetNames(tags)
+ newGalleryJSON.CustomFields, err = galleryReader.GetCustomFields(ctx, g.ID)
+ if err != nil {
+ logger.Errorf("[galleries] <%s> error getting gallery custom fields: %v", g.DisplayName(), err)
+ continue
+ }
+
if t.includeDependencies {
if g.StudioID != nil {
t.studios.IDs = sliceutil.AppendUnique(t.studios.IDs, *g.StudioID)
diff --git a/internal/manager/task_generate.go b/internal/manager/task_generate.go
index cc991d5d6..6f8ac8178 100644
--- a/internal/manager/task_generate.go
+++ b/internal/manager/task_generate.go
@@ -43,6 +43,8 @@ type GenerateMetadataInput struct {
GalleryIDs []string `json:"galleryIDs"`
// overwrite existing media
Overwrite bool `json:"overwrite"`
+ // paths to run generate on, in addition to the other ID lists
+ Paths []string `json:"paths"`
}
type GeneratePreviewOptionsInput struct {
@@ -133,8 +135,13 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
r := j.repository
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
qb := r.Scene
- if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 && len(j.input.ImageIDs) == 0 && len(j.input.GalleryIDs) == 0 {
- j.queueTasks(ctx, g, queue)
+ if len(j.input.SceneIDs) == 0 &&
+ len(j.input.MarkerIDs) == 0 &&
+ len(j.input.ImageIDs) == 0 &&
+ len(j.input.GalleryIDs) == 0 &&
+ len(j.input.Paths) == 0 {
+
+ j.queueTasks(ctx, g, nil, queue)
} else {
if len(j.input.SceneIDs) > 0 {
scenes, err = qb.FindMany(ctx, sceneIDs)
@@ -183,6 +190,11 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
}
}
}
+
+ if len(j.input.Paths) > 0 {
+ paths := filterStashPaths(j.input.Paths)
+ j.queueTasks(ctx, g, paths, queue)
+ }
}
return nil
@@ -250,7 +262,9 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
for f := range queue {
if job.IsCancelled(ctx) {
- break
+ // keep draining the queue so the producer goroutine can finish
+ // and release its read transaction, otherwise the DB stays locked
+ continue
}
wg.Add()
@@ -276,17 +290,18 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
return nil
}
-func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) {
+func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) {
j.totals = totalsGenerate{}
- j.queueScenesTasks(ctx, g, queue)
- j.queueImagesTasks(ctx, g, queue)
+ j.queueScenesTasks(ctx, g, paths, queue)
+ j.queueImagesTasks(ctx, g, paths, queue)
}
-func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) {
+func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) {
const batchSize = 1000
findFilter := models.BatchFindFilter(batchSize)
+ sceneFilter := scene.FilterFromPaths(paths)
r := j.repository
@@ -295,7 +310,7 @@ func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generato
return
}
- scenes, err := scene.Query(ctx, r.Scene, nil, findFilter)
+ scenes, err := scene.Query(ctx, r.Scene, sceneFilter, findFilter)
if err != nil {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return
@@ -322,10 +337,11 @@ func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generato
}
}
-func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) {
+func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) {
const batchSize = 1000
findFilter := models.BatchFindFilter(batchSize)
+ imageFilter := image.FilterFromPaths(paths)
r := j.repository
@@ -334,7 +350,7 @@ func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generato
return
}
- images, err := image.Query(ctx, r.Image, nil, findFilter)
+ images, err := image.Query(ctx, r.Image, imageFilter, findFilter)
if err != nil {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return
diff --git a/internal/manager/task_optimise.go b/internal/manager/task_optimise.go
index 9f85e961c..7b14acebf 100644
--- a/internal/manager/task_optimise.go
+++ b/internal/manager/task_optimise.go
@@ -35,7 +35,7 @@ func (j *OptimiseDatabaseJob) Execute(ctx context.Context, progress *job.Progres
return nil
}
if err != nil {
- return fmt.Errorf("Error analyzing database: %w", err)
+ return fmt.Errorf("error analyzing database: %w", err)
}
progress.ExecuteTask("Vacuuming database", func() {
diff --git a/internal/manager/task_plugin.go b/internal/manager/task_plugin.go
index 80f38598c..fb8cea0cb 100644
--- a/internal/manager/task_plugin.go
+++ b/internal/manager/task_plugin.go
@@ -20,12 +20,12 @@ func (s *Manager) RunPluginTask(
pluginProgress := make(chan float64)
task, err := s.PluginCache.CreateTask(ctx, pluginID, taskName, args, pluginProgress)
if err != nil {
- return fmt.Errorf("Error creating plugin task: %w", err)
+ return fmt.Errorf("error creating plugin task: %w", err)
}
err = task.Start()
if err != nil {
- return fmt.Errorf("Error running plugin task: %w", err)
+ return fmt.Errorf("error running plugin task: %w", err)
}
done := make(chan bool)
diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go
index d09765577..155090cd2 100644
--- a/internal/manager/task_scan.go
+++ b/internal/manager/task_scan.go
@@ -26,6 +26,7 @@ import (
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate"
"github.com/stashapp/stash/pkg/txn"
+ "github.com/stashapp/stash/pkg/utils"
)
type ScanJob struct {
@@ -35,6 +36,8 @@ type ScanJob struct {
fileQueue chan file.ScannedFile
count int
+
+ unmatchedCaptionFiles utils.MutexField[[]string]
}
func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
@@ -73,6 +76,8 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
j.scanner.ScanFilters = []file.PathFilter{newScanFilter(c, repo, minModTime)}
j.scanner.HandlerRequiredFilters = []file.Filter{newHandlerRequiredFilter(cfg, repo)}
+ logger.Infof("Starting scan of %d paths with %d parallel tasks", len(paths), nTasks)
+
j.runJob(ctx, paths, nTasks, progress)
taskQueue.Close()
@@ -83,7 +88,7 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
}
elapsed := time.Since(start)
- logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed))
+ logger.Infof("Scan finished (%s)", elapsed)
j.subscriptions.notify()
return nil
@@ -166,12 +171,33 @@ func (j *ScanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *file.
return nil
}
- if !j.scanner.AcceptEntry(ctx, path, info) {
+ zipFilePath := ""
+ if zipFile != nil {
+ zipFilePath = zipFile.Path
+ }
+
+ if !j.scanner.AcceptEntry(ctx, path, info, zipFilePath) {
if info.IsDir() {
logger.Debugf("Skipping directory %s", path)
return fs.SkipDir
}
+ // we don't include caption files in the file scan, but we do need
+ // to handle them
+ if fsutil.MatchExtension(path, video.CaptionExts) {
+ fileRepo := j.scanner.Repository.File
+ matched := video.AssociateCaptions(ctx, path, j.scanner.Repository.TxnManager, fileRepo, fileRepo)
+
+ if !matched {
+ logger.Debugf("No matching video file found for caption file %s", path)
+ j.unmatchedCaptionFiles.SetFunc(func(files []string) []string {
+ return append(files, path)
+ })
+ }
+
+ return nil
+ }
+
logger.Debugf("Skipping file %s", path)
return nil
}
@@ -257,8 +283,10 @@ func (j *ScanJob) processQueue(ctx context.Context, parallelTasks int, progress
for f := range j.fileQueue {
logger.Tracef("Processing queued file %s", f.Path)
- if err := ctx.Err(); err != nil {
- return
+ if ctx.Err() != nil {
+ // Keep receiving until queueFiles closes the channel; otherwise
+ // the walker can block on send (full buffer) and never finish.
+ continue
}
wg.Add()
@@ -309,10 +337,53 @@ func (j *ScanJob) handleFile(ctx context.Context, f file.ScannedFile, progress *
return err
}
- // handle rename should have already handled the contents of the zip file
- // so shouldn't need to scan it again
+ // if this is a new video file, match it with any unmatched caption files
+ if r.New && len(j.unmatchedCaptionFiles.Get()) > 0 {
+ videoFile, _ := r.File.(*models.VideoFile)
- if (r.New || r.Updated) && j.scanner.IsZipFile(f.Info.Name()) {
+ if videoFile != nil {
+ // try to match any unmatched caption files to this video file
+ for _, captionPath := range j.unmatchedCaptionFiles.Get() {
+ if video.MatchesCaption(videoFile.Path, captionPath) {
+ video.AssociateCaptions(ctx, captionPath, j.scanner.Repository.TxnManager, j.scanner.Repository.File, j.scanner.Repository.File)
+
+ // remove from the unmatched list
+ j.unmatchedCaptionFiles.SetFunc(func(files []string) []string {
+ newFiles := make([]string, 0, len(files)-1)
+ for _, f := range files {
+ if f != captionPath {
+ newFiles = append(newFiles, f)
+ }
+ }
+ return newFiles
+ })
+ }
+ }
+ }
+ }
+
+ // clean captions - scene handler handles this as well, but
+ // unchanged files aren't processed by the scene handler
+ if r.IsUnchanged() {
+ videoFile, _ := r.File.(*models.VideoFile)
+
+ if videoFile != nil {
+ txnMgr := j.scanner.Repository.TxnManager
+ fileRepo := j.scanner.Repository.File
+ if err := txn.WithDatabase(ctx, txnMgr, func(ctx context.Context) error {
+ return video.CleanCaptions(ctx, videoFile, txnMgr, fileRepo)
+ }); err != nil {
+ logger.Errorf("Error cleaning captions: %v", err)
+ }
+ }
+ }
+
+ // handle rename should have already handled the contents of the zip file
+ // so shouldn't need to scan it again.
+ // Only scan zip contents if the file is new, the fingerprint changed,
+ // or if a force rescan was requested.
+
+ if j.scanner.IsZipFile(f.Info.Name()) && (r.New || r.FingerprintChanged || j.scanner.Rescan) {
ff := r.File
f.BaseFile = ff.Base()
@@ -324,6 +395,8 @@ func (j *ScanJob) handleFile(ctx context.Context, f file.ScannedFile, progress *
if err := j.scanZipFile(zipCtx, f, progress); err != nil {
logger.Errorf("Error scanning zip file %q: %v", f.Path, err)
}
+ } else if r.Updated && j.scanner.IsZipFile(f.Info.Name()) {
+ logger.Debugf("Skipping zip file scan for %q: fingerprint unchanged", f.Path)
}
return nil
@@ -378,11 +451,10 @@ type sceneFinder interface {
// handlerRequiredFilter returns true if a File's handler needs to be executed despite the file not being updated.
type handlerRequiredFilter struct {
extensionConfig
- txnManager txn.Manager
- SceneFinder sceneFinder
- ImageFinder fileCounter
- GalleryFinder galleryFinder
- CaptionUpdater video.CaptionUpdater
+ txnManager txn.Manager
+ SceneFinder sceneFinder
+ ImageFinder fileCounter
+ GalleryFinder galleryFinder
FolderCache *lru.LRU[bool]
@@ -398,7 +470,6 @@ func newHandlerRequiredFilter(c *config.Config, repo models.Repository) *handler
SceneFinder: repo.Scene,
ImageFinder: repo.Image,
GalleryFinder: repo.Gallery,
- CaptionUpdater: repo.File,
FolderCache: lru.New[bool](processes * 2),
videoFileNamingAlgorithm: c.GetVideoFileNamingAlgorithm(),
}
@@ -473,65 +544,35 @@ func (f *handlerRequiredFilter) Accept(ctx context.Context, ff models.File) bool
}
}
- if isVideoFile {
- // TODO - check if the cover exists
- // hash := scene.GetHash(ff, f.videoFileNamingAlgorithm)
- // ssPath := instance.Paths.Scene.GetScreenshotPath(hash)
- // if exists, _ := fsutil.FileExists(ssPath); !exists {
- // // if not, check if the file is a primary file for a scene
- // scenes, err := f.SceneFinder.FindByPrimaryFileID(ctx, ff.Base().ID)
- // if err != nil {
- // // just ignore
- // return false
- // }
-
- // if len(scenes) > 0 {
- // // if it is, then it needs to be re-generated
- // return true
- // }
- // }
-
- // clean captions - scene handler handles this as well, but
- // unchanged files aren't processed by the scene handler
- videoFile, _ := ff.(*models.VideoFile)
- if videoFile != nil {
- if err := video.CleanCaptions(ctx, videoFile, f.txnManager, f.CaptionUpdater); err != nil {
- logger.Errorf("Error cleaning captions: %v", err)
- }
- }
- }
-
return false
}
type scanFilter struct {
extensionConfig
- txnManager txn.Manager
- FileFinder models.FileFinder
- CaptionUpdater video.CaptionUpdater
+ txnManager txn.Manager
stashPaths config.StashConfigs
generatedPath string
videoExcludeRegex []*regexp.Regexp
imageExcludeRegex []*regexp.Regexp
minModTime time.Time
+ stashIgnoreFilter *file.StashIgnoreFilter
}
func newScanFilter(c *config.Config, repo models.Repository, minModTime time.Time) *scanFilter {
return &scanFilter{
extensionConfig: newExtensionConfig(c),
txnManager: repo.TxnManager,
- FileFinder: repo.File,
- CaptionUpdater: repo.File,
stashPaths: c.GetStashPaths(),
generatedPath: c.GetGeneratedPath(),
videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
minModTime: minModTime,
+ stashIgnoreFilter: file.NewStashIgnoreFilter(),
}
}
-func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
+func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
if fsutil.IsPathInDir(f.generatedPath, path) {
logger.Warnf("Skipping %q as it overlaps with the generated folder", path)
return false
@@ -548,19 +589,16 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
return false
}
+ // Check .stashignore files, bounded to the library root.
+ if !f.stashIgnoreFilter.Accept(ctx, path, info, s.Path, zipFilePath) {
+ logger.Debugf("Skipping %s due to .stashignore", path)
+ return false
+ }
+
isVideoFile := useAsVideo(path)
isImageFile := useAsImage(path)
isZipFile := fsutil.MatchExtension(path, f.zipExt)
- // handle caption files
- if fsutil.MatchExtension(path, video.CaptionExts) {
- // we don't include caption files in the file scan, but we do need
- // to handle them
- video.AssociateCaptions(ctx, path, f.txnManager, f.FileFinder, f.CaptionUpdater)
-
- return false
- }
-
if !info.IsDir() && !isVideoFile && !isImageFile && !isZipFile {
logger.Debugf("Skipping %s as it does not match any known file extensions", path)
return false
@@ -624,8 +662,9 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
&file.FilteredHandler{
Filter: file.FilterFunc(imageFileFilter),
Handler: &image.ScanHandler{
- CreatorUpdater: r.Image,
- GalleryFinder: r.Gallery,
+ CreatorUpdater: r.Image,
+ GalleryFinder: r.Gallery,
+ SceneFinderUpdater: r.Scene,
ScanGenerator: &imageGenerators{
input: options,
taskQueue: taskQueue,
@@ -654,9 +693,10 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
&file.FilteredHandler{
Filter: file.FilterFunc(videoFileFilter),
Handler: &scene.ScanHandler{
- CreatorUpdater: r.Scene,
- CaptionUpdater: r.File,
- PluginCache: pluginCache,
+ CreatorUpdater: r.Scene,
+ GalleryFinderUpdater: r.Gallery,
+ CaptionUpdater: r.File,
+ PluginCache: pluginCache,
ScanGenerator: &sceneGenerators{
input: options,
taskQueue: taskQueue,
diff --git a/internal/manager/task_stash_box_tag.go b/internal/manager/task_stash_box_tag.go
index 4848b46ad..264e7e96c 100644
--- a/internal/manager/task_stash_box_tag.go
+++ b/internal/manager/task_stash_box_tag.go
@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"strconv"
+ "strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/match"
@@ -12,6 +13,7 @@ import (
"github.com/stashapp/stash/pkg/sliceutil"
"github.com/stashapp/stash/pkg/stashbox"
"github.com/stashapp/stash/pkg/studio"
+ "github.com/stashapp/stash/pkg/tag"
)
// stashBoxBatchPerformerTagTask is used to tag or create performers from stash-box.
@@ -529,3 +531,235 @@ func (t *stashBoxBatchStudioTagTask) processParentStudio(ctx context.Context, pa
return err
}
}
+
+// stashBoxBatchTagTagTask is used to tag or create tags from stash-box.
+//
+// Two modes of operation:
+// - Update existing tag: set tag to update from stash-box data
+// - Create new tag: set name or stashID to search stash-box and create locally
+type stashBoxBatchTagTagTask struct {
+ box *models.StashBox
+ name *string
+ stashID *string
+ tag *models.Tag
+ createParent bool
+ excludedFields []string
+}
+
+func (t *stashBoxBatchTagTagTask) getName() string {
+ switch {
+ case t.name != nil:
+ return *t.name
+ case t.stashID != nil:
+ return *t.stashID
+ case t.tag != nil:
+ return t.tag.Name
+ default:
+ return ""
+ }
+}
+
+func (t *stashBoxBatchTagTagTask) Start(ctx context.Context) {
+ scrapedTag, err := t.findStashBoxTag(ctx)
+ if err != nil {
+ logger.Errorf("Error fetching tag data from stash-box: %v", err)
+ return
+ }
+
+ excluded := map[string]bool{}
+ for _, field := range t.excludedFields {
+ excluded[field] = true
+ }
+
+ if scrapedTag != nil {
+ t.processMatchedTag(ctx, scrapedTag, excluded)
+ } else {
+ logger.Infof("No match found for %s", t.getName())
+ }
+}
+
+func (t *stashBoxBatchTagTagTask) GetDescription() string {
+ return fmt.Sprintf("Tagging tag %s from stash-box", t.getName())
+}
+
+func (t *stashBoxBatchTagTagTask) findStashBoxTag(ctx context.Context) (*models.ScrapedTag, error) {
+ var results []*models.ScrapedTag
+ var err error
+
+ r := instance.Repository
+
+ client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns()))
+
+ nameQuery := ""
+
+ switch {
+ case t.name != nil:
+ nameQuery = *t.name
+ results, err = client.QueryTag(ctx, *t.name)
+ case t.stashID != nil:
+ results, err = client.QueryTag(ctx, *t.stashID)
+ case t.tag != nil:
+ var remoteID string
+ if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
+ if !t.tag.StashIDs.Loaded() {
+ err = t.tag.LoadStashIDs(ctx, r.Tag)
+ if err != nil {
+ return err
+ }
+ }
+ for _, id := range t.tag.StashIDs.List() {
+ if id.Endpoint == t.box.Endpoint {
+ remoteID = id.StashID
+ }
+ }
+ return nil
+ }); err != nil {
+ return nil, err
+ }
+
+ if remoteID != "" {
+ results, err = client.QueryTag(ctx, remoteID)
+ } else {
+ nameQuery = t.tag.Name
+ results, err = client.QueryTag(ctx, t.tag.Name)
+ }
+ }
+
+ if err != nil {
+ return nil, err
+ }
+
+ if len(results) == 0 {
+ return nil, nil
+ }
+
+ var result *models.ScrapedTag
+
+ // QueryTag returns tags that partially match the name, so find the exact match if searching by name
+ if nameQuery != "" {
+ for _, r := range results {
+ if strings.EqualFold(r.Name, nameQuery) {
+ result = r
+ break
+ }
+ }
+ } else {
+ result = results[0]
+ }
+
+ if result == nil {
+ return nil, nil
+ }
+
+ if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
+ return match.ScrapedTagHierarchy(ctx, r.Tag, result, t.box.Endpoint)
+ }); err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+func (t *stashBoxBatchTagTagTask) processParentTag(ctx context.Context, parent *models.ScrapedTag, excluded map[string]bool) error {
+ if parent.StoredID == nil {
+ // Create new parent tag
+ newParentTag := parent.ToTag(t.box.Endpoint, excluded)
+
+ r := instance.Repository
+ err := r.WithTxn(ctx, func(ctx context.Context) error {
+ qb := r.Tag
+
+ if err := tag.ValidateCreate(ctx, *newParentTag, qb); err != nil {
+ return err
+ }
+
+ if err := qb.Create(ctx, &models.CreateTagInput{Tag: newParentTag}); err != nil {
+ return err
+ }
+
+ storedID := strconv.Itoa(newParentTag.ID)
+ parent.StoredID = &storedID
+ return nil
+ })
+ if err != nil {
+ logger.Errorf("Failed to create parent tag %s: %v", parent.Name, err)
+ } else {
+ logger.Infof("Created parent tag %s", parent.Name)
+ }
+ return err
+ }
+
+ // Parent already exists — nothing to update for categories
+ return nil
+}
+
+func (t *stashBoxBatchTagTagTask) processMatchedTag(ctx context.Context, s *models.ScrapedTag, excluded map[string]bool) {
+ // Determine the tag ID to update — either from the task's tag or from the
+ // StoredID set by match.ScrapedTag (when batch adding by name and the tag
+ // already exists locally).
+ tagID := 0
+ if t.tag != nil {
+ tagID = t.tag.ID
+ } else if s.StoredID != nil {
+ tagID, _ = strconv.Atoi(*s.StoredID)
+ }
+
+ if s.Parent != nil && t.createParent {
+ if err := t.processParentTag(ctx, s.Parent, excluded); err != nil {
+ return
+ }
+ }
+
+ if tagID > 0 {
+ r := instance.Repository
+ err := r.WithTxn(ctx, func(ctx context.Context) error {
+ qb := r.Tag
+
+ existingStashIDs, err := qb.GetStashIDs(ctx, tagID)
+ if err != nil {
+ return err
+ }
+
+ storedID := strconv.Itoa(tagID)
+ partial := s.ToPartial(storedID, t.box.Endpoint, excluded, existingStashIDs)
+
+ if err := tag.ValidateUpdate(ctx, tagID, partial, qb); err != nil {
+ return err
+ }
+
+ if _, err := qb.UpdatePartial(ctx, tagID, partial); err != nil {
+ return err
+ }
+
+ return nil
+ })
+ if err != nil {
+ logger.Errorf("Failed to update tag %s: %v", s.Name, err)
+ } else {
+ logger.Infof("Updated tag %s", s.Name)
+ }
+ } else if s.Name != "" {
+ // no existing tag, create a new one
+ newTag := s.ToTag(t.box.Endpoint, excluded)
+
+ r := instance.Repository
+ err := r.WithTxn(ctx, func(ctx context.Context) error {
+ qb := r.Tag
+
+ if err := tag.ValidateCreate(ctx, *newTag, qb); err != nil {
+ return err
+ }
+
+ if err := qb.Create(ctx, &models.CreateTagInput{Tag: newTag}); err != nil {
+ return err
+ }
+
+ return nil
+ })
+ if err != nil {
+ logger.Errorf("Failed to create tag %s: %v", s.Name, err)
+ } else {
+ logger.Infof("Created tag %s", s.Name)
+ }
+ }
+}
diff --git a/pkg/ffmpeg/codec_hardware.go b/pkg/ffmpeg/codec_hardware.go
index aa8c75dcc..a83830c52 100644
--- a/pkg/ffmpeg/codec_hardware.go
+++ b/pkg/ffmpeg/codec_hardware.go
@@ -45,13 +45,13 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
// log if the initialization takes too long
const hwInitLogTimeoutSecondsDefault = 5
- hwInitLogTimeoutSeconds := hwInitLogTimeoutSecondsDefault * time.Second
- timer := time.NewTimer(hwInitLogTimeoutSeconds)
+ hwInitLogTimeout := hwInitLogTimeoutSecondsDefault * time.Second
+ timer := time.NewTimer(hwInitLogTimeout)
go func() {
select {
case <-timer.C:
- logger.Warnf("[InitHWSupport] Hardware codec initialization is taking longer than %s...", hwInitLogTimeoutSeconds)
+ logger.Warnf("[InitHWSupport] Hardware codec initialization is taking longer than %s...", hwInitLogTimeout)
logger.Info("[InitHWSupport] Hardware encoding will not be available until initialization is complete.")
case <-done:
if !timer.Stop() {
@@ -96,16 +96,16 @@ func (f *FFMpeg) initHWSupport(ctx context.Context) {
// #6064 - add timeout to context to prevent hangs
const hwTestTimeoutSecondsDefault = 10
- hwTestTimeoutSeconds := hwTestTimeoutSecondsDefault * time.Second
+ hwTestTimeout := hwTestTimeoutSecondsDefault * time.Second
// allow timeout to be overridden with environment variable
if timeout := os.Getenv("STASH_HW_TEST_TIMEOUT"); timeout != "" {
if seconds, err := strconv.Atoi(timeout); err == nil {
- hwTestTimeoutSeconds = time.Duration(seconds) * time.Second
+ hwTestTimeout = time.Duration(seconds) * time.Second
}
}
- testCtx, cancel := context.WithTimeout(ctx, hwTestTimeoutSeconds)
+ testCtx, cancel := context.WithTimeout(ctx, hwTestTimeout)
defer cancel()
cmd := f.Command(testCtx, args)
@@ -117,7 +117,7 @@ func (f *FFMpeg) initHWSupport(ctx context.Context) {
if err := cmd.Run(); err != nil {
if testCtx.Err() != nil {
- logger.Debugf("[InitHWSupport] Codec %s test timed out after %s", codec, hwTestTimeoutSeconds)
+ logger.Debugf("[InitHWSupport] Codec %s test timed out after %s", codec, hwTestTimeout)
continue
}
@@ -185,6 +185,12 @@ func (f *FFMpeg) hwCanFullHWTranscode(ctx context.Context, codec VideoCodec, vf
// Prepend input for hardware encoding only
func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args {
+ // check for custom /dev/dri device #6435
+ driDevice := os.Getenv("STASH_HW_DRI_DEVICE")
+ if driDevice == "" {
+ driDevice = "/dev/dri/renderD128"
+ }
+
switch toCodec {
case VideoCodecN264,
VideoCodecN264H:
@@ -201,7 +207,7 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args {
case VideoCodecV264,
VideoCodecVVP9:
args = append(args, "-vaapi_device")
- args = append(args, "/dev/dri/renderD128")
+ args = append(args, driDevice)
if fullhw {
args = append(args, "-hwaccel")
args = append(args, "vaapi")
diff --git a/pkg/file/clean.go b/pkg/file/clean.go
index 53b2e0612..369600f4c 100644
--- a/pkg/file/clean.go
+++ b/pkg/file/clean.go
@@ -33,6 +33,11 @@ type cleanJob struct {
type CleanOptions struct {
Paths []string
+ // IgnoreZipFileContents will skip checking the contents of zip files when determining whether to clean a file.
+ // This can significantly speed up the clean process, but will potentially miss removed files within zip files.
+ // Where users do not modify zip files contents directly, this should be safe to use.
+ IgnoreZipFileContents bool
+
// Do a dry run. Don't delete any files
DryRun bool
@@ -174,13 +179,16 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error {
more := true
r := j.Repository
+
+ includeZipContents := !j.options.IgnoreZipFileContents
+
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
for more {
if job.IsCancelled(ctx) {
return nil
}
- files, err := r.File.FindAllInPaths(ctx, j.options.Paths, batchSize, offset)
+ files, err := r.File.FindAllInPaths(ctx, j.options.Paths, includeZipContents, batchSize, offset)
if err != nil {
return fmt.Errorf("error querying for files: %w", err)
}
@@ -258,6 +266,8 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error
offset := 0
progress := j.progress
+ includeZipContents := !j.options.IgnoreZipFileContents
+
more := true
r := j.Repository
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
@@ -266,7 +276,7 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error
return nil
}
- folders, err := r.Folder.FindAllInPaths(ctx, j.options.Paths, batchSize, offset)
+ folders, err := r.Folder.FindAllInPaths(ctx, j.options.Paths, includeZipContents, batchSize, offset)
if err != nil {
return fmt.Errorf("error querying for folders: %w", err)
}
@@ -348,8 +358,14 @@ func (j *cleanJob) shouldClean(ctx context.Context, f models.File) bool {
// run through path filter, if returns false then the file should be cleaned
filter := j.options.PathFilter
+ // need to get the zip file path if present
+ zipFilePath := ""
+ if f.Base().ZipFile != nil {
+ zipFilePath = f.Base().ZipFile.Base().Path
+ }
+
// don't log anything - assume filter will have logged the reason
- return !filter.Accept(ctx, path, info)
+ return !filter.Accept(ctx, path, info, zipFilePath)
}
func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool {
@@ -387,8 +403,14 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool
// run through path filter, if returns false then the file should be cleaned
filter := j.options.PathFilter
+ // need to get the zip file path if present
+ zipFilePath := ""
+ if f.ZipFile != nil {
+ zipFilePath = f.ZipFile.Base().Path
+ }
+
// don't log anything - assume filter will have logged the reason
- return !filter.Accept(ctx, path, info)
+ return !filter.Accept(ctx, path, info, zipFilePath)
}
func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) {
diff --git a/pkg/file/folder.go b/pkg/file/folder.go
index fe260c155..249f73a7a 100644
--- a/pkg/file/folder.go
+++ b/pkg/file/folder.go
@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"path/filepath"
+ "slices"
"strings"
"time"
@@ -12,8 +13,9 @@ import (
)
// GetOrCreateFolderHierarchy gets the folder for the given path, or creates a folder hierarchy for the given path if one if no existing folder is found.
-// Does not create any folders in the file system
-func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string) (*models.Folder, error) {
+// Creates folder entries for each level of the hierarchy that doesn't already exist, up to the provided root paths.
+// Does not create any folders in the file system.
+func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string, rootPaths []string) (*models.Folder, error) {
// get or create folder hierarchy
// assume case sensitive when searching for the folder
const caseSensitive = true
@@ -23,17 +25,33 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreat
}
if folder == nil {
- parentPath := filepath.Dir(path)
- parent, err := GetOrCreateFolderHierarchy(ctx, fc, parentPath)
- if err != nil {
- return nil, err
+ var parentID *models.FolderID
+
+ if !slices.Contains(rootPaths, path) {
+ parentPath := filepath.Dir(path)
+
+ // safety check - don't allow parent path to be the same as the current path,
+ // otherwise we could end up in an infinite loop
+ if parentPath == path {
+ // #6618 - log a warning and return nil for the parent ID,
+ // which will cause the folder to be created with no parent
+ logger.Warnf("parent path is the same as the current path: %s", path)
+ return nil, nil
+ }
+
+ parent, err := GetOrCreateFolderHierarchy(ctx, fc, parentPath, rootPaths)
+ if err != nil {
+ return nil, err
+ }
+
+ parentID = &parent.ID
}
now := time.Now()
folder = &models.Folder{
Path: path,
- ParentFolderID: &parent.ID,
+ ParentFolderID: parentID,
DirEntry: models.DirEntry{
// leave mod time empty for now - it will be updated when the folder is scanned
},
@@ -41,6 +59,8 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreat
UpdatedAt: now,
}
+ logger.Infof("%s doesn't exist. Creating new folder entry...", path)
+
if err = fc.Create(ctx, folder); err != nil {
return nil, fmt.Errorf("creating folder %s: %w", path, err)
}
@@ -49,12 +69,18 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreat
return folder, nil
}
-func transferZipHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, files models.FileFinderUpdater, zipFileID models.FileID, oldPath string, newPath string) error {
- if err := transferZipFolderHierarchy(ctx, folderStore, zipFileID, oldPath, newPath); err != nil {
+type zipHierarchyMover struct {
+ folderStore models.FolderReaderWriter
+ files models.FileFinderUpdater
+ rootPaths []string
+}
+
+func (m zipHierarchyMover) transferZipHierarchy(ctx context.Context, zipFileID models.FileID, oldPath string, newPath string) error {
+ if err := m.transferZipFolderHierarchy(ctx, zipFileID, oldPath, newPath); err != nil {
return fmt.Errorf("moving folder hierarchy for file %s: %w", oldPath, err)
}
- if err := transferZipFileEntries(ctx, folderStore, files, zipFileID, oldPath, newPath); err != nil {
+ if err := m.transferZipFileEntries(ctx, zipFileID, oldPath, newPath); err != nil {
return fmt.Errorf("moving zip file contents for file %s: %w", oldPath, err)
}
@@ -63,8 +89,8 @@ func transferZipHierarchy(ctx context.Context, folderStore models.FolderReaderWr
// transferZipFolderHierarchy creates the folder hierarchy for zipFileID under newPath, and removes
// ZipFileID from folders under oldPath.
-func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, zipFileID models.FileID, oldPath string, newPath string) error {
- zipFolders, err := folderStore.FindByZipFileID(ctx, zipFileID)
+func (m zipHierarchyMover) transferZipFolderHierarchy(ctx context.Context, zipFileID models.FileID, oldPath string, newPath string) error {
+ zipFolders, err := m.folderStore.FindByZipFileID(ctx, zipFileID)
if err != nil {
return err
}
@@ -83,7 +109,7 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe
}
newZfPath := filepath.Join(newPath, relZfPath)
- newFolder, err := GetOrCreateFolderHierarchy(ctx, folderStore, newZfPath)
+ newFolder, err := GetOrCreateFolderHierarchy(ctx, m.folderStore, newZfPath, m.rootPaths)
if err != nil {
return err
}
@@ -91,14 +117,14 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe
// add ZipFileID to new folder
logger.Debugf("adding zip file %s to folder %s", zipFileID, newFolder.Path)
newFolder.ZipFileID = &zipFileID
- if err = folderStore.Update(ctx, newFolder); err != nil {
+ if err = m.folderStore.Update(ctx, newFolder); err != nil {
return err
}
// remove ZipFileID from old folder
logger.Debugf("removing zip file %s from folder %s", zipFileID, oldFolder.Path)
oldFolder.ZipFileID = nil
- if err = folderStore.Update(ctx, oldFolder); err != nil {
+ if err = m.folderStore.Update(ctx, oldFolder); err != nil {
return err
}
}
@@ -106,9 +132,9 @@ func transferZipFolderHierarchy(ctx context.Context, folderStore models.FolderRe
return nil
}
-func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCreator, files models.FileFinderUpdater, zipFileID models.FileID, oldPath, newPath string) error {
+func (m zipHierarchyMover) transferZipFileEntries(ctx context.Context, zipFileID models.FileID, oldPath, newPath string) error {
// move contained files if file is a zip file
- zipFiles, err := files.FindByZipFileID(ctx, zipFileID)
+ zipFiles, err := m.files.FindByZipFileID(ctx, zipFileID)
if err != nil {
return fmt.Errorf("finding contained files in file %s: %w", oldPath, err)
}
@@ -129,7 +155,7 @@ func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCrea
newZfDir := filepath.Join(newPath, relZfDir)
// folder should have been created by transferZipFolderHierarchy
- newZfFolder, err := GetOrCreateFolderHierarchy(ctx, folders, newZfDir)
+ newZfFolder, err := GetOrCreateFolderHierarchy(ctx, m.folderStore, newZfDir, m.rootPaths)
if err != nil {
return fmt.Errorf("getting or creating folder hierarchy: %w", err)
}
@@ -137,7 +163,7 @@ func transferZipFileEntries(ctx context.Context, folders models.FolderFinderCrea
// update file parent folder
zfBase.ParentFolderID = newZfFolder.ID
logger.Debugf("moving %s to folder %s", zfBase.Path, newZfFolder.Path)
- if err := files.Update(ctx, zf); err != nil {
+ if err := m.files.Update(ctx, zf); err != nil {
return fmt.Errorf("updating file %s: %w", oldZfPath, err)
}
}
diff --git a/pkg/file/folder_rename_detect.go b/pkg/file/folder_rename_detect.go
index cfae7e4fb..d45593b28 100644
--- a/pkg/file/folder_rename_detect.go
+++ b/pkg/file/folder_rename_detect.go
@@ -2,7 +2,6 @@ package file
import (
"context"
- "errors"
"fmt"
"io/fs"
@@ -88,6 +87,11 @@ func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*mode
r := s.Repository
+ zipFilePath := ""
+ if file.ZipFile != nil {
+ zipFilePath = file.ZipFile.Base().Path
+ }
+
if err := SymWalk(file.FS, file.Path, func(path string, d fs.DirEntry, err error) error {
if err != nil {
// don't let errors prevent scanning
@@ -111,7 +115,7 @@ func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*mode
return nil
}
- if !s.AcceptEntry(ctx, path, info) {
+ if !s.AcceptEntry(ctx, path, info, zipFilePath) {
return nil
}
@@ -161,9 +165,7 @@ func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*mode
continue
}
- if !errors.Is(err, fs.ErrNotExist) {
- return fmt.Errorf("checking for parent folder %q: %w", pf.Path, err)
- }
+ // treat any error as missing folder
// parent folder is missing, possible candidate
// count the total number of files in the existing folder
diff --git a/pkg/file/handler.go b/pkg/file/handler.go
index 10616eefa..b4056f195 100644
--- a/pkg/file/handler.go
+++ b/pkg/file/handler.go
@@ -9,7 +9,7 @@ import (
// PathFilter provides a filter function for paths.
type PathFilter interface {
- Accept(ctx context.Context, path string, info fs.FileInfo) bool
+ Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool
}
type PathFilterFunc func(path string) bool
diff --git a/pkg/file/move.go b/pkg/file/move.go
index ba2a496bb..1f0a5012c 100644
--- a/pkg/file/move.go
+++ b/pkg/file/move.go
@@ -45,9 +45,12 @@ type Mover struct {
moved map[string]string
foldersCreated []string
+
+ // needed for creating folder hierarchy when moving zip file entries
+ rootPaths []string
}
-func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter) *Mover {
+func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter, rootPaths []string) *Mover {
return &Mover{
Files: fileStore,
Folders: folderStore,
@@ -55,6 +58,7 @@ func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReade
renamerRemoverImpl: newRenamerRemoverImpl(),
mkDirFn: os.Mkdir,
},
+ rootPaths: rootPaths,
}
}
@@ -87,7 +91,13 @@ func (m *Mover) Move(ctx context.Context, f models.File, folder *models.Folder,
return fmt.Errorf("file %s already exists", newPath)
}
- if err := transferZipHierarchy(ctx, m.Folders, m.Files, fBase.ID, oldPath, newPath); err != nil {
+ zipMover := zipHierarchyMover{
+ folderStore: m.Folders,
+ files: m.Files,
+ rootPaths: m.rootPaths,
+ }
+
+ if err := zipMover.transferZipHierarchy(ctx, fBase.ID, oldPath, newPath); err != nil {
return fmt.Errorf("moving folder hierarchy for file %s: %w", fBase.Path, err)
}
@@ -195,6 +205,25 @@ func correctSubFolderHierarchy(ctx context.Context, rw models.FolderReaderWriter
logger.Debugf("updating folder %s to %s", oldPath, correctPath)
+ // #6427 - ensure folder entry with new path doesn't already exist
+ const caseSensitive = true
+ existing, err := rw.FindByPath(ctx, correctPath, caseSensitive)
+ if err != nil {
+ return fmt.Errorf("finding folder by path %s: %w", correctPath, err)
+ }
+
+ if existing != nil {
+ // this should no longer be possible, but if it does happen, log a warning
+ // and skip updating this folder and its subfolders
+ logger.Warnf("folder with path %s already exists, setting parent_folder_id of %s to NULL and skipping", correctPath, oldPath)
+ f.ParentFolderID = nil
+ if err := rw.Update(ctx, f); err != nil {
+ return fmt.Errorf("updating folder parent id to NULL for folder %s: %w", oldPath, err)
+ }
+
+ continue
+ }
+
f.Path = correctPath
if err := rw.Update(ctx, f); err != nil {
return fmt.Errorf("updating folder path %s -> %s: %w", oldPath, f.Path, err)
diff --git a/pkg/file/scan.go b/pkg/file/scan.go
index d9a58ad44..4cfcaf7ae 100644
--- a/pkg/file/scan.go
+++ b/pkg/file/scan.go
@@ -5,6 +5,7 @@ import (
"fmt"
"io/fs"
"path/filepath"
+ "slices"
"strings"
"sync"
"time"
@@ -60,6 +61,10 @@ type Scanner struct {
// handlers are called after a file has been scanned.
FileHandlers []Handler
+ // RootPaths form the top-level paths for the library.
+ // Used to determine the root of the folder hierarchy when creating folders.
+ RootPaths []string
+
// Rescan indicates whether files should be rescanned even if they haven't changed.
Rescan bool
@@ -106,12 +111,12 @@ type ScannedFile struct {
}
// AcceptEntry determines if the file entry should be accepted for scanning
-func (s *Scanner) AcceptEntry(ctx context.Context, path string, info fs.FileInfo) bool {
+func (s *Scanner) AcceptEntry(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
// always accept if there's no filters
accept := len(s.ScanFilters) == 0
for _, filter := range s.ScanFilters {
// accept if any filter accepts the file
- if filter.Accept(ctx, path, info) {
+ if filter.Accept(ctx, path, info, zipFilePath) {
accept = true
break
}
@@ -193,6 +198,10 @@ func (s *Scanner) ScanFolder(ctx context.Context, file ScannedFile) (*models.Fol
return f, err
}
+func (s *Scanner) isRootPath(path string) bool {
+ return path == "." || slices.Contains(s.RootPaths, path)
+}
+
func (s *Scanner) onNewFolder(ctx context.Context, file ScannedFile) (*models.Folder, error) {
renamed, err := s.handleFolderRename(ctx, file)
if err != nil {
@@ -212,18 +221,16 @@ func (s *Scanner) onNewFolder(ctx context.Context, file ScannedFile) (*models.Fo
UpdatedAt: now,
}
- dir := filepath.Dir(file.Path)
- if dir != "." {
- parentFolderID, err := s.getFolderID(ctx, dir)
+ if !s.isRootPath(file.Path) {
+ dir := filepath.Dir(file.Path)
+
+ // create full folder hierarchy if parent folder doesn't exist, and set parent folder ID
+ parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, dir, s.RootPaths)
if err != nil {
return nil, fmt.Errorf("getting parent folder %q: %w", dir, err)
}
- // if parent folder doesn't exist, assume it's a top-level folder
- // this may not be true if we're using multiple goroutines
- if parentFolderID != nil {
- toCreate.ParentFolderID = parentFolderID
- }
+ toCreate.ParentFolderID = &parentFolder.ID
}
txn.AddPostCommitHook(ctx, func(ctx context.Context) {
@@ -312,6 +319,19 @@ func (s *Scanner) onExistingFolder(ctx context.Context, f ScannedFile, existing
}
}
+ // handle case where parent folder was not previously set
+ if existing.ParentFolderID == nil && !s.isRootPath(existing.Path) {
+ logger.Infof("Existing folder entry %q has no parent folder. Creating folder hierarchy and setting parent ID...", existing.Path)
+
+ // create full folder hierarchy if parent folder doesn't exist, and set parent folder ID
+ parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, filepath.Dir(f.Path), s.RootPaths)
+ if err != nil {
+ return nil, fmt.Errorf("getting parent folder for %q: %w", f.Path, err)
+ }
+ existing.ParentFolderID = &parentFolder.ID
+ update = true
+ }
+
if update {
var err error
if err = s.Repository.Folder.Update(ctx, existing); err != nil {
@@ -323,10 +343,15 @@ func (s *Scanner) onExistingFolder(ctx context.Context, f ScannedFile, existing
}
type ScanFileResult struct {
- File models.File
- New bool
- Renamed bool
- Updated bool
+ File models.File
+ New bool
+ Renamed bool
+ Updated bool
+ FingerprintChanged bool
+}
+
+func (r ScanFileResult) IsUnchanged() bool {
+ return !r.New && !r.Renamed && !r.Updated
}
// ScanFile scans the provided file into the database, returning the scan result.
@@ -393,13 +418,31 @@ func (s *Scanner) onNewFile(ctx context.Context, f ScannedFile) (*ScanFileResult
baseFile.UpdatedAt = now
// find the parent folder
- parentFolderID, err := s.getFolderID(ctx, filepath.Dir(path))
+ folderPath := filepath.Dir(path)
+ parentFolderID, err := s.getFolderID(ctx, folderPath)
if err != nil {
return nil, fmt.Errorf("getting parent folder for %q: %w", path, err)
}
if parentFolderID == nil {
- return nil, fmt.Errorf("parent folder for %q doesn't exist", path)
+ // parent folders should have been created before scanning this file in a recursive scan
+ // assume that we are scanning specifically and only this file,
+ // so we should create the parent folder hierarchy if it doesn't exist
+ if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
+ parentFolder, err := GetOrCreateFolderHierarchy(ctx, s.Repository.Folder, folderPath, s.RootPaths)
+ if err != nil {
+ return fmt.Errorf("getting parent folder for %q: %w", f.Path, err)
+ }
+
+ parentFolderID = &parentFolder.ID
+ return nil
+ }); err != nil {
+ return nil, err
+ }
+ }
+ if parentFolderID == nil {
+ // shouldn't happen
+ return nil, fmt.Errorf("parent folder ID is nil for %q", path)
}
baseFile.ParentFolderID = *parentFolderID
@@ -419,7 +462,11 @@ func (s *Scanner) onNewFile(ctx context.Context, f ScannedFile) (*ScanFileResult
// determine if the file is renamed from an existing file in the store
// do this after decoration so that missing fields can be populated
- renamed, err := s.handleRename(ctx, file, fp)
+ zipFilePath := ""
+ if f.ZipFile != nil {
+ zipFilePath = f.ZipFile.Base().Path
+ }
+ renamed, err := s.handleRename(ctx, file, fp, zipFilePath)
if err != nil {
return nil, err
}
@@ -529,7 +576,7 @@ func (s *Scanner) getFileFS(f *models.BaseFile) (models.FS, error) {
return fs.OpenZip(zipPath, zipSize)
}
-func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) {
+func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint, zipFilePath string) (models.File, error) {
var others []models.File
for _, tfp := range fp {
@@ -571,7 +618,7 @@ func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.F
// treat as a move
missing = append(missing, other)
}
- case !s.AcceptEntry(ctx, other.Base().Path, info):
+ case !s.AcceptEntry(ctx, other.Base().Path, info, zipFilePath):
// #4393 - if the file is no longer in the configured library paths, treat it as a move
logger.Debugf("File %q no longer in library paths. Treating as a move.", other.Base().Path)
missing = append(missing, other)
@@ -604,13 +651,19 @@ func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.F
fBaseCopy.Fingerprints = updatedBase.Fingerprints
*updatedBase = fBaseCopy
+ zipMover := zipHierarchyMover{
+ folderStore: s.Repository.Folder,
+ files: s.Repository.File,
+ rootPaths: s.RootPaths,
+ }
+
if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
if err := s.Repository.File.Update(ctx, updated); err != nil {
return fmt.Errorf("updating file for rename %q: %w", newPath, err)
}
if s.IsZipFile(updatedBase.Basename) {
- if err := transferZipHierarchy(ctx, s.Repository.Folder, s.Repository.File, updatedBase.ID, oldPath, newPath); err != nil {
+ if err := zipMover.transferZipHierarchy(ctx, updatedBase.ID, oldPath, newPath); err != nil {
return fmt.Errorf("moving zip hierarchy for renamed zip file %q: %w", newPath, err)
}
}
@@ -743,6 +796,9 @@ func (s *Scanner) onExistingFile(ctx context.Context, f ScannedFile, existing mo
return nil, err
}
+ oldFingerprints := existing.Base().Fingerprints
+ fingerprintChanged := fp.ContentsChanged(oldFingerprints)
+
s.removeOutdatedFingerprints(existing, fp)
existing.SetFingerprints(fp)
@@ -766,8 +822,9 @@ func (s *Scanner) onExistingFile(ctx context.Context, f ScannedFile, existing mo
return nil, err
}
return &ScanFileResult{
- File: existing,
- Updated: true,
+ File: existing,
+ Updated: true,
+ FingerprintChanged: fingerprintChanged,
}, nil
}
diff --git a/pkg/file/stashignore.go b/pkg/file/stashignore.go
new file mode 100644
index 000000000..a6de050c6
--- /dev/null
+++ b/pkg/file/stashignore.go
@@ -0,0 +1,264 @@
+package file
+
+import (
+ "context"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ lru "github.com/hashicorp/golang-lru/v2"
+ ignore "github.com/sabhiram/go-gitignore"
+ "github.com/stashapp/stash/pkg/logger"
+)
+
+const stashIgnoreFilename = ".stashignore"
+
+// entriesCacheSize is the size of the LRU cache for collected ignore entries.
+// This cache stores the computed list of ignore entries per directory, avoiding
+// repeated directory tree walks for files in the same directory.
+const entriesCacheSize = 500
+
+// StashIgnoreFilter implements PathFilter to exclude files/directories
+// based on .stashignore files with gitignore-style patterns.
+type StashIgnoreFilter struct {
+ // cache stores compiled ignore patterns per directory.
+ cache sync.Map // map[string]*ignoreEntry
+ // entriesCache stores collected ignore entries per (dir, libraryRoot) pair.
+ // This avoids recomputing the entry list for every file in the same directory.
+ entriesCache *lru.Cache[string, []*ignoreEntry]
+}
+
+// ignoreEntry holds the compiled ignore patterns for a directory.
+type ignoreEntry struct {
+ // patterns is the compiled gitignore matcher for this directory.
+ patterns *ignore.GitIgnore
+ // dir is the directory this entry applies to.
+ dir string
+}
+
+// NewStashIgnoreFilter creates a new StashIgnoreFilter.
+func NewStashIgnoreFilter() *StashIgnoreFilter {
+ // Create the LRU cache for collected entries.
+ // Ignore error as it only fails if size <= 0.
+ entriesCache, _ := lru.New[string, []*ignoreEntry](entriesCacheSize)
+ return &StashIgnoreFilter{
+ entriesCache: entriesCache,
+ }
+}
+
+// Accept returns true if the path should be included in the scan.
+// It checks for .stashignore files in the directory hierarchy and
+// applies gitignore-style pattern matching.
+// The libraryRoot parameter bounds the search for .stashignore files -
+// only directories within the library root are checked.
+// zipFilepath is the path of the zip file if the file is inside a zip.
+// .stashignore files will not be read within zip files.
+func (f *StashIgnoreFilter) Accept(ctx context.Context, path string, info fs.FileInfo, libraryRoot string, zipFilePath string) bool {
+ // If no library root provided, accept the file (safety fallback).
+ if libraryRoot == "" {
+ return true
+ }
+
+ // Get the directory containing this path.
+ dir := filepath.Dir(path)
+
+ // If the file is inside a zip, use the zip file's directory as the base for .stashignore lookup.
+ if zipFilePath != "" {
+ dir = filepath.Dir(zipFilePath)
+ }
+
+ // Collect all applicable ignore entries from library root to this directory.
+ entries := f.collectIgnoreEntries(dir, libraryRoot)
+
+ // If no .stashignore files found, accept the file.
+ if len(entries) == 0 {
+ return true
+ }
+
+ // Check each ignore entry in order (from root to most specific).
+ // Later entries can override earlier ones with negation patterns.
+ ignored := false
+ for _, entry := range entries {
+ // Get path relative to the ignore file's directory.
+ entryRelPath, err := filepath.Rel(entry.dir, path)
+ if err != nil {
+ continue
+ }
+ entryRelPath = filepath.ToSlash(entryRelPath)
+ if info.IsDir() {
+ entryRelPath += "/"
+ }
+
+ if entry.patterns.MatchesPath(entryRelPath) {
+ ignored = true
+ }
+ }
+
+ return !ignored
+}
+
+// collectIgnoreEntries gathers all ignore entries from library root to the given directory.
+// It walks up the directory tree from dir to libraryRoot and returns entries in order
+// from root to most specific. Results are cached to avoid repeated computation for
+// files in the same directory.
+func (f *StashIgnoreFilter) collectIgnoreEntries(dir string, libraryRoot string) []*ignoreEntry {
+ // Clean paths for consistent comparison and cache key generation.
+ dir = filepath.Clean(dir)
+ libraryRoot = filepath.Clean(libraryRoot)
+
+ // Build cache key from dir and libraryRoot.
+ cacheKey := dir + "\x00" + libraryRoot
+
+ // Check the entries cache first.
+ if cached, ok := f.entriesCache.Get(cacheKey); ok {
+ return cached
+ }
+
+ // Try subdirectory shortcut: if parent's entries are cached, extend them.
+ if dir != libraryRoot {
+ parent := filepath.Dir(dir)
+ if isPathInOrEqual(libraryRoot, parent) {
+ parentKey := parent + "\x00" + libraryRoot
+ if parentEntries, ok := f.entriesCache.Get(parentKey); ok {
+ // Parent is cached - just check if current dir has a .stashignore.
+ entries := parentEntries
+ if entry := f.getOrLoadIgnoreEntry(dir); entry != nil {
+ // Copy parent slice and append to avoid mutating cached slice.
+ entries = make([]*ignoreEntry, len(parentEntries), len(parentEntries)+1)
+ copy(entries, parentEntries)
+ entries = append(entries, entry)
+ }
+ f.entriesCache.Add(cacheKey, entries)
+ return entries
+ }
+ }
+ }
+
+ // No cache hit - compute from scratch.
+ // Walk up from dir to library root, collecting directories.
+ var dirs []string
+ current := dir
+ for {
+ // Check if we're still within the library root.
+ // nolint:staticcheck // QF1006 - we could make this the for condition
+ // but I don't think it improves readability
+ if !isPathInOrEqual(libraryRoot, current) {
+ break
+ }
+
+ dirs = append(dirs, current)
+
+ // Stop if we've reached the library root.
+ if current == libraryRoot {
+ break
+ }
+
+ parent := filepath.Dir(current)
+ if parent == current {
+ // Reached filesystem root without finding library root.
+ break
+ }
+ current = parent
+ }
+
+ // Reverse to get root-to-leaf order.
+ for i, j := 0, len(dirs)-1; i < j; i, j = i+1, j-1 {
+ dirs[i], dirs[j] = dirs[j], dirs[i]
+ }
+
+ // Check each directory for .stashignore files.
+ var entries []*ignoreEntry
+ for _, d := range dirs {
+ if entry := f.getOrLoadIgnoreEntry(d); entry != nil {
+ entries = append(entries, entry)
+ }
+ }
+
+ // Cache the result.
+ f.entriesCache.Add(cacheKey, entries)
+
+ return entries
+}
+
+// isPathInOrEqual checks if path is equal to or inside root.
+func isPathInOrEqual(root, path string) bool {
+ if path == root {
+ return true
+ }
+ // Check if path starts with root + separator.
+ return strings.HasPrefix(path, root+string(filepath.Separator))
+}
+
+// getOrLoadIgnoreEntry returns the cached ignore entry for a directory, or loads it.
+func (f *StashIgnoreFilter) getOrLoadIgnoreEntry(dir string) *ignoreEntry {
+ // Check cache first.
+ if cached, ok := f.cache.Load(dir); ok {
+ entry := cached.(*ignoreEntry)
+ if entry.patterns == nil {
+ return nil // Cached negative result.
+ }
+ return entry
+ }
+
+ // Try to load .stashignore from this directory.
+ stashIgnorePath := filepath.Join(dir, stashIgnoreFilename)
+ patterns, err := f.loadIgnoreFile(stashIgnorePath)
+ if err != nil {
+ if !os.IsNotExist(err) {
+ logger.Warnf("Failed to load .stashignore from %s: %v", dir, err)
+ }
+ f.cache.Store(dir, &ignoreEntry{patterns: nil, dir: dir})
+ return nil
+ }
+ if patterns == nil {
+ // File exists but has no patterns (empty or only comments).
+ f.cache.Store(dir, &ignoreEntry{patterns: nil, dir: dir})
+ return nil
+ }
+
+ logger.Debugf("Loaded .stashignore from %s", dir)
+
+ entry := &ignoreEntry{
+ patterns: patterns,
+ dir: dir,
+ }
+ f.cache.Store(dir, entry)
+ return entry
+}
+
+// loadIgnoreFile loads and compiles a .stashignore file.
+func (f *StashIgnoreFilter) loadIgnoreFile(path string) (*ignore.GitIgnore, error) {
+ data, err := os.ReadFile(path)
+ if err != nil {
+ return nil, err
+ }
+
+ lines := strings.Split(string(data), "\n")
+ var patterns []string
+
+ for _, line := range lines {
+ // Trim trailing whitespace (but preserve leading for patterns).
+ line = strings.TrimRight(line, " \t\r")
+
+ // Skip empty lines.
+ if line == "" {
+ continue
+ }
+
+ // Skip comments (but not escaped #).
+ if strings.HasPrefix(line, "#") && !strings.HasPrefix(line, "\\#") {
+ continue
+ }
+
+ patterns = append(patterns, line)
+ }
+
+ if len(patterns) == 0 {
+ // File exists but has no patterns (e.g., only comments).
+ return nil, nil
+ }
+
+ return ignore.CompileIgnoreLines(patterns...), nil
+}
diff --git a/pkg/file/stashignore_test.go b/pkg/file/stashignore_test.go
new file mode 100644
index 000000000..41668b51b
--- /dev/null
+++ b/pkg/file/stashignore_test.go
@@ -0,0 +1,523 @@
+package file
+
+import (
+ "context"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "sort"
+ "testing"
+)
+
+// Helper to create an empty file.
+func createTestFile(t *testing.T, dir, name string) {
+ t.Helper()
+ path := filepath.Join(dir, name)
+ if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
+ t.Fatalf("failed to create directory for %s: %v", path, err)
+ }
+ if err := os.WriteFile(path, []byte{}, 0644); err != nil {
+ t.Fatalf("failed to create file %s: %v", path, err)
+ }
+}
+
+// Helper to create a file with content.
+func createTestFileWithContent(t *testing.T, dir, name, content string) {
+ t.Helper()
+ path := filepath.Join(dir, name)
+ if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
+ t.Fatalf("failed to create directory for %s: %v", path, err)
+ }
+ if err := os.WriteFile(path, []byte(content), 0644); err != nil {
+ t.Fatalf("failed to create file %s: %v", path, err)
+ }
+}
+
+// Helper to create a directory.
+func createTestDir(t *testing.T, dir, name string) {
+ t.Helper()
+ path := filepath.Join(dir, name)
+ if err := os.MkdirAll(path, 0755); err != nil {
+ t.Fatalf("failed to create directory %s: %v", path, err)
+ }
+}
+
+// walkAndFilter walks the directory tree and returns paths accepted by the filter.
+// Returns paths relative to root for easier assertion.
+func walkAndFilter(t *testing.T, root string, filter *StashIgnoreFilter) []string {
+ t.Helper()
+ var accepted []string
+ ctx := context.Background()
+
+ err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+
+ // Skip the root directory itself.
+ if path == root {
+ return nil
+ }
+
+ info, err := d.Info()
+ if err != nil {
+ return err
+ }
+
+ if filter.Accept(ctx, path, info, root, "") {
+ relPath, _ := filepath.Rel(root, path)
+ accepted = append(accepted, relPath)
+ } else if info.IsDir() {
+ // If directory is rejected, skip it.
+ return filepath.SkipDir
+ }
+
+ return nil
+ })
+
+ if err != nil {
+ t.Fatalf("walk failed: %v", err)
+ }
+
+ sort.Strings(accepted)
+ return accepted
+}
+
+// assertPathsEqual checks that the accepted paths match expected.
+func assertPathsEqual(t *testing.T, expected, actual []string) {
+ t.Helper()
+ sort.Strings(expected)
+
+ if len(expected) != len(actual) {
+ t.Errorf("path count mismatch:\nexpected %d: %v\nactual %d: %v", len(expected), expected, len(actual), actual)
+ return
+ }
+
+ for i := range expected {
+ if expected[i] != actual[i] {
+ t.Errorf("path mismatch at index %d:\nexpected: %s\nactual: %s", i, expected[i], actual[i])
+ }
+ }
+}
+
+func TestStashIgnore_ExactFilename(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestFile(t, tmpDir, "video2.mp4")
+ createTestFile(t, tmpDir, "ignore_me.mp4")
+
+ // Create .stashignore that excludes exact filename.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "ignore_me.mp4\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "video1.mp4",
+ "video2.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_WildcardPattern(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestFile(t, tmpDir, "video2.mp4")
+ createTestFile(t, tmpDir, "temp1.tmp")
+ createTestFile(t, tmpDir, "temp2.tmp")
+ createTestFile(t, tmpDir, "notes.log")
+
+ // Create .stashignore that excludes by extension.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n*.log\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "video1.mp4",
+ "video2.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_DirectoryExclusion(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestDir(t, tmpDir, "excluded_dir")
+ createTestFile(t, tmpDir, "excluded_dir/video2.mp4")
+ createTestFile(t, tmpDir, "excluded_dir/video3.mp4")
+ createTestDir(t, tmpDir, "included_dir")
+ createTestFile(t, tmpDir, "included_dir/video4.mp4")
+
+ // Create .stashignore that excludes a directory.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "excluded_dir/\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "included_dir",
+ "included_dir/video4.mp4",
+ "video1.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_NegationPattern(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "file1.tmp")
+ createTestFile(t, tmpDir, "file2.tmp")
+ createTestFile(t, tmpDir, "keep_this.tmp")
+
+ // Create .stashignore that excludes *.tmp but keeps one.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n!keep_this.tmp\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "keep_this.tmp",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_CommentsAndEmptyLines(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestFile(t, tmpDir, "ignore_me.mp4")
+
+ // Create .stashignore with comments and empty lines.
+ stashignore := `# This is a comment
+ignore_me.mp4
+
+# Another comment
+
+`
+ createTestFileWithContent(t, tmpDir, ".stashignore", stashignore)
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "video1.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_NestedStashIgnoreFiles(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "root_video.mp4")
+ createTestFile(t, tmpDir, "root_ignore.tmp")
+ createTestDir(t, tmpDir, "subdir")
+ createTestFile(t, tmpDir, "subdir/sub_video.mp4")
+ createTestFile(t, tmpDir, "subdir/sub_ignore.log")
+ createTestFile(t, tmpDir, "subdir/also_tmp.tmp")
+
+ // Root .stashignore excludes *.tmp.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n")
+
+ // Subdir .stashignore excludes *.log.
+ createTestFileWithContent(t, tmpDir, "subdir/.stashignore", "*.log\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ // *.tmp from root should apply everywhere.
+ // *.log from subdir should only apply in subdir.
+ expected := []string{
+ ".stashignore",
+ "root_video.mp4",
+ "subdir",
+ "subdir/.stashignore",
+ "subdir/sub_video.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_PathPattern(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestDir(t, tmpDir, "subdir")
+ createTestFile(t, tmpDir, "subdir/video2.mp4")
+ createTestFile(t, tmpDir, "subdir/skip_this.mp4")
+
+ // Create .stashignore that excludes a specific path.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "subdir/skip_this.mp4\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "subdir",
+ "subdir/video2.mp4",
+ "video1.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_DoubleStarPattern(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestDir(t, tmpDir, "a")
+ createTestFile(t, tmpDir, "a/video2.mp4")
+ createTestDir(t, tmpDir, "a/temp")
+ createTestFile(t, tmpDir, "a/temp/video3.mp4")
+ createTestDir(t, tmpDir, "a/b")
+ createTestDir(t, tmpDir, "a/b/temp")
+ createTestFile(t, tmpDir, "a/b/temp/video4.mp4")
+
+ // Create .stashignore that excludes temp directories at any level.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "**/temp/\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "a",
+ "a/b",
+ "a/video2.mp4",
+ "video1.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_LeadingSlashPattern(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "ignore.mp4")
+ createTestDir(t, tmpDir, "subdir")
+ createTestFile(t, tmpDir, "subdir/ignore.mp4")
+
+ // Create .stashignore that excludes only at root level.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "/ignore.mp4\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ // Only root ignore.mp4 should be excluded.
+ expected := []string{
+ ".stashignore",
+ "subdir",
+ "subdir/ignore.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_NoStashIgnoreFile(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files without any .stashignore.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestFile(t, tmpDir, "video2.mp4")
+ createTestDir(t, tmpDir, "subdir")
+ createTestFile(t, tmpDir, "subdir/video3.mp4")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ // All files should be accepted.
+ expected := []string{
+ "subdir",
+ "subdir/video3.mp4",
+ "video1.mp4",
+ "video2.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_HiddenDirectories(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files including hidden directory.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestDir(t, tmpDir, ".hidden")
+ createTestFile(t, tmpDir, ".hidden/video2.mp4")
+
+ // Create .stashignore that excludes hidden directories.
+ createTestFileWithContent(t, tmpDir, ".stashignore", ".*\n!.stashignore\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "video1.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_MultiplePatternsSameLine(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestFile(t, tmpDir, "file.tmp")
+ createTestFile(t, tmpDir, "file.log")
+ createTestFile(t, tmpDir, "file.bak")
+
+ // Each pattern should be on its own line.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "*.tmp\n*.log\n*.bak\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "video1.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_TrailingSpaces(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestFile(t, tmpDir, "ignore_me.mp4")
+
+ // Pattern with trailing spaces (should be trimmed).
+ createTestFileWithContent(t, tmpDir, ".stashignore", "ignore_me.mp4 \n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "video1.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_EscapedHash(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestFile(t, tmpDir, "#filename.mp4")
+
+ // Escaped hash should match literal # character.
+ createTestFileWithContent(t, tmpDir, ".stashignore", "\\#filename.mp4\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "video1.mp4",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_CaseSensitiveMatching(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create test files - use distinct names that work on all filesystems.
+ createTestFile(t, tmpDir, "video_lower.mp4")
+ createTestFile(t, tmpDir, "VIDEO_UPPER.mp4")
+ createTestFile(t, tmpDir, "other.avi")
+
+ // Pattern should match exactly (case-sensitive).
+ createTestFileWithContent(t, tmpDir, ".stashignore", "video_lower.mp4\n")
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ // Only exact match is excluded.
+ expected := []string{
+ ".stashignore",
+ "VIDEO_UPPER.mp4",
+ "other.avi",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
+
+func TestStashIgnore_ComplexScenario(t *testing.T) {
+ tmpDir := t.TempDir()
+
+ // Create a complex directory structure.
+ createTestFile(t, tmpDir, "video1.mp4")
+ createTestFile(t, tmpDir, "video2.avi")
+ createTestFile(t, tmpDir, "thumbnail.jpg")
+ createTestFile(t, tmpDir, "metadata.nfo")
+ createTestDir(t, tmpDir, "movies")
+ createTestFile(t, tmpDir, "movies/movie1.mp4")
+ createTestFile(t, tmpDir, "movies/movie1.nfo")
+ createTestDir(t, tmpDir, "movies/.thumbnails")
+ createTestFile(t, tmpDir, "movies/.thumbnails/thumb1.jpg")
+ createTestDir(t, tmpDir, "temp")
+ createTestFile(t, tmpDir, "temp/processing.mp4")
+ createTestDir(t, tmpDir, "backup")
+ createTestFile(t, tmpDir, "backup/video1.mp4.bak")
+
+ // Complex .stashignore.
+ stashignore := `# Ignore metadata files
+*.nfo
+
+# Ignore hidden directories
+.*
+!.stashignore
+
+# Ignore temp and backup directories
+temp/
+backup/
+
+# But keep thumbnails in specific location
+!movies/.thumbnails/
+`
+ createTestFileWithContent(t, tmpDir, ".stashignore", stashignore)
+
+ filter := NewStashIgnoreFilter()
+ accepted := walkAndFilter(t, tmpDir, filter)
+
+ expected := []string{
+ ".stashignore",
+ "movies",
+ "movies/.thumbnails",
+ "movies/.thumbnails/thumb1.jpg",
+ "movies/movie1.mp4",
+ "thumbnail.jpg",
+ "video1.mp4",
+ "video2.avi",
+ }
+
+ assertPathsEqual(t, expected, accepted)
+}
diff --git a/pkg/file/video/caption.go b/pkg/file/video/caption.go
index 43723864f..46317d90c 100644
--- a/pkg/file/video/caption.go
+++ b/pkg/file/video/caption.go
@@ -90,11 +90,20 @@ type CaptionUpdater interface {
UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error
}
+// MatchesCaption returns true if the caption file matches the video file based on the filename
+func MatchesCaption(videoPath, captionPath string) bool {
+ captionPrefix := getCaptionPrefix(captionPath)
+ videoPrefix := strings.TrimSuffix(videoPath, filepath.Ext(videoPath)) + "."
+ return captionPrefix == videoPrefix
+}
+
// associates captions to scene/s with the same basename
-func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) {
+// returns true if the caption file was matched to a video file and processed, false otherwise
+func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) bool {
captionLang := getCaptionsLangFromPath(captionPath)
captionPrefix := getCaptionPrefix(captionPath)
+ matched := false
if err := txn.WithTxn(ctx, txnMgr, func(ctx context.Context) error {
var err error
files, er := fqb.FindAllByPath(ctx, captionPrefix+"*", true)
@@ -117,28 +126,36 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag
path := f.Base().Path
logger.Debugf("Matched captions to file %s", path)
+ matched = true
+
captions, er := w.GetCaptions(ctx, fileID)
- if er == nil {
- fileExt := filepath.Ext(captionPath)
- ext := fileExt[1:]
- if !IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present
- newCaption := &models.VideoCaption{
- LanguageCode: captionLang,
- Filename: filepath.Base(captionPath),
- CaptionType: ext,
- }
- captions = append(captions, newCaption)
- er = w.UpdateCaptions(ctx, fileID, captions)
- if er == nil {
- logger.Debugf("Updated captions for file %s. Added %s", path, captionLang)
- }
+ if er != nil {
+ return fmt.Errorf("getting captions for file %s: %w", path, er)
+ }
+
+ fileExt := filepath.Ext(captionPath)
+ ext := fileExt[1:]
+ if !IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present
+ newCaption := &models.VideoCaption{
+ LanguageCode: captionLang,
+ Filename: filepath.Base(captionPath),
+ CaptionType: ext,
}
+ captions = append(captions, newCaption)
+ er = w.UpdateCaptions(ctx, fileID, captions)
+ if er != nil {
+ return fmt.Errorf("updating captions for file %s: %w", path, er)
+ }
+
+ logger.Debugf("Updated captions for file %s. Added %s", path, captionLang)
}
}
return err
}); err != nil {
logger.Error(err.Error())
}
+
+ return matched
}
// CleanCaptions removes non existent/accessible language codes from captions
diff --git a/pkg/file/zip.go b/pkg/file/zip.go
index 5afcd5329..6d00c7e35 100644
--- a/pkg/file/zip.go
+++ b/pkg/file/zip.go
@@ -99,7 +99,9 @@ func (f *zipFS) rel(name string) (string, error) {
relName, err := filepath.Rel(f.zipPath, name)
if err != nil {
- return "", fmt.Errorf("internal error getting relative path: %w", err)
+ // if the path is not relative to the zip path, then it's not found in the zip file,
+ // so treat this as a file not found
+ return "", fs.ErrNotExist
}
// convert relName to use slash, since zip files do so regardless
diff --git a/pkg/fsutil/file.go b/pkg/fsutil/file.go
index 1d0c0c473..05a127129 100644
--- a/pkg/fsutil/file.go
+++ b/pkg/fsutil/file.go
@@ -148,7 +148,7 @@ func Touch(path string) error {
var (
replaceCharsRE = regexp.MustCompile(`[&=\\/:*"?_ ]`)
- removeCharsRE = regexp.MustCompile(`[^[:alnum:]-.]`)
+ removeCharsRE = regexp.MustCompile(`[^\p{L}\p{N}\-.]`)
multiHyphenRE = regexp.MustCompile(`\-+`)
)
diff --git a/pkg/fsutil/file_test.go b/pkg/fsutil/file_test.go
index 4d84f8a47..df1077df2 100644
--- a/pkg/fsutil/file_test.go
+++ b/pkg/fsutil/file_test.go
@@ -15,6 +15,9 @@ func TestSanitiseBasename(t *testing.T) {
{"multi-hyphen", `hyphened--name`, "hyphened-name-2da2a58f"},
{"replaced characters", `a&b=c\d/:e*"f?_ g`, "a-b-c-d-e-f-g-ffca6fb0"},
{"removed characters", `foo!!bar@@and, more`, "foobarand-more-7cee02ab"},
+ {"unicode cjk", `テスト`, "テスト-63b560db"},
+ {"unicode korean", `시험`, "시험-3fcc7beb"},
+ {"mixed unicode", `Test テスト`, "Test-テスト-366aff1e"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
diff --git a/pkg/fsutil/fs.go b/pkg/fsutil/fs.go
index 10666bb63..032bec53c 100644
--- a/pkg/fsutil/fs.go
+++ b/pkg/fsutil/fs.go
@@ -5,7 +5,6 @@ import (
"fmt"
"os"
"path/filepath"
- "strings"
"unicode"
)
@@ -27,18 +26,10 @@ func IsFsPathCaseSensitive(path string) (bool, error) {
if err != nil { // cannot be case flipped
return false, err
}
- i := strings.LastIndex(path, base)
- if i < 0 { // shouldn't happen
- return false, fmt.Errorf("could not case flip path %s", path)
- }
- flipped := []rune(path)
- for _, c := range fBase { // replace base of path with the flipped one ( we need to flip the base or last dir part )
- flipped[i] = c
- i++
- }
+ flippedPath := filepath.Join(filepath.Dir(path), fBase)
- fiCase, err := os.Stat(string(flipped))
+ fiCase, err := os.Stat(flippedPath)
if err != nil { // cannot stat the case flipped path
return true, nil // fs of path should be case sensitive
}
diff --git a/pkg/fsutil/fs_test.go b/pkg/fsutil/fs_test.go
index 522e95fa6..155e76ba5 100644
--- a/pkg/fsutil/fs_test.go
+++ b/pkg/fsutil/fs_test.go
@@ -41,4 +41,15 @@ func TestIsFsPathCaseSensitive_UnicodeByteLength(t *testing.T) {
}
// assert.True(t, r, "expected fs to be case sensitive")
+
+ // Ensure that subfolders of a folder with multi-byte chars is not causing a panic
+ path3 := filepath.Join(dir, "NoPanic ❤️")
+ makeDir(path3)
+ path4 := filepath.Join(path3, "Test")
+ makeDir(path4)
+
+ _, err = IsFsPathCaseSensitive(path4)
+ if err != nil {
+ t.Fatal(err)
+ }
}
diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go
index 22f3e6c44..e33297bdb 100644
--- a/pkg/gallery/import.go
+++ b/pkg/gallery/import.go
@@ -28,8 +28,9 @@ type Importer struct {
Input jsonschema.Gallery
MissingRefBehaviour models.ImportMissingRefEnum
- ID int
- gallery models.Gallery
+ ID int
+ gallery models.Gallery
+ customFields map[string]interface{}
}
func (i *Importer) PreImport(ctx context.Context) error {
@@ -51,6 +52,8 @@ func (i *Importer) PreImport(ctx context.Context) error {
return err
}
+ i.customFields = i.Input.CustomFields
+
return nil
}
@@ -356,7 +359,11 @@ func (i *Importer) Create(ctx context.Context) (*int, error) {
for _, f := range i.gallery.Files.List() {
fileIDs = append(fileIDs, f.Base().ID)
}
- err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs)
+ err := i.ReaderWriter.Create(ctx, &models.CreateGalleryInput{
+ Gallery: &i.gallery,
+ FileIDs: fileIDs,
+ CustomFields: i.customFields,
+ })
if err != nil {
return nil, fmt.Errorf("error creating gallery: %v", err)
}
@@ -368,7 +375,12 @@ func (i *Importer) Create(ctx context.Context) (*int, error) {
func (i *Importer) Update(ctx context.Context, id int) error {
gallery := i.gallery
gallery.ID = id
- err := i.ReaderWriter.Update(ctx, &gallery)
+ err := i.ReaderWriter.Update(ctx, &models.UpdateGalleryInput{
+ Gallery: &gallery,
+ CustomFields: models.CustomFieldsInput{
+ Full: i.customFields,
+ },
+ })
if err != nil {
return fmt.Errorf("error updating existing gallery: %v", err)
}
diff --git a/pkg/gallery/scan.go b/pkg/gallery/scan.go
index 9d0313b17..7689bb9b6 100644
--- a/pkg/gallery/scan.go
+++ b/pkg/gallery/scan.go
@@ -17,14 +17,13 @@ type ScanCreatorUpdater interface {
FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error)
GetFiles(ctx context.Context, relatedID int) ([]models.File, error)
- Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error
+ models.GalleryCreator
UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error)
AddFileID(ctx context.Context, id int, fileID models.FileID) error
}
type ScanSceneFinderUpdater interface {
FindByPath(ctx context.Context, p string) ([]*models.Scene, error)
- Update(ctx context.Context, updatedScene *models.Scene) error
AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error
}
@@ -80,7 +79,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.
logger.Infof("%s doesn't exist. Creating new gallery...", f.Base().Path)
- if err := h.CreatorUpdater.Create(ctx, &newGallery, []models.FileID{baseFile.ID}); err != nil {
+ if err := h.CreatorUpdater.Create(ctx, &models.CreateGalleryInput{
+ Gallery: &newGallery,
+ FileIDs: []models.FileID{baseFile.ID},
+ }); err != nil {
return fmt.Errorf("creating new gallery: %w", err)
}
@@ -132,13 +134,14 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
if err := h.CreatorUpdater.AddFileID(ctx, i.ID, f.Base().ID); err != nil {
return fmt.Errorf("adding file to gallery: %w", err)
}
- // update updated_at time
- if _, err := h.CreatorUpdater.UpdatePartial(ctx, i.ID, models.NewGalleryPartial()); err != nil {
- return fmt.Errorf("updating gallery: %w", err)
- }
}
if !found || updateExisting {
+ // update updated_at time when file association or content changes
+ if _, err := h.CreatorUpdater.UpdatePartial(ctx, i.ID, models.NewGalleryPartial()); err != nil {
+ return fmt.Errorf("updating gallery: %w", err)
+ }
+
h.PluginCache.RegisterPostHooks(ctx, i.ID, hook.GalleryUpdatePost, nil, nil)
}
}
diff --git a/pkg/gallery/scan_test.go b/pkg/gallery/scan_test.go
new file mode 100644
index 000000000..4a89206e3
--- /dev/null
+++ b/pkg/gallery/scan_test.go
@@ -0,0 +1,108 @@
+package gallery
+
+import (
+ "context"
+ "testing"
+
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/models/mocks"
+ "github.com/stashapp/stash/pkg/plugin"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
+)
+
+func TestAssociateExisting_UpdatePartialOnContentChange(t *testing.T) {
+ const (
+ testGalleryID = 1
+ testFileID = 100
+ )
+
+ existingFile := &models.BaseFile{ID: models.FileID(testFileID), Path: "test.zip"}
+
+ makeGallery := func() *models.Gallery {
+ return &models.Gallery{
+ ID: testGalleryID,
+ Files: models.NewRelatedFiles([]models.File{existingFile}),
+ }
+ }
+
+ tests := []struct {
+ name string
+ updateExisting bool
+ expectUpdate bool
+ }{
+ {
+ name: "calls UpdatePartial when file content changed",
+ updateExisting: true,
+ expectUpdate: true,
+ },
+ {
+ name: "skips UpdatePartial when file unchanged and already associated",
+ updateExisting: false,
+ expectUpdate: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ db := mocks.NewDatabase()
+ db.Gallery.On("GetFiles", mock.Anything, testGalleryID).Return([]models.File{existingFile}, nil)
+
+ if tt.expectUpdate {
+ db.Gallery.On("UpdatePartial", mock.Anything, testGalleryID, mock.Anything).
+ Return(&models.Gallery{ID: testGalleryID}, nil)
+ }
+
+ h := &ScanHandler{
+ CreatorUpdater: db.Gallery,
+ PluginCache: &plugin.Cache{},
+ }
+
+ db.WithTxnCtx(func(ctx context.Context) {
+ err := h.associateExisting(ctx, []*models.Gallery{makeGallery()}, existingFile, tt.updateExisting)
+ assert.NoError(t, err)
+ })
+
+ if tt.expectUpdate {
+ db.Gallery.AssertCalled(t, "UpdatePartial", mock.Anything, testGalleryID, mock.Anything)
+ } else {
+ db.Gallery.AssertNotCalled(t, "UpdatePartial", mock.Anything, mock.Anything, mock.Anything)
+ }
+ })
+ }
+}
+
+func TestAssociateExisting_UpdatePartialOnNewFile(t *testing.T) {
+ const (
+ testGalleryID = 1
+ existFileID = 100
+ newFileID = 200
+ )
+
+ existingFile := &models.BaseFile{ID: models.FileID(existFileID), Path: "existing.zip"}
+ newFile := &models.BaseFile{ID: models.FileID(newFileID), Path: "new.zip"}
+
+ gallery := &models.Gallery{
+ ID: testGalleryID,
+ Files: models.NewRelatedFiles([]models.File{existingFile}),
+ }
+
+ db := mocks.NewDatabase()
+ db.Gallery.On("GetFiles", mock.Anything, testGalleryID).Return([]models.File{existingFile}, nil)
+ db.Gallery.On("AddFileID", mock.Anything, testGalleryID, models.FileID(newFileID)).Return(nil)
+ db.Gallery.On("UpdatePartial", mock.Anything, testGalleryID, mock.Anything).
+ Return(&models.Gallery{ID: testGalleryID}, nil)
+
+ h := &ScanHandler{
+ CreatorUpdater: db.Gallery,
+ PluginCache: &plugin.Cache{},
+ }
+
+ db.WithTxnCtx(func(ctx context.Context) {
+ err := h.associateExisting(ctx, []*models.Gallery{gallery}, newFile, false)
+ assert.NoError(t, err)
+ })
+
+ db.Gallery.AssertCalled(t, "AddFileID", mock.Anything, testGalleryID, models.FileID(newFileID))
+ db.Gallery.AssertCalled(t, "UpdatePartial", mock.Anything, testGalleryID, mock.Anything)
+}
diff --git a/pkg/group/create.go b/pkg/group/create.go
index 56d6b7a4e..9cc578b23 100644
--- a/pkg/group/create.go
+++ b/pkg/group/create.go
@@ -12,27 +12,37 @@ var (
ErrHierarchyLoop = errors.New("a group cannot be contained by one of its subgroups")
)
-func (s *Service) Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error {
+func (s *Service) Create(ctx context.Context, input *models.CreateGroupInput) error {
r := s.Repository
+ group := input.Group
if err := s.validateCreate(ctx, group); err != nil {
return err
}
- err := r.Create(ctx, group)
+ err := r.Create(ctx, input.Group)
if err != nil {
return err
}
- // update image table
- if len(frontimageData) > 0 {
- if err := r.UpdateFrontImage(ctx, group.ID, frontimageData); err != nil {
+ // set custom fields
+ if len(input.CustomFields) > 0 {
+ if err := r.SetCustomFields(ctx, group.ID, models.CustomFieldsInput{
+ Full: input.CustomFields,
+ }); err != nil {
return err
}
}
- if len(backimageData) > 0 {
- if err := r.UpdateBackImage(ctx, group.ID, backimageData); err != nil {
+ // update image table
+ if len(input.FrontImageData) > 0 {
+ if err := r.UpdateFrontImage(ctx, group.ID, input.FrontImageData); err != nil {
+ return err
+ }
+ }
+
+ if len(input.BackImageData) > 0 {
+ if err := r.UpdateBackImage(ctx, group.ID, input.BackImageData); err != nil {
return err
}
}
diff --git a/pkg/group/export.go b/pkg/group/export.go
index 418ce7bed..0a56fbdbb 100644
--- a/pkg/group/export.go
+++ b/pkg/group/export.go
@@ -11,61 +11,67 @@ import (
"github.com/stashapp/stash/pkg/utils"
)
-type ImageGetter interface {
- GetFrontImage(ctx context.Context, movieID int) ([]byte, error)
- GetBackImage(ctx context.Context, movieID int) ([]byte, error)
+type GroupExportReader interface {
+ GetFrontImage(ctx context.Context, groupID int) ([]byte, error)
+ GetBackImage(ctx context.Context, groupID int) ([]byte, error)
+ GetCustomFields(ctx context.Context, groupID int) (map[string]interface{}, error)
}
-// ToJSON converts a Movie into its JSON equivalent.
-func ToJSON(ctx context.Context, reader ImageGetter, studioReader models.StudioGetter, movie *models.Group) (*jsonschema.Group, error) {
- newMovieJSON := jsonschema.Group{
- Name: movie.Name,
- Aliases: movie.Aliases,
- Director: movie.Director,
- Synopsis: movie.Synopsis,
- URLs: movie.URLs.List(),
- CreatedAt: json.JSONTime{Time: movie.CreatedAt},
- UpdatedAt: json.JSONTime{Time: movie.UpdatedAt},
+// ToJSON converts a Group into its JSON equivalent.
+func ToJSON(ctx context.Context, reader GroupExportReader, studioReader models.StudioGetter, group *models.Group) (*jsonschema.Group, error) {
+ newGroupJSON := jsonschema.Group{
+ Name: group.Name,
+ Aliases: group.Aliases,
+ Director: group.Director,
+ Synopsis: group.Synopsis,
+ URLs: group.URLs.List(),
+ CreatedAt: json.JSONTime{Time: group.CreatedAt},
+ UpdatedAt: json.JSONTime{Time: group.UpdatedAt},
}
- if movie.Date != nil {
- newMovieJSON.Date = movie.Date.String()
+ if group.Date != nil {
+ newGroupJSON.Date = group.Date.String()
}
- if movie.Rating != nil {
- newMovieJSON.Rating = *movie.Rating
+ if group.Rating != nil {
+ newGroupJSON.Rating = *group.Rating
}
- if movie.Duration != nil {
- newMovieJSON.Duration = *movie.Duration
+ if group.Duration != nil {
+ newGroupJSON.Duration = *group.Duration
}
- if movie.StudioID != nil {
- studio, err := studioReader.Find(ctx, *movie.StudioID)
+ if group.StudioID != nil {
+ studio, err := studioReader.Find(ctx, *group.StudioID)
if err != nil {
return nil, fmt.Errorf("error getting movie studio: %v", err)
}
if studio != nil {
- newMovieJSON.Studio = studio.Name
+ newGroupJSON.Studio = studio.Name
}
}
- frontImage, err := reader.GetFrontImage(ctx, movie.ID)
+ frontImage, err := reader.GetFrontImage(ctx, group.ID)
if err != nil {
logger.Errorf("Error getting movie front image: %v", err)
}
if len(frontImage) > 0 {
- newMovieJSON.FrontImage = utils.GetBase64StringFromData(frontImage)
+ newGroupJSON.FrontImage = utils.GetBase64StringFromData(frontImage)
}
- backImage, err := reader.GetBackImage(ctx, movie.ID)
+ backImage, err := reader.GetBackImage(ctx, group.ID)
if err != nil {
logger.Errorf("Error getting movie back image: %v", err)
}
if len(backImage) > 0 {
- newMovieJSON.BackImage = utils.GetBase64StringFromData(backImage)
+ newGroupJSON.BackImage = utils.GetBase64StringFromData(backImage)
}
- return &newMovieJSON, nil
+ newGroupJSON.CustomFields, err = reader.GetCustomFields(ctx, group.ID)
+ if err != nil {
+ return nil, fmt.Errorf("getting group custom fields: %v", err)
+ }
+
+ return &newGroupJSON, nil
}
diff --git a/pkg/group/export_test.go b/pkg/group/export_test.go
index 5f8d9f7dc..bff50de5e 100644
--- a/pkg/group/export_test.go
+++ b/pkg/group/export_test.go
@@ -8,24 +8,26 @@ import (
"github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
"testing"
"time"
)
const (
- movieID = 1
- emptyID = 2
- errFrontImageID = 3
- errBackImageID = 4
- errStudioMovieID = 5
- missingStudioMovieID = 6
+ movieID = iota + 1
+ emptyID
+ errFrontImageID
+ errBackImageID
+ errStudioMovieID
+ missingStudioMovieID
+ errCustomFieldsID
)
const (
- studioID = 1
- missingStudioID = 2
- errStudioID = 3
+ studioID = iota + 1
+ missingStudioID
+ errStudioID
)
const movieName = "testMovie"
@@ -51,6 +53,11 @@ const (
var (
frontImageBytes = []byte("frontImageBytes")
backImageBytes = []byte("backImageBytes")
+
+ emptyCustomFields = make(map[string]interface{})
+ customFields = map[string]interface{}{
+ "customField1": "customValue1",
+ }
)
var movieStudio models.Studio = models.Studio{
@@ -88,7 +95,7 @@ func createEmptyMovie(id int) models.Group {
}
}
-func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Group {
+func createFullJSONMovie(studio, frontImage, backImage string, customFields map[string]interface{}) *jsonschema.Group {
return &jsonschema.Group{
Name: movieName,
Aliases: movieAliases,
@@ -107,6 +114,7 @@ func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Group
UpdatedAt: json.JSONTime{
Time: updateTime,
},
+ CustomFields: customFields,
}
}
@@ -119,13 +127,15 @@ func createEmptyJSONMovie() *jsonschema.Group {
UpdatedAt: json.JSONTime{
Time: updateTime,
},
+ CustomFields: emptyCustomFields,
}
}
type testScenario struct {
- movie models.Group
- expected *jsonschema.Group
- err bool
+ movie models.Group
+ customFields map[string]interface{}
+ expected *jsonschema.Group
+ err bool
}
var scenarios []testScenario
@@ -134,36 +144,48 @@ func initTestTable() {
scenarios = []testScenario{
{
createFullMovie(movieID, studioID),
- createFullJSONMovie(studioName, frontImage, backImage),
+ customFields,
+ createFullJSONMovie(studioName, frontImage, backImage, customFields),
false,
},
{
createEmptyMovie(emptyID),
+ emptyCustomFields,
createEmptyJSONMovie(),
false,
},
{
createFullMovie(errFrontImageID, studioID),
- createFullJSONMovie(studioName, "", backImage),
+ emptyCustomFields,
+ createFullJSONMovie(studioName, "", backImage, emptyCustomFields),
// failure to get front image should not cause error
false,
},
{
createFullMovie(errBackImageID, studioID),
- createFullJSONMovie(studioName, frontImage, ""),
+ emptyCustomFields,
+ createFullJSONMovie(studioName, frontImage, "", emptyCustomFields),
// failure to get back image should not cause error
false,
},
{
createFullMovie(errStudioMovieID, errStudioID),
+ emptyCustomFields,
nil,
true,
},
{
createFullMovie(missingStudioMovieID, missingStudioID),
- createFullJSONMovie("", frontImage, backImage),
+ emptyCustomFields,
+ createFullJSONMovie("", frontImage, backImage, emptyCustomFields),
false,
},
+ {
+ createFullMovie(errCustomFieldsID, studioID),
+ customFields,
+ nil,
+ true,
+ },
}
}
@@ -179,6 +201,7 @@ func TestToJSON(t *testing.T) {
db.Group.On("GetFrontImage", testCtx, emptyID).Return(nil, nil).Once().Maybe()
db.Group.On("GetFrontImage", testCtx, errFrontImageID).Return(nil, imageErr).Once()
db.Group.On("GetFrontImage", testCtx, errBackImageID).Return(frontImageBytes, nil).Once()
+ db.Group.On("GetFrontImage", testCtx, errCustomFieldsID).Return(nil, nil).Once()
db.Group.On("GetBackImage", testCtx, movieID).Return(backImageBytes, nil).Once()
db.Group.On("GetBackImage", testCtx, missingStudioMovieID).Return(backImageBytes, nil).Once()
@@ -186,6 +209,11 @@ func TestToJSON(t *testing.T) {
db.Group.On("GetBackImage", testCtx, errBackImageID).Return(nil, imageErr).Once()
db.Group.On("GetBackImage", testCtx, errFrontImageID).Return(backImageBytes, nil).Maybe()
db.Group.On("GetBackImage", testCtx, errStudioMovieID).Return(backImageBytes, nil).Maybe()
+ db.Group.On("GetBackImage", testCtx, errCustomFieldsID).Return(nil, nil).Once()
+
+ db.Group.On("GetCustomFields", testCtx, movieID).Return(customFields, nil).Once()
+ db.Group.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, errors.New("error getting custom fields")).Once()
+ db.Group.On("GetCustomFields", testCtx, mock.Anything).Return(emptyCustomFields, nil).Times(4)
studioErr := errors.New("error getting studio")
diff --git a/pkg/group/import.go b/pkg/group/import.go
index d7acad47c..1a332bac2 100644
--- a/pkg/group/import.go
+++ b/pkg/group/import.go
@@ -14,6 +14,7 @@ import (
type ImporterReaderWriter interface {
models.GroupCreatorUpdater
+ models.CustomFieldsWriter
FindByName(ctx context.Context, name string, nocase bool) (*models.Group, error)
}
@@ -233,6 +234,14 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
}
}
+ if len(i.Input.CustomFields) > 0 {
+ if err := i.ReaderWriter.SetCustomFields(ctx, id, models.CustomFieldsInput{
+ Full: i.Input.CustomFields,
+ }); err != nil {
+ return fmt.Errorf("error setting custom fields: %v", err)
+ }
+ }
+
if len(i.frontImageData) > 0 {
if err := i.ReaderWriter.UpdateFrontImage(ctx, id, i.frontImageData); err != nil {
return fmt.Errorf("error setting group front image: %v", err)
diff --git a/pkg/group/import_test.go b/pkg/group/import_test.go
index 387ceb87e..006c91327 100644
--- a/pkg/group/import_test.go
+++ b/pkg/group/import_test.go
@@ -259,17 +259,29 @@ func TestImporterPostImport(t *testing.T) {
db := mocks.NewDatabase()
i := Importer{
- ReaderWriter: db.Group,
- StudioWriter: db.Studio,
+ ReaderWriter: db.Group,
+ StudioWriter: db.Studio,
+ Input: jsonschema.Group{
+ CustomFields: customFields,
+ },
frontImageData: frontImageBytes,
backImageData: backImageBytes,
}
updateMovieImageErr := errors.New("UpdateImages error")
+ customFieldsErr := errors.New("SetCustomFields error")
+
+ customFieldsInput := models.CustomFieldsInput{
+ Full: customFields,
+ }
db.Group.On("UpdateFrontImage", testCtx, movieID, frontImageBytes).Return(nil).Once()
- db.Group.On("UpdateBackImage", testCtx, movieID, backImageBytes).Return(nil).Once()
db.Group.On("UpdateFrontImage", testCtx, errImageID, frontImageBytes).Return(updateMovieImageErr).Once()
+ db.Group.On("UpdateBackImage", testCtx, movieID, backImageBytes).Return(nil).Once()
+
+ db.Group.On("SetCustomFields", testCtx, movieID, customFieldsInput).Return(nil).Once()
+ db.Group.On("SetCustomFields", testCtx, errImageID, customFieldsInput).Return(nil).Once()
+ db.Group.On("SetCustomFields", testCtx, errCustomFieldsID, customFieldsInput).Return(customFieldsErr).Once()
err := i.PostImport(testCtx, movieID)
assert.Nil(t, err)
@@ -277,6 +289,9 @@ func TestImporterPostImport(t *testing.T) {
err = i.PostImport(testCtx, errImageID)
assert.NotNil(t, err)
+ err = i.PostImport(testCtx, errCustomFieldsID)
+ assert.NotNil(t, err)
+
db.AssertExpectations(t)
}
diff --git a/pkg/group/service.go b/pkg/group/service.go
index ff6e03541..37094665a 100644
--- a/pkg/group/service.go
+++ b/pkg/group/service.go
@@ -10,6 +10,7 @@ type CreatorUpdater interface {
models.GroupGetter
models.GroupCreator
models.GroupUpdater
+ models.CustomFieldsWriter
models.ContainingGroupLoader
models.SubGroupLoader
diff --git a/pkg/hash/imagephash/phash.go b/pkg/hash/imagephash/phash.go
index 73e8e3667..0af5adec9 100644
--- a/pkg/hash/imagephash/phash.go
+++ b/pkg/hash/imagephash/phash.go
@@ -3,10 +3,9 @@ package imagephash
import (
"bytes"
"context"
+ "errors"
"fmt"
"image"
- "path/filepath"
- "strings"
"github.com/corona10/goimagehash"
"github.com/stashapp/stash/pkg/ffmpeg"
@@ -32,17 +31,9 @@ func Generate(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (*uint64, err
}
// loadImage loads an image from disk and decodes it.
-// For AVIF files, ffmpeg is used to convert to BMP first since Go has no built-in AVIF decoder.
+// Where Go has no built-in decoder for a specific format, ffmpeg is used to convert to BMP first.
func loadImage(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (image.Image, error) {
- ext := strings.ToLower(filepath.Ext(imageFile.Path))
- if ext == ".avif" {
- // AVIF in zip files is not supported - ffmpeg cannot read files inside zips
- if imageFile.Base().ZipFileID != nil {
- return nil, fmt.Errorf("AVIF images in zip files are not supported for phash generation")
- }
- return loadImageFFmpeg(encoder, imageFile.Path)
- }
-
+ // try to load with Go's built-in decoders first for better performance
reader, err := imageFile.Open(&file.OsFS{})
if err != nil {
return nil, err
@@ -55,6 +46,15 @@ func loadImage(encoder *ffmpeg.FFMpeg, imageFile *models.ImageFile) (image.Image
}
img, _, err := image.Decode(buf)
+ if errors.Is(err, image.ErrFormat) {
+ // try ffmpeg as a fallback for unsupported formats
+ // ffmpeg cannot read files inside zips
+ if imageFile.Base().ZipFileID != nil {
+ return nil, fmt.Errorf("ffmpeg fallback unsupported for images in zip files")
+ }
+ return loadImageFFmpeg(encoder, imageFile.Path)
+ }
+
if err != nil {
return nil, fmt.Errorf("decoding image: %w", err)
}
diff --git a/pkg/image/export.go b/pkg/image/export.go
index fdba6165c..eb5d5da27 100644
--- a/pkg/image/export.go
+++ b/pkg/image/export.go
@@ -2,16 +2,21 @@ package image
import (
"context"
+ "fmt"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json"
"github.com/stashapp/stash/pkg/models/jsonschema"
)
+type ExportReader interface {
+ models.CustomFieldsReader
+}
+
// ToBasicJSON converts a image object into its JSON object equivalent. It
// does not convert the relationships to other objects, with the exception
// of cover image.
-func ToBasicJSON(image *models.Image) *jsonschema.Image {
+func ToBasicJSON(ctx context.Context, reader ExportReader, image *models.Image) (*jsonschema.Image, error) {
newImageJSON := jsonschema.Image{
Title: image.Title,
Code: image.Code,
@@ -33,11 +38,17 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image {
newImageJSON.Organized = image.Organized
newImageJSON.OCounter = image.OCounter
+ var err error
+ newImageJSON.CustomFields, err = reader.GetCustomFields(ctx, image.ID)
+ if err != nil {
+ return nil, fmt.Errorf("getting image custom fields: %v", err)
+ }
+
for _, f := range image.Files.List() {
newImageJSON.Files = append(newImageJSON.Files, f.Base().Path)
}
- return &newImageJSON
+ return &newImageJSON, nil
}
// GetStudioName returns the name of the provided image's studio. It returns an
diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go
index 6adaf1d33..d0d36afbb 100644
--- a/pkg/image/export_test.go
+++ b/pkg/image/export_test.go
@@ -29,6 +29,10 @@ var (
dateObj, _ = models.ParseDate(date)
organized = true
ocounter = 2
+
+ customFields = map[string]interface{}{
+ "customField1": "customValue1",
+ }
)
const (
@@ -60,7 +64,7 @@ func createFullImage(id int) models.Image {
}
}
-func createFullJSONImage() *jsonschema.Image {
+func createFullJSONImage(customFields map[string]interface{}) *jsonschema.Image {
return &jsonschema.Image{
Title: title,
OCounter: ocounter,
@@ -75,28 +79,40 @@ func createFullJSONImage() *jsonschema.Image {
UpdatedAt: json.JSONTime{
Time: updateTime,
},
+ CustomFields: customFields,
}
}
type basicTestScenario struct {
- input models.Image
- expected *jsonschema.Image
+ input models.Image
+ customFields map[string]interface{}
+ expected *jsonschema.Image
}
var scenarios = []basicTestScenario{
{
createFullImage(imageID),
- createFullJSONImage(),
+ customFields,
+ createFullJSONImage(customFields),
},
}
func TestToJSON(t *testing.T) {
+ db := mocks.NewDatabase()
+ db.Image.On("GetCustomFields", testCtx, imageID).Return(customFields, nil).Once()
+
for i, s := range scenarios {
image := s.input
- json := ToBasicJSON(&image)
+ json, err := ToBasicJSON(testCtx, db.Image, &image)
+ if err != nil {
+ t.Errorf("[%d] unexpected error: %s", i, err.Error())
+ continue
+ }
assert.Equal(t, s.expected, json, "[%d]", i)
}
+
+ db.AssertExpectations(t)
}
func createStudioImage(studioID int) models.Image {
diff --git a/pkg/image/import.go b/pkg/image/import.go
index c7ef7f00c..d8dfa987f 100644
--- a/pkg/image/import.go
+++ b/pkg/image/import.go
@@ -31,8 +31,9 @@ type Importer struct {
Input jsonschema.Image
MissingRefBehaviour models.ImportMissingRefEnum
- ID int
- image models.Image
+ ID int
+ image models.Image
+ customFields map[string]interface{}
}
func (i *Importer) PreImport(ctx context.Context) error {
@@ -58,6 +59,8 @@ func (i *Importer) PreImport(ctx context.Context) error {
return err
}
+ i.customFields = i.Input.CustomFields
+
return nil
}
@@ -344,7 +347,11 @@ func (i *Importer) Create(ctx context.Context) (*int, error) {
fileIDs = append(fileIDs, f.Base().ID)
}
- err := i.ReaderWriter.Create(ctx, &i.image, fileIDs)
+ err := i.ReaderWriter.Create(ctx, &models.CreateImageInput{
+ Image: &i.image,
+ FileIDs: fileIDs,
+ CustomFields: i.customFields,
+ })
if err != nil {
return nil, fmt.Errorf("error creating image: %v", err)
}
diff --git a/pkg/image/import_test.go b/pkg/image/import_test.go
index 5d01d4b97..a693c4568 100644
--- a/pkg/image/import_test.go
+++ b/pkg/image/import_test.go
@@ -45,7 +45,8 @@ func TestImporterPreImportWithStudio(t *testing.T) {
i := Importer{
StudioWriter: db.Studio,
Input: jsonschema.Image{
- Studio: existingStudioName,
+ Studio: existingStudioName,
+ CustomFields: customFields,
},
}
@@ -57,6 +58,7 @@ func TestImporterPreImportWithStudio(t *testing.T) {
err := i.PreImport(testCtx)
assert.Nil(t, err)
assert.Equal(t, existingStudioID, *i.image.StudioID)
+ assert.Equal(t, customFields, i.customFields)
i.Input.Studio = existingStudioErr
err = i.PreImport(testCtx)
diff --git a/pkg/image/query.go b/pkg/image/query.go
index b9b9e6628..958c9de9b 100644
--- a/pkg/image/query.go
+++ b/pkg/image/query.go
@@ -2,7 +2,9 @@ package image
import (
"context"
+ "path/filepath"
"strconv"
+ "strings"
"github.com/stashapp/stash/pkg/models"
)
@@ -46,6 +48,35 @@ func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType,
return images, nil
}
+// FilterFromPaths creates a ImageFilterType that filters using the provided
+// paths.
+func FilterFromPaths(paths []string) *models.ImageFilterType {
+ ret := &models.ImageFilterType{}
+ or := ret
+ sep := string(filepath.Separator)
+
+ for _, p := range paths {
+ if !strings.HasSuffix(p, sep) {
+ p += sep
+ }
+
+ if ret.Path == nil {
+ or = ret
+ } else {
+ newOr := &models.ImageFilterType{}
+ or.Or = newOr
+ or = newOr
+ }
+
+ or.Path = &models.StringCriterionInput{
+ Modifier: models.CriterionModifierEquals,
+ Value: p + "%",
+ }
+ }
+
+ return ret
+}
+
func CountByPerformerID(ctx context.Context, r QueryCounter, id int) (int, error) {
filter := &models.ImageFilterType{
Performers: &models.MultiCriterionInput{
diff --git a/pkg/image/scan.go b/pkg/image/scan.go
index a6002057f..a1844bd38 100644
--- a/pkg/image/scan.go
+++ b/pkg/image/scan.go
@@ -7,6 +7,7 @@ import (
"os"
"path/filepath"
"slices"
+ "strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
@@ -27,7 +28,7 @@ type ScanCreatorUpdater interface {
GetFiles(ctx context.Context, relatedID int) ([]models.File, error)
GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error)
- Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error
+ Create(ctx context.Context, newImage *models.CreateImageInput) error
UpdatePartial(ctx context.Context, id int, updatedImage models.ImagePartial) (*models.Image, error)
AddFileID(ctx context.Context, id int, fileID models.FileID) error
}
@@ -35,10 +36,15 @@ type ScanCreatorUpdater interface {
type GalleryFinderCreator interface {
FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error)
FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error)
- Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error
+ models.GalleryCreator
UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error)
}
+type ScanSceneFinderUpdater interface {
+ FindByPath(ctx context.Context, p string) ([]*models.Scene, error)
+ AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error
+}
+
type ScanConfig interface {
GetCreateGalleriesFromFolders() bool
}
@@ -48,8 +54,9 @@ type ScanGenerator interface {
}
type ScanHandler struct {
- CreatorUpdater ScanCreatorUpdater
- GalleryFinder GalleryFinderCreator
+ CreatorUpdater ScanCreatorUpdater
+ GalleryFinder GalleryFinderCreator
+ SceneFinderUpdater ScanSceneFinderUpdater
ScanGenerator ScanGenerator
@@ -62,19 +69,19 @@ type ScanHandler struct {
func (h *ScanHandler) validate() error {
if h.CreatorUpdater == nil {
- return errors.New("CreatorUpdater is required")
+ return errors.New("internal error: CreatorUpdater is required")
}
if h.ScanGenerator == nil {
- return errors.New("ScanGenerator is required")
+ return errors.New("internal error: ScanGenerator is required")
}
if h.GalleryFinder == nil {
- return errors.New("GalleryFinder is required")
+ return errors.New("internal error: GalleryFinder is required")
}
if h.ScanConfig == nil {
- return errors.New("ScanConfig is required")
+ return errors.New("internal error: ScanConfig is required")
}
if h.Paths == nil {
- return errors.New("Paths is required")
+ return errors.New("internal error: Paths is required")
}
return nil
@@ -124,7 +131,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.
logger.Infof("Adding %s to gallery %s", f.Base().Path, g.Path)
}
- if err := h.CreatorUpdater.Create(ctx, &newImage, []models.FileID{imageFile.ID}); err != nil {
+ if err := h.CreatorUpdater.Create(ctx, &models.CreateImageInput{
+ Image: &newImage,
+ FileIDs: []models.FileID{imageFile.ID},
+ }); err != nil {
return fmt.Errorf("creating new image: %w", err)
}
@@ -207,8 +217,8 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
changed = true
}
- if changed {
- // always update updated_at time
+ if changed || updateExisting {
+ // update updated_at time when file association or content changes
imagePartial := models.NewImagePartial()
imagePartial.GalleryIDs = galleryIDs
@@ -226,9 +236,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
return fmt.Errorf("updating gallery updated at timestamp: %w", err)
}
}
- }
- if changed || updateExisting {
h.PluginCache.RegisterPostHooks(ctx, i.ID, hook.ImageUpdatePost, nil, nil)
}
}
@@ -252,9 +260,13 @@ func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f model
newGallery := models.NewGallery()
newGallery.FolderID = &folderID
+ input := models.CreateGalleryInput{
+ Gallery: &newGallery,
+ }
+
logger.Infof("Creating folder-based gallery for %s", filepath.Dir(f.Base().Path))
- if err := h.GalleryFinder.Create(ctx, &newGallery, nil); err != nil {
+ if err := h.GalleryFinder.Create(ctx, &input); err != nil {
return nil, fmt.Errorf("creating folder based gallery: %w", err)
}
@@ -308,15 +320,48 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile mo
logger.Infof("%s doesn't exist. Creating new gallery...", zipFile.Base().Path)
- if err := h.GalleryFinder.Create(ctx, &newGallery, []models.FileID{zipFile.Base().ID}); err != nil {
+ input := models.CreateGalleryInput{
+ Gallery: &newGallery,
+ FileIDs: []models.FileID{zipFile.Base().ID},
+ }
+
+ if err := h.GalleryFinder.Create(ctx, &input); err != nil {
return nil, fmt.Errorf("creating zip-based gallery: %w", err)
}
+ // try to associate with scene
+ if err := h.associateScene(ctx, &newGallery, zipFile); err != nil {
+ return nil, fmt.Errorf("associating scene: %w", err)
+ }
+
h.PluginCache.RegisterPostHooks(ctx, newGallery.ID, hook.GalleryCreatePost, nil, nil)
return &newGallery, nil
}
+func (h *ScanHandler) associateScene(ctx context.Context, existing *models.Gallery, zipFile models.File) error {
+ galleryIDs := []int{existing.ID}
+
+ path := zipFile.Base().Path
+ withoutExt := strings.TrimSuffix(path, filepath.Ext(path)) + ".*"
+
+ // find scenes with a file that matches
+ scenes, err := h.SceneFinderUpdater.FindByPath(ctx, withoutExt)
+ if err != nil {
+ return err
+ }
+
+ for _, scene := range scenes {
+ // found related Scene
+ logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID)
+ if err := h.SceneFinderUpdater.AddGalleryIDs(ctx, scene.ID, galleryIDs); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*models.Gallery, error) {
// don't create folder-based galleries for files in zip file
if f.Base().ZipFile != nil {
@@ -330,13 +375,13 @@ func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*m
if _, err := os.Stat(filepath.Join(folderPath, ".forcegallery")); err == nil {
forceGallery = true
} else if !errors.Is(err, os.ErrNotExist) {
- return nil, fmt.Errorf("Could not test Path %s: %w", folderPath, err)
+ return nil, fmt.Errorf("could not test Path %s: %w", folderPath, err)
}
exemptGallery := false
if _, err := os.Stat(filepath.Join(folderPath, ".nogallery")); err == nil {
exemptGallery = true
} else if !errors.Is(err, os.ErrNotExist) {
- return nil, fmt.Errorf("Could not test Path %s: %w", folderPath, err)
+ return nil, fmt.Errorf("could not test Path %s: %w", folderPath, err)
}
if forceGallery || (h.ScanConfig.GetCreateGalleriesFromFolders() && !exemptGallery) {
diff --git a/pkg/image/scan_test.go b/pkg/image/scan_test.go
new file mode 100644
index 000000000..f48c188ee
--- /dev/null
+++ b/pkg/image/scan_test.go
@@ -0,0 +1,120 @@
+package image
+
+import (
+ "context"
+ "testing"
+
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/models/mocks"
+ "github.com/stashapp/stash/pkg/plugin"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
+)
+
+type mockScanConfig struct{}
+
+func (m *mockScanConfig) GetCreateGalleriesFromFolders() bool { return false }
+
+func TestAssociateExisting_UpdatePartialOnContentChange(t *testing.T) {
+ const (
+ testImageID = 1
+ testFileID = 100
+ )
+
+ existingFile := &models.BaseFile{ID: models.FileID(testFileID), Path: "/images/test.jpg"}
+
+ makeImage := func() *models.Image {
+ return &models.Image{
+ ID: testImageID,
+ Files: models.NewRelatedFiles([]models.File{existingFile}),
+ GalleryIDs: models.NewRelatedIDs([]int{}),
+ }
+ }
+
+ tests := []struct {
+ name string
+ updateExisting bool
+ expectUpdate bool
+ }{
+ {
+ name: "calls UpdatePartial when file content changed",
+ updateExisting: true,
+ expectUpdate: true,
+ },
+ {
+ name: "skips UpdatePartial when file unchanged and already associated",
+ updateExisting: false,
+ expectUpdate: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ db := mocks.NewDatabase()
+ db.Image.On("GetFiles", mock.Anything, testImageID).Return([]models.File{existingFile}, nil)
+ db.Image.On("GetGalleryIDs", mock.Anything, testImageID).Return([]int{}, nil)
+
+ if tt.expectUpdate {
+ db.Image.On("UpdatePartial", mock.Anything, testImageID, mock.Anything).
+ Return(&models.Image{ID: testImageID}, nil)
+ }
+
+ h := &ScanHandler{
+ CreatorUpdater: db.Image,
+ GalleryFinder: db.Gallery,
+ ScanConfig: &mockScanConfig{},
+ PluginCache: &plugin.Cache{},
+ }
+
+ db.WithTxnCtx(func(ctx context.Context) {
+ err := h.associateExisting(ctx, []*models.Image{makeImage()}, existingFile, tt.updateExisting)
+ assert.NoError(t, err)
+ })
+
+ if tt.expectUpdate {
+ db.Image.AssertCalled(t, "UpdatePartial", mock.Anything, testImageID, mock.Anything)
+ } else {
+ db.Image.AssertNotCalled(t, "UpdatePartial", mock.Anything, mock.Anything, mock.Anything)
+ }
+ })
+ }
+}
+
+func TestAssociateExisting_UpdatePartialOnNewFile(t *testing.T) {
+ const (
+ testImageID = 1
+ existFileID = 100
+ newFileID = 200
+ )
+
+ existingFile := &models.BaseFile{ID: models.FileID(existFileID), Path: "/images/existing.jpg"}
+ newFile := &models.BaseFile{ID: models.FileID(newFileID), Path: "/images/new.jpg"}
+
+ image := &models.Image{
+ ID: testImageID,
+ Files: models.NewRelatedFiles([]models.File{existingFile}),
+ GalleryIDs: models.NewRelatedIDs([]int{}),
+ }
+
+ db := mocks.NewDatabase()
+ db.Image.On("GetFiles", mock.Anything, testImageID).Return([]models.File{existingFile}, nil)
+ db.Image.On("GetGalleryIDs", mock.Anything, testImageID).Return([]int{}, nil)
+ db.Image.On("AddFileID", mock.Anything, testImageID, models.FileID(newFileID)).Return(nil)
+ db.Image.On("UpdatePartial", mock.Anything, testImageID, mock.Anything).
+ Return(&models.Image{ID: testImageID}, nil)
+
+ h := &ScanHandler{
+ CreatorUpdater: db.Image,
+ GalleryFinder: db.Gallery,
+ ScanConfig: &mockScanConfig{},
+ PluginCache: &plugin.Cache{},
+ }
+
+ db.WithTxnCtx(func(ctx context.Context) {
+ err := h.associateExisting(ctx, []*models.Image{image}, newFile, false)
+ assert.NoError(t, err)
+ })
+
+ db.Image.AssertCalled(t, "AddFileID", mock.Anything, testImageID, models.FileID(newFileID))
+ db.Image.AssertCalled(t, "UpdatePartial", mock.Anything, testImageID, mock.Anything)
+}
diff --git a/pkg/job/job.go b/pkg/job/job.go
index 48b5e7b13..94d5fe2f5 100644
--- a/pkg/job/job.go
+++ b/pkg/job/job.go
@@ -66,6 +66,23 @@ type Job struct {
cancelFunc context.CancelFunc
}
+// statusCopy returns a copy of the Job with only the fields needed for
+// status reporting. Internal fields (exec, cancelFunc, outerCtx) are
+// excluded so that subscription channels don't retain heavy resources.
+func (j *Job) statusCopy() Job {
+ return Job{
+ ID: j.ID,
+ Status: j.Status,
+ Details: j.Details,
+ Description: j.Description,
+ Progress: j.Progress,
+ StartTime: j.StartTime,
+ EndTime: j.EndTime,
+ AddTime: j.AddTime,
+ Error: j.Error,
+ }
+}
+
// TimeElapsed returns the total time elapsed for the job.
// If the EndTime is set, then it uses this to calculate the elapsed time, otherwise it uses time.Now.
func (j *Job) TimeElapsed() time.Duration {
@@ -80,9 +97,10 @@ func (j *Job) TimeElapsed() time.Duration {
}
func (j *Job) cancel() {
- if j.Status == StatusReady {
+ switch j.Status {
+ case StatusReady:
j.Status = StatusCancelled
- } else if j.Status == StatusRunning {
+ case StatusRunning:
j.Status = StatusStopping
}
diff --git a/pkg/job/manager.go b/pkg/job/manager.go
index 3e47d842b..ba62d102c 100644
--- a/pkg/job/manager.go
+++ b/pkg/job/manager.go
@@ -105,7 +105,7 @@ func (m *Manager) notifyNewJob(j *Job) {
for _, s := range m.subscriptions {
// don't block if channel is full
select {
- case s.newJob <- *j:
+ case s.newJob <- j.statusCopy():
default:
}
}
@@ -232,7 +232,9 @@ func (m *Manager) removeJob(job *Job) {
return
}
- // clear any subtasks
+ // release the executor and subtask details so they can be GC'd
+ // while the job remains in the graveyard for status reporting
+ job.exec = nil
job.Details = nil
m.queue = append(m.queue[:index], m.queue[index+1:]...)
@@ -246,7 +248,7 @@ func (m *Manager) removeJob(job *Job) {
for _, s := range m.subscriptions {
// don't block if channel is full
select {
- case s.removedJob <- *job:
+ case s.removedJob <- job.statusCopy():
default:
}
}
@@ -310,8 +312,7 @@ func (m *Manager) GetJob(id int) *Job {
// get from the queue or graveyard
_, j := m.getJob(append(m.queue, m.graveyard...), id)
if j != nil {
- // make a copy of the job and return the pointer
- jCopy := *j
+ jCopy := j.statusCopy()
return &jCopy
}
@@ -326,8 +327,7 @@ func (m *Manager) GetQueue() []Job {
var ret []Job
for _, j := range m.queue {
- jCopy := *j
- ret = append(ret, jCopy)
+ ret = append(ret, j.statusCopy())
}
return ret
@@ -372,7 +372,7 @@ func (m *Manager) notifyJobUpdate(j *Job) {
for _, s := range m.subscriptions {
// don't block if channel is full
select {
- case s.updatedJob <- *j:
+ case s.updatedJob <- j.statusCopy():
default:
}
}
diff --git a/pkg/job/task.go b/pkg/job/task.go
index fa0891e6f..6dd2cf02b 100644
--- a/pkg/job/task.go
+++ b/pkg/job/task.go
@@ -51,7 +51,7 @@ func (tq *TaskQueue) executer(ctx context.Context) {
defer tq.wg.Wait()
for task := range tq.tasks {
if IsCancelled(ctx) {
- return
+ continue // allow channel to continue draining until Close()
}
tt := task
diff --git a/pkg/match/scraped.go b/pkg/match/scraped.go
index d3039f4c6..32759a2a4 100644
--- a/pkg/match/scraped.go
+++ b/pkg/match/scraped.go
@@ -22,7 +22,7 @@ type GroupNamesFinder interface {
type SceneRelationships struct {
PerformerFinder PerformerFinder
- TagFinder models.TagQueryer
+ TagFinder models.TagNameFinder
StudioFinder StudioFinder
}
@@ -188,9 +188,23 @@ func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, na
return
}
+// ScrapedTagHierarchy executes ScrapedTag for the provided tag and its parent.
+func ScrapedTagHierarchy(ctx context.Context, qb models.TagNameFinder, s *models.ScrapedTag, stashBoxEndpoint string) error {
+ if err := ScrapedTag(ctx, qb, s, stashBoxEndpoint); err != nil {
+ return err
+ }
+
+ if s.Parent == nil {
+ return nil
+ }
+
+ // Match parent by name only (categories don't have StashDB tag IDs)
+ return ScrapedTag(ctx, qb, s.Parent, "")
+}
+
// ScrapedTag matches the provided tag with the tags
// in the database and sets the ID field if one is found.
-func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag, stashBoxEndpoint string) error {
+func ScrapedTag(ctx context.Context, qb models.TagNameFinder, s *models.ScrapedTag, stashBoxEndpoint string) error {
if s.StoredID != nil {
return nil
}
diff --git a/pkg/models/date.go b/pkg/models/date.go
index dbd5c4ec6..912361507 100644
--- a/pkg/models/date.go
+++ b/pkg/models/date.go
@@ -2,6 +2,7 @@ package models
import (
"fmt"
+ "strings"
"time"
"github.com/stashapp/stash/pkg/utils"
@@ -61,3 +62,114 @@ func ParseDate(s string) (Date, error) {
return Date{}, fmt.Errorf("failed to parse date %q: %v", s, errs)
}
+
+func DateFromYear(year int) Date {
+ return Date{
+ Time: time.Date(year, 1, 1, 0, 0, 0, 0, time.UTC),
+ Precision: DatePrecisionYear,
+ }
+}
+
+func FormatYearRange(start *Date, end *Date) string {
+ var (
+ startStr, endStr string
+ )
+
+ if start != nil {
+ startStr = start.Format(dateFormatPrecision[DatePrecisionYear])
+ }
+
+ if end != nil {
+ endStr = end.Format(dateFormatPrecision[DatePrecisionYear])
+ }
+
+ switch {
+ case startStr == "" && endStr == "":
+ return ""
+ case endStr == "":
+ return fmt.Sprintf("%s -", startStr)
+ case startStr == "":
+ return fmt.Sprintf("- %s", endStr)
+ default:
+ return fmt.Sprintf("%s - %s", startStr, endStr)
+ }
+}
+
+func FormatYearRangeString(start *string, end *string) string {
+ switch {
+ case start == nil && end == nil:
+ return ""
+ case end == nil:
+ return fmt.Sprintf("%s -", *start)
+ case start == nil:
+ return fmt.Sprintf("- %s", *end)
+ default:
+ return fmt.Sprintf("%s - %s", *start, *end)
+ }
+}
+
+// ParseYearRangeString parses a year range string into start and end year integers.
+// Supported formats: "YYYY", "YYYY - YYYY", "YYYY-YYYY", "YYYY -", "- YYYY", "YYYY-present".
+// Returns nil for start/end if not present in the string.
+func ParseYearRangeString(s string) (start *Date, end *Date, err error) {
+ s = strings.TrimSpace(s)
+ if s == "" {
+ return nil, nil, fmt.Errorf("empty year range string")
+ }
+
+ // normalize "present" to empty end
+ lower := strings.ToLower(s)
+ lower = strings.ReplaceAll(lower, "present", "")
+
+ // split on "-" if it contains one
+ var parts []string
+ if strings.Contains(lower, "-") {
+ parts = strings.SplitN(lower, "-", 2)
+ } else {
+ // single value, treat as start year
+ year, err := parseYear(lower)
+ if err != nil {
+ return nil, nil, fmt.Errorf("invalid year range %q: %w", s, err)
+ }
+ return year, nil, nil
+ }
+
+ startStr := strings.TrimSpace(parts[0])
+ endStr := strings.TrimSpace(parts[1])
+
+ if startStr != "" {
+ y, err := parseYear(startStr)
+ if err != nil {
+ return nil, nil, fmt.Errorf("invalid start year in %q: %w", s, err)
+ }
+ start = y
+ }
+
+ if endStr != "" {
+ y, err := parseYear(endStr)
+ if err != nil {
+ return nil, nil, fmt.Errorf("invalid end year in %q: %w", s, err)
+ }
+ end = y
+ }
+
+ if start == nil && end == nil {
+ return nil, nil, fmt.Errorf("could not parse year range %q", s)
+ }
+
+ return start, end, nil
+}
+
+func parseYear(s string) (*Date, error) {
+ ret, err := ParseDate(s)
+ if err != nil {
+ return nil, fmt.Errorf("parsing year %q: %w", s, err)
+ }
+
+ year := ret.Time.Year()
+ if year < 1900 || year > 2200 {
+ return nil, fmt.Errorf("year %d out of reasonable range", year)
+ }
+
+ return &ret, nil
+}
diff --git a/pkg/models/date_test.go b/pkg/models/date_test.go
index b6cca9ee1..3b2962e28 100644
--- a/pkg/models/date_test.go
+++ b/pkg/models/date_test.go
@@ -3,6 +3,8 @@ package models
import (
"testing"
"time"
+
+ "github.com/stretchr/testify/assert"
)
func TestParseDateStringAsTime(t *testing.T) {
@@ -48,3 +50,102 @@ func TestParseDateStringAsTime(t *testing.T) {
})
}
}
+
+func TestFormatYearRange(t *testing.T) {
+ datePtr := func(v int) *Date {
+ date := DateFromYear(v)
+ return &date
+ }
+
+ tests := []struct {
+ name string
+ start *Date
+ end *Date
+ want string
+ }{
+ {"both nil", nil, nil, ""},
+ {"only start", datePtr(2005), nil, "2005 -"},
+ {"only end", nil, datePtr(2010), "- 2010"},
+ {"start and end", datePtr(2005), datePtr(2010), "2005 - 2010"},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := FormatYearRange(tt.start, tt.end)
+ assert.Equal(t, tt.want, got)
+ })
+ }
+}
+
+func TestFormatYearRangeString(t *testing.T) {
+ stringPtr := func(v string) *string { return &v }
+
+ tests := []struct {
+ name string
+ start *string
+ end *string
+ want string
+ }{
+ {"both nil", nil, nil, ""},
+ {"only start", stringPtr("2005"), nil, "2005 -"},
+ {"only end", nil, stringPtr("2010"), "- 2010"},
+ {"start and end", stringPtr("2005"), stringPtr("2010"), "2005 - 2010"},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := FormatYearRangeString(tt.start, tt.end)
+ assert.Equal(t, tt.want, got)
+ })
+ }
+}
+
+func TestParseYearRangeString(t *testing.T) {
+ intPtr := func(v int) *int { return &v }
+
+ tests := []struct {
+ name string
+ input string
+ wantStart *int
+ wantEnd *int
+ wantErr bool
+ }{
+ {"single year", "2005", intPtr(2005), nil, false},
+ {"year range with spaces", "2005 - 2010", intPtr(2005), intPtr(2010), false},
+ {"year range no spaces", "2005-2010", intPtr(2005), intPtr(2010), false},
+ {"year dash open", "2005 -", intPtr(2005), nil, false},
+ {"year dash open no space", "2005-", intPtr(2005), nil, false},
+ {"dash year", "- 2010", nil, intPtr(2010), false},
+ {"year present", "2005-present", intPtr(2005), nil, false},
+ {"year Present caps", "2005 - Present", intPtr(2005), nil, false},
+ {"whitespace padding", " 2005 - 2010 ", intPtr(2005), intPtr(2010), false},
+ {"empty string", "", nil, nil, true},
+ {"garbage", "not a year", nil, nil, true},
+ {"partial garbage start", "abc - 2010", nil, nil, true},
+ {"partial garbage end", "2005 - abc", nil, nil, true},
+ {"year out of range", "1800", nil, nil, true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ start, end, err := ParseYearRangeString(tt.input)
+ if tt.wantErr {
+ assert.Error(t, err)
+ return
+ }
+ assert.NoError(t, err)
+ if tt.wantStart != nil {
+ assert.NotNil(t, start)
+ assert.Equal(t, *tt.wantStart, start.Time.Year())
+ } else {
+ assert.Nil(t, start)
+ }
+ if tt.wantEnd != nil {
+ assert.NotNil(t, end)
+ assert.Equal(t, *tt.wantEnd, end.Time.Year())
+ } else {
+ assert.Nil(t, end)
+ }
+ })
+ }
+}
diff --git a/pkg/models/folder.go b/pkg/models/folder.go
index ada9e17b7..e9e9a3971 100644
--- a/pkg/models/folder.go
+++ b/pkg/models/folder.go
@@ -18,10 +18,8 @@ type FolderQueryOptions struct {
type FolderFilterType struct {
OperatorFilter[FolderFilterType]
- Path *StringCriterionInput `json:"path,omitempty"`
- Basename *StringCriterionInput `json:"basename,omitempty"`
- // Filter by parent directory path
- Dir *StringCriterionInput `json:"dir,omitempty"`
+ Path *StringCriterionInput `json:"path,omitempty"`
+ Basename *StringCriterionInput `json:"basename,omitempty"`
ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder,omitempty"`
ZipFile *MultiCriterionInput `json:"zip_file,omitempty"`
// Filter by modification time
diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go
index dfc776afe..3bf70b754 100644
--- a/pkg/models/gallery.go
+++ b/pkg/models/gallery.go
@@ -11,6 +11,8 @@ type GalleryFilterType struct {
Checksum *StringCriterionInput `json:"checksum"`
// Filter by path
Path *StringCriterionInput `json:"path"`
+ // Filter by parent folder
+ ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder,omitempty"`
// Filter by zip file count
FileCount *IntCriterionInput `json:"file_count"`
// Filter to only include galleries missing this property
@@ -67,6 +69,9 @@ type GalleryFilterType struct {
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at
UpdatedAt *TimestampCriterionInput `json:"updated_at"`
+
+ // Filter by custom fields
+ CustomFields []CustomFieldCriterionInput `json:"custom_fields"`
}
type GalleryUpdateInput struct {
@@ -86,6 +91,8 @@ type GalleryUpdateInput struct {
PerformerIds []string `json:"performer_ids"`
PrimaryFileID *string `json:"primary_file_id"`
+ CustomFields *CustomFieldsInput `json:"custom_fields"`
+
// deprecated
URL *string `json:"url"`
}
diff --git a/pkg/models/group.go b/pkg/models/group.go
index ec550eea8..396384b51 100644
--- a/pkg/models/group.go
+++ b/pkg/models/group.go
@@ -43,4 +43,6 @@ type GroupFilterType struct {
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at
UpdatedAt *TimestampCriterionInput `json:"updated_at"`
+ // Filter by custom fields
+ CustomFields []CustomFieldCriterionInput `json:"custom_fields"`
}
diff --git a/pkg/models/image.go b/pkg/models/image.go
index 84be79360..b99267e8c 100644
--- a/pkg/models/image.go
+++ b/pkg/models/image.go
@@ -1,6 +1,8 @@
package models
-import "context"
+import (
+ "context"
+)
type ImageFilterType struct {
OperatorFilter[ImageFilterType]
@@ -65,25 +67,28 @@ type ImageFilterType struct {
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at
UpdatedAt *TimestampCriterionInput `json:"updated_at"`
+ // Filter by custom fields
+ CustomFields []CustomFieldCriterionInput `json:"custom_fields"`
}
type ImageUpdateInput struct {
- ClientMutationID *string `json:"clientMutationId"`
- ID string `json:"id"`
- Title *string `json:"title"`
- Code *string `json:"code"`
- Urls []string `json:"urls"`
- Date *string `json:"date"`
- Details *string `json:"details"`
- Photographer *string `json:"photographer"`
- Rating100 *int `json:"rating100"`
- Organized *bool `json:"organized"`
- SceneIds []string `json:"scene_ids"`
- StudioID *string `json:"studio_id"`
- TagIds []string `json:"tag_ids"`
- PerformerIds []string `json:"performer_ids"`
- GalleryIds []string `json:"gallery_ids"`
- PrimaryFileID *string `json:"primary_file_id"`
+ ClientMutationID *string `json:"clientMutationId"`
+ ID string `json:"id"`
+ Title *string `json:"title"`
+ Code *string `json:"code"`
+ Urls []string `json:"urls"`
+ Date *string `json:"date"`
+ Details *string `json:"details"`
+ Photographer *string `json:"photographer"`
+ Rating100 *int `json:"rating100"`
+ Organized *bool `json:"organized"`
+ SceneIds []string `json:"scene_ids"`
+ StudioID *string `json:"studio_id"`
+ TagIds []string `json:"tag_ids"`
+ PerformerIds []string `json:"performer_ids"`
+ GalleryIds []string `json:"gallery_ids"`
+ PrimaryFileID *string `json:"primary_file_id"`
+ CustomFields *CustomFieldsInput `json:"custom_fields"`
// deprecated
URL *string `json:"url"`
diff --git a/pkg/models/jsonschema/gallery.go b/pkg/models/jsonschema/gallery.go
index 7323e37ba..5fb6e16ab 100644
--- a/pkg/models/jsonschema/gallery.go
+++ b/pkg/models/jsonschema/gallery.go
@@ -18,22 +18,23 @@ type GalleryChapter struct {
}
type Gallery struct {
- ZipFiles []string `json:"zip_files,omitempty"`
- FolderPath string `json:"folder_path,omitempty"`
- Title string `json:"title,omitempty"`
- Code string `json:"code,omitempty"`
- URLs []string `json:"urls,omitempty"`
- Date string `json:"date,omitempty"`
- Details string `json:"details,omitempty"`
- Photographer string `json:"photographer,omitempty"`
- Rating int `json:"rating,omitempty"`
- Organized bool `json:"organized,omitempty"`
- Chapters []GalleryChapter `json:"chapters,omitempty"`
- Studio string `json:"studio,omitempty"`
- Performers []string `json:"performers,omitempty"`
- Tags []string `json:"tags,omitempty"`
- CreatedAt json.JSONTime `json:"created_at,omitempty"`
- UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
+ ZipFiles []string `json:"zip_files,omitempty"`
+ FolderPath string `json:"folder_path,omitempty"`
+ Title string `json:"title,omitempty"`
+ Code string `json:"code,omitempty"`
+ URLs []string `json:"urls,omitempty"`
+ Date string `json:"date,omitempty"`
+ Details string `json:"details,omitempty"`
+ Photographer string `json:"photographer,omitempty"`
+ Rating int `json:"rating,omitempty"`
+ Organized bool `json:"organized,omitempty"`
+ Chapters []GalleryChapter `json:"chapters,omitempty"`
+ Studio string `json:"studio,omitempty"`
+ Performers []string `json:"performers,omitempty"`
+ Tags []string `json:"tags,omitempty"`
+ CreatedAt json.JSONTime `json:"created_at,omitempty"`
+ UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
+ CustomFields map[string]interface{} `json:"custom_fields,omitempty"`
// deprecated - for import only
URL string `json:"url,omitempty"`
diff --git a/pkg/models/jsonschema/group.go b/pkg/models/jsonschema/group.go
index b284dab6e..357ac70bc 100644
--- a/pkg/models/jsonschema/group.go
+++ b/pkg/models/jsonschema/group.go
@@ -33,6 +33,8 @@ type Group struct {
CreatedAt json.JSONTime `json:"created_at,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
+ CustomFields map[string]interface{} `json:"custom_fields,omitempty"`
+
// deprecated - for import only
URL string `json:"url,omitempty"`
}
diff --git a/pkg/models/jsonschema/image.go b/pkg/models/jsonschema/image.go
index 1bdac8770..168ea9eec 100644
--- a/pkg/models/jsonschema/image.go
+++ b/pkg/models/jsonschema/image.go
@@ -18,18 +18,19 @@ type Image struct {
// deprecated - for import only
URL string `json:"url,omitempty"`
- URLs []string `json:"urls,omitempty"`
- Date string `json:"date,omitempty"`
- Details string `json:"details,omitempty"`
- Photographer string `json:"photographer,omitempty"`
- Organized bool `json:"organized,omitempty"`
- OCounter int `json:"o_counter,omitempty"`
- Galleries []GalleryRef `json:"galleries,omitempty"`
- Performers []string `json:"performers,omitempty"`
- Tags []string `json:"tags,omitempty"`
- Files []string `json:"files,omitempty"`
- CreatedAt json.JSONTime `json:"created_at,omitempty"`
- UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
+ URLs []string `json:"urls,omitempty"`
+ Date string `json:"date,omitempty"`
+ Details string `json:"details,omitempty"`
+ Photographer string `json:"photographer,omitempty"`
+ Organized bool `json:"organized,omitempty"`
+ OCounter int `json:"o_counter,omitempty"`
+ Galleries []GalleryRef `json:"galleries,omitempty"`
+ Performers []string `json:"performers,omitempty"`
+ Tags []string `json:"tags,omitempty"`
+ Files []string `json:"files,omitempty"`
+ CreatedAt json.JSONTime `json:"created_at,omitempty"`
+ UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
+ CustomFields map[string]interface{} `json:"custom_fields,omitempty"`
}
func (s Image) Filename(basename string, hash string) string {
diff --git a/pkg/models/jsonschema/performer.go b/pkg/models/jsonschema/performer.go
index b738fbfac..1a8acd5f3 100644
--- a/pkg/models/jsonschema/performer.go
+++ b/pkg/models/jsonschema/performer.go
@@ -49,8 +49,8 @@ type Performer struct {
PenisLength float64 `json:"penis_length,omitempty"`
Circumcised string `json:"circumcised,omitempty"`
CareerLength string `json:"career_length,omitempty"` // deprecated - for import only
- CareerStart *int `json:"career_start,omitempty"`
- CareerEnd *int `json:"career_end,omitempty"`
+ CareerStart string `json:"career_start,omitempty"`
+ CareerEnd string `json:"career_end,omitempty"`
Tattoos string `json:"tattoos,omitempty"`
Piercings string `json:"piercings,omitempty"`
Aliases StringOrStringList `json:"aliases,omitempty"`
diff --git a/pkg/models/mocks/FileReaderWriter.go b/pkg/models/mocks/FileReaderWriter.go
index 97a0136e6..4b370459e 100644
--- a/pkg/models/mocks/FileReaderWriter.go
+++ b/pkg/models/mocks/FileReaderWriter.go
@@ -153,13 +153,13 @@ func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string, case
return r0, r1
}
-// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset
-func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]models.File, error) {
- ret := _m.Called(ctx, p, limit, offset)
+// FindAllInPaths provides a mock function with given fields: ctx, p, includeZipContents, limit, offset
+func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit int, offset int) ([]models.File, error) {
+ ret := _m.Called(ctx, p, includeZipContents, limit, offset)
var r0 []models.File
- if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []models.File); ok {
- r0 = rf(ctx, p, limit, offset)
+ if rf, ok := ret.Get(0).(func(context.Context, []string, bool, int, int) []models.File); ok {
+ r0 = rf(ctx, p, includeZipContents, limit, offset)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]models.File)
@@ -167,8 +167,8 @@ func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limi
}
var r1 error
- if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok {
- r1 = rf(ctx, p, limit, offset)
+ if rf, ok := ret.Get(1).(func(context.Context, []string, bool, int, int) error); ok {
+ r1 = rf(ctx, p, includeZipContents, limit, offset)
} else {
r1 = ret.Error(1)
}
diff --git a/pkg/models/mocks/FolderReaderWriter.go b/pkg/models/mocks/FolderReaderWriter.go
index 7bca013fe..d2230c645 100644
--- a/pkg/models/mocks/FolderReaderWriter.go
+++ b/pkg/models/mocks/FolderReaderWriter.go
@@ -86,13 +86,13 @@ func (_m *FolderReaderWriter) Find(ctx context.Context, id models.FolderID) (*mo
return r0, r1
}
-// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset
-func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]*models.Folder, error) {
- ret := _m.Called(ctx, p, limit, offset)
+// FindAllInPaths provides a mock function with given fields: ctx, p, includeZipContents, limit, offset
+func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit int, offset int) ([]*models.Folder, error) {
+ ret := _m.Called(ctx, p, includeZipContents, limit, offset)
var r0 []*models.Folder
- if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []*models.Folder); ok {
- r0 = rf(ctx, p, limit, offset)
+ if rf, ok := ret.Get(0).(func(context.Context, []string, bool, int, int) []*models.Folder); ok {
+ r0 = rf(ctx, p, includeZipContents, limit, offset)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Folder)
@@ -100,8 +100,8 @@ func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, li
}
var r1 error
- if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok {
- r1 = rf(ctx, p, limit, offset)
+ if rf, ok := ret.Get(1).(func(context.Context, []string, bool, int, int) error); ok {
+ r1 = rf(ctx, p, includeZipContents, limit, offset)
} else {
r1 = ret.Error(1)
}
@@ -201,6 +201,52 @@ func (_m *FolderReaderWriter) FindMany(ctx context.Context, id []models.FolderID
return r0, r1
}
+// GetManyParentFolderIDs provides a mock function with given fields: ctx, folderIDs
+func (_m *FolderReaderWriter) GetManyParentFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) {
+ ret := _m.Called(ctx, folderIDs)
+
+ var r0 [][]models.FolderID
+ if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) [][]models.FolderID); ok {
+ r0 = rf(ctx, folderIDs)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).([][]models.FolderID)
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok {
+ r1 = rf(ctx, folderIDs)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
+// GetManySubFolderIDs provides a mock function with given fields: ctx, folderIDs
+func (_m *FolderReaderWriter) GetManySubFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) {
+ ret := _m.Called(ctx, folderIDs)
+
+ var r0 [][]models.FolderID
+ if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) [][]models.FolderID); ok {
+ r0 = rf(ctx, folderIDs)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).([][]models.FolderID)
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok {
+ r1 = rf(ctx, folderIDs)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
// Query provides a mock function with given fields: ctx, options
func (_m *FolderReaderWriter) Query(ctx context.Context, options models.FolderQueryOptions) (*models.FolderQueryResult, error) {
ret := _m.Called(ctx, options)
diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go
index f07f8a7d9..e835ea2bc 100644
--- a/pkg/models/mocks/GalleryReaderWriter.go
+++ b/pkg/models/mocks/GalleryReaderWriter.go
@@ -49,6 +49,20 @@ func (_m *GalleryReaderWriter) AddImages(ctx context.Context, galleryID int, ima
return r0
}
+// AddSceneIDs provides a mock function with given fields: ctx, galleryID, sceneIDs
+func (_m *GalleryReaderWriter) AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error {
+ ret := _m.Called(ctx, galleryID, sceneIDs)
+
+ var r0 error
+ if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok {
+ r0 = rf(ctx, galleryID, sceneIDs)
+ } else {
+ r0 = ret.Error(0)
+ }
+
+ return r0
+}
+
// All provides a mock function with given fields: ctx
func (_m *GalleryReaderWriter) All(ctx context.Context) ([]*models.Gallery, error) {
ret := _m.Called(ctx)
@@ -114,13 +128,13 @@ func (_m *GalleryReaderWriter) CountByFileID(ctx context.Context, fileID models.
return r0, r1
}
-// Create provides a mock function with given fields: ctx, newGallery, fileIDs
-func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error {
- ret := _m.Called(ctx, newGallery, fileIDs)
+// Create provides a mock function with given fields: ctx, newGallery
+func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.CreateGalleryInput) error {
+ ret := _m.Called(ctx, newGallery)
var r0 error
- if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []models.FileID) error); ok {
- r0 = rf(ctx, newGallery, fileIDs)
+ if rf, ok := ret.Get(0).(func(context.Context, *models.CreateGalleryInput) error); ok {
+ r0 = rf(ctx, newGallery)
} else {
r0 = ret.Error(0)
}
@@ -395,6 +409,52 @@ func (_m *GalleryReaderWriter) FindUserGalleryByTitle(ctx context.Context, title
return r0, r1
}
+// GetCustomFields provides a mock function with given fields: ctx, id
+func (_m *GalleryReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) {
+ ret := _m.Called(ctx, id)
+
+ var r0 map[string]interface{}
+ if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok {
+ r0 = rf(ctx, id)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).(map[string]interface{})
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
+ r1 = rf(ctx, id)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
+// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids
+func (_m *GalleryReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) {
+ ret := _m.Called(ctx, ids)
+
+ var r0 []models.CustomFieldMap
+ if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok {
+ r0 = rf(ctx, ids)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]models.CustomFieldMap)
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
+ r1 = rf(ctx, ids)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
// GetFiles provides a mock function with given fields: ctx, relatedID
func (_m *GalleryReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) {
ret := _m.Called(ctx, relatedID)
@@ -656,12 +716,26 @@ func (_m *GalleryReaderWriter) SetCover(ctx context.Context, galleryID int, cove
return r0
}
+// SetCustomFields provides a mock function with given fields: ctx, id, fields
+func (_m *GalleryReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error {
+ ret := _m.Called(ctx, id, fields)
+
+ var r0 error
+ if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok {
+ r0 = rf(ctx, id, fields)
+ } else {
+ r0 = ret.Error(0)
+ }
+
+ return r0
+}
+
// Update provides a mock function with given fields: ctx, updatedGallery
-func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.Gallery) error {
+func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.UpdateGalleryInput) error {
ret := _m.Called(ctx, updatedGallery)
var r0 error
- if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery) error); ok {
+ if rf, ok := ret.Get(0).(func(context.Context, *models.UpdateGalleryInput) error); ok {
r0 = rf(ctx, updatedGallery)
} else {
r0 = ret.Error(0)
diff --git a/pkg/models/mocks/GroupReaderWriter.go b/pkg/models/mocks/GroupReaderWriter.go
index dc745d094..ac9e513f4 100644
--- a/pkg/models/mocks/GroupReaderWriter.go
+++ b/pkg/models/mocks/GroupReaderWriter.go
@@ -312,6 +312,52 @@ func (_m *GroupReaderWriter) GetContainingGroupDescriptions(ctx context.Context,
return r0, r1
}
+// GetCustomFields provides a mock function with given fields: ctx, id
+func (_m *GroupReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) {
+ ret := _m.Called(ctx, id)
+
+ var r0 map[string]interface{}
+ if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok {
+ r0 = rf(ctx, id)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).(map[string]interface{})
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
+ r1 = rf(ctx, id)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
+// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids
+func (_m *GroupReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) {
+ ret := _m.Called(ctx, ids)
+
+ var r0 []models.CustomFieldMap
+ if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok {
+ r0 = rf(ctx, ids)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]models.CustomFieldMap)
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
+ r1 = rf(ctx, ids)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
// GetFrontImage provides a mock function with given fields: ctx, groupID
func (_m *GroupReaderWriter) GetFrontImage(ctx context.Context, groupID int) ([]byte, error) {
ret := _m.Called(ctx, groupID)
@@ -497,6 +543,20 @@ func (_m *GroupReaderWriter) QueryCount(ctx context.Context, groupFilter *models
return r0, r1
}
+// SetCustomFields provides a mock function with given fields: ctx, id, fields
+func (_m *GroupReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error {
+ ret := _m.Called(ctx, id, fields)
+
+ var r0 error
+ if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok {
+ r0 = rf(ctx, id, fields)
+ } else {
+ r0 = ret.Error(0)
+ }
+
+ return r0
+}
+
// Update provides a mock function with given fields: ctx, updatedGroup
func (_m *GroupReaderWriter) Update(ctx context.Context, updatedGroup *models.Group) error {
ret := _m.Called(ctx, updatedGroup)
diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go
index afc5efdb7..f2c9934be 100644
--- a/pkg/models/mocks/ImageReaderWriter.go
+++ b/pkg/models/mocks/ImageReaderWriter.go
@@ -137,13 +137,13 @@ func (_m *ImageReaderWriter) CoverByGalleryID(ctx context.Context, galleryId int
return r0, r1
}
-// Create provides a mock function with given fields: ctx, newImage, fileIDs
-func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error {
- ret := _m.Called(ctx, newImage, fileIDs)
+// Create provides a mock function with given fields: ctx, newImage
+func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.CreateImageInput) error {
+ ret := _m.Called(ctx, newImage)
var r0 error
- if rf, ok := ret.Get(0).(func(context.Context, *models.Image, []models.FileID) error); ok {
- r0 = rf(ctx, newImage, fileIDs)
+ if rf, ok := ret.Get(0).(func(context.Context, *models.CreateImageInput) error); ok {
+ r0 = rf(ctx, newImage)
} else {
r0 = ret.Error(0)
}
@@ -393,6 +393,52 @@ func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models
return r0, r1
}
+// GetCustomFields provides a mock function with given fields: ctx, id
+func (_m *ImageReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) {
+ ret := _m.Called(ctx, id)
+
+ var r0 map[string]interface{}
+ if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok {
+ r0 = rf(ctx, id)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).(map[string]interface{})
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
+ r1 = rf(ctx, id)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
+// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids
+func (_m *ImageReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) {
+ ret := _m.Called(ctx, ids)
+
+ var r0 []models.CustomFieldMap
+ if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok {
+ r0 = rf(ctx, ids)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]models.CustomFieldMap)
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
+ r1 = rf(ctx, ids)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
// GetFiles provides a mock function with given fields: ctx, relatedID
func (_m *ImageReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) {
ret := _m.Called(ctx, relatedID)
@@ -694,6 +740,20 @@ func (_m *ImageReaderWriter) ResetOCounter(ctx context.Context, id int) (int, er
return r0, r1
}
+// SetCustomFields provides a mock function with given fields: ctx, id, fields
+func (_m *ImageReaderWriter) SetCustomFields(ctx context.Context, id int, fields models.CustomFieldsInput) error {
+ ret := _m.Called(ctx, id, fields)
+
+ var r0 error
+ if rf, ok := ret.Get(0).(func(context.Context, int, models.CustomFieldsInput) error); ok {
+ r0 = rf(ctx, id, fields)
+ } else {
+ r0 = ret.Error(0)
+ }
+
+ return r0
+}
+
// Size provides a mock function with given fields: ctx
func (_m *ImageReaderWriter) Size(ctx context.Context) (float64, error) {
ret := _m.Called(ctx)
diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go
index 95a3b7a87..194f475c8 100644
--- a/pkg/models/mocks/TagReaderWriter.go
+++ b/pkg/models/mocks/TagReaderWriter.go
@@ -197,6 +197,29 @@ func (_m *TagReaderWriter) FindAllDescendants(ctx context.Context, tagID int, ex
return r0, r1
}
+// FindByAlias provides a mock function with given fields: ctx, alias, nocase
+func (_m *TagReaderWriter) FindByAlias(ctx context.Context, alias string, nocase bool) (*models.Tag, error) {
+ ret := _m.Called(ctx, alias, nocase)
+
+ var r0 *models.Tag
+ if rf, ok := ret.Get(0).(func(context.Context, string, bool) *models.Tag); ok {
+ r0 = rf(ctx, alias, nocase)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).(*models.Tag)
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok {
+ r1 = rf(ctx, alias, nocase)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
// FindByChildTagID provides a mock function with given fields: ctx, childID
func (_m *TagReaderWriter) FindByChildTagID(ctx context.Context, childID int) ([]*models.Tag, error) {
ret := _m.Called(ctx, childID)
@@ -450,6 +473,29 @@ func (_m *TagReaderWriter) FindByStashID(ctx context.Context, stashID models.Sta
return r0, r1
}
+// FindByStashIDStatus provides a mock function with given fields: ctx, hasStashID, stashboxEndpoint
+func (_m *TagReaderWriter) FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*models.Tag, error) {
+ ret := _m.Called(ctx, hasStashID, stashboxEndpoint)
+
+ var r0 []*models.Tag
+ if rf, ok := ret.Get(0).(func(context.Context, bool, string) []*models.Tag); ok {
+ r0 = rf(ctx, hasStashID, stashboxEndpoint)
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]*models.Tag)
+ }
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, bool, string) error); ok {
+ r1 = rf(ctx, hasStashID, stashboxEndpoint)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
// FindByStudioID provides a mock function with given fields: ctx, studioID
func (_m *TagReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) {
ret := _m.Called(ctx, studioID)
diff --git a/pkg/models/mocks/database.go b/pkg/models/mocks/database.go
index ec4177b30..88f106e19 100644
--- a/pkg/models/mocks/database.go
+++ b/pkg/models/mocks/database.go
@@ -3,6 +3,7 @@ package mocks
import (
"context"
+ "errors"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
@@ -89,6 +90,16 @@ func (db *Database) AssertExpectations(t mock.TestingT) {
db.SavedFilter.AssertExpectations(t)
}
+// WithTxnCtx runs fn with a context that has a transaction hook manager registered,
+// so code that calls txn.AddPostCommitHook (e.g. plugin cache) won't nil-panic.
+// Always rolls back to avoid executing the registered hooks.
+func (db *Database) WithTxnCtx(fn func(ctx context.Context)) {
+ _ = txn.WithTxn(context.Background(), db, func(ctx context.Context) error {
+ fn(ctx)
+ return errors.New("rollback")
+ })
+}
+
func (db *Database) Repository() models.Repository {
return models.Repository{
TxnManager: db,
diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go
index 4b6a3183d..bbdba46a6 100644
--- a/pkg/models/model_gallery.go
+++ b/pkg/models/model_gallery.go
@@ -46,6 +46,20 @@ func NewGallery() Gallery {
}
}
+type CreateGalleryInput struct {
+ *Gallery
+
+ FileIDs []FileID
+ CustomFields map[string]interface{} `json:"custom_fields"`
+}
+
+type UpdateGalleryInput struct {
+ *Gallery
+
+ FileIDs []FileID
+ CustomFields CustomFieldsInput `json:"custom_fields"`
+}
+
// GalleryPartial represents part of a Gallery object. It is used to update
// the database entry. Only non-nil fields will be updated.
type GalleryPartial struct {
@@ -70,6 +84,8 @@ type GalleryPartial struct {
TagIDs *UpdateIDs
PerformerIDs *UpdateIDs
PrimaryFileID *FileID
+
+ CustomFields CustomFieldsInput
}
func NewGalleryPartial() GalleryPartial {
diff --git a/pkg/models/model_group.go b/pkg/models/model_group.go
index 82c71996a..5bfb42c44 100644
--- a/pkg/models/model_group.go
+++ b/pkg/models/model_group.go
@@ -34,6 +34,14 @@ func NewGroup() Group {
}
}
+type CreateGroupInput struct {
+ *Group
+
+ CustomFields map[string]interface{} `json:"custom_fields"`
+ FrontImageData []byte
+ BackImageData []byte
+}
+
func (m *Group) LoadURLs(ctx context.Context, l URLLoader) error {
return m.URLs.load(func() ([]string, error) {
return l.GetURLs(ctx, m.ID)
@@ -74,6 +82,8 @@ type GroupPartial struct {
SubGroups *UpdateGroupDescriptions
CreatedAt OptionalTime
UpdatedAt OptionalTime
+
+ CustomFields CustomFieldsInput
}
func NewGroupPartial() GroupPartial {
diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go
index 1d0993536..72ca61826 100644
--- a/pkg/models/model_image.go
+++ b/pkg/models/model_image.go
@@ -47,6 +47,13 @@ func NewImage() Image {
}
}
+type CreateImageInput struct {
+ *Image
+
+ FileIDs []FileID
+ CustomFields map[string]interface{} `json:"custom_fields"`
+}
+
type ImagePartial struct {
Title OptionalString
Code OptionalString
@@ -66,6 +73,7 @@ type ImagePartial struct {
TagIDs *UpdateIDs
PerformerIDs *UpdateIDs
PrimaryFileID *FileID
+ CustomFields CustomFieldsInput
}
func NewImagePartial() ImagePartial {
diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go
index a30eafa0a..7bc3b3174 100644
--- a/pkg/models/model_performer.go
+++ b/pkg/models/model_performer.go
@@ -6,26 +6,26 @@ import (
)
type Performer struct {
- ID int `json:"id"`
- Name string `json:"name"`
- Disambiguation string `json:"disambiguation"`
- Gender *GenderEnum `json:"gender"`
- Birthdate *Date `json:"birthdate"`
- Ethnicity string `json:"ethnicity"`
- Country string `json:"country"`
- EyeColor string `json:"eye_color"`
- Height *int `json:"height"`
- Measurements string `json:"measurements"`
- FakeTits string `json:"fake_tits"`
- PenisLength *float64 `json:"penis_length"`
- Circumcised *CircumisedEnum `json:"circumcised"`
- CareerStart *int `json:"career_start"`
- CareerEnd *int `json:"career_end"`
- Tattoos string `json:"tattoos"`
- Piercings string `json:"piercings"`
- Favorite bool `json:"favorite"`
- CreatedAt time.Time `json:"created_at"`
- UpdatedAt time.Time `json:"updated_at"`
+ ID int `json:"id"`
+ Name string `json:"name"`
+ Disambiguation string `json:"disambiguation"`
+ Gender *GenderEnum `json:"gender"`
+ Birthdate *Date `json:"birthdate"`
+ Ethnicity string `json:"ethnicity"`
+ Country string `json:"country"`
+ EyeColor string `json:"eye_color"`
+ Height *int `json:"height"`
+ Measurements string `json:"measurements"`
+ FakeTits string `json:"fake_tits"`
+ PenisLength *float64 `json:"penis_length"`
+ Circumcised *CircumcisedEnum `json:"circumcised"`
+ CareerStart *Date `json:"career_start"`
+ CareerEnd *Date `json:"career_end"`
+ Tattoos string `json:"tattoos"`
+ Piercings string `json:"piercings"`
+ Favorite bool `json:"favorite"`
+ CreatedAt time.Time `json:"created_at"`
+ UpdatedAt time.Time `json:"updated_at"`
// Rating expressed in 1-100 scale
Rating *int `json:"rating"`
Details string `json:"details"`
@@ -76,8 +76,8 @@ type PerformerPartial struct {
FakeTits OptionalString
PenisLength OptionalFloat64
Circumcised OptionalString
- CareerStart OptionalInt
- CareerEnd OptionalInt
+ CareerStart OptionalDate
+ CareerEnd OptionalDate
Tattoos OptionalString
Piercings OptionalString
Favorite OptionalBool
diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go
index 3c0e083c1..d20fbd589 100644
--- a/pkg/models/model_scraped_item.go
+++ b/pkg/models/model_scraped_item.go
@@ -177,8 +177,8 @@ type ScrapedPerformer struct {
PenisLength *string `json:"penis_length"`
Circumcised *string `json:"circumcised"`
CareerLength *string `json:"career_length"` // deprecated: use CareerStart/CareerEnd
- CareerStart *int `json:"career_start"`
- CareerEnd *int `json:"career_end"`
+ CareerStart *string `json:"career_start"`
+ CareerEnd *string `json:"career_end"`
Tattoos *string `json:"tattoos"`
Piercings *string `json:"piercings"`
Aliases *string `json:"aliases"`
@@ -225,12 +225,16 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool
// assume that career length is _not_ populated in favour of start/end
if p.CareerStart != nil && !excluded["career_start"] {
- cs := *p.CareerStart
- ret.CareerStart = &cs
+ date, err := ParseDate(*p.CareerStart)
+ if err == nil {
+ ret.CareerStart = &date
+ }
}
if p.CareerEnd != nil && !excluded["career_end"] {
- ce := *p.CareerEnd
- ret.CareerEnd = &ce
+ date, err := ParseDate(*p.CareerEnd)
+ if err == nil {
+ ret.CareerEnd = &date
+ }
}
if p.Country != nil && !excluded["country"] {
ret.Country = *p.Country
@@ -288,7 +292,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool
}
}
if p.Circumcised != nil && !excluded["circumcised"] {
- v := CircumisedEnum(*p.Circumcised)
+ v := CircumcisedEnum(*p.Circumcised)
if v.IsValid() {
ret.Circumcised = &v
}
@@ -367,13 +371,13 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
}
if p.CareerLength != nil && !excluded["career_length"] {
// parse career_length into career_start/career_end
- start, end, err := utils.ParseYearRangeString(*p.CareerLength)
+ start, end, err := ParseYearRangeString(*p.CareerLength)
if err == nil {
if start != nil {
- ret.CareerStart = NewOptionalInt(*start)
+ ret.CareerStart = NewOptionalDate(*start)
}
if end != nil {
- ret.CareerEnd = NewOptionalInt(*end)
+ ret.CareerEnd = NewOptionalDate(*end)
}
}
}
@@ -471,9 +475,12 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
type ScrapedTag struct {
// Set if tag matched
- StoredID *string `json:"stored_id"`
- Name string `json:"name"`
- RemoteSiteID *string `json:"remote_site_id"`
+ StoredID *string `json:"stored_id"`
+ Name string `json:"name"`
+ Description *string `json:"description"`
+ AliasList []string `json:"alias_list"`
+ RemoteSiteID *string `json:"remote_site_id"`
+ Parent *ScrapedTag `json:"parent"`
}
func (ScrapedTag) IsScrapedContent() {}
@@ -482,6 +489,24 @@ func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag {
currentTime := time.Now()
ret := NewTag()
ret.Name = t.Name
+ ret.ParentIDs = NewRelatedIDs([]int{})
+ ret.ChildIDs = NewRelatedIDs([]int{})
+ ret.Aliases = NewRelatedStrings([]string{})
+
+ if t.Description != nil && !excluded["description"] {
+ ret.Description = *t.Description
+ }
+
+ if len(t.AliasList) > 0 && !excluded["aliases"] {
+ ret.Aliases = NewRelatedStrings(t.AliasList)
+ }
+
+ if t.Parent != nil && t.Parent.StoredID != nil {
+ parentID, err := strconv.Atoi(*t.Parent.StoredID)
+ if err == nil && parentID > 0 {
+ ret.ParentIDs = NewRelatedIDs([]int{parentID})
+ }
+ }
if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" {
ret.StashIDs = NewRelatedStashIDs([]StashID{
@@ -496,6 +521,49 @@ func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag {
return &ret
}
+func (t *ScrapedTag) ToPartial(storedID string, endpoint string, excluded map[string]bool, existingStashIDs []StashID) TagPartial {
+ ret := NewTagPartial()
+
+ if t.Name != "" && !excluded["name"] {
+ ret.Name = NewOptionalString(t.Name)
+ }
+
+ if t.Description != nil && !excluded["description"] {
+ ret.Description = NewOptionalString(*t.Description)
+ }
+
+ if len(t.AliasList) > 0 && !excluded["aliases"] {
+ ret.Aliases = &UpdateStrings{
+ Values: t.AliasList,
+ Mode: RelationshipUpdateModeSet,
+ }
+ }
+
+ if t.Parent != nil && t.Parent.StoredID != nil {
+ parentID, err := strconv.Atoi(*t.Parent.StoredID)
+ if err == nil && parentID > 0 {
+ ret.ParentIDs = &UpdateIDs{
+ IDs: []int{parentID},
+ Mode: RelationshipUpdateModeAdd,
+ }
+ }
+ }
+
+ if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" {
+ ret.StashIDs = &UpdateStashIDs{
+ StashIDs: existingStashIDs,
+ Mode: RelationshipUpdateModeSet,
+ }
+ ret.StashIDs.Set(StashID{
+ Endpoint: endpoint,
+ StashID: *t.RemoteSiteID,
+ UpdatedAt: time.Now(),
+ })
+ }
+
+ return ret
+}
+
func ScrapedTagSortFunction(a, b *ScrapedTag) int {
return strings.Compare(strings.ToLower(a.Name), strings.ToLower(b.Name))
}
diff --git a/pkg/models/model_scraped_item_test.go b/pkg/models/model_scraped_item_test.go
index 09d8fbb32..1956d8a0b 100644
--- a/pkg/models/model_scraped_item_test.go
+++ b/pkg/models/model_scraped_item_test.go
@@ -8,8 +8,6 @@ import (
"github.com/stretchr/testify/assert"
)
-func intPtr(i int) *int { return &i }
-
func Test_scrapedToStudioInput(t *testing.T) {
const name = "name"
url := "url"
@@ -186,8 +184,8 @@ func Test_scrapedToPerformerInput(t *testing.T) {
Weight: nextVal(),
Measurements: nextVal(),
FakeTits: nextVal(),
- CareerStart: intPtr(2005),
- CareerEnd: intPtr(2015),
+ CareerStart: dateStrFromInt(2005),
+ CareerEnd: dateStrFromInt(2015),
Tattoos: nextVal(),
Piercings: nextVal(),
Aliases: nextVal(),
@@ -212,8 +210,8 @@ func Test_scrapedToPerformerInput(t *testing.T) {
Weight: nextIntVal(),
Measurements: *nextVal(),
FakeTits: *nextVal(),
- CareerStart: intPtr(2005),
- CareerEnd: intPtr(2015),
+ CareerStart: dateFromInt(2005),
+ CareerEnd: dateFromInt(2015),
Tattoos: *nextVal(), // skip CareerLength counter slot
Piercings: *nextVal(),
Aliases: NewRelatedStrings([]string{*nextVal()}),
diff --git a/pkg/models/performer.go b/pkg/models/performer.go
index e4fb8dd98..606b87f9f 100644
--- a/pkg/models/performer.go
+++ b/pkg/models/performer.go
@@ -61,49 +61,49 @@ type GenderCriterionInput struct {
Modifier CriterionModifier `json:"modifier"`
}
-type CircumisedEnum string
+type CircumcisedEnum string
const (
- CircumisedEnumCut CircumisedEnum = "CUT"
- CircumisedEnumUncut CircumisedEnum = "UNCUT"
+ CircumcisedEnumCut CircumcisedEnum = "CUT"
+ CircumcisedEnumUncut CircumcisedEnum = "UNCUT"
)
-var AllCircumcisionEnum = []CircumisedEnum{
- CircumisedEnumCut,
- CircumisedEnumUncut,
+var AllCircumcisionEnum = []CircumcisedEnum{
+ CircumcisedEnumCut,
+ CircumcisedEnumUncut,
}
-func (e CircumisedEnum) IsValid() bool {
+func (e CircumcisedEnum) IsValid() bool {
switch e {
- case CircumisedEnumCut, CircumisedEnumUncut:
+ case CircumcisedEnumCut, CircumcisedEnumUncut:
return true
}
return false
}
-func (e CircumisedEnum) String() string {
+func (e CircumcisedEnum) String() string {
return string(e)
}
-func (e *CircumisedEnum) UnmarshalGQL(v interface{}) error {
+func (e *CircumcisedEnum) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
- *e = CircumisedEnum(str)
+ *e = CircumcisedEnum(str)
if !e.IsValid() {
- return fmt.Errorf("%s is not a valid CircumisedEnum", str)
+ return fmt.Errorf("%s is not a valid CircumcisedEnum", str)
}
return nil
}
-func (e CircumisedEnum) MarshalGQL(w io.Writer) {
+func (e CircumcisedEnum) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type CircumcisionCriterionInput struct {
- Value []CircumisedEnum `json:"value"`
+ Value []CircumcisedEnum `json:"value"`
Modifier CriterionModifier `json:"modifier"`
}
@@ -139,9 +139,9 @@ type PerformerFilterType struct {
// Filter by career length
CareerLength *StringCriterionInput `json:"career_length"` // deprecated
// Filter by career start year
- CareerStart *IntCriterionInput `json:"career_start"`
+ CareerStart *DateCriterionInput `json:"career_start"`
// Filter by career end year
- CareerEnd *IntCriterionInput `json:"career_end"`
+ CareerEnd *DateCriterionInput `json:"career_end"`
// Filter by tattoos
Tattoos *StringCriterionInput `json:"tattoos"`
// Filter by piercings
@@ -158,6 +158,8 @@ type PerformerFilterType struct {
TagCount *IntCriterionInput `json:"tag_count"`
// Filter by scene count
SceneCount *IntCriterionInput `json:"scene_count"`
+ // Filter by scene marker count (via scene)
+ MarkerCount *IntCriterionInput `json:"marker_count"`
// Filter by image count
ImageCount *IntCriterionInput `json:"image_count"`
// Filter by gallery count
@@ -202,6 +204,8 @@ type PerformerFilterType struct {
GalleriesFilter *GalleryFilterType `json:"galleries_filter"`
// Filter by related tags that meet this criteria
TagsFilter *TagFilterType `json:"tags_filter"`
+ // Filter by related scene markers (via scene) that meet this criteria
+ MarkersFilter *SceneMarkerFilterType `json:"markers_filter"`
// Filter by created at
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at
@@ -212,32 +216,32 @@ type PerformerFilterType struct {
}
type PerformerCreateInput struct {
- Name string `json:"name"`
- Disambiguation *string `json:"disambiguation"`
- URL *string `json:"url"` // deprecated
- Urls []string `json:"urls"`
- Gender *GenderEnum `json:"gender"`
- Birthdate *string `json:"birthdate"`
- Ethnicity *string `json:"ethnicity"`
- Country *string `json:"country"`
- EyeColor *string `json:"eye_color"`
- Height *string `json:"height"`
- HeightCm *int `json:"height_cm"`
- Measurements *string `json:"measurements"`
- FakeTits *string `json:"fake_tits"`
- PenisLength *float64 `json:"penis_length"`
- Circumcised *CircumisedEnum `json:"circumcised"`
- CareerLength *string `json:"career_length"`
- CareerStart *int `json:"career_start"`
- CareerEnd *int `json:"career_end"`
- Tattoos *string `json:"tattoos"`
- Piercings *string `json:"piercings"`
- Aliases *string `json:"aliases"`
- AliasList []string `json:"alias_list"`
- Twitter *string `json:"twitter"` // deprecated
- Instagram *string `json:"instagram"` // deprecated
- Favorite *bool `json:"favorite"`
- TagIds []string `json:"tag_ids"`
+ Name string `json:"name"`
+ Disambiguation *string `json:"disambiguation"`
+ URL *string `json:"url"` // deprecated
+ Urls []string `json:"urls"`
+ Gender *GenderEnum `json:"gender"`
+ Birthdate *string `json:"birthdate"`
+ Ethnicity *string `json:"ethnicity"`
+ Country *string `json:"country"`
+ EyeColor *string `json:"eye_color"`
+ Height *string `json:"height"`
+ HeightCm *int `json:"height_cm"`
+ Measurements *string `json:"measurements"`
+ FakeTits *string `json:"fake_tits"`
+ PenisLength *float64 `json:"penis_length"`
+ Circumcised *CircumcisedEnum `json:"circumcised"`
+ CareerLength *string `json:"career_length"`
+ CareerStart *string `json:"career_start"`
+ CareerEnd *string `json:"career_end"`
+ Tattoos *string `json:"tattoos"`
+ Piercings *string `json:"piercings"`
+ Aliases *string `json:"aliases"`
+ AliasList []string `json:"alias_list"`
+ Twitter *string `json:"twitter"` // deprecated
+ Instagram *string `json:"instagram"` // deprecated
+ Favorite *bool `json:"favorite"`
+ TagIds []string `json:"tag_ids"`
// This should be a URL or a base64 encoded data URL
Image *string `json:"image"`
StashIds []StashIDInput `json:"stash_ids"`
@@ -252,33 +256,33 @@ type PerformerCreateInput struct {
}
type PerformerUpdateInput struct {
- ID string `json:"id"`
- Name *string `json:"name"`
- Disambiguation *string `json:"disambiguation"`
- URL *string `json:"url"` // deprecated
- Urls []string `json:"urls"`
- Gender *GenderEnum `json:"gender"`
- Birthdate *string `json:"birthdate"`
- Ethnicity *string `json:"ethnicity"`
- Country *string `json:"country"`
- EyeColor *string `json:"eye_color"`
- Height *string `json:"height"`
- HeightCm *int `json:"height_cm"`
- Measurements *string `json:"measurements"`
- FakeTits *string `json:"fake_tits"`
- PenisLength *float64 `json:"penis_length"`
- Circumcised *CircumisedEnum `json:"circumcised"`
- CareerLength *string `json:"career_length"`
- CareerStart *int `json:"career_start"`
- CareerEnd *int `json:"career_end"`
- Tattoos *string `json:"tattoos"`
- Piercings *string `json:"piercings"`
- Aliases *string `json:"aliases"`
- AliasList []string `json:"alias_list"`
- Twitter *string `json:"twitter"` // deprecated
- Instagram *string `json:"instagram"` // deprecated
- Favorite *bool `json:"favorite"`
- TagIds []string `json:"tag_ids"`
+ ID string `json:"id"`
+ Name *string `json:"name"`
+ Disambiguation *string `json:"disambiguation"`
+ URL *string `json:"url"` // deprecated
+ Urls []string `json:"urls"`
+ Gender *GenderEnum `json:"gender"`
+ Birthdate *string `json:"birthdate"`
+ Ethnicity *string `json:"ethnicity"`
+ Country *string `json:"country"`
+ EyeColor *string `json:"eye_color"`
+ Height *string `json:"height"`
+ HeightCm *int `json:"height_cm"`
+ Measurements *string `json:"measurements"`
+ FakeTits *string `json:"fake_tits"`
+ PenisLength *float64 `json:"penis_length"`
+ Circumcised *CircumcisedEnum `json:"circumcised"`
+ CareerLength *string `json:"career_length"`
+ CareerStart *string `json:"career_start"`
+ CareerEnd *string `json:"career_end"`
+ Tattoos *string `json:"tattoos"`
+ Piercings *string `json:"piercings"`
+ Aliases *string `json:"aliases"`
+ AliasList []string `json:"alias_list"`
+ Twitter *string `json:"twitter"` // deprecated
+ Instagram *string `json:"instagram"` // deprecated
+ Favorite *bool `json:"favorite"`
+ TagIds []string `json:"tag_ids"`
// This should be a URL or a base64 encoded data URL
Image *string `json:"image"`
StashIds []StashIDInput `json:"stash_ids"`
diff --git a/pkg/models/repository_file.go b/pkg/models/repository_file.go
index c851ce08c..e1ac0b213 100644
--- a/pkg/models/repository_file.go
+++ b/pkg/models/repository_file.go
@@ -14,7 +14,7 @@ type FileGetter interface {
type FileFinder interface {
FileGetter
FindAllByPath(ctx context.Context, path string, caseSensitive bool) ([]File, error)
- FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error)
+ FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]File, error)
FindByPath(ctx context.Context, path string, caseSensitive bool) (File, error)
FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error)
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error)
diff --git a/pkg/models/repository_folder.go b/pkg/models/repository_folder.go
index 3d0fdb822..1169e53ac 100644
--- a/pkg/models/repository_folder.go
+++ b/pkg/models/repository_folder.go
@@ -11,10 +11,12 @@ type FolderGetter interface {
// FolderFinder provides methods to find folders.
type FolderFinder interface {
FolderGetter
- FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error)
+ FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]*Folder, error)
FindByPath(ctx context.Context, path string, caseSensitive bool) (*Folder, error)
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error)
FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error)
+ GetManyParentFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error)
+ GetManySubFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error)
}
type FolderQueryer interface {
diff --git a/pkg/models/repository_gallery.go b/pkg/models/repository_gallery.go
index 0cfb9964f..8fc3b29d5 100644
--- a/pkg/models/repository_gallery.go
+++ b/pkg/models/repository_gallery.go
@@ -37,12 +37,12 @@ type GalleryCounter interface {
// GalleryCreator provides methods to create galleries.
type GalleryCreator interface {
- Create(ctx context.Context, newGallery *Gallery, fileIDs []FileID) error
+ Create(ctx context.Context, newGallery *CreateGalleryInput) error
}
// GalleryUpdater provides methods to update galleries.
type GalleryUpdater interface {
- Update(ctx context.Context, updatedGallery *Gallery) error
+ Update(ctx context.Context, updatedGallery *UpdateGalleryInput) error
UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error)
UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error
}
@@ -70,6 +70,7 @@ type GalleryReader interface {
PerformerIDLoader
TagIDLoader
FileLoader
+ CustomFieldsReader
All(ctx context.Context) ([]*Gallery, error)
}
@@ -80,6 +81,9 @@ type GalleryWriter interface {
GalleryUpdater
GalleryDestroyer
+ CustomFieldsWriter
+
+ AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error
AddFileID(ctx context.Context, id int, fileID FileID) error
AddImages(ctx context.Context, galleryID int, imageIDs ...int) error
RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error
diff --git a/pkg/models/repository_group.go b/pkg/models/repository_group.go
index 704390d77..d7f74de64 100644
--- a/pkg/models/repository_group.go
+++ b/pkg/models/repository_group.go
@@ -68,6 +68,7 @@ type GroupReader interface {
TagIDLoader
ContainingGroupLoader
SubGroupLoader
+ CustomFieldsReader
All(ctx context.Context) ([]*Group, error)
GetFrontImage(ctx context.Context, groupID int) ([]byte, error)
@@ -81,6 +82,7 @@ type GroupWriter interface {
GroupCreator
GroupUpdater
GroupDestroyer
+ CustomFieldsWriter
}
// GroupReaderWriter provides all group methods.
diff --git a/pkg/models/repository_image.go b/pkg/models/repository_image.go
index 672ecd063..99dab3479 100644
--- a/pkg/models/repository_image.go
+++ b/pkg/models/repository_image.go
@@ -43,7 +43,7 @@ type ImageCounter interface {
// ImageCreator provides methods to create images.
type ImageCreator interface {
- Create(ctx context.Context, newImage *Image, fileIDs []FileID) error
+ Create(ctx context.Context, newImage *CreateImageInput) error
}
// ImageUpdater provides methods to update images.
@@ -78,6 +78,7 @@ type ImageReader interface {
FileLoader
GalleryCoverFinder
+ CustomFieldsReader
All(ctx context.Context) ([]*Image, error)
Size(ctx context.Context) (float64, error)
@@ -88,6 +89,7 @@ type ImageWriter interface {
ImageCreator
ImageUpdater
ImageDestroyer
+ CustomFieldsWriter
AddFileID(ctx context.Context, id int, fileID FileID) error
RemoveFileID(ctx context.Context, id int, fileID FileID) error
diff --git a/pkg/models/repository_tag.go b/pkg/models/repository_tag.go
index ba403cf2d..bd2ab2592 100644
--- a/pkg/models/repository_tag.go
+++ b/pkg/models/repository_tag.go
@@ -9,9 +9,16 @@ type TagGetter interface {
Find(ctx context.Context, id int) (*Tag, error)
}
+type TagNameFinder interface {
+ FindByName(ctx context.Context, name string, nocase bool) (*Tag, error)
+ FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error)
+ FindByAlias(ctx context.Context, alias string, nocase bool) (*Tag, error)
+}
+
// TagFinder provides methods to find tags.
type TagFinder interface {
TagGetter
+ TagNameFinder
FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error)
FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error)
FindByParentTagID(ctx context.Context, parentID int) ([]*Tag, error)
@@ -23,9 +30,8 @@ type TagFinder interface {
FindByGroupID(ctx context.Context, groupID int) ([]*Tag, error)
FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error)
FindByStudioID(ctx context.Context, studioID int) ([]*Tag, error)
- FindByName(ctx context.Context, name string, nocase bool) (*Tag, error)
- FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error)
FindByStashID(ctx context.Context, stashID StashID) ([]*Tag, error)
+ FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Tag, error)
}
// TagQueryer provides methods to query tags.
diff --git a/pkg/models/tag.go b/pkg/models/tag.go
index 3a133dcad..b166e5a69 100644
--- a/pkg/models/tag.go
+++ b/pkg/models/tag.go
@@ -56,6 +56,8 @@ type TagFilterType struct {
PerformersFilter *PerformerFilterType `json:"performers_filter"`
// Filter by related studios that meet this criteria
StudiosFilter *StudioFilterType `json:"studios_filter"`
+ // Filter by related scene markers that meet this criteria
+ MarkersFilter *SceneMarkerFilterType `json:"markers_filter"`
// Filter by created at
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at
diff --git a/pkg/performer/export.go b/pkg/performer/export.go
index 691175b1f..d7807f651 100644
--- a/pkg/performer/export.go
+++ b/pkg/performer/export.go
@@ -71,10 +71,10 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode
}
if performer.CareerStart != nil {
- newPerformerJSON.CareerStart = performer.CareerStart
+ newPerformerJSON.CareerStart = performer.CareerStart.String()
}
if performer.CareerEnd != nil {
- newPerformerJSON.CareerEnd = performer.CareerEnd
+ newPerformerJSON.CareerEnd = performer.CareerEnd.String()
}
if err := performer.LoadAliases(ctx, reader); err != nil {
diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go
index 1a87bc2b1..2cf476321 100644
--- a/pkg/performer/export_test.go
+++ b/pkg/performer/export_test.go
@@ -48,10 +48,10 @@ var (
rating = 5
height = 123
weight = 60
- careerStart = 2005
- careerEnd = 2015
+ careerStart, _ = models.ParseDate("2005")
+ careerEnd, _ = models.ParseDate("2015")
penisLength = 1.23
- circumcisedEnum = models.CircumisedEnumCut
+ circumcisedEnum = models.CircumcisedEnumCut
circumcised = circumcisedEnum.String()
emptyCustomFields = make(map[string]interface{})
@@ -134,8 +134,8 @@ func createFullJSONPerformer(name string, image string, withCustomFields bool) *
URLs: []string{url, twitter, instagram},
Aliases: aliases,
Birthdate: birthDate.String(),
- CareerStart: &careerStart,
- CareerEnd: &careerEnd,
+ CareerStart: careerStart.String(),
+ CareerEnd: careerEnd.String(),
Country: country,
Ethnicity: ethnicity,
EyeColor: eyeColor,
diff --git a/pkg/performer/import.go b/pkg/performer/import.go
index 1df69521a..62b4d87d0 100644
--- a/pkg/performer/import.go
+++ b/pkg/performer/import.go
@@ -247,7 +247,7 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) (models.Perfor
}
if performerJSON.Circumcised != "" {
- v := models.CircumisedEnum(performerJSON.Circumcised)
+ v := models.CircumcisedEnum(performerJSON.Circumcised)
newPerformer.Circumcised = &v
}
@@ -285,11 +285,17 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) (models.Perfor
}
// prefer explicit career_start/career_end, fall back to parsing legacy career_length
- if performerJSON.CareerStart != nil || performerJSON.CareerEnd != nil {
- newPerformer.CareerStart = performerJSON.CareerStart
- newPerformer.CareerEnd = performerJSON.CareerEnd
+ if performerJSON.CareerStart != "" || performerJSON.CareerEnd != "" {
+ careerStart, err := models.ParseDate(performerJSON.CareerStart)
+ if err == nil {
+ newPerformer.CareerStart = &careerStart
+ }
+ careerEnd, err := models.ParseDate(performerJSON.CareerEnd)
+ if err == nil {
+ newPerformer.CareerEnd = &careerEnd
+ }
} else if performerJSON.CareerLength != "" {
- start, end, err := utils.ParseYearRangeString(performerJSON.CareerLength)
+ start, end, err := models.ParseYearRangeString(performerJSON.CareerLength)
if err != nil {
return models.Performer{}, fmt.Errorf("invalid career_length %q: %w", performerJSON.CareerLength, err)
}
diff --git a/pkg/performer/import_test.go b/pkg/performer/import_test.go
index ca28c1990..0d5f80d01 100644
--- a/pkg/performer/import_test.go
+++ b/pkg/performer/import_test.go
@@ -317,15 +317,15 @@ func TestUpdate(t *testing.T) {
}
func TestImportCareerFields(t *testing.T) {
- startYear := 2005
- endYear := 2015
+ startYear, _ := models.ParseDate("2005")
+ endYear, _ := models.ParseDate("2015")
// explicit career_start/career_end should be used directly
t.Run("explicit fields", func(t *testing.T) {
input := jsonschema.Performer{
Name: "test",
- CareerStart: &startYear,
- CareerEnd: &endYear,
+ CareerStart: startYear.String(),
+ CareerEnd: endYear.String(),
}
p, err := performerJSONToPerformer(input)
@@ -338,8 +338,8 @@ func TestImportCareerFields(t *testing.T) {
t.Run("explicit fields override legacy", func(t *testing.T) {
input := jsonschema.Performer{
Name: "test",
- CareerStart: &startYear,
- CareerEnd: &endYear,
+ CareerStart: startYear.String(),
+ CareerEnd: endYear.String(),
CareerLength: "1990 - 1995",
}
diff --git a/pkg/pkg/cache.go b/pkg/pkg/cache.go
index 9d36bdd1d..e94b2cb41 100644
--- a/pkg/pkg/cache.go
+++ b/pkg/pkg/cache.go
@@ -1,6 +1,7 @@
package pkg
import (
+ "sync"
"time"
)
@@ -10,22 +11,23 @@ type cacheEntry struct {
}
type repositoryCache struct {
+ mu sync.RWMutex
// cache maps the URL to the last modified time and the data
cache map[string]cacheEntry
}
-func (c *repositoryCache) ensureCache() {
- if c.cache == nil {
- c.cache = make(map[string]cacheEntry)
- }
-}
-
func (c *repositoryCache) lastModified(url string) *time.Time {
if c == nil {
return nil
}
- c.ensureCache()
+ c.mu.RLock()
+ defer c.mu.RUnlock()
+
+ if c.cache == nil {
+ return nil
+ }
+
e, found := c.cache[url]
if !found {
@@ -36,7 +38,13 @@ func (c *repositoryCache) lastModified(url string) *time.Time {
}
func (c *repositoryCache) getPackageList(url string) []RemotePackage {
- c.ensureCache()
+ c.mu.RLock()
+ defer c.mu.RUnlock()
+
+ if c.cache == nil {
+ return nil
+ }
+
e, found := c.cache[url]
if !found {
@@ -51,7 +59,13 @@ func (c *repositoryCache) cacheList(url string, lastModified time.Time, data []R
return
}
- c.ensureCache()
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ if c.cache == nil {
+ c.cache = make(map[string]cacheEntry)
+ }
+
c.cache[url] = cacheEntry{
lastModified: lastModified,
data: data,
diff --git a/pkg/pkg/manager.go b/pkg/pkg/manager.go
index 18fa4e0d1..4024191ad 100644
--- a/pkg/pkg/manager.go
+++ b/pkg/pkg/manager.go
@@ -10,6 +10,7 @@ import (
"net/http"
"net/url"
"path/filepath"
+ "sync"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
@@ -31,13 +32,14 @@ type Manager struct {
Client *http.Client
- cache *repositoryCache
+ cacheOnce sync.Once
+ cache *repositoryCache
}
func (m *Manager) getCache() *repositoryCache {
- if m.cache == nil {
+ m.cacheOnce.Do(func() {
m.cache = &repositoryCache{}
- }
+ })
return m.cache
}
diff --git a/pkg/scene/filename_parser.go b/pkg/scene/filename_parser.go
index b8dff89d7..1ce6e7b4a 100644
--- a/pkg/scene/filename_parser.go
+++ b/pkg/scene/filename_parser.go
@@ -456,7 +456,7 @@ type FilenameParserRepository struct {
Performer PerformerNamesFinder
Studio models.StudioQueryer
Group GroupNameFinder
- Tag models.TagQueryer
+ Tag models.TagNameFinder
}
func NewFilenameParserRepository(repo models.Repository) FilenameParserRepository {
@@ -599,7 +599,7 @@ func (p *FilenameParser) queryGroup(ctx context.Context, qb GroupNameFinder, gro
return ret
}
-func (p *FilenameParser) queryTag(ctx context.Context, qb models.TagQueryer, tagName string) *models.Tag {
+func (p *FilenameParser) queryTag(ctx context.Context, qb models.TagNameFinder, tagName string) *models.Tag {
// massage the tag name
tagName = delimiterRE.ReplaceAllString(tagName, " ")
@@ -638,7 +638,7 @@ func (p *FilenameParser) setPerformers(ctx context.Context, qb PerformerNamesFin
}
}
-func (p *FilenameParser) setTags(ctx context.Context, qb models.TagQueryer, h sceneHolder, result *models.SceneParserResult) {
+func (p *FilenameParser) setTags(ctx context.Context, qb models.TagNameFinder, h sceneHolder, result *models.SceneParserResult) {
// query for each performer
tagsSet := make(map[int]bool)
for _, tagName := range h.tags {
diff --git a/pkg/scene/generate/preview.go b/pkg/scene/generate/preview.go
index ceefd617c..a0fea4994 100644
--- a/pkg/scene/generate/preview.go
+++ b/pkg/scene/generate/preview.go
@@ -232,7 +232,7 @@ func (g Generator) generateConcatFile(chunkFiles []string) (fn string, err error
for _, f := range chunkFiles {
// files in concat file should be relative to concat
relFile := filepath.Base(f)
- if _, err := w.WriteString(fmt.Sprintf("file '%s'\n", relFile)); err != nil {
+ if _, err := fmt.Fprintf(w, "file '%s'\n", relFile); err != nil {
return concatFile.Name(), fmt.Errorf("writing concat file: %w", err)
}
}
diff --git a/pkg/scene/scan.go b/pkg/scene/scan.go
index e1038fbc3..8d2944a36 100644
--- a/pkg/scene/scan.go
+++ b/pkg/scene/scan.go
@@ -4,6 +4,8 @@ import (
"context"
"errors"
"fmt"
+ "path/filepath"
+ "strings"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/logger"
@@ -32,12 +34,18 @@ type ScanCreatorUpdater interface {
AddFileID(ctx context.Context, id int, fileID models.FileID) error
}
+type ScanGalleryFinderUpdater interface {
+ FindByPath(ctx context.Context, p string) ([]*models.Gallery, error)
+ AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error
+}
+
type ScanGenerator interface {
Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error
}
type ScanHandler struct {
- CreatorUpdater ScanCreatorUpdater
+ CreatorUpdater ScanCreatorUpdater
+ GalleryFinderUpdater ScanGalleryFinderUpdater
ScanGenerator ScanGenerator
CaptionUpdater video.CaptionUpdater
@@ -49,19 +57,19 @@ type ScanHandler struct {
func (h *ScanHandler) validate() error {
if h.CreatorUpdater == nil {
- return errors.New("CreatorUpdater is required")
+ return errors.New("internal error: CreatorUpdater is required")
}
if h.ScanGenerator == nil {
- return errors.New("ScanGenerator is required")
+ return errors.New("internal error: ScanGenerator is required")
}
if h.CaptionUpdater == nil {
- return errors.New("CaptionUpdater is required")
+ return errors.New("internal error: CaptionUpdater is required")
}
if !h.FileNamingAlgorithm.IsValid() {
- return errors.New("FileNamingAlgorithm is required")
+ return errors.New("internal error: FileNamingAlgorithm is required")
}
if h.Paths == nil {
- return errors.New("Paths is required")
+ return errors.New("internal error: Paths is required")
}
return nil
@@ -127,6 +135,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.
}
}
+ if err := h.associateGallery(ctx, existing, f); err != nil {
+ return err
+ }
+
// do this after the commit so that cover generation doesn't hold up the transaction
txn.AddPostCommitHook(ctx, func(ctx context.Context) {
for _, s := range existing {
@@ -160,18 +172,44 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
if err := h.CreatorUpdater.AddFileID(ctx, s.ID, f.ID); err != nil {
return fmt.Errorf("adding file to scene: %w", err)
}
+ }
- // update updated_at time
+ if !found || updateExisting {
+ // update updated_at time when file association or content changes
scenePartial := models.NewScenePartial()
if _, err := h.CreatorUpdater.UpdatePartial(ctx, s.ID, scenePartial); err != nil {
return fmt.Errorf("updating scene: %w", err)
}
- }
- if !found || updateExisting {
h.PluginCache.RegisterPostHooks(ctx, s.ID, hook.SceneUpdatePost, nil, nil)
}
}
return nil
}
+
+func (h *ScanHandler) associateGallery(ctx context.Context, existing []*models.Scene, f models.File) error {
+ sceneIDs := make([]int, len(existing))
+ for i, s := range existing {
+ sceneIDs[i] = s.ID
+ }
+
+ path := f.Base().Path
+ zipPath := strings.TrimSuffix(path, filepath.Ext(path)) + ".zip"
+
+ // find galleries with a file that matches
+ galleries, err := h.GalleryFinderUpdater.FindByPath(ctx, zipPath)
+ if err != nil {
+ return err
+ }
+
+ for _, gallery := range galleries {
+ // found related Scene
+ logger.Infof("associate: Scene %s is related to gallery: %d", path, gallery.ID)
+ if err := h.GalleryFinderUpdater.AddSceneIDs(ctx, gallery.ID, sceneIDs); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/pkg/scene/scan_test.go b/pkg/scene/scan_test.go
new file mode 100644
index 000000000..71729bb57
--- /dev/null
+++ b/pkg/scene/scan_test.go
@@ -0,0 +1,114 @@
+package scene
+
+import (
+ "context"
+ "testing"
+
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/models/mocks"
+ "github.com/stashapp/stash/pkg/plugin"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
+)
+
+func TestAssociateExisting_UpdatePartialOnContentChange(t *testing.T) {
+ const (
+ testSceneID = 1
+ testFileID = 100
+ )
+
+ existingFile := &models.VideoFile{
+ BaseFile: &models.BaseFile{ID: models.FileID(testFileID), Path: "test.mp4"},
+ }
+
+ makeScene := func() *models.Scene {
+ return &models.Scene{
+ ID: testSceneID,
+ Files: models.NewRelatedVideoFiles([]*models.VideoFile{existingFile}),
+ }
+ }
+
+ tests := []struct {
+ name string
+ updateExisting bool
+ expectUpdate bool
+ }{
+ {
+ name: "calls UpdatePartial when file content changed",
+ updateExisting: true,
+ expectUpdate: true,
+ },
+ {
+ name: "skips UpdatePartial when file unchanged and already associated",
+ updateExisting: false,
+ expectUpdate: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ db := mocks.NewDatabase()
+ db.Scene.On("GetFiles", mock.Anything, testSceneID).Return([]*models.VideoFile{existingFile}, nil)
+
+ if tt.expectUpdate {
+ db.Scene.On("UpdatePartial", mock.Anything, testSceneID, mock.Anything).
+ Return(&models.Scene{ID: testSceneID}, nil)
+ }
+
+ h := &ScanHandler{
+ CreatorUpdater: db.Scene,
+ PluginCache: &plugin.Cache{},
+ }
+
+ db.WithTxnCtx(func(ctx context.Context) {
+ err := h.associateExisting(ctx, []*models.Scene{makeScene()}, existingFile, tt.updateExisting)
+ assert.NoError(t, err)
+ })
+
+ if tt.expectUpdate {
+ db.Scene.AssertCalled(t, "UpdatePartial", mock.Anything, testSceneID, mock.Anything)
+ } else {
+ db.Scene.AssertNotCalled(t, "UpdatePartial", mock.Anything, mock.Anything, mock.Anything)
+ }
+ })
+ }
+}
+
+func TestAssociateExisting_UpdatePartialOnNewFile(t *testing.T) {
+ const (
+ testSceneID = 1
+ existFileID = 100
+ newFileID = 200
+ )
+
+ existingFile := &models.VideoFile{
+ BaseFile: &models.BaseFile{ID: models.FileID(existFileID), Path: "existing.mp4"},
+ }
+ newFile := &models.VideoFile{
+ BaseFile: &models.BaseFile{ID: models.FileID(newFileID), Path: "new.mp4"},
+ }
+
+ scene := &models.Scene{
+ ID: testSceneID,
+ Files: models.NewRelatedVideoFiles([]*models.VideoFile{existingFile}),
+ }
+
+ db := mocks.NewDatabase()
+ db.Scene.On("GetFiles", mock.Anything, testSceneID).Return([]*models.VideoFile{existingFile}, nil)
+ db.Scene.On("AddFileID", mock.Anything, testSceneID, models.FileID(newFileID)).Return(nil)
+ db.Scene.On("UpdatePartial", mock.Anything, testSceneID, mock.Anything).
+ Return(&models.Scene{ID: testSceneID}, nil)
+
+ h := &ScanHandler{
+ CreatorUpdater: db.Scene,
+ PluginCache: &plugin.Cache{},
+ }
+
+ db.WithTxnCtx(func(ctx context.Context) {
+ err := h.associateExisting(ctx, []*models.Scene{scene}, newFile, false)
+ assert.NoError(t, err)
+ })
+
+ db.Scene.AssertCalled(t, "AddFileID", mock.Anything, testSceneID, models.FileID(newFileID))
+ db.Scene.AssertCalled(t, "UpdatePartial", mock.Anything, testSceneID, mock.Anything)
+}
diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go
index 6aeb95fcf..83a590b3e 100644
--- a/pkg/scraper/cache.go
+++ b/pkg/scraper/cache.go
@@ -70,6 +70,7 @@ type StudioFinder interface {
type TagFinder interface {
models.TagGetter
+ models.TagNameFinder
models.TagAutoTagQueryer
}
diff --git a/pkg/scraper/mapped_result.go b/pkg/scraper/mapped_result.go
index 1260f3082..64cc97ec7 100644
--- a/pkg/scraper/mapped_result.go
+++ b/pkg/scraper/mapped_result.go
@@ -140,8 +140,8 @@ func (r mappedResult) scrapedPerformer() *models.ScrapedPerformer {
PenisLength: r.stringPtr("PenisLength"),
Circumcised: r.stringPtr("Circumcised"),
CareerLength: r.stringPtr("CareerLength"),
- CareerStart: r.IntPtr("CareerStart"),
- CareerEnd: r.IntPtr("CareerEnd"),
+ CareerStart: r.stringPtr("CareerStart"),
+ CareerEnd: r.stringPtr("CareerEnd"),
Tattoos: r.stringPtr("Tattoos"),
Piercings: r.stringPtr("Piercings"),
Aliases: r.stringPtr("Aliases"),
diff --git a/pkg/scraper/performer.go b/pkg/scraper/performer.go
index 4684a6683..e05240453 100644
--- a/pkg/scraper/performer.go
+++ b/pkg/scraper/performer.go
@@ -20,8 +20,8 @@ type ScrapedPerformerInput struct {
PenisLength *string `json:"penis_length"`
Circumcised *string `json:"circumcised"`
CareerLength *string `json:"career_length"`
- CareerStart *int `json:"career_start"`
- CareerEnd *int `json:"career_end"`
+ CareerStart *string `json:"career_start"`
+ CareerEnd *string `json:"career_end"`
Tattoos *string `json:"tattoos"`
Piercings *string `json:"piercings"`
Aliases *string `json:"aliases"`
diff --git a/pkg/scraper/post_processing_test.go b/pkg/scraper/post_processing_test.go
new file mode 100644
index 000000000..2eb9385e1
--- /dev/null
+++ b/pkg/scraper/post_processing_test.go
@@ -0,0 +1,144 @@
+package scraper
+
+import (
+ "context"
+ "testing"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+func TestPostScrapePerformerCareerLength(t *testing.T) {
+ ctx := context.Background()
+ const related = false
+
+ strPtr := func(s string) *string {
+ return &s
+ }
+
+ tests := []struct {
+ name string
+ input models.ScrapedPerformer
+ want models.ScrapedPerformer
+ }{
+ {
+ "start = 2000",
+ models.ScrapedPerformer{
+ CareerStart: strPtr("2000"),
+ },
+ models.ScrapedPerformer{
+ CareerStart: strPtr("2000"),
+ CareerLength: strPtr("2000 -"),
+ },
+ },
+ {
+ "end = 2000",
+ models.ScrapedPerformer{
+ CareerEnd: strPtr("2000"),
+ },
+ models.ScrapedPerformer{
+ CareerEnd: strPtr("2000"),
+ CareerLength: strPtr("- 2000"),
+ },
+ },
+ {
+ "start = 2000, end = 2020",
+ models.ScrapedPerformer{
+ CareerStart: strPtr("2000"),
+ CareerEnd: strPtr("2020"),
+ },
+ models.ScrapedPerformer{
+ CareerStart: strPtr("2000"),
+ CareerEnd: strPtr("2020"),
+ CareerLength: strPtr("2000 - 2020"),
+ },
+ },
+ {
+ "length = 2000 -",
+ models.ScrapedPerformer{
+ CareerLength: strPtr("2000 -"),
+ },
+ models.ScrapedPerformer{
+ CareerStart: strPtr("2000"),
+ CareerLength: strPtr("2000 -"),
+ },
+ },
+ {
+ "length = - 2010",
+ models.ScrapedPerformer{
+ CareerLength: strPtr("- 2010"),
+ },
+ models.ScrapedPerformer{
+ CareerEnd: strPtr("2010"),
+ CareerLength: strPtr("- 2010"),
+ },
+ },
+ {
+ "length = 2000 - 2010",
+ models.ScrapedPerformer{
+ CareerLength: strPtr("2000 - 2010"),
+ },
+ models.ScrapedPerformer{
+ CareerStart: strPtr("2000"),
+ CareerEnd: strPtr("2010"),
+ CareerLength: strPtr("2000 - 2010"),
+ },
+ },
+ {
+ "invalid start",
+ models.ScrapedPerformer{
+ CareerStart: strPtr("two thousand"),
+ },
+ models.ScrapedPerformer{
+ CareerStart: strPtr("two thousand"),
+ },
+ },
+ {
+ "invalid end",
+ models.ScrapedPerformer{
+ CareerEnd: strPtr("two thousand"),
+ },
+ models.ScrapedPerformer{
+ CareerEnd: strPtr("two thousand"),
+ },
+ },
+ {
+ "invalid career length",
+ models.ScrapedPerformer{
+ CareerLength: strPtr("1234 - 4567 - 9224"),
+ },
+ models.ScrapedPerformer{
+ CareerLength: strPtr("1234 - 4567 - 9224"),
+ },
+ },
+ }
+
+ compareStrPtr := func(a, b *string) bool {
+ if a == b {
+ return true
+ }
+ if a == nil || b == nil {
+ return false
+ }
+ return *a == *b
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ c := &postScraper{}
+ got, err := c.postScrapePerformer(ctx, tt.input, related)
+ if err != nil {
+ t.Fatalf("postScrapePerformer returned error: %v", err)
+ }
+ postScraped := got.(models.ScrapedPerformer)
+ if !compareStrPtr(postScraped.CareerStart, tt.want.CareerStart) {
+ t.Errorf("CareerStart = %v, want %v", postScraped.CareerStart, tt.want.CareerStart)
+ }
+ if !compareStrPtr(postScraped.CareerEnd, tt.want.CareerEnd) {
+ t.Errorf("CareerEnd = %v, want %v", postScraped.CareerEnd, tt.want.CareerEnd)
+ }
+ if !compareStrPtr(postScraped.CareerLength, tt.want.CareerLength) {
+ t.Errorf("CareerLength = %v, want %v", postScraped.CareerLength, tt.want.CareerLength)
+ }
+ })
+ }
+}
diff --git a/pkg/scraper/postprocessing.go b/pkg/scraper/postprocessing.go
index 8a4d4de7d..4b8f7e022 100644
--- a/pkg/scraper/postprocessing.go
+++ b/pkg/scraper/postprocessing.go
@@ -125,23 +125,64 @@ func (c *postScraper) postScrapePerformer(ctx context.Context, p models.ScrapedP
}
}
- isEmptyStr := func(s *string) bool { return s == nil || *s == "" }
- isEmptyInt := func(s *int) bool { return s == nil || *s == 0 }
-
- // populate career start/end from career length and vice versa
- if !isEmptyStr(p.CareerLength) && isEmptyInt(p.CareerStart) && isEmptyInt(p.CareerEnd) {
- p.CareerStart, p.CareerEnd, err = utils.ParseYearRangeString(*p.CareerLength)
- if err != nil {
- logger.Warnf("Could not parse career length %s: %v", *p.CareerLength, err)
- }
- } else if isEmptyStr(p.CareerLength) && (!isEmptyInt(p.CareerStart) || !isEmptyInt(p.CareerEnd)) {
- v := utils.FormatYearRange(p.CareerStart, p.CareerEnd)
- p.CareerLength = &v
- }
+ c.postProcessCareerLength(&p)
return p, nil
}
+func (c *postScraper) postProcessCareerLength(p *models.ScrapedPerformer) {
+ isEmptyStr := func(s *string) bool { return s == nil || *s == "" }
+
+ // populate career start/end from career length and vice versa
+ if !isEmptyStr(p.CareerLength) && isEmptyStr(p.CareerStart) && isEmptyStr(p.CareerEnd) {
+ start, end, err := models.ParseYearRangeString(*p.CareerLength)
+ if err != nil {
+ logger.Warnf("Could not parse career length %s: %v", *p.CareerLength, err)
+ return
+ }
+
+ if start != nil {
+ startStr := start.String()
+ p.CareerStart = &startStr
+ }
+ if end != nil {
+ endStr := end.String()
+ p.CareerEnd = &endStr
+ }
+
+ return
+ }
+
+ // populate career length from career start/end if career length is missing
+ if isEmptyStr(p.CareerLength) {
+ var (
+ start *models.Date
+ end *models.Date
+ )
+
+ if !isEmptyStr(p.CareerStart) {
+ date, err := models.ParseDate(*p.CareerStart)
+ if err != nil {
+ logger.Warnf("Could not parse career start %s: %v", *p.CareerStart, err)
+ return
+ }
+ start = &date
+ }
+
+ if !isEmptyStr(p.CareerEnd) {
+ date, err := models.ParseDate(*p.CareerEnd)
+ if err != nil {
+ logger.Warnf("Could not parse career end %s: %v", *p.CareerEnd, err)
+ return
+ }
+ end = &date
+ }
+
+ v := models.FormatYearRange(start, end)
+ p.CareerLength = &v
+ }
+}
+
func (c *postScraper) postScrapeMovie(ctx context.Context, m models.ScrapedMovie, related bool) (_ ScrapedContent, err error) {
r := c.repository
tqb := r.TagFinder
diff --git a/pkg/scraper/query_url.go b/pkg/scraper/query_url.go
index 2cd9f683e..7fe874947 100644
--- a/pkg/scraper/query_url.go
+++ b/pkg/scraper/query_url.go
@@ -17,6 +17,12 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
ret["oshash"] = scene.OSHash
ret["filename"] = filepath.Base(scene.Path)
+ // pull phash from primary file
+ phashFingerprints := scene.Files.Primary().Base().Fingerprints.Filter(models.FingerprintTypePhash)
+ if len(phashFingerprints) > 0 {
+ ret["phash"] = phashFingerprints[0].Value()
+ }
+
if scene.Title != "" {
ret["title"] = scene.Title
}
diff --git a/pkg/scraper/tag.go b/pkg/scraper/tag.go
index 14f02e397..c9c2530de 100644
--- a/pkg/scraper/tag.go
+++ b/pkg/scraper/tag.go
@@ -11,7 +11,7 @@ import (
"github.com/stashapp/stash/pkg/sliceutil"
)
-func postProcessTags(ctx context.Context, tqb models.TagQueryer, scrapedTags []*models.ScrapedTag) (ret []*models.ScrapedTag, err error) {
+func postProcessTags(ctx context.Context, tqb models.TagNameFinder, scrapedTags []*models.ScrapedTag) (ret []*models.ScrapedTag, err error) {
ret = make([]*models.ScrapedTag, 0, len(scrapedTags))
for _, t := range scrapedTags {
diff --git a/pkg/session/local.go b/pkg/session/local.go
new file mode 100644
index 000000000..519328496
--- /dev/null
+++ b/pkg/session/local.go
@@ -0,0 +1,44 @@
+package session
+
+import (
+ "context"
+ "net"
+ "net/http"
+
+ "github.com/stashapp/stash/pkg/logger"
+)
+
+// SetLocalRequest checks if the request is from localhost and sets the context value accordingly.
+// It returns the modified request with the updated context, or the original request if it did
+// not come from localhost or if there was an error parsing the remote address.
+func SetLocalRequest(r *http.Request) *http.Request {
+ // determine if request is from localhost
+ host, _, err := net.SplitHostPort(r.RemoteAddr)
+ if err != nil {
+ logger.Errorf("Error parsing remote address: %v", err)
+ return r
+ }
+
+ ip := net.ParseIP(host)
+ if ip == nil {
+ logger.Errorf("Error parsing IP address: %s", host)
+ return r
+ }
+
+ if ip.IsLoopback() {
+ ctx := context.WithValue(r.Context(), contextLocalRequest, true)
+ r = r.WithContext(ctx)
+ }
+
+ return r
+}
+
+// IsLocalRequest returns true if the request is from localhost, as determined by the context value set by SetLocalRequest.
+// If the context value is not set, it returns false.
+func IsLocalRequest(ctx context.Context) bool {
+ val := ctx.Value(contextLocalRequest)
+ if val == nil {
+ return false
+ }
+ return val.(bool)
+}
diff --git a/pkg/session/session.go b/pkg/session/session.go
index 66cb39e09..3e4c2eea1 100644
--- a/pkg/session/session.go
+++ b/pkg/session/session.go
@@ -15,6 +15,7 @@ type key int
const (
contextUser key = iota
contextVisitedPlugins
+ contextLocalRequest
)
const (
diff --git a/pkg/sqlite/anonymise.go b/pkg/sqlite/anonymise.go
index e0a354980..ace306169 100644
--- a/pkg/sqlite/anonymise.go
+++ b/pkg/sqlite/anonymise.go
@@ -522,6 +522,10 @@ func (db *Anonymiser) anonymiseGalleries(ctx context.Context) error {
return err
}
+ if err := db.anonymiseCustomFields(ctx, goqu.T(galleriesCustomFieldsTable.GetTable()), "gallery_id"); err != nil {
+ return err
+ }
+
return nil
}
@@ -960,6 +964,10 @@ func (db *Anonymiser) anonymiseGroups(ctx context.Context) error {
return err
}
+ if err := db.anonymiseCustomFields(ctx, goqu.T(groupsCustomFieldsTable.GetTable()), "group_id"); err != nil {
+ return err
+ }
+
return nil
}
diff --git a/pkg/sqlite/criterion_handlers.go b/pkg/sqlite/criterion_handlers.go
index 1496df71d..c703a85e3 100644
--- a/pkg/sqlite/criterion_handlers.go
+++ b/pkg/sqlite/criterion_handlers.go
@@ -70,11 +70,52 @@ func stringCriterionHandler(c *models.StringCriterionInput, column string) crite
}
}
-func joinedStringCriterionHandler(c *models.StringCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func stringNoTrimCriterionHandler(c *models.StringCriterionInput, column string) criterionHandlerFunc {
+ return func(ctx context.Context, f *filterBuilder) {
+ if c != nil {
+ if modifier := c.Modifier; c.Modifier.IsValid() {
+ switch modifier {
+ case models.CriterionModifierIncludes:
+ f.whereClauses = append(f.whereClauses, getStringSearchClause([]string{column}, c.Value, false))
+ case models.CriterionModifierExcludes:
+ f.whereClauses = append(f.whereClauses, getStringSearchClause([]string{column}, c.Value, true))
+ case models.CriterionModifierEquals:
+ f.addWhere(column+" LIKE ?", c.Value)
+ case models.CriterionModifierNotEquals:
+ f.addWhere(column+" NOT LIKE ?", c.Value)
+ case models.CriterionModifierMatchesRegex:
+ if _, err := regexp.Compile(c.Value); err != nil {
+ f.setError(err)
+ return
+ }
+ f.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column), c.Value)
+ case models.CriterionModifierNotMatchesRegex:
+ if _, err := regexp.Compile(c.Value); err != nil {
+ f.setError(err)
+ return
+ }
+ f.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column), c.Value)
+ case models.CriterionModifierIsNull:
+ f.addWhere("(" + column + " IS NULL)")
+ case models.CriterionModifierNotNull:
+ f.addWhere("(" + column + " IS NOT NULL)")
+ default:
+ panic("unsupported string filter modifier")
+ }
+ }
+ }
+ }
+}
+
+func joinedStringCriterionHandler(c *models.StringCriterionInput, column string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if c != nil {
if addJoinFn != nil {
- addJoinFn(f)
+ joinType := joinTypeInner
+ if c.Modifier == models.CriterionModifierIsNull || c.Modifier == models.CriterionModifierNotMatchesRegex {
+ joinType = joinTypeLeft
+ }
+ addJoinFn(f, joinType)
}
stringCriterionHandler(c, column)(ctx, f)
}
@@ -104,16 +145,20 @@ func enumCriterionHandler(modifier models.CriterionModifier, values []string, co
}
}
-func pathCriterionHandler(c *models.StringCriterionInput, pathColumn string, basenameColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func pathCriterionHandler(c *models.StringCriterionInput, pathColumn string, basenameColumn string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if c != nil {
- if addJoinFn != nil {
- addJoinFn(f)
- }
- addWildcards := true
- not := false
-
if modifier := c.Modifier; c.Modifier.IsValid() {
+ if addJoinFn != nil {
+ joinType := joinTypeInner
+ if modifier == models.CriterionModifierIsNull || modifier == models.CriterionModifierNotMatchesRegex {
+ joinType = joinTypeLeft
+ }
+ addJoinFn(f, joinType)
+ }
+ addWildcards := true
+ not := false
+
switch modifier {
case models.CriterionModifierIncludes:
f.whereClauses = append(f.whereClauses, getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not))
@@ -194,11 +239,15 @@ func getPathSearchClauseMany(pathColumn, basenameColumn, p string, addWildcards,
return getPathSearchClause(pathColumn, basenameColumn, trimmedQuery, addWildcards, not)
}
-func intCriterionHandler(c *models.IntCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func intCriterionHandler(c *models.IntCriterionInput, column string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if c != nil {
if addJoinFn != nil {
- addJoinFn(f)
+ joinType := joinTypeInner
+ if c.Modifier == models.CriterionModifierIsNull {
+ joinType = joinTypeLeft
+ }
+ addJoinFn(f, joinType)
}
clause, args := getIntCriterionWhereClause(column, *c)
f.addWhere(clause, args...)
@@ -206,11 +255,15 @@ func intCriterionHandler(c *models.IntCriterionInput, column string, addJoinFn f
}
}
-func floatCriterionHandler(c *models.FloatCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func floatCriterionHandler(c *models.FloatCriterionInput, column string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if c != nil {
if addJoinFn != nil {
- addJoinFn(f)
+ joinType := joinTypeInner
+ if c.Modifier == models.CriterionModifierIsNull {
+ joinType = joinTypeLeft
+ }
+ addJoinFn(f, joinType)
}
clause, args := getFloatCriterionWhereClause(column, *c)
f.addWhere(clause, args...)
@@ -218,11 +271,15 @@ func floatCriterionHandler(c *models.FloatCriterionInput, column string, addJoin
}
}
-func floatIntCriterionHandler(durationFilter *models.IntCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func floatIntCriterionHandler(durationFilter *models.IntCriterionInput, column string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if durationFilter != nil {
if addJoinFn != nil {
- addJoinFn(f)
+ joinType := joinTypeInner
+ if durationFilter.Modifier == models.CriterionModifierIsNull {
+ joinType = joinTypeLeft
+ }
+ addJoinFn(f, joinType)
}
clause, args := getIntCriterionWhereClause("cast("+column+" as int)", *durationFilter)
f.addWhere(clause, args...)
@@ -230,11 +287,11 @@ func floatIntCriterionHandler(durationFilter *models.IntCriterionInput, column s
}
}
-func boolCriterionHandler(c *bool, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func boolCriterionHandler(c *bool, column string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if c != nil {
if addJoinFn != nil {
- addJoinFn(f)
+ addJoinFn(f, joinTypeInner)
}
var v string
if *c {
@@ -289,11 +346,11 @@ func yearFilterCriterionHandler(year *models.IntCriterionInput, col string) crit
}
}
-func resolutionCriterionHandler(resolution *models.ResolutionCriterionInput, heightColumn string, widthColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func resolutionCriterionHandler(resolution *models.ResolutionCriterionInput, heightColumn string, widthColumn string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if resolution != nil && resolution.Value.IsValid() {
if addJoinFn != nil {
- addJoinFn(f)
+ addJoinFn(f, joinTypeInner)
}
mn := resolution.Value.GetMinResolution()
@@ -315,11 +372,11 @@ func resolutionCriterionHandler(resolution *models.ResolutionCriterionInput, hei
}
}
-func orientationCriterionHandler(orientation *models.OrientationCriterionInput, heightColumn string, widthColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func orientationCriterionHandler(orientation *models.OrientationCriterionInput, heightColumn string, widthColumn string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if orientation != nil {
if addJoinFn != nil {
- addJoinFn(f)
+ addJoinFn(f, joinTypeInner)
}
var clauses []sqlClause
@@ -362,7 +419,7 @@ type joinedMultiCriterionHandlerBuilder struct {
// foreign key of the foreign object on the join table
foreignFK string
- addJoinTable func(f *filterBuilder)
+ addJoinTable func(f *filterBuilder, joinType joinType)
}
func (m *joinedMultiCriterionHandlerBuilder) handler(c *models.MultiCriterionInput) criterionHandlerFunc {
@@ -378,11 +435,13 @@ func (m *joinedMultiCriterionHandlerBuilder) handler(c *models.MultiCriterionInp
if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull {
var notClause string
+ joinType := joinTypeLeft
if criterion.Modifier == models.CriterionModifierNotNull {
notClause = "NOT"
+ joinType = joinTypeInner
}
- m.addJoinTable(f)
+ m.addJoinTable(f, joinType)
f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{
"table": joinAlias,
@@ -415,11 +474,11 @@ func (m *joinedMultiCriterionHandlerBuilder) handler(c *models.MultiCriterionInp
switch criterion.Modifier {
case models.CriterionModifierIncludes:
// includes any of the provided ids
- m.addJoinTable(f)
+ m.addJoinTable(f, joinTypeInner)
whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value)))
case models.CriterionModifierEquals:
// includes only the provided ids
- m.addJoinTable(f)
+ m.addJoinTable(f, joinTypeInner)
whereClause = utils.StrFormat("{joinAlias}.{foreignFK} IN {inBinding} AND (SELECT COUNT(*) FROM {joinTable} s WHERE s.{primaryFK} = {primaryTable}.id) = ?", utils.StrFormatMap{
"joinAlias": joinAlias,
"foreignFK": m.foreignFK,
@@ -434,7 +493,7 @@ func (m *joinedMultiCriterionHandlerBuilder) handler(c *models.MultiCriterionInp
f.setError(fmt.Errorf("not equals modifier is not supported for multi criterion input"))
case models.CriterionModifierIncludesAll:
// includes all of the provided ids
- m.addJoinTable(f)
+ m.addJoinTable(f, joinTypeInner)
whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value)))
havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value))
}
@@ -468,7 +527,7 @@ type multiCriterionHandlerBuilder struct {
foreignFK string
// function that will be called to perform any necessary joins
- addJoinsFunc func(f *filterBuilder)
+ addJoinsFunc func(f *filterBuilder, joinType joinType)
}
func (m *multiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionInput) criterionHandlerFunc {
@@ -500,7 +559,7 @@ func (m *multiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionI
}
if m.addJoinsFunc != nil {
- m.addJoinsFunc(f)
+ m.addJoinsFunc(f, joinTypeInner)
}
whereClause, havingClause := getMultiCriterionClause(m.primaryTable, m.foreignTable, m.joinTable, m.primaryFK, m.foreignFK, criterion)
@@ -536,7 +595,7 @@ type stringListCriterionHandlerBuilder struct {
// string field on the join table
stringColumn string
- addJoinTable func(f *filterBuilder)
+ addJoinTable func(f *filterBuilder, joinType joinType)
excludeHandler func(f *filterBuilder, criterion *models.StringCriterionInput)
}
@@ -570,7 +629,11 @@ func (m *stringListCriterionHandlerBuilder) handler(criterion *models.StringCrit
// Modifier: models.CriterionModifierNotNull,
// }, m.joinTable+"."+m.stringColumn)(ctx, f)
} else {
- m.addJoinTable(f)
+ joinType := joinTypeInner
+ if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotMatchesRegex {
+ joinType = joinTypeLeft
+ }
+ m.addJoinTable(f, joinType)
stringCriterionHandler(criterion, m.joinTable+"."+m.stringColumn)(ctx, f)
}
}
@@ -1028,14 +1091,18 @@ func (h *stashIDCriterionHandler) handle(ctx context.Context, f *filterBuilder)
joinClause += fmt.Sprintf(" AND %s.endpoint = '%s'", t, *h.c.Endpoint)
}
- f.addLeftJoin(stashIDRepo.tableName, h.stashIDTableAs, joinClause)
+ joinType := joinTypeInner
+ if h.c.Modifier == models.CriterionModifierIsNull || h.c.Modifier == models.CriterionModifierNotMatchesRegex {
+ joinType = joinTypeLeft
+ }
+ f.addJoin(joinType, stashIDRepo.tableName, h.stashIDTableAs, joinClause)
v := ""
if h.c.StashID != nil {
v = *h.c.StashID
}
- stringCriterionHandler(&models.StringCriterionInput{
+ stringNoTrimCriterionHandler(&models.StringCriterionInput{
Value: v,
Modifier: h.c.Modifier,
}, t+".stash_id")(ctx, f)
@@ -1064,7 +1131,12 @@ func (h *stashIDsCriterionHandler) handle(ctx context.Context, f *filterBuilder)
joinClause += fmt.Sprintf(" AND %s.endpoint = '%s'", t, *h.c.Endpoint)
}
- f.addLeftJoin(stashIDRepo.tableName, h.stashIDTableAs, joinClause)
+ joinType := joinTypeInner
+ if h.c.Modifier == models.CriterionModifierIsNull {
+ joinType = joinTypeLeft
+ }
+
+ f.addJoin(joinType, stashIDRepo.tableName, h.stashIDTableAs, joinClause)
switch h.c.Modifier {
case models.CriterionModifierIsNull:
@@ -1089,11 +1161,16 @@ func (h *stashIDsCriterionHandler) handle(ctx context.Context, f *filterBuilder)
}
type relatedFilterHandler struct {
- relatedIDCol string
- relatedRepo repository
+ // column on the primary table that relates to the related table (eg scene_id)
+ relatedIDCol string
+ // repository for the related table (eg sceneRepository)
+ relatedRepo repository
+ // handler for the filter on the related table
relatedHandler criterionHandler
- joinFn func(f *filterBuilder)
- directJoin bool
+ // optional function to perform the necessary join(s) to the related table
+ joinFn func(f *filterBuilder)
+ // if true, related filter handler will be run using the existing filterBuilder instead of a subquery.
+ directJoin bool
}
func (h *relatedFilterHandler) handle(ctx context.Context, f *filterBuilder) {
@@ -1124,7 +1201,7 @@ func (h *relatedFilterHandler) handle(ctx context.Context, f *filterBuilder) {
return
}
- f.addWhere(fmt.Sprintf("%s IN ("+subQuery.toSQL(false)+")", h.relatedIDCol), subQuery.args...)
+ f.addWhere(fmt.Sprintf("%s IN ("+subQuery.toSQL(false)+")", h.relatedIDCol), subQuery.allArgs()...)
}
type phashDistanceCriterionHandler struct {
diff --git a/pkg/sqlite/custom_fields.go b/pkg/sqlite/custom_fields.go
index 63f85b250..22dbbfeb2 100644
--- a/pkg/sqlite/custom_fields.go
+++ b/pkg/sqlite/custom_fields.go
@@ -192,6 +192,10 @@ func (s *customFieldsStore) GetCustomFieldsBulk(ctx context.Context, ids []int)
const single = false
ret := make([]models.CustomFieldMap, len(ids))
+ // initialise ret with empty maps for each id
+ for i := range ret {
+ ret[i] = make(map[string]interface{})
+ }
idi := make(map[int]int, len(ids))
for i, id := range ids {
@@ -257,8 +261,8 @@ func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs str
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%[1]s.value IN %s", joinAs, getInBinding(len(cv))), cv...)
case models.CriterionModifierNotEquals:
- h.innerJoin(f, joinAs, cc.Field)
- f.addWhere(fmt.Sprintf("%[1]s.value NOT IN %s", joinAs, getInBinding(len(cv))), cv...)
+ h.leftJoin(f, joinAs, cc.Field)
+ f.addWhere(fmt.Sprintf("(%[1]s.value NOT IN %s OR %[1]s.value IS NULL)", joinAs, getInBinding(len(cv))), cv...)
case models.CriterionModifierIncludes:
clauses := make([]sqlClause, len(cv))
for i, v := range cv {
@@ -268,7 +272,7 @@ func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs str
f.whereClauses = append(f.whereClauses, clauses...)
case models.CriterionModifierExcludes:
for _, v := range cv {
- f.addWhere(fmt.Sprintf("%[1]s.value NOT LIKE ?", joinAs), fmt.Sprintf("%%%v%%", v))
+ f.addWhere(fmt.Sprintf("(%[1]s.value NOT LIKE ? OR %[1]s.value IS NULL)", joinAs), fmt.Sprintf("%%%v%%", v))
}
h.leftJoin(f, joinAs, cc.Field)
case models.CriterionModifierMatchesRegex:
@@ -311,8 +315,8 @@ func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs str
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value BETWEEN ? AND ?", joinAs), cv[0], cv[1])
case models.CriterionModifierNotBetween:
- h.innerJoin(f, joinAs, cc.Field)
- f.addWhere(fmt.Sprintf("%s.value NOT BETWEEN ? AND ?", joinAs), cv[0], cv[1])
+ h.leftJoin(f, joinAs, cc.Field)
+ f.addWhere(fmt.Sprintf("(%s.value NOT BETWEEN ? AND ? OR %[1]s.value IS NULL)", joinAs), cv[0], cv[1])
case models.CriterionModifierLessThan:
if len(cv) != 1 {
f.setError(fmt.Errorf("expected 1 value for custom field criterion modifier LESS_THAN, got %d", len(cv)))
diff --git a/pkg/sqlite/custom_fields_test.go b/pkg/sqlite/custom_fields_test.go
index a2c045851..5d5545210 100644
--- a/pkg/sqlite/custom_fields_test.go
+++ b/pkg/sqlite/custom_fields_test.go
@@ -240,3 +240,21 @@ func TestSceneSetCustomFields(t *testing.T) {
testSetCustomFields(t, "Scene", db.Scene, sceneIDs[sceneIdx], getSceneCustomFields(sceneIdx))
}
+
+func TestGallerySetCustomFields(t *testing.T) {
+ galleryIdx := galleryIdxWithChapters
+
+ testSetCustomFields(t, "Gallery", db.Gallery, galleryIDs[galleryIdx], getGalleryCustomFields(galleryIdx))
+}
+
+func TestImageSetCustomFields(t *testing.T) {
+ imageIdx := imageIdx2WithGallery
+
+ testSetCustomFields(t, "Image", db.Image, imageIDs[imageIdx], getImageCustomFields(imageIdx))
+}
+
+func TestGroupSetCustomFields(t *testing.T) {
+ groupIdx := groupIdxWithScene
+
+ testSetCustomFields(t, "Group", db.Group, groupIDs[groupIdx], getGroupCustomFields(groupIdx))
+}
diff --git a/pkg/sqlite/database.go b/pkg/sqlite/database.go
index 5b67e5602..7c383dc4c 100644
--- a/pkg/sqlite/database.go
+++ b/pkg/sqlite/database.go
@@ -34,7 +34,7 @@ const (
cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE"
)
-var appSchemaVersion uint = 80
+var appSchemaVersion uint = 85
//go:embed migrations/*.sql
var migrationsBox embed.FS
diff --git a/pkg/sqlite/file.go b/pkg/sqlite/file.go
index 1be5648b4..b8e807e37 100644
--- a/pkg/sqlite/file.go
+++ b/pkg/sqlite/file.go
@@ -695,7 +695,7 @@ func (qb *FileStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.SelectD
// FindAllByPaths returns the all files that are within any of the given paths.
// Returns all if limit is < 0.
// Returns all files if p is empty.
-func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]models.File, error) {
+func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]models.File, error) {
table := qb.table()
folderTable := folderTableMgr.table
@@ -706,6 +706,10 @@ func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offs
q = qb.allInPaths(q, p)
+ if !includeZipContents {
+ q = q.Where(table.Col("zip_file_id").IsNull())
+ }
+
if limit > -1 {
q = q.Limit(uint(limit))
}
@@ -975,7 +979,7 @@ func (qb *FileStore) queryGroupedFields(ctx context.Context, options models.File
Megapixels float64
Size int64
}{}
- if err := qb.repository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil {
+ if err := qb.repository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.allArgs(), &out); err != nil {
return nil, err
}
diff --git a/pkg/sqlite/file_filter.go b/pkg/sqlite/file_filter.go
index 157efb1d8..b8e9253a0 100644
--- a/pkg/sqlite/file_filter.go
+++ b/pkg/sqlite/file_filter.go
@@ -238,22 +238,32 @@ func (qb *fileFilterHandler) hashesCriterionHandler(hashes []*models.Fingerprint
t := fmt.Sprintf("file_fingerprints_%d", i)
f.addLeftJoin(fingerprintTable, t, fmt.Sprintf("files.id = %s.file_id AND %s.type = ?", t, t), hash.Type)
- value, _ := utils.StringToPhash(hash.Value)
distance := 0
if hash.Distance != nil {
distance = *hash.Distance
}
- if distance > 0 {
- // needed to avoid a type mismatch
- f.addWhere(fmt.Sprintf("typeof(%s.fingerprint) = 'integer'", t))
- f.addWhere(fmt.Sprintf("phash_distance(%s.fingerprint, ?) < ?", t), value, distance)
+ // Only phash supports distance matching and is stored as integer
+ if hash.Type == models.FingerprintTypePhash {
+ value, err := utils.StringToPhash(hash.Value)
+ if err != nil {
+ f.setError(fmt.Errorf("invalid phash value: %w", err))
+ return
+ }
+ if distance > 0 {
+ // needed to avoid a type mismatch
+ f.addWhere(fmt.Sprintf("typeof(%s.fingerprint) = 'integer'", t))
+ f.addWhere(fmt.Sprintf("phash_distance(%s.fingerprint, ?) < ?", t), value, distance)
+ } else {
+ intCriterionHandler(&models.IntCriterionInput{
+ Value: int(value),
+ Modifier: models.CriterionModifierEquals,
+ }, t+".fingerprint", nil)(ctx, f)
+ }
} else {
- // use the default handler
- intCriterionHandler(&models.IntCriterionInput{
- Value: int(value),
- Modifier: models.CriterionModifierEquals,
- }, t+".fingerprint", nil)(ctx, f)
+ // All other fingerprint types (md5, oshash, sha1, etc.) are stored as strings
+ // Use exact match for string-based fingerprints
+ f.addWhere(fmt.Sprintf("%s.fingerprint = ?", t), hash.Value)
}
}
}
@@ -290,15 +300,19 @@ func (qb *videoFileFilterHandler) criterionHandler() criterionHandler {
}
}
-func (qb *videoFileFilterHandler) addVideoFilesTable(f *filterBuilder) {
- f.addLeftJoin(videoFileTable, "", "video_files.file_id = files.id")
+func (qb *videoFileFilterHandler) addVideoFilesTable(f *filterBuilder, joinType joinType) {
+ f.addJoin(joinType, videoFileTable, "", "video_files.file_id = files.id")
}
-func (qb *videoFileFilterHandler) codecCriterionHandler(codec *models.StringCriterionInput, codecColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
+func (qb *videoFileFilterHandler) codecCriterionHandler(codec *models.StringCriterionInput, codecColumn string, addJoinFn func(f *filterBuilder, joinType joinType)) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if codec != nil {
if addJoinFn != nil {
- addJoinFn(f)
+ joinType := joinTypeInner
+ if codec.Modifier == models.CriterionModifierIsNull || codec.Modifier == models.CriterionModifierNotMatchesRegex {
+ joinType = joinTypeLeft
+ }
+ addJoinFn(f, joinType)
}
stringCriterionHandler(codec, codecColumn)(ctx, f)
@@ -312,8 +326,8 @@ func (qb *videoFileFilterHandler) captionCriterionHandler(captions *models.Strin
primaryFK: sceneIDColumn,
joinTable: videoCaptionsTable,
stringColumn: captionCodeColumn,
- addJoinTable: func(f *filterBuilder) {
- f.addLeftJoin(videoCaptionsTable, "", "video_captions.file_id = files.id")
+ addJoinTable: func(f *filterBuilder, joinType joinType) {
+ f.addJoin(joinType, videoCaptionsTable, "", "video_captions.file_id = files.id")
},
excludeHandler: func(f *filterBuilder, criterion *models.StringCriterionInput) {
excludeClause := `files.id NOT IN (
@@ -351,6 +365,6 @@ func (qb *imageFileFilterHandler) criterionHandler() criterionHandler {
}
}
-func (qb *imageFileFilterHandler) addImageFilesTable(f *filterBuilder) {
- f.addLeftJoin(imageFileTable, "", "image_files.file_id = files.id")
+func (qb *imageFileFilterHandler) addImageFilesTable(f *filterBuilder, joinType joinType) {
+ f.addJoin(joinType, imageFileTable, "", "image_files.file_id = files.id")
}
diff --git a/pkg/sqlite/file_filter_test.go b/pkg/sqlite/file_filter_test.go
index 50eed0129..648e502f7 100644
--- a/pkg/sqlite/file_filter_test.go
+++ b/pkg/sqlite/file_filter_test.go
@@ -9,6 +9,7 @@ import (
"testing"
"github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/utils"
"github.com/stretchr/testify/assert"
)
@@ -81,7 +82,45 @@ func TestFileQuery(t *testing.T) {
includeIDs: []models.FileID{fileIDs[fileIdxInZip]},
excludeIdxs: []int{fileIdxStartImageFiles},
},
- // TODO - add more tests for other file filters
+ {
+ name: "hashes md5",
+ filter: &models.FileFilterType{
+ Hashes: []*models.FingerprintFilterInput{
+ {
+ Type: models.FingerprintTypeMD5,
+ Value: getPrefixedStringValue("file", fileIdxStartVideoFiles, "md5"),
+ },
+ },
+ },
+ includeIdxs: []int{fileIdxStartVideoFiles},
+ excludeIdxs: []int{fileIdxStartImageFiles},
+ },
+ {
+ name: "hashes oshash",
+ filter: &models.FileFilterType{
+ Hashes: []*models.FingerprintFilterInput{
+ {
+ Type: models.FingerprintTypeOshash,
+ Value: getPrefixedStringValue("file", fileIdxStartVideoFiles, "oshash"),
+ },
+ },
+ },
+ includeIdxs: []int{fileIdxStartVideoFiles},
+ excludeIdxs: []int{fileIdxStartImageFiles},
+ },
+ {
+ name: "hashes phash",
+ filter: &models.FileFilterType{
+ Hashes: []*models.FingerprintFilterInput{
+ {
+ Type: models.FingerprintTypePhash,
+ Value: utils.PhashToString(getFilePhash(fileIdxStartImageFiles)),
+ },
+ },
+ },
+ includeIdxs: []int{fileIdxStartImageFiles},
+ excludeIdxs: []int{fileIdxStartVideoFiles},
+ },
}
for _, tt := range tests {
diff --git a/pkg/sqlite/file_test.go b/pkg/sqlite/file_test.go
index 8422390c0..55c41f4f7 100644
--- a/pkg/sqlite/file_test.go
+++ b/pkg/sqlite/file_test.go
@@ -572,7 +572,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) {
{
"by MD5",
models.Fingerprint{
- Type: "MD5",
+ Type: models.FingerprintTypeMD5,
Fingerprint: getPrefixedStringValue("file", fileIdxZip, "md5"),
},
[]models.File{makeFileWithID(fileIdxZip)},
@@ -581,7 +581,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) {
{
"by OSHASH",
models.Fingerprint{
- Type: "OSHASH",
+ Type: models.FingerprintTypeOshash,
Fingerprint: getPrefixedStringValue("file", fileIdxZip, "oshash"),
},
[]models.File{makeFileWithID(fileIdxZip)},
@@ -590,7 +590,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) {
{
"non-existing",
models.Fingerprint{
- Type: "OSHASH",
+ Type: models.FingerprintTypeOshash,
Fingerprint: "foo",
},
nil,
diff --git a/pkg/sqlite/filter.go b/pkg/sqlite/filter.go
index fa6759ae6..c5e78c1d3 100644
--- a/pkg/sqlite/filter.go
+++ b/pkg/sqlite/filter.go
@@ -90,11 +90,18 @@ func andClauses(clauses ...sqlClause) sqlClause {
return joinClauses("AND", clauses...)
}
+type joinType string
+
+const (
+ joinTypeLeft joinType = "LEFT"
+ joinTypeInner joinType = "INNER"
+)
+
type join struct {
table string
as string
onClause string
- joinType string
+ joinType joinType
args []interface{}
// if true, indicates this is required for sorting only
@@ -115,15 +122,19 @@ func (j join) alias() string {
return j.as
}
+func (j join) getJoinType() joinType {
+ if j.joinType == "" {
+ return joinTypeLeft
+ }
+ return j.joinType
+}
+
func (j join) toSQL() string {
asStr := ""
- joinStr := j.joinType
+ joinStr := j.getJoinType()
if j.as != "" && j.as != j.table {
asStr = " AS " + j.as
}
- if j.joinType == "" {
- joinStr = "LEFT"
- }
return fmt.Sprintf("%s JOIN %s%s ON %s", joinStr, j.table, asStr, j.onClause)
}
@@ -141,6 +152,12 @@ func (j *joins) addUnique(newJoin join) bool {
if !newJoin.sort && jj.sort {
(*j)[i].sort = false
}
+
+ // if the new join is inner, override existing left join
+ if newJoin.getJoinType() == joinTypeInner && jj.getJoinType() == joinTypeLeft {
+ (*j)[i].joinType = joinTypeInner
+ }
+
break
}
}
@@ -243,6 +260,23 @@ func (f *filterBuilder) not(n *filterBuilder) {
f.subFilterOp = notOp
}
+// addJoin adds a join to the filter. The join is expressed in SQL as:
+// JOIN
[AS ] ON
+// The AS is omitted if as is empty.
+// This method does not add a join if it its alias/table name is already
+// present in another existing join.
+func (f *filterBuilder) addJoin(joinType joinType, table, as, onClause string, args ...interface{}) {
+ newJoin := join{
+ table: table,
+ as: as,
+ onClause: onClause,
+ joinType: joinType,
+ args: args,
+ }
+
+ f.joins.add(newJoin)
+}
+
// addLeftJoin adds a left join to the filter. The join is expressed in SQL as:
// LEFT JOIN