Merge pull request #6789 from stashapp/releases/0.31.0

This commit is contained in:
DogmaDragon 2026-04-02 09:24:03 +03:00 committed by GitHub
commit 7c8590eb7b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
705 changed files with 38619 additions and 12857 deletions

28
.github/workflows/build-compiler.yml vendored Normal file
View file

@ -0,0 +1,28 @@
name: Compiler Build
on:
workflow_dispatch:
env:
COMPILER_IMAGE: ghcr.io/stashapp/compiler:13
jobs:
build-compiler:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v6
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
push: true
context: "{{defaultContext}}:docker/compiler"
tags: |
${{ env.COMPILER_IMAGE }}
ghcr.io/stashapp/compiler:latest
cache-from: type=gha,scope=all,mode=max
cache-to: type=gha,scope=all,mode=max

View file

@ -2,7 +2,7 @@ name: Build
on:
push:
branches:
branches:
- develop
- master
- 'releases/**'
@ -15,50 +15,163 @@ concurrency:
cancel-in-progress: true
env:
COMPILER_IMAGE: stashapp/compiler:12
COMPILER_IMAGE: ghcr.io/stashapp/compiler:13
jobs:
build:
runs-on: ubuntu-22.04
# Job 1: Generate code and build UI
# Runs natively (no Docker) — go generate/gqlgen and node don't need cross-compilers.
# Produces artifacts (generated Go files + UI build) consumed by test and build jobs.
generate:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v6
with:
fetch-depth: 0
fetch-tags: true
- name: Setup Go
uses: actions/setup-go@v6
- name: Checkout
run: git fetch --prune --unshallow --tags
# pnpm version is read from the packageManager field in package.json
# very broken (4.3, 4.4)
- name: Install pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
with:
package_json_file: ui/v2.5/package.json
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: '20'
cache: 'pnpm'
cache-dependency-path: ui/v2.5/pnpm-lock.yaml
- name: Install UI dependencies
run: cd ui/v2.5 && pnpm install --frozen-lockfile
- name: Generate
run: make generate
- name: Cache UI build
uses: actions/cache@v5
id: cache-ui
with:
path: ui/v2.5/build
key: ${{ runner.os }}-ui-build-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
- name: Validate UI
# skip UI validation for pull requests if UI is unchanged
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
run: make validate-ui
- name: Build UI
# skip UI build for pull requests if UI is unchanged (UI was cached)
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
run: make ui
# Bundle generated Go files + UI build for downstream jobs (test + build)
- name: Upload generated artifacts
uses: actions/upload-artifact@v7
with:
name: generated
retention-days: 1
path: |
internal/api/generated_exec.go
internal/api/generated_models.go
ui/v2.5/build/
ui/login/locales/
# Job 2: Integration tests
# Runs natively (no Docker) — only needs Go + GCC (for CGO/SQLite), both on ubuntu-22.04.
# Runs in parallel with the build matrix jobs.
test:
needs: generate
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v6
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@v6
with:
go-version-file: 'go.mod'
- name: Pull compiler image
run: docker pull $COMPILER_IMAGE
- name: Cache node modules
uses: actions/cache@v3
env:
cache-name: cache-node_modules
# Places generated Go files + UI build into the working tree so the build compiles
- name: Download generated artifacts
uses: actions/download-artifact@v8
with:
path: ui/v2.5/node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml') }}
name: generated
- name: Cache UI build
uses: actions/cache@v3
id: cache-ui
env:
cache-name: cache-ui
- name: Test Backend
run: make it
# Job 3: Cross-compile for all platforms
# Each platform gets its own runner and Docker container (ghcr.io/stashapp/compiler:13).
# Each build-cc-* make target is self-contained (sets its own GOOS/GOARCH/CC),
# so running them in separate containers is functionally identical to one container.
# Runs in parallel with the test job.
build:
needs: generate
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
include:
- platform: windows
make-target: build-cc-windows
artifact-paths: |
dist/stash-win.exe
tag: win
- platform: macos
make-target: build-cc-macos
artifact-paths: |
dist/stash-macos
dist/Stash.app.zip
tag: osx
- platform: linux
make-target: build-cc-linux
artifact-paths: |
dist/stash-linux
tag: linux
- platform: linux-arm64v8
make-target: build-cc-linux-arm64v8
artifact-paths: |
dist/stash-linux-arm64v8
tag: arm
- platform: linux-arm32v7
make-target: build-cc-linux-arm32v7
artifact-paths: |
dist/stash-linux-arm32v7
tag: arm
- platform: linux-arm32v6
make-target: build-cc-linux-arm32v6
artifact-paths: |
dist/stash-linux-arm32v6
tag: arm
- platform: freebsd
make-target: build-cc-freebsd
artifact-paths: |
dist/stash-freebsd
tag: freebsd
steps:
- uses: actions/checkout@v6
with:
path: ui/v2.5/build
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
fetch-depth: 0
fetch-tags: true
- name: Cache go build
uses: actions/cache@v3
env:
# increment the number suffix to bump the cache
cache-name: cache-go-cache-1
- name: Download generated artifacts
uses: actions/download-artifact@v8
with:
name: generated
- name: Cache Go build
uses: actions/cache@v5
with:
path: .go-cache
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('go.mod', '**/go.sum') }}
key: ${{ runner.os }}-go-cache-${{ matrix.platform }}-${{ hashFiles('go.mod', '**/go.sum') }}
# kept seperate to test timings
- name: pull compiler image
run: docker pull $COMPILER_IMAGE
- name: Start build container
env:
@ -67,45 +180,50 @@ jobs:
mkdir -p .go-cache
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null
- name: Pre-install
run: docker exec -t build /bin/bash -c "make CI=1 pre-ui"
- name: Generate
run: docker exec -t build /bin/bash -c "make generate"
- name: Validate UI
# skip UI validation for pull requests if UI is unchanged
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
run: docker exec -t build /bin/bash -c "make validate-ui"
# Static validation happens in the linter workflow in parallel to this workflow
# Run Dynamic validation here, to make sure we pass all the projects integration tests
- name: Test Backend
run: docker exec -t build /bin/bash -c "make it"
- name: Build UI
# skip UI build for pull requests if UI is unchanged (UI was cached)
# this means that the build version/time may be incorrect if the UI is
# not changed in a pull request
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
run: docker exec -t build /bin/bash -c "make ui"
- name: Compile for all supported platforms
run: |
docker exec -t build /bin/bash -c "make build-cc-windows"
docker exec -t build /bin/bash -c "make build-cc-macos"
docker exec -t build /bin/bash -c "make build-cc-linux"
docker exec -t build /bin/bash -c "make build-cc-linux-arm64v8"
docker exec -t build /bin/bash -c "make build-cc-linux-arm32v7"
docker exec -t build /bin/bash -c "make build-cc-linux-arm32v6"
docker exec -t build /bin/bash -c "make build-cc-freebsd"
- name: Zip UI
run: docker exec -t build /bin/bash -c "make zip-ui"
- name: Build (${{ matrix.platform }})
run: docker exec -t build /bin/bash -c "make ${{ matrix.make-target }}"
- name: Cleanup build container
run: docker rm -f -v build
- name: Upload build artifact
uses: actions/upload-artifact@v7
with:
name: build-${{ matrix.platform }}
retention-days: 1
path: ${{ matrix.artifact-paths }}
# Job 4: Release
# Waits for both test and build to pass, then collects all platform artifacts
# into dist/ for checksums, GitHub releases, and multi-arch Docker push.
release:
needs: [test, build]
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
fetch-tags: true
# Downloads all artifacts (generated + 7 platform builds) into artifacts/ subdirectories
- name: Download all build artifacts
uses: actions/download-artifact@v8
with:
path: artifacts
# Reassemble platform binaries from matrix job artifacts into a single dist/ directory
# make sure that artifacts have executable bit set
# upload-artifact@v4 strips the common path prefix (dist/), so files are at the artifact root
- name: Collect binaries
run: |
mkdir -p dist
cp artifacts/build-*/* dist/
chmod +x dist/*
- name: Zip UI
run: |
cd artifacts/generated/ui/v2.5/build && zip -r ../../../../../dist/stash-ui.zip .
- name: Generate checksums
run: |
git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1
@ -116,7 +234,7 @@ jobs:
- name: Upload Windows binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: stash-win.exe
path: dist/stash-win.exe
@ -124,15 +242,23 @@ jobs:
- name: Upload macOS binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: stash-macos
path: dist/stash-macos
- name: Upload macOS bundle
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v7
with:
name: Stash.app.zip
path: dist/Stash.app.zip
- name: Upload Linux binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: stash-linux
path: dist/stash-linux
@ -140,14 +266,14 @@ jobs:
- name: Upload UI
# only upload for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: stash-ui.zip
path: dist/stash-ui.zip
- name: Update latest_develop tag
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
run : git tag -f latest_develop; git push -f --tags
run: git tag -f latest_develop; git push -f --tags
- name: Development Release
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
@ -197,7 +323,7 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
docker run --rm --privileged tonistiigi/binfmt
docker info
docker buildx create --name builder --use
docker buildx inspect --bootstrap
@ -213,7 +339,7 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
docker run --rm --privileged tonistiigi/binfmt
docker info
docker buildx create --name builder --use
docker buildx inspect --bootstrap

View file

@ -9,65 +9,20 @@ on:
- 'releases/**'
pull_request:
env:
COMPILER_IMAGE: stashapp/compiler:12
jobs:
golangci:
name: lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Checkout
run: git fetch --prune --unshallow --tags
- name: Setup Go
uses: actions/setup-go@v5
with:
go-version-file: 'go.mod'
- name: Pull compiler image
run: docker pull $COMPILER_IMAGE
- name: Start build container
run: |
mkdir -p .go-cache
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null
# no tags or depth needed for lint
- uses: actions/checkout@v6
- uses: actions/setup-go@v6
# generate-backend runs natively (just go generate + touch-ui) — no Docker needed
- name: Generate Backend
run: docker exec -t build /bin/bash -c "make generate-backend"
run: make generate-backend
## WARN
## using v1, update in a later PR
- name: Run golangci-lint
uses: golangci/golangci-lint-action@v6
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: latest
# Optional: working directory, useful for monorepos
# working-directory: somedir
# Optional: golangci-lint command line arguments.
#
# Note: By default, the `.golangci.yml` file should be at the root of the repository.
# The location of the configuration file can be changed by using `--config=`
args: --timeout=5m
# Optional: show only new issues if it's a pull request. The default value is `false`.
# only-new-issues: true
# Optional: if set to true, then all caching functionality will be completely disabled,
# takes precedence over all other caching options.
# skip-cache: true
# Optional: if set to true, then the action won't cache or restore ~/go/pkg.
# skip-pkg-cache: true
# Optional: if set to true, then the action won't cache or restore ~/.cache/go-build.
# skip-build-cache: true
# Optional: The mode to install golangci-lint. It can be 'binary' or 'goinstall'.
# install-mode: "goinstall"
- name: Cleanup build container
run: docker rm -f -v build
uses: golangci/golangci-lint-action@v6

View file

@ -50,7 +50,7 @@ export CGO_ENABLED := 1
# define COMPILER_IMAGE for cross-compilation docker container
ifndef COMPILER_IMAGE
COMPILER_IMAGE := stashapp/compiler:latest
COMPILER_IMAGE := ghcr.io/stashapp/compiler:latest
endif
.PHONY: release
@ -129,7 +129,7 @@ phasher: build-flags
# builds dynamically-linked debug binaries
.PHONY: build
build: stash phasher
build: stash
# builds dynamically-linked PIE release binaries
.PHONY: build-release
@ -187,8 +187,6 @@ build-cc-macos:
# Combine into universal binaries
lipo -create -output dist/stash-macos dist/stash-macos-intel dist/stash-macos-arm
rm dist/stash-macos-intel dist/stash-macos-arm
lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm
rm dist/phasher-macos-intel dist/phasher-macos-arm
# Place into bundle and zip up
rm -rf dist/Stash.app
@ -198,6 +196,16 @@ build-cc-macos:
cd dist && rm -f Stash.app.zip && zip -r Stash.app.zip Stash.app
rm -rf dist/Stash.app
.PHONY: build-cc-macos-phasher
build-cc-macos-phasher:
make build-cc-macos-arm
make build-cc-macos-intel
# Combine into universal binaries
lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm
rm dist/phasher-macos-intel dist/phasher-macos-arm
# do not bundle phasher
.PHONY: build-cc-freebsd
build-cc-freebsd: export GOOS := freebsd
build-cc-freebsd: export GOARCH := amd64

View file

@ -13,10 +13,10 @@
![Screenshot of Stash web application interface](docs/readme_assets/demo_image.png)
* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.
* Stash supports a wide variety of both video and image formats.
* You can tag videos and find them later.
* Stash provides statistics about performers, tags, studios and more.
- Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.
- Stash supports a wide variety of both video and image formats.
- You can tag videos and find them later.
- Stash provides statistics about performers, tags, studios and more.
You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action.
@ -24,17 +24,19 @@ For further information you can consult the [documentation](https://docs.stashap
# Installing Stash
> [!tip]
Step-by-step instructions are available at [docs.stashapp.cc/installation](https://docs.stashapp.cc/installation/).
#### Windows Users:
As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._
At least Windows 10 or Server 2016 is required.
#### Mac Users:
As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later.
Stash can still be run through docker on older versions of macOS.
> [!important]
>**Windows Users**
>
>As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._
>At least Windows 10 or Server 2016 is required.
>
>**macOS Users**
>
> As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later.
> Stash can still be run through docker on older versions of macOS.
<img src="docs/readme_assets/windows_logo.svg" width="100%" height="75"> Windows | <img src="docs/readme_assets/mac_logo.svg" width="100%" height="75"> macOS | <img src="docs/readme_assets/linux_logo.svg" width="100%" height="75"> Linux | <img src="docs/readme_assets/docker_logo.svg" width="100%" height="75"> Docker
:---:|:---:|:---:|:---:
@ -85,23 +87,23 @@ The badge below shows the current translation status of Stash across all support
Need help or want to get involved? Start with the documentation, then reach out to the community if you need further assistance.
- Documentation
- Official docs: https://docs.stashapp.cc - official guides guides and troubleshooting.
- In-app manual: press <kbd>Shift</kbd> + <kbd>?</kbd> in the app or view the manual online: https://docs.stashapp.cc/in-app-manual.
- FAQ: https://discourse.stashapp.cc/c/support/faq/28 - common questions and answers.
- Community wiki: https://discourse.stashapp.cc/tags/c/community-wiki/22/stash - guides, how-tos and tips.
### Documentation
- [Official documentation](https://docs.stashapp.cc) - official guides guides and troubleshooting.
- [In-app manual](https://docs.stashapp.cc/in-app-manual) press <kbd>Shift</kbd> + <kbd>?</kbd> in the app or view the manual online.
- [FAQ](https://discourse.stashapp.cc/c/support/faq/28) - common questions and answers.
- [Community wiki](https://discourse.stashapp.cc/tags/c/community-wiki/22/stash) - guides, how-tos and tips.
- Community & discussion
- Community forum: https://discourse.stashapp.cc - community support, feature requests and discussions.
- Discord: https://discord.gg/2TsNFKt - real-time chat and community support.
- GitHub discussions: https://github.com/stashapp/stash/discussions - community support and feature discussions.
- Lemmy community: https://discuss.online/c/stashapp - Reddit-style community space.
### Community & discussion
- [Community forum](https://discourse.stashapp.cc) - community support, feature requests and discussions.
- [Discord](https://discord.gg/2TsNFKt) - real-time chat and community support.
- [GitHub discussions](https://github.com/stashapp/stash/discussions) - community support and feature discussions.
- [Lemmy community](https://discuss.online/c/stashapp) - board-style community space.
- Community scrapers & plugins
- Metadata sources: https://docs.stashapp.cc/metadata-sources/
- Plugins: https://docs.stashapp.cc/plugins/
- Themes: https://docs.stashapp.cc/themes/
- Other projects: https://docs.stashapp.cc/other-projects/
### Community scrapers & plugins
- [Metadata sources](https://docs.stashapp.cc/metadata-sources/)
- [Plugins](https://docs.stashapp.cc/plugins/)
- [Themes](https://docs.stashapp.cc/themes/)
- [Other projects](https://docs.stashapp.cc/other-projects/)
# For Developers

View file

@ -5,20 +5,39 @@ import (
"fmt"
"os"
"os/exec"
"path/filepath"
flag "github.com/spf13/pflag"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/hash/imagephash"
"github.com/stashapp/stash/pkg/hash/videophash"
"github.com/stashapp/stash/pkg/models"
)
func customUsage() {
fmt.Fprintf(os.Stderr, "Usage:\n")
fmt.Fprintf(os.Stderr, "%s [OPTIONS] VIDEOFILE...\n\nOptions:\n", os.Args[0])
fmt.Fprintf(os.Stderr, "%s [OPTIONS] FILE...\n\nOptions:\n", os.Args[0])
flag.PrintDefaults()
}
func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error {
// Determine if this is a video or image file based on extension
ext := filepath.Ext(inputfile)
ext = ext[1:] // remove the leading dot
// Common image extensions
imageExts := map[string]bool{
"jpg": true, "jpeg": true, "png": true, "gif": true, "webp": true, "bmp": true, "avif": true,
}
if imageExts[ext] {
return printImagePhash(ff, inputfile, quiet)
}
return printVideoPhash(ff, ffp, inputfile, quiet)
}
func printVideoPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error {
ffvideoFile, err := ffp.NewVideoFile(inputfile)
if err != nil {
return err
@ -46,6 +65,24 @@ func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet
return nil
}
func printImagePhash(ff *ffmpeg.FFMpeg, inputfile string, quiet *bool) error {
imgFile := &models.ImageFile{
BaseFile: &models.BaseFile{Path: inputfile},
}
phash, err := imagephash.Generate(ff, imgFile)
if err != nil {
return err
}
if *quiet {
fmt.Printf("%x\n", *phash)
} else {
fmt.Printf("%x %v\n", *phash, imgFile.Path)
}
return nil
}
func getPaths() (string, string) {
ffmpegPath, _ := exec.LookPath("ffmpeg")
ffprobePath, _ := exec.LookPath("ffprobe")
@ -67,7 +104,7 @@ func main() {
args := flag.Args()
if len(args) < 1 {
fmt.Fprintf(os.Stderr, "Missing VIDEOFILE argument.\n")
fmt.Fprintf(os.Stderr, "Missing FILE argument.\n")
flag.Usage()
os.Exit(2)
}
@ -87,4 +124,5 @@ func main() {
fmt.Fprintln(os.Stderr, err)
}
}
}

View file

@ -12,7 +12,7 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-linux-arm32v6; \
FROM --platform=$TARGETPLATFORM alpine:latest AS app
COPY --from=binary /stash /usr/bin/
RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools \
RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools vips-heif \
&& pip install --break-system-packages mechanicalsoup cloudscraper stashapp-tools
ENV STASH_CONFIG_FILE=/root/.stash/config.yml

View file

@ -1 +0,0 @@
*.sdk.tar.*

View file

@ -1,82 +1,86 @@
FROM golang:1.24.3
### OSXCROSS
FROM debian:bookworm AS osxcross
# add osxcross
WORKDIR /tmp/osxcross
ARG OSXCROSS_REVISION=5e1b71fcceb23952f3229995edca1b6231525b5b
ADD --checksum=sha256:d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647 https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION} /tmp/osxcross.tar.gz
LABEL maintainer="https://discord.gg/2TsNFKt"
ARG OSX_SDK_VERSION=11.3
ARG OSX_SDK_DOWNLOAD_FILE=MacOSX${OSX_SDK_VERSION}.sdk.tar.xz
ARG OSX_SDK_DOWNLOAD_URL=https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE}
ADD --checksum=sha256:cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4 ${OSX_SDK_DOWNLOAD_URL} /tmp/osxcross/tarballs/${OSX_SDK_DOWNLOAD_FILE}
RUN apt-get update && apt-get install -y apt-transport-https ca-certificates gnupg
ENV UNATTENDED=yes \
SDK_VERSION=${OSX_SDK_VERSION} \
OSX_VERSION_MIN=10.10
RUN apt update && \
apt install -y --no-install-recommends \
bash ca-certificates clang cmake git patch libssl-dev bzip2 cpio libbz2-dev libxml2-dev make python3 xz-utils zlib1g-dev
# lzma-dev libxml2-dev xz
RUN tar --strip=1 -C /tmp/osxcross -xf /tmp/osxcross.tar.gz
RUN ./build.sh
RUN mkdir -p /etc/apt/keyrings
### FREEBSD cross-compilation stage
# use alpine for cacheable image since apt is notorous for not caching
FROM alpine:3 AS freebsd
# match golang latest
# https://go.dev/wiki/FreeBSD
ARG FREEBSD_VERSION=12.4
ADD --checksum=sha256:581c7edacfd2fca2bdf5791f667402d22fccd8a5e184635e0cac075564d57aa8 \
http://ftp-archive.freebsd.org/mirror/FreeBSD-Archive/old-releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz \
/tmp/base.txz
ADD https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key nodesource.gpg.key
RUN cat nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && rm nodesource.gpg.key
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_24.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
WORKDIR /opt/cross-freebsd
RUN apk add --no-cache tar xz
RUN tar -xf /tmp/base.txz --strip-components=1 ./usr/lib ./usr/include ./lib
RUN cd /opt/cross-freebsd/usr/lib && \
find . -type l -exec sh -c ' \
for link; do \
target=$(readlink "$link"); \
case "$target" in \
/lib/*) ln -sf "/opt/cross-freebsd$target" "$link";; \
esac; \
done \
' sh {} + && \
ln -s libc++.a libstdc++.a && \
ln -s libc++.so libstdc++.so
RUN apt-get update && \
apt-get install -y --no-install-recommends \
git make tar bash nodejs zip \
clang llvm-dev cmake patch libxml2-dev uuid-dev libssl-dev xz-utils \
bzip2 gzip sed cpio libbz2-dev zlib1g-dev \
gcc-mingw-w64 \
gcc-arm-linux-gnueabi libc-dev-armel-cross linux-libc-dev-armel-cross \
gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
rm -rf /var/lib/apt/lists/*;
### BUILDER
FROM golang:1.24.3 AS builder
ENV PATH=/opt/osx-ndk-x86/bin:$PATH
# copy in nodejs instead of using nodesource :thumbsup:
COPY --from=docker.io/library/node:24-bookworm /usr/local /usr/local
# copy in osxcross
COPY --from=osxcross /tmp/osxcross/target/lib /usr/lib
COPY --from=osxcross /tmp/osxcross/target /opt/osx-ndk-x86
# copy in cross-freebsd
COPY --from=freebsd /opt/cross-freebsd /opt/cross-freebsd
# pnpm install with npm
RUN npm install -g pnpm
# FreeBSD cross-compilation setup
# https://github.com/smartmontools/docker-build/blob/6b8c92560d17d325310ba02d9f5a4b250cb0764a/Dockerfile#L66
ENV FREEBSD_VERSION 13.4
ENV FREEBSD_DOWNLOAD_URL http://ftp.plusline.de/FreeBSD/releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz
ENV FREEBSD_SHA 8e13b0a93daba349b8d28ad246d7beb327659b2ef4fe44d89f447392daec5a7c
# git for getting hash
# make and bash for building
RUN cd /tmp && \
curl -o base.txz $FREEBSD_DOWNLOAD_URL && \
echo "$FREEBSD_SHA base.txz" | sha256sum -c - && \
mkdir -p /opt/cross-freebsd && \
cd /opt/cross-freebsd && \
tar -xf /tmp/base.txz ./lib/ ./usr/lib/ ./usr/include/ && \
rm -f /tmp/base.txz && \
cd /opt/cross-freebsd/usr/lib && \
find . -xtype l | xargs ls -l | grep ' /lib/' | awk '{print "ln -sf /opt/cross-freebsd"$11 " " $9}' | /bin/sh && \
ln -s libc++.a libstdc++.a && \
ln -s libc++.so libstdc++.so
# macOS cross-compilation setup
ENV OSX_SDK_VERSION 11.3
ENV OSX_SDK_DOWNLOAD_FILE MacOSX${OSX_SDK_VERSION}.sdk.tar.xz
ENV OSX_SDK_DOWNLOAD_URL https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE}
ENV OSX_SDK_SHA cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4
ENV OSXCROSS_REVISION 5e1b71fcceb23952f3229995edca1b6231525b5b
ENV OSXCROSS_DOWNLOAD_URL https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION}
ENV OSXCROSS_SHA d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647
RUN cd /tmp && \
curl -o osxcross.tar.gz $OSXCROSS_DOWNLOAD_URL && \
echo "$OSXCROSS_SHA osxcross.tar.gz" | sha256sum -c - && \
mkdir osxcross && \
tar --strip=1 -C osxcross -xf osxcross.tar.gz && \
rm -f osxcross.tar.gz && \
curl -Lo $OSX_SDK_DOWNLOAD_FILE $OSX_SDK_DOWNLOAD_URL && \
echo "$OSX_SDK_SHA $OSX_SDK_DOWNLOAD_FILE" | sha256sum -c - && \
mv $OSX_SDK_DOWNLOAD_FILE osxcross/tarballs/ && \
UNATTENDED=yes SDK_VERSION=$OSX_SDK_VERSION OSX_VERSION_MIN=10.10 osxcross/build.sh && \
cp osxcross/target/lib/* /usr/lib/ && \
mv osxcross/target /opt/osx-ndk-x86 && \
rm -rf /tmp/osxcross
ENV PATH /opt/osx-ndk-x86/bin:$PATH
RUN mkdir -p /root/.ssh && \
chmod 0700 /root/.ssh && \
ssh-keyscan github.com > /root/.ssh/known_hosts
# ignore "dubious ownership" errors
# clang for macos
# zip for stashapp.zip
# gcc-extensions for cross-arch build
# we still target arm soft float?
RUN apt-get update && \
apt-get install -y --no-install-recommends \
git make bash \
clang zip \
gcc-mingw-w64 \
gcc-arm-linux-gnueabi \
libc-dev-armel-cross linux-libc-dev-armel-cross \
gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
rm -rf /var/lib/apt/lists/*;
RUN git config --global safe.directory '*'
# To test locally:
# make generate
# make ui
# cd docker/compiler
# make build
# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t stashapp/compiler:latest make build-cc-all
# # binaries will show up in /dist
# docker build . -t ghcr.io/stashapp/compiler:latest
# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t ghcr.io/stashapp/compiler:latest make build-cc-all
# # binaries will show up in /dist

View file

@ -1,16 +1,22 @@
host=ghcr.io
user=stashapp
repo=compiler
version=12
version=13
VERSION_IMAGE = ${host}/${user}/${repo}:${version}
LATEST_IMAGE = ${host}/${user}/${repo}:latest
latest:
docker build -t ${user}/${repo}:latest .
docker build -t ${LATEST_IMAGE} .
build:
docker build -t ${user}/${repo}:${version} -t ${user}/${repo}:latest .
docker build -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} .
build-no-cache:
docker build --no-cache -t ${user}/${repo}:${version} -t ${user}/${repo}:latest .
docker build --no-cache -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} .
install: build
docker push ${user}/${repo}:${version}
docker push ${user}/${repo}:latest
# requires docker login ghcr.io
# echo $CR_PAT | docker login ghcr.io -u USERNAME --password-stdin
push:
docker push ${VERSION_IMAGE}
docker push ${LATEST_IMAGE}

View file

@ -1,3 +1,3 @@
Modified from https://github.com/bep/dockerfiles/tree/master/ci-goreleaser
When the Dockerfile is changed, the version number should be incremented in the Makefile and the new version tag should be pushed to Docker Hub. The GitHub workflow files also need to be updated to pull the correct image tag.
When the Dockerfile is changed, the version number should be incremented in [.github/workflows/build-compiler.yml](../../.github/workflows/build-compiler.yml) and the workflow [manually ran](). `env: COMPILER_IMAGE` in [.github/workflows/build.yml](../../.github/workflows/build.yml) also needs to be updated to pull the correct image tag.

View file

@ -118,8 +118,8 @@ This project uses a modification of the [CI-GoReleaser](https://github.com/bep/d
To cross-compile the app yourself:
1. Run `make pre-ui`, `make generate` and `make ui` outside the container, to generate files and build the UI.
2. Pull the latest compiler image from Docker Hub: `docker pull stashapp/compiler`
3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it stashapp/compiler /bin/bash` to open a shell inside the container.
2. Pull the latest compiler image from GHCR: `docker pull ghcr.io/stashapp/compiler`
3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it ghcr.io/stashapp/compiler /bin/bash` to open a shell inside the container.
4. From inside the container, run `make build-cc-all` to build for all platforms, or run `make build-cc-{platform}` to build for a specific platform (have a look at the `Makefile` for the list of targets).
5. You will find the compiled binaries in `dist/`.

16
go.mod
View file

@ -7,10 +7,10 @@ require (
github.com/WithoutPants/sortorder v0.0.0-20230616003020-921c9ef69552
github.com/Yamashou/gqlgenc v0.32.1
github.com/anacrolix/dms v1.2.2
github.com/antchfx/htmlquery v1.3.0
github.com/antchfx/htmlquery v1.3.5
github.com/asticode/go-astisub v0.25.1
github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617
github.com/chromedp/chromedp v0.9.2
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d
github.com/chromedp/chromedp v0.14.2
github.com/corona10/goimagehash v1.1.0
github.com/disintegration/imaging v1.6.2
github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d
@ -44,6 +44,7 @@ require (
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cast v1.6.0
github.com/spf13/pflag v1.0.6
@ -69,20 +70,21 @@ require (
require (
github.com/agnivade/levenshtein v1.2.1 // indirect
github.com/antchfx/xpath v1.2.3 // indirect
github.com/antchfx/xpath v1.3.5 // indirect
github.com/asticode/go-astikit v0.20.0 // indirect
github.com/asticode/go-astits v1.8.0 // indirect
github.com/chromedp/sysutil v1.0.0 // indirect
github.com/chromedp/sysutil v1.1.0 // indirect
github.com/coder/websocket v1.8.12 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.7.0 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 // indirect
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/gobwas/ws v1.3.0 // indirect
github.com/gobwas/ws v1.4.0 // indirect
github.com/goccy/go-yaml v1.18.0 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
@ -90,10 +92,8 @@ require (
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/knadh/koanf/maps v0.1.2 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect

80
go.sum
View file

@ -85,10 +85,10 @@ github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/antchfx/htmlquery v1.3.0 h1:5I5yNFOVI+egyia5F2s/5Do2nFWxJz41Tr3DyfKD25E=
github.com/antchfx/htmlquery v1.3.0/go.mod h1:zKPDVTMhfOmcwxheXUsx4rKJy8KEY/PU6eXr/2SebQ8=
github.com/antchfx/xpath v1.2.3 h1:CCZWOzv5bAqjVv0offZ2LVgVYFbeldKQVuLNbViZdes=
github.com/antchfx/xpath v1.2.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0=
github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA=
github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ=
github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
@ -116,13 +116,12 @@ github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs=
github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617 h1:/5dwcyi5WOawM1Iz6MjrYqB90TRIdZv3O0fVHEJb86w=
github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs=
github.com/chromedp/chromedp v0.9.2 h1:dKtNz4kApb06KuSXoTQIyUC2TrA0fhGDwNZf3bcgfKw=
github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs=
github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic=
github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww=
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d h1:ZtA1sedVbEW7EW80Iz2GR3Ye6PwbJAJXjv7D74xG6HU=
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k=
github.com/chromedp/chromedp v0.14.2 h1:r3b/WtwM50RsBZHMUm9fsNhhzRStTHrKdr2zmwbZSzM=
github.com/chromedp/chromedp v0.14.2/go.mod h1:rHzAv60xDE7VNy/MYtTUrYreSc0ujt2O1/C3bzctYBo=
github.com/chromedp/sysutil v1.1.0 h1:PUFNv5EcprjqXZD9nJb9b/c9ibAbxiYo4exNWZyipwM=
github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHGPTUfWTJ8=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
@ -206,6 +205,8 @@ github.com/go-chi/httplog v0.3.1/go.mod h1:UoiQQ/MTZH5V6JbNB2FzF0DynTh5okpXxlhsy
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 h1:iizUGZ9pEquQS5jTGkh4AqeeHCMbfbjeb0zMt0aEFzs=
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
@ -224,9 +225,8 @@ github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY=
github.com/gobwas/ws v1.3.0 h1:sbeU3Y4Qzlb+MOzIe6mQGf7QR4Hkv6ZD0qhGkBFL2O0=
github.com/gobwas/ws v1.3.0/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY=
github.com/gobwas/ws v1.4.0 h1:CTaoG1tojrh4ucGPcoJFiAQUAsEWekEWvLy7GsVNqGs=
github.com/gobwas/ws v1.4.0/go.mod h1:G3gNqMNtPppf5XUz7O4shetPpcZ1VJ7zt18dlUeakrc=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
@ -286,6 +286,7 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@ -379,8 +380,6 @@ github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
@ -432,8 +431,6 @@ github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc8
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
@ -540,6 +537,8 @@ github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI=
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs=
github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=
github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
@ -664,6 +663,10 @@ golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@ -707,6 +710,10 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -757,7 +764,12 @@ golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@ -789,6 +801,11 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -869,14 +886,25 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -889,7 +917,12 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -956,6 +989,9 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View file

@ -140,4 +140,8 @@ models:
fields:
plugins:
resolver: true
Performer:
fields:
career_length:
resolver: true

View file

@ -373,6 +373,7 @@ type Mutation {
performerDestroy(input: PerformerDestroyInput!): Boolean!
performersDestroy(ids: [ID!]!): Boolean!
bulkPerformerUpdate(input: BulkPerformerUpdateInput!): [Performer!]
performerMerge(input: PerformerMergeInput!): Performer!
studioCreate(input: StudioCreateInput!): Studio
studioUpdate(input: StudioUpdateInput!): Studio
@ -421,8 +422,14 @@ type Mutation {
"""
moveFiles(input: MoveFilesInput!): Boolean!
deleteFiles(ids: [ID!]!): Boolean!
"Deletes file entries from the database without deleting the files from the filesystem"
destroyFiles(ids: [ID!]!): Boolean!
fileSetFingerprints(input: FileSetFingerprintsInput!): Boolean!
"Reveal the file in the system file manager"
revealFileInFileManager(id: ID!): Boolean!
"Reveal the folder in the system file manager"
revealFolderInFileManager(id: ID!): Boolean!
# Saved filters
saveFilter(input: SaveFilterInput!): SavedFilter!
@ -576,6 +583,8 @@ type Mutation {
stashBoxBatchPerformerTag(input: StashBoxBatchTagInput!): String!
"Run batch studio tag task. Returns the job ID."
stashBoxBatchStudioTag(input: StashBoxBatchTagInput!): String!
"Run batch tag tag task. Returns the job ID."
stashBoxBatchTagTag(input: StashBoxBatchTagInput!): String!
"Enables DLNA for an optional duration. Has no effect if DLNA is enabled by default"
enableDLNA(input: EnableDLNAInput!): Boolean!

View file

@ -184,6 +184,18 @@ input ConfigGeneralInput {
scraperPackageSources: [PackageSourceInput!]
"Source of plugin packages"
pluginPackageSources: [PackageSourceInput!]
"Size of the longest dimension for each sprite in pixels"
spriteScreenshotSize: Int
"True if sprite generation should use the sprite interval and min/max sprites settings instead of the default"
useCustomSpriteInterval: Boolean
"Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true"
spriteInterval: Float
"Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true"
minimumSprites: Int
"Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true"
maximumSprites: Int
}
type ConfigGeneralResult {
@ -287,6 +299,16 @@ type ConfigGeneralResult {
logAccess: Boolean!
"Maximum log size"
logFileMaxSize: Int!
"True if sprite generation should use the sprite interval and min/max sprites settings instead of the default"
useCustomSpriteInterval: Boolean!
"Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true"
spriteInterval: Float!
"Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true"
minimumSprites: Int!
"Maximum number of sprites to be generated - only used if useCustomSpriteInterval is true"
maximumSprites: Int!
"Size of the longest dimension for each sprite in pixels"
spriteScreenshotSize: Int!
"Array of video file extensions"
videoExtensions: [String!]!
"Array of image file extensions"
@ -395,6 +417,9 @@ input ConfigInterfaceInput {
customLocales: String
customLocalesEnabled: Boolean
"When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting"
disableCustomizations: Boolean
"Interface language"
language: String
@ -469,6 +494,9 @@ type ConfigInterfaceResult {
customLocales: String
customLocalesEnabled: Boolean
"When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting"
disableCustomizations: Boolean
"Interface language"
language: String

View file

@ -6,13 +6,19 @@ type Fingerprint {
type Folder {
id: ID!
path: String!
basename: String!
parent_folder_id: ID @deprecated(reason: "Use parent_folder instead")
zip_file_id: ID @deprecated(reason: "Use zip_file instead")
parent_folder: Folder
"Returns all parent folders in order from immediate parent to top-level"
parent_folders: [Folder!]!
zip_file: BasicFile
"Returns direct sub-folders"
sub_folders: [Folder!]!
mod_time: Time!
created_at: Time!
@ -153,7 +159,7 @@ input MoveFilesInput {
input SetFingerprintsInput {
type: String!
"an null value will remove the fingerprint"
"a null value will remove the fingerprint"
value: String
}

View file

@ -75,22 +75,48 @@ input OrientationCriterionInput {
value: [OrientationEnum!]!
}
input PHashDuplicationCriterionInput {
duplicated: Boolean
"Currently unimplemented"
input DuplicationCriterionInput {
duplicated: Boolean @deprecated(reason: "Use phash field instead")
"Currently unimplemented. Intended for phash distance matching."
distance: Int
"Filter by phash duplication"
phash: Boolean
"Filter by URL duplication"
url: Boolean
"Filter by Stash ID duplication"
stash_id: Boolean
"Filter by title duplication"
title: Boolean
}
input FileDuplicationCriterionInput {
duplicated: Boolean @deprecated(reason: "Use phash field instead")
"Currently unimplemented. Intended for phash distance matching."
distance: Int
"Filter by phash duplication"
phash: Boolean
}
input StashIDCriterionInput {
"""
If present, this value is treated as a predicate.
That is, it will filter based on stash_ids with the matching endpoint
That is, it will filter based on stash_id with the matching endpoint
"""
endpoint: String
stash_id: String
modifier: CriterionModifier!
}
input StashIDsCriterionInput {
"""
If present, this value is treated as a predicate.
That is, it will filter based on stash_ids with the matching endpoint
"""
endpoint: String
stash_ids: [String]
modifier: CriterionModifier!
}
input CustomFieldCriterionInput {
field: String!
value: [Any!]
@ -126,10 +152,15 @@ input PerformerFilterType {
fake_tits: StringCriterionInput
"Filter by penis length value"
penis_length: FloatCriterionInput
"Filter by ciricumcision"
"Filter by circumcision"
circumcised: CircumcisionCriterionInput
"Filter by career length"
"Deprecated: use career_start and career_end. This filter is non-functional."
career_length: StringCriterionInput
@deprecated(reason: "Use career_start and career_end")
"Filter by career start"
career_start: DateCriterionInput
"Filter by career end"
career_end: DateCriterionInput
"Filter by tattoos"
tattoos: StringCriterionInput
"Filter by piercings"
@ -146,6 +177,8 @@ input PerformerFilterType {
tag_count: IntCriterionInput
"Filter by scene count"
scene_count: IntCriterionInput
"Filter by marker count (via scene)"
marker_count: IntCriterionInput
"Filter by image count"
image_count: IntCriterionInput
"Filter by gallery count"
@ -156,6 +189,9 @@ input PerformerFilterType {
o_counter: IntCriterionInput
"Filter by StashID"
stash_id_endpoint: StashIDCriterionInput
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashIDs"
stash_ids_endpoint: StashIDsCriterionInput
# rating expressed as 1-100
rating100: IntCriterionInput
"Filter by url"
@ -186,6 +222,8 @@ input PerformerFilterType {
galleries_filter: GalleryFilterType
"Filter by related tags that meet this criteria"
tags_filter: TagFilterType
"Filter by related scene markers (via scene) that meet this criteria"
markers_filter: SceneMarkerFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"
@ -211,9 +249,9 @@ input SceneMarkerFilterType {
updated_at: TimestampCriterionInput
"Filter by scene date"
scene_date: DateCriterionInput
"Filter by cscene reation time"
"Filter by scene creation time"
scene_created_at: TimestampCriterionInput
"Filter by lscene ast update time"
"Filter by scene last update time"
scene_updated_at: TimestampCriterionInput
"Filter by related scenes that meet this criteria"
scene_filter: SceneFilterType
@ -248,8 +286,8 @@ input SceneFilterType {
organized: Boolean
"Filter by o-counter"
o_counter: IntCriterionInput
"Filter Scenes that have an exact phash match available"
duplicated: PHashDuplicationCriterionInput
"Filter Scenes by duplication criteria"
duplicated: DuplicationCriterionInput
"Filter by resolution"
resolution: ResolutionCriterionInput
"Filter by orientation"
@ -292,6 +330,11 @@ input SceneFilterType {
performer_count: IntCriterionInput
"Filter by StashID"
stash_id_endpoint: StashIDCriterionInput
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashIDs"
stash_ids_endpoint: StashIDsCriterionInput
"Filter by StashID count"
stash_id_count: IntCriterionInput
"Filter by url"
url: StringCriterionInput
"Filter by interactive"
@ -332,6 +375,8 @@ input SceneFilterType {
markers_filter: SceneMarkerFilterType
"Filter by related files that meet this criteria"
files_filter: FileFilterType
custom_fields: [CustomFieldCriterionInput!]
}
input MovieFilterType {
@ -414,11 +459,16 @@ input GroupFilterType {
containing_group_count: IntCriterionInput
"Filter by number of sub-groups the group has"
sub_group_count: IntCriterionInput
"Filter by number of scenes the group has"
scene_count: IntCriterionInput
"Filter by related scenes that meet this criteria"
scenes_filter: SceneFilterType
"Filter by related studios that meet this criteria"
studios_filter: StudioFilterType
"Filter by custom fields"
custom_fields: [CustomFieldCriterionInput!]
}
input StudioFilterType {
@ -432,6 +482,9 @@ input StudioFilterType {
parents: MultiCriterionInput
"Filter by StashID"
stash_id_endpoint: StashIDCriterionInput
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashIDs"
stash_ids_endpoint: StashIDsCriterionInput
"Filter to only include studios with these tags"
tags: HierarchicalMultiCriterionInput
"Filter to only include studios missing this property"
@ -446,6 +499,8 @@ input StudioFilterType {
image_count: IntCriterionInput
"Filter by gallery count"
gallery_count: IntCriterionInput
"Filter by group count"
group_count: IntCriterionInput
"Filter by tag count"
tag_count: IntCriterionInput
"Filter by url"
@ -456,16 +511,22 @@ input StudioFilterType {
child_count: IntCriterionInput
"Filter by autotag ignore value"
ignore_auto_tag: Boolean
"Filter by organized"
organized: Boolean
"Filter by related scenes that meet this criteria"
scenes_filter: SceneFilterType
"Filter by related images that meet this criteria"
images_filter: ImageFilterType
"Filter by related galleries that meet this criteria"
galleries_filter: GalleryFilterType
"Filter by related groups that meet this criteria"
groups_filter: GroupFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"
updated_at: TimestampCriterionInput
custom_fields: [CustomFieldCriterionInput!]
}
input GalleryFilterType {
@ -542,6 +603,10 @@ input GalleryFilterType {
files_filter: FileFilterType
"Filter by related folders that meet this criteria"
folders_filter: FolderFilterType
"Filter by parent folder of the zip or folder the gallery is in"
parent_folder: HierarchicalMultiCriterionInput
custom_fields: [CustomFieldCriterionInput!]
}
input TagFilterType {
@ -600,7 +665,7 @@ input TagFilterType {
"Filter by number of parent tags the tag has"
parent_count: IntCriterionInput
"Filter by number f child tags the tag has"
"Filter by number of child tags the tag has"
child_count: IntCriterionInput
"Filter by autotag ignore value"
@ -608,6 +673,10 @@ input TagFilterType {
"Filter by StashID"
stash_id_endpoint: StashIDCriterionInput
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashID"
stash_ids_endpoint: StashIDsCriterionInput
"Filter by related scenes that meet this criteria"
scenes_filter: SceneFilterType
@ -615,12 +684,22 @@ input TagFilterType {
images_filter: ImageFilterType
"Filter by related galleries that meet this criteria"
galleries_filter: GalleryFilterType
"Filter by related groups that meet this criteria"
groups_filter: GroupFilterType
"Filter by related performers that meet this criteria"
performers_filter: PerformerFilterType
"Filter by related studios that meet this criteria"
studios_filter: StudioFilterType
"Filter by related scene markers that meet this criteria"
markers_filter: SceneMarkerFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"
updated_at: TimestampCriterionInput
custom_fields: [CustomFieldCriterionInput!]
}
input ImageFilterType {
@ -635,6 +714,8 @@ input ImageFilterType {
id: IntCriterionInput
"Filter by file checksum"
checksum: StringCriterionInput
"Filter by file phash distance"
phash_distance: PhashDistanceCriterionInput
"Filter by path"
path: StringCriterionInput
"Filter by file count"
@ -692,6 +773,8 @@ input ImageFilterType {
tags_filter: TagFilterType
"Filter by related files that meet this criteria"
files_filter: FileFilterType
"Filter by custom fields"
custom_fields: [CustomFieldCriterionInput!]
}
input FileFilterType {
@ -709,8 +792,8 @@ input FileFilterType {
"Filter by modification time"
mod_time: TimestampCriterionInput
"Filter files that have an exact match available"
duplicated: PHashDuplicationCriterionInput
"Filter files by duplication criteria (only phash applies to files)"
duplicated: FileDuplicationCriterionInput
"find files based on hash"
hashes: [FingerprintFilterInput!]
@ -741,6 +824,7 @@ input FolderFilterType {
NOT: FolderFilterType
path: StringCriterionInput
basename: StringCriterionInput
parent_folder: HierarchicalMultiCriterionInput
zip_file: MultiCriterionInput
@ -849,7 +933,7 @@ input GenderCriterionInput {
}
input CircumcisionCriterionInput {
value: [CircumisedEnum!]
value: [CircumcisedEnum!]
modifier: CriterionModifier!
}

View file

@ -32,6 +32,7 @@ type Gallery {
cover: Image
paths: GalleryPathsType! # Resolver
custom_fields: Map!
image(index: Int!): Image!
}
@ -50,6 +51,8 @@ input GalleryCreateInput {
studio_id: ID
tag_ids: [ID!]
performer_ids: [ID!]
custom_fields: Map
}
input GalleryUpdateInput {
@ -71,6 +74,8 @@ input GalleryUpdateInput {
performer_ids: [ID!]
primary_file_id: ID
custom_fields: CustomFieldsInput
}
input BulkGalleryUpdateInput {
@ -89,6 +94,8 @@ input BulkGalleryUpdateInput {
studio_id: ID
tag_ids: BulkUpdateIds
performer_ids: BulkUpdateIds
custom_fields: CustomFieldsInput
}
input GalleryDestroyInput {
@ -100,6 +107,8 @@ input GalleryDestroyInput {
"""
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
type FindGalleriesResultType {

View file

@ -31,6 +31,7 @@ type Group {
sub_group_count(depth: Int): Int! # Resolver
scenes: [Scene!]!
o_counter: Int # Resolver
custom_fields: Map!
}
input GroupDescriptionInput {
@ -59,6 +60,8 @@ input GroupCreateInput {
front_image: String
"This should be a URL or a base64 encoded data URL"
back_image: String
custom_fields: Map
}
input GroupUpdateInput {
@ -82,6 +85,8 @@ input GroupUpdateInput {
front_image: String
"This should be a URL or a base64 encoded data URL"
back_image: String
custom_fields: CustomFieldsInput
}
input BulkUpdateGroupDescriptionsInput {
@ -94,6 +99,8 @@ input BulkGroupUpdateInput {
ids: [ID!]
# rating expressed as 1-100
rating100: Int
date: String
synopsis: String
studio_id: ID
director: String
urls: BulkUpdateStrings
@ -101,6 +108,8 @@ input BulkGroupUpdateInput {
containing_groups: BulkUpdateGroupDescriptionsInput
sub_groups: BulkUpdateGroupDescriptionsInput
custom_fields: CustomFieldsInput
}
input GroupDestroyInput {

View file

@ -21,6 +21,7 @@ type Image {
studio: Studio
tags: [Tag!]!
performers: [Performer!]!
custom_fields: Map!
}
type ImageFileType {
@ -56,6 +57,7 @@ input ImageUpdateInput {
gallery_ids: [ID!]
primary_file_id: ID
custom_fields: CustomFieldsInput
}
input BulkImageUpdateInput {
@ -76,18 +78,23 @@ input BulkImageUpdateInput {
performer_ids: BulkUpdateIds
tag_ids: BulkUpdateIds
gallery_ids: BulkUpdateIds
custom_fields: CustomFieldsInput
}
input ImageDestroyInput {
id: ID!
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
input ImagesDestroyInput {
ids: [ID!]!
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
type FindImagesResultType {

View file

@ -10,8 +10,11 @@ input GenerateMetadataInput {
transcodes: Boolean
"Generate transcodes even if not required"
forceTranscodes: Boolean
"Generate video phashes during scan"
phashes: Boolean
interactiveHeatmapsSpeeds: Boolean
"Generate image phashes during scan"
imagePhashes: Boolean
imageThumbnails: Boolean
clipPreviews: Boolean
@ -19,6 +22,12 @@ input GenerateMetadataInput {
sceneIDs: [ID!]
"marker ids to generate for"
markerIDs: [ID!]
"image ids to generate for"
imageIDs: [ID!]
"gallery ids to generate for"
galleryIDs: [ID!]
"paths to run generate on, in addition to the other ID lists"
paths: [String!]
"overwrite existing media"
overwrite: Boolean
@ -85,8 +94,10 @@ input ScanMetadataInput {
scanGenerateImagePreviews: Boolean
"Generate sprites during scan"
scanGenerateSprites: Boolean
"Generate phashes during scan"
"Generate video phashes during scan"
scanGeneratePhashes: Boolean
"Generate image phashes during scan"
scanGenerateImagePhashes: Boolean
"Generate image thumbnails during scan"
scanGenerateThumbnails: Boolean
"Generate image clip previews during scan"
@ -107,8 +118,10 @@ type ScanMetadataOptions {
scanGenerateImagePreviews: Boolean!
"Generate sprites during scan"
scanGenerateSprites: Boolean!
"Generate phashes during scan"
"Generate video phashes during scan"
scanGeneratePhashes: Boolean!
"Generate image phashes during scan"
scanGenerateImagePhashes: Boolean
"Generate image thumbnails during scan"
scanGenerateThumbnails: Boolean!
"Generate image clip previews during scan"
@ -118,6 +131,14 @@ type ScanMetadataOptions {
input CleanMetadataInput {
paths: [String!]
"""
Don't check zip file contents when determining whether to clean a file.
This can significantly speed up the clean process, but will potentially miss removed files within zip files.
Where users do not modify zip files contents directly, this should be safe to use.
Defaults to false.
"""
ignoreZipFileContents: Boolean
"Do a dry run. Don't delete any files"
dryRun: Boolean!
}
@ -204,7 +225,9 @@ input IdentifyMetadataOptionsInput {
setCoverImage: Boolean
setOrganized: Boolean
"defaults to true if not provided"
includeMalePerformers: Boolean
includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders")
"Filter to only include performers with these genders. If not provided, all genders are included."
performerGenders: [GenderEnum!]
"defaults to true if not provided"
skipMultipleMatches: Boolean
"tag to tag skipped multiple matches with"
@ -249,7 +272,9 @@ type IdentifyMetadataOptions {
setCoverImage: Boolean
setOrganized: Boolean
"defaults to true if not provided"
includeMalePerformers: Boolean
includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders")
"Filter to only include performers with these genders. If not provided, all genders are included."
performerGenders: [GenderEnum!]
"defaults to true if not provided"
skipMultipleMatches: Boolean
"tag to tag skipped multiple matches with"
@ -310,6 +335,8 @@ input ImportObjectsInput {
input BackupDatabaseInput {
download: Boolean
"If true, blob files will be included in the backup. This can significantly increase the size of the backup and the time it takes to create it, but allows for a complete backup of the system that can be restored without needing access to the original media files."
includeBlobs: Boolean
}
input AnonymiseDatabaseInput {

View file

@ -7,7 +7,7 @@ enum GenderEnum {
NON_BINARY
}
enum CircumisedEnum {
enum CircumcisedEnum {
CUT
UNCUT
}
@ -29,8 +29,10 @@ type Performer {
measurements: String
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
career_length: String
circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: String
career_end: String
tattoos: String
piercings: String
alias_list: [String!]!
@ -76,10 +78,13 @@ input PerformerCreateInput {
measurements: String
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
career_length: String
circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: String
career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
alias_list: [String!]
twitter: String @deprecated(reason: "Use urls")
instagram: String @deprecated(reason: "Use urls")
@ -114,10 +119,13 @@ input PerformerUpdateInput {
measurements: String
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
career_length: String
circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: String
career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
alias_list: [String!]
twitter: String @deprecated(reason: "Use urls")
instagram: String @deprecated(reason: "Use urls")
@ -157,10 +165,13 @@ input BulkPerformerUpdateInput {
measurements: String
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
career_length: String
circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: String
career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will result in an error (case-insensitive)"
alias_list: BulkUpdateStrings
twitter: String @deprecated(reason: "Use urls")
instagram: String @deprecated(reason: "Use urls")
@ -185,3 +196,10 @@ type FindPerformersResultType {
count: Int!
performers: [Performer!]!
}
input PerformerMergeInput {
source: [ID!]!
destination: ID!
# values defined here will override values in the destination
values: PerformerUpdateInput
}

View file

@ -79,6 +79,8 @@ type Scene {
performers: [Performer!]!
stash_ids: [StashID!]!
custom_fields: Map!
"Return valid stream paths"
sceneStreams: [SceneStreamEndpoint!]!
}
@ -120,6 +122,8 @@ input SceneCreateInput {
Files must not already be primary for another scene.
"""
file_ids: [ID!]
custom_fields: Map
}
input SceneUpdateInput {
@ -158,6 +162,8 @@ input SceneUpdateInput {
)
primary_file_id: ID
custom_fields: CustomFieldsInput
}
enum BulkUpdateIdMode {
@ -190,18 +196,24 @@ input BulkSceneUpdateInput {
tag_ids: BulkUpdateIds
group_ids: BulkUpdateIds
movie_ids: BulkUpdateIds @deprecated(reason: "Use group_ids")
custom_fields: CustomFieldsInput
}
input SceneDestroyInput {
id: ID!
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
input ScenesDestroyInput {
ids: [ID!]!
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
type FindScenesResultType {

View file

@ -18,7 +18,9 @@ type ScrapedPerformer {
fake_tits: String
penis_length: String
circumcised: String
career_length: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: String
career_end: String
tattoos: String
piercings: String
# aliases must be comma-delimited to be parsed correctly
@ -54,7 +56,9 @@ input ScrapedPerformerInput {
fake_tits: String
penis_length: String
circumcised: String
career_length: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: String
career_end: String
tattoos: String
piercings: String
aliases: String

View file

@ -71,6 +71,9 @@ type ScrapedTag {
"Set if tag matched"
stored_id: ID
name: String!
description: String
alias_list: [String!]
parent: ScrapedTag
"Remote site ID, if applicable"
remote_site_id: String
}

View file

@ -8,6 +8,7 @@ type Studio {
aliases: [String!]!
tags: [Tag!]!
ignore_auto_tag: Boolean!
organized: Boolean!
image_path: String # Resolver
scene_count(depth: Int): Int! # Resolver
@ -26,6 +27,8 @@ type Studio {
groups: [Group!]!
movies: [Movie!]! @deprecated(reason: "use groups instead")
o_counter: Int
custom_fields: Map!
}
input StudioCreateInput {
@ -40,9 +43,13 @@ input StudioCreateInput {
rating100: Int
favorite: Boolean
details: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
aliases: [String!]
tag_ids: [ID!]
ignore_auto_tag: Boolean
organized: Boolean
custom_fields: Map
}
input StudioUpdateInput {
@ -58,9 +65,13 @@ input StudioUpdateInput {
rating100: Int
favorite: Boolean
details: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
aliases: [String!]
tag_ids: [ID!]
ignore_auto_tag: Boolean
organized: Boolean
custom_fields: CustomFieldsInput
}
input BulkStudioUpdateInput {
@ -74,6 +85,7 @@ input BulkStudioUpdateInput {
details: String
tag_ids: BulkUpdateIds
ignore_auto_tag: Boolean
organized: Boolean
}
input StudioDestroyInput {

View file

@ -24,6 +24,7 @@ type Tag {
parent_count: Int! # Resolver
child_count: Int! # Resolver
custom_fields: Map!
}
input TagCreateInput {
@ -31,6 +32,7 @@ input TagCreateInput {
"Value that does not appear in the UI but overrides name for sorting"
sort_name: String
description: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
aliases: [String!]
ignore_auto_tag: Boolean
favorite: Boolean
@ -40,6 +42,8 @@ input TagCreateInput {
parent_ids: [ID!]
child_ids: [ID!]
custom_fields: Map
}
input TagUpdateInput {
@ -48,6 +52,7 @@ input TagUpdateInput {
"Value that does not appear in the UI but overrides name for sorting"
sort_name: String
description: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
aliases: [String!]
ignore_auto_tag: Boolean
favorite: Boolean
@ -57,6 +62,8 @@ input TagUpdateInput {
parent_ids: [ID!]
child_ids: [ID!]
custom_fields: CustomFieldsInput
}
input TagDestroyInput {
@ -71,11 +78,14 @@ type FindTagsResultType {
input TagsMergeInput {
source: [ID!]!
destination: ID!
# values defined here will override values in the destination
values: TagUpdateInput
}
input BulkTagUpdateInput {
ids: [ID!]
description: String
"Duplicate aliases and those equal to name will result in an error (case-insensitive)"
aliases: BulkUpdateStrings
ignore_auto_tag: Boolean
favorite: Boolean

View file

@ -29,6 +29,13 @@ fragment StudioFragment on Studio {
fragment TagFragment on Tag {
name
id
description
aliases
category {
id
name
description
}
}
fragment MeasurementsFragment on Measurements {
@ -120,18 +127,6 @@ fragment SceneFragment on Scene {
}
}
query FindSceneByFingerprint($fingerprint: FingerprintQueryInput!) {
findSceneByFingerprint(fingerprint: $fingerprint) {
...SceneFragment
}
}
query FindScenesByFullFingerprints($fingerprints: [FingerprintQueryInput!]!) {
findScenesByFullFingerprints(fingerprints: $fingerprints) {
...SceneFragment
}
}
query FindScenesBySceneFingerprints(
$fingerprints: [[FingerprintQueryInput!]!]!
) {

View file

@ -40,6 +40,8 @@ func authenticateHandler() func(http.Handler) http.Handler {
return
}
r = session.SetLocalRequest(r)
userID, err := manager.GetInstance().SessionStore.Authenticate(w, r)
if err != nil {
if !errors.Is(err, session.ErrUnauthorized) {

View file

@ -11,6 +11,7 @@
//go:generate go run github.com/vektah/dataloaden GroupLoader int *github.com/stashapp/stash/pkg/models.Group
//go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File
//go:generate go run github.com/vektah/dataloaden FolderLoader github.com/stashapp/stash/pkg/models.FolderID *github.com/stashapp/stash/pkg/models.Folder
//go:generate go run github.com/vektah/dataloaden FolderRelatedFolderIDsLoader github.com/stashapp/stash/pkg/models.FolderID []github.com/stashapp/stash/pkg/models.FolderID
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
@ -42,28 +43,40 @@ const (
)
type Loaders struct {
SceneByID *SceneLoader
SceneFiles *SceneFileIDsLoader
ScenePlayCount *ScenePlayCountLoader
SceneOCount *SceneOCountLoader
ScenePlayHistory *ScenePlayHistoryLoader
SceneOHistory *SceneOHistoryLoader
SceneLastPlayed *SceneLastPlayedLoader
SceneByID *SceneLoader
SceneFiles *SceneFileIDsLoader
ScenePlayCount *ScenePlayCountLoader
SceneOCount *SceneOCountLoader
ScenePlayHistory *ScenePlayHistoryLoader
SceneOHistory *SceneOHistoryLoader
SceneLastPlayed *SceneLastPlayedLoader
SceneCustomFields *CustomFieldsLoader
ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader
GalleryByID *GalleryLoader
ImageByID *ImageLoader
GalleryByID *GalleryLoader
GalleryCustomFields *CustomFieldsLoader
ImageByID *ImageLoader
ImageCustomFields *CustomFieldsLoader
PerformerByID *PerformerLoader
PerformerCustomFields *CustomFieldsLoader
StudioByID *StudioLoader
TagByID *TagLoader
GroupByID *GroupLoader
FileByID *FileLoader
FolderByID *FolderLoader
StudioByID *StudioLoader
StudioCustomFields *CustomFieldsLoader
TagByID *TagLoader
TagCustomFields *CustomFieldsLoader
GroupByID *GroupLoader
GroupCustomFields *CustomFieldsLoader
FileByID *FileLoader
FolderByID *FolderLoader
FolderParentFolderIDs *FolderRelatedFolderIDsLoader
FolderSubFolderIDs *FolderRelatedFolderIDsLoader
}
type Middleware struct {
@ -84,11 +97,21 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchGalleries(ctx),
},
GalleryCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchGalleryCustomFields(ctx),
},
ImageByID: &ImageLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchImages(ctx),
},
ImageCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchImageCustomFields(ctx),
},
PerformerByID: &PerformerLoader{
wait: wait,
maxBatch: maxBatch,
@ -99,6 +122,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchPerformerCustomFields(ctx),
},
StudioCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchStudioCustomFields(ctx),
},
SceneCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchSceneCustomFields(ctx),
},
StudioByID: &StudioLoader{
wait: wait,
maxBatch: maxBatch,
@ -109,11 +142,21 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchTags(ctx),
},
TagCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchTagCustomFields(ctx),
},
GroupByID: &GroupLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchGroups(ctx),
},
GroupCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchGroupCustomFields(ctx),
},
FileByID: &FileLoader{
wait: wait,
maxBatch: maxBatch,
@ -124,6 +167,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchFolders(ctx),
},
FolderParentFolderIDs: &FolderRelatedFolderIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFoldersParentFolderIDs(ctx),
},
FolderSubFolderIDs: &FolderRelatedFolderIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFoldersSubFolderIDs(ctx),
},
SceneFiles: &SceneFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
@ -194,6 +247,18 @@ func (m Middleware) fetchScenes(ctx context.Context) func(keys []int) ([]*models
}
}
func (m Middleware) fetchSceneCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models.Image, []error) {
return func(keys []int) (ret []*models.Image, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
@ -206,6 +271,18 @@ func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models
}
}
func (m Middleware) fetchImageCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Image.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchGalleries(ctx context.Context) func(keys []int) ([]*models.Gallery, []error) {
return func(keys []int) (ret []*models.Gallery, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
@ -253,6 +330,18 @@ func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*model
}
}
func (m Middleware) fetchStudioCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Studio.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.Tag, []error) {
return func(keys []int) (ret []*models.Tag, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
@ -264,6 +353,42 @@ func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.T
}
}
func (m Middleware) fetchTagCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Tag.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchGroupCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Group.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchGalleryCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Gallery.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchGroups(ctx context.Context) func(keys []int) ([]*models.Group, []error) {
return func(keys []int) (ret []*models.Group, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
@ -297,6 +422,28 @@ func (m Middleware) fetchFolders(ctx context.Context) func(keys []models.FolderI
}
}
func (m Middleware) fetchFoldersParentFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) {
return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Folder.GetManyParentFolderIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchFoldersSubFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) {
return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Folder.GetManySubFolderIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) {
return func(keys []int) (ret [][]models.FileID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {

View file

@ -0,0 +1,225 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// FolderParentFolderIDsLoaderConfig captures the config to create a new FolderParentFolderIDsLoader
type FolderParentFolderIDsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []models.FolderID) ([][]models.FolderID, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewFolderParentFolderIDsLoader creates a new FolderParentFolderIDsLoader given a fetch, wait, and maxBatch
func NewFolderParentFolderIDsLoader(config FolderParentFolderIDsLoaderConfig) *FolderRelatedFolderIDsLoader {
return &FolderRelatedFolderIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// FolderRelatedFolderIDsLoader batches and caches requests
type FolderRelatedFolderIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []models.FolderID) ([][]models.FolderID, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[models.FolderID][]models.FolderID
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *folderParentFolderIDsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type folderParentFolderIDsLoaderBatch struct {
keys []models.FolderID
data [][]models.FolderID
error []error
closing bool
done chan struct{}
}
// Load a FolderID by key, batching and caching will be applied automatically
func (l *FolderRelatedFolderIDsLoader) Load(key models.FolderID) ([]models.FolderID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a FolderID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FolderRelatedFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]models.FolderID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]models.FolderID, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &folderParentFolderIDsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]models.FolderID, error) {
<-batch.done
var data []models.FolderID
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *FolderRelatedFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]models.FolderID, []error) {
results := make([]func() ([]models.FolderID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
folderIDs := make([][]models.FolderID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
folderIDs[i], errors[i] = thunk()
}
return folderIDs, errors
}
// LoadAllThunk returns a function that when called will block waiting for a FolderIDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FolderRelatedFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func() ([][]models.FolderID, []error) {
results := make([]func() ([]models.FolderID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]models.FolderID, []error) {
folderIDs := make([][]models.FolderID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
folderIDs[i], errors[i] = thunk()
}
return folderIDs, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *FolderRelatedFolderIDsLoader) Prime(key models.FolderID, value []models.FolderID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]models.FolderID, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *FolderRelatedFolderIDsLoader) Clear(key models.FolderID) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *FolderRelatedFolderIDsLoader) unsafeSet(key models.FolderID, value []models.FolderID) {
if l.cache == nil {
l.cache = map[models.FolderID][]models.FolderID{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderRelatedFolderIDsLoader, key models.FolderID) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderRelatedFolderIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *folderParentFolderIDsLoaderBatch) end(l *FolderRelatedFolderIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -7,6 +7,7 @@ import (
"sort"
"strconv"
"github.com/99designs/gqlgen/graphql"
"github.com/stashapp/stash/internal/build"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/logger"
@ -145,6 +146,13 @@ func (r *Resolver) withReadTxn(ctx context.Context, fn func(ctx context.Context)
return r.repository.WithReadTxn(ctx, fn)
}
// idOnly returns true if the query is only asking for the id field.
// This can be used to optimize certain queries where we don't need to load the full object if we're only getting the id.
func (r *Resolver) idOnly(ctx context.Context) bool {
fields := graphql.CollectAllFields(ctx)
return len(fields) == 1 && fields[0] == "id"
}
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.SceneMarker.Wall(ctx, q)

View file

@ -2,19 +2,77 @@ package api
import (
"context"
"path/filepath"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/pkg/models"
)
func (r *folderResolver) Basename(ctx context.Context, obj *models.Folder) (string, error) {
return filepath.Base(obj.Path), nil
}
func (r *folderResolver) ParentFolder(ctx context.Context, obj *models.Folder) (*models.Folder, error) {
if obj.ParentFolderID == nil {
return nil, nil
}
if r.idOnly(ctx) {
return &models.Folder{ID: *obj.ParentFolderID}, nil
}
return loaders.From(ctx).FolderByID.Load(*obj.ParentFolderID)
}
func foldersFromIDs(ids []models.FolderID) []*models.Folder {
ret := make([]*models.Folder, len(ids))
for i, id := range ids {
ret[i] = &models.Folder{ID: id}
}
return ret
}
func (r *folderResolver) ParentFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) {
ids, err := loaders.From(ctx).FolderParentFolderIDs.Load(obj.ID)
if err != nil {
return nil, err
}
if r.idOnly(ctx) {
return foldersFromIDs(ids), nil
}
var errs []error
ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids)
return ret, firstError(errs)
}
func (r *folderResolver) SubFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) {
ids, err := loaders.From(ctx).FolderSubFolderIDs.Load(obj.ID)
if err != nil {
return nil, err
}
if r.idOnly(ctx) {
return foldersFromIDs(ids), nil
}
var errs []error
ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids)
return ret, firstError(errs)
}
func (r *folderResolver) ZipFile(ctx context.Context, obj *models.Folder) (*BasicFile, error) {
// shortcut for id only queries
if r.idOnly(ctx) {
if obj.ZipFileID == nil {
return nil, nil
}
return &BasicFile{
BaseFile: &models.BaseFile{ID: *obj.ZipFileID},
}, nil
}
return zipFileResolver(ctx, obj.ZipFileID)
}

View file

@ -216,3 +216,16 @@ func (r *galleryResolver) Image(ctx context.Context, obj *models.Gallery, index
return
}
func (r *galleryResolver) CustomFields(ctx context.Context, obj *models.Gallery) (map[string]interface{}, error) {
m, err := loaders.From(ctx).GalleryCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}

View file

@ -161,3 +161,12 @@ func (r *imageResolver) Urls(ctx context.Context, obj *models.Image) ([]string,
return obj.URLs.List(), nil
}
func (r *imageResolver) CustomFields(ctx context.Context, obj *models.Image) (map[string]interface{}, error) {
customFields, err := loaders.From(ctx).ImageCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
return customFields, nil
}

View file

@ -215,3 +215,16 @@ func (r *groupResolver) OCounter(ctx context.Context, obj *models.Group) (ret *i
}
return &count, nil
}
func (r *groupResolver) CustomFields(ctx context.Context, obj *models.Group) (map[string]interface{}, error) {
m, err := loaders.From(ctx).GroupCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}

View file

@ -109,6 +109,31 @@ func (r *performerResolver) HeightCm(ctx context.Context, obj *models.Performer)
return obj.Height, nil
}
func (r *performerResolver) CareerStart(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.CareerStart != nil {
ret := obj.CareerStart.String()
return &ret, nil
}
return nil, nil
}
func (r *performerResolver) CareerEnd(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.CareerEnd != nil {
ret := obj.CareerEnd.String()
return &ret, nil
}
return nil, nil
}
func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.CareerStart == nil && obj.CareerEnd == nil {
return nil, nil
}
ret := models.FormatYearRange(obj.CareerStart, obj.CareerEnd)
return &ret, nil
}
func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.Birthdate != nil {
ret := obj.Birthdate.String()

View file

@ -410,3 +410,16 @@ func (r *sceneResolver) OHistory(ctx context.Context, obj *models.Scene) ([]*tim
return ptrRet, nil
}
func (r *sceneResolver) CustomFields(ctx context.Context, obj *models.Scene) (map[string]interface{}, error) {
m, err := loaders.From(ctx).SceneCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}

View file

@ -207,6 +207,19 @@ func (r *studioResolver) Groups(ctx context.Context, obj *models.Studio) (ret []
return ret, nil
}
func (r *studioResolver) CustomFields(ctx context.Context, obj *models.Studio) (map[string]interface{}, error) {
m, err := loaders.From(ctx).StudioCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}
// deprecated
func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Group, err error) {
return r.Groups(ctx, obj)

View file

@ -181,3 +181,16 @@ func (r *tagResolver) ChildCount(ctx context.Context, obj *models.Tag) (ret int,
return ret, nil
}
func (r *tagResolver) CustomFields(ctx context.Context, obj *models.Tag) (map[string]interface{}, error) {
m, err := loaders.From(ctx).TagCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}

View file

@ -5,6 +5,7 @@ import (
"encoding/json"
"errors"
"fmt"
"io/fs"
"path/filepath"
"regexp"
"strconv"
@ -85,6 +86,8 @@ func (r *mutationResolver) setConfigFloat(key string, value *float64) {
func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGeneralInput) (*ConfigGeneralResult, error) {
c := config.GetInstance()
// #4709 - allow stash paths even if they do not exist, so that users may configure stash
// for disconnected drives or network storage.
existingPaths := c.GetStashPaths()
if input.Stashes != nil {
for _, s := range input.Stashes {
@ -97,8 +100,12 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
}
}
if isNew {
s.Path = filepath.Clean(s.Path)
// if it exists, it must be directory
exists, err := fsutil.DirExists(s.Path)
if !exists {
// allow it to not exist but if it does exist it must be a directory
if !exists && !errors.Is(err, fs.ErrNotExist) {
return makeConfigGeneralResult(), err
}
}
@ -287,6 +294,11 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
if input.PreviewPreset != nil {
c.SetString(config.PreviewPreset, input.PreviewPreset.String())
}
r.setConfigBool(config.UseCustomSpriteInterval, input.UseCustomSpriteInterval)
r.setConfigFloat(config.SpriteInterval, input.SpriteInterval)
r.setConfigInt(config.MinimumSprites, input.MinimumSprites)
r.setConfigInt(config.MaximumSprites, input.MaximumSprites)
r.setConfigInt(config.SpriteScreenshotSize, input.SpriteScreenshotSize)
r.setConfigBool(config.TranscodeHardwareAcceleration, input.TranscodeHardwareAcceleration)
if input.MaxTranscodeSize != nil {
@ -515,6 +527,8 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigI
r.setConfigBool(config.CustomLocalesEnabled, input.CustomLocalesEnabled)
r.setConfigBool(config.DisableCustomizations, input.DisableCustomizations)
if input.DisableDropdownCreate != nil {
ddc := input.DisableDropdownCreate
r.setConfigBool(config.DisableDropdownCreatePerformer, ddc.Performer)

View file

@ -5,10 +5,14 @@ import (
"fmt"
"strconv"
"github.com/stashapp/stash/internal/desktop"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/session"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
@ -16,7 +20,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput)
if err := r.withTxn(ctx, func(ctx context.Context) error {
fileStore := r.repository.File
folderStore := r.repository.Folder
mover := file.NewMover(fileStore, folderStore)
mover := file.NewMover(fileStore, folderStore, manager.GetInstance().Config.GetStashPaths().Paths())
mover.RegisterHooks(ctx)
var (
@ -54,13 +58,14 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput)
folderPath := *input.DestinationFolder
// ensure folder path is within the library
if err := r.validateFolderPath(folderPath); err != nil {
stashPaths := manager.GetInstance().Config.GetStashPaths()
if err := r.validateFolderPath(stashPaths, folderPath); err != nil {
return err
}
// get or create folder hierarchy
var err error
folder, err = file.GetOrCreateFolderHierarchy(ctx, folderStore, folderPath)
folder, err = file.GetOrCreateFolderHierarchy(ctx, folderStore, folderPath, stashPaths.Paths())
if err != nil {
return fmt.Errorf("getting or creating folder hierarchy: %w", err)
}
@ -109,8 +114,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput)
return true, nil
}
func (r *mutationResolver) validateFolderPath(folderPath string) error {
paths := manager.GetInstance().Config.GetStashPaths()
func (r *mutationResolver) validateFolderPath(paths config.StashConfigs, folderPath string) error {
if l := paths.GetStashFromDirPath(folderPath); l == nil {
return fmt.Errorf("folder path %s must be within a stash library path", folderPath)
}
@ -210,6 +214,58 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b
return true, nil
}
func (r *mutationResolver) DestroyFiles(ctx context.Context, ids []string) (ret bool, err error) {
fileIDs, err := stringslice.StringSliceToIntSlice(ids)
if err != nil {
return false, fmt.Errorf("converting ids: %w", err)
}
destroyer := &file.ZipDestroyer{
FileDestroyer: r.repository.File,
FolderDestroyer: r.repository.Folder,
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.File
for _, fileIDInt := range fileIDs {
fileID := models.FileID(fileIDInt)
f, err := qb.Find(ctx, fileID)
if err != nil {
return err
}
if len(f) == 0 {
return fmt.Errorf("file with id %d not found", fileID)
}
path := f[0].Base().Path
// ensure not a primary file
isPrimary, err := qb.IsPrimary(ctx, fileID)
if err != nil {
return fmt.Errorf("checking if file %s is primary: %w", path, err)
}
if isPrimary {
return fmt.Errorf("cannot destroy primary file entry %s", path)
}
// destroy DB entries only (no filesystem deletion)
const deleteFile = false
if err := destroyer.DestroyZip(ctx, f[0], nil, deleteFile); err != nil {
return fmt.Errorf("destroying file entry %s: %w", path, err)
}
}
return nil
}); err != nil {
return false, err
}
return true, nil
}
func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSetFingerprintsInput) (bool, error) {
fileIDInt, err := strconv.Atoi(input.ID)
if err != nil {
@ -274,3 +330,71 @@ func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSe
return true, nil
}
func (r *mutationResolver) RevealFileInFileManager(ctx context.Context, id string) (bool, error) {
// disallow if request did not come from localhost
if !session.IsLocalRequest(ctx) {
logger.Warnf("Attempt to reveal file in file manager from non-local request")
return false, fmt.Errorf("access denied")
}
fileIDInt, err := strconv.Atoi(id)
if err != nil {
return false, fmt.Errorf("converting id: %w", err)
}
var filePath string
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
files, err := r.repository.File.Find(ctx, models.FileID(fileIDInt))
if err != nil {
return fmt.Errorf("finding file: %w", err)
}
if len(files) == 0 {
return fmt.Errorf("file with id %d not found", fileIDInt)
}
filePath = files[0].Base().Path
return nil
}); err != nil {
return false, err
}
if err := desktop.RevealInFileManager(filePath); err != nil {
return false, err
}
return true, nil
}
func (r *mutationResolver) RevealFolderInFileManager(ctx context.Context, id string) (bool, error) {
// disallow if request did not come from localhost
if !session.IsLocalRequest(ctx) {
logger.Warnf("Attempt to reveal folder in file manager from non-local request")
return false, fmt.Errorf("access denied")
}
folderIDInt, err := strconv.Atoi(id)
if err != nil {
return false, fmt.Errorf("converting id: %w", err)
}
var folderPath string
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
folder, err := r.repository.Folder.Find(ctx, models.FolderID(folderIDInt))
if err != nil {
return fmt.Errorf("finding folder: %w", err)
}
if folder == nil {
return fmt.Errorf("folder with id %d not found", folderIDInt)
}
folderPath = folder.Path
return nil
}); err != nil {
return false, err
}
if err := desktop.RevealInFileManager(folderPath); err != nil {
return false, err
}
return true, nil
}

View file

@ -42,7 +42,10 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
}
// Populate a new gallery from the input
newGallery := models.NewGallery()
newGallery := models.CreateGalleryInput{
Gallery: &models.Gallery{},
}
*newGallery.Gallery = models.NewGallery()
newGallery.Title = strings.TrimSpace(input.Title)
newGallery.Code = translator.string(input.Code)
@ -81,10 +84,12 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
newGallery.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)})
}
newGallery.CustomFields = convertMapJSONNumbers(input.CustomFields)
// Start the transaction and save the gallery
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Gallery
if err := qb.Create(ctx, &newGallery, nil); err != nil {
if err := qb.Create(ctx, &newGallery); err != nil {
return err
}
@ -241,6 +246,10 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle
return nil, fmt.Errorf("converting scene ids: %w", err)
}
if input.CustomFields != nil {
updatedGallery.CustomFields = handleUpdateCustomFields(*input.CustomFields)
}
// gallery scene is set from the scene only
gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery)
@ -293,6 +302,10 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall
return nil, fmt.Errorf("converting scene ids: %w", err)
}
if input.CustomFields != nil {
updatedGallery.CustomFields = handleUpdateCustomFields(*input.CustomFields)
}
ret := []*models.Gallery{}
// Start the transaction and save the galleries
@ -346,6 +359,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
destroyFileEntry := utils.IsTrue(input.DestroyFileEntry)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Gallery
@ -366,7 +380,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
galleries = append(galleries, gallery)
imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile)
imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry)
if err != nil {
return err
}

View file

@ -14,13 +14,17 @@ import (
"github.com/stashapp/stash/pkg/utils"
)
func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.Group, error) {
func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.CreateGroupInput, error) {
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Populate a new group from the input
newGroup := models.NewGroup()
newGroupInput := &models.CreateGroupInput{
Group: &models.Group{},
}
*newGroupInput.Group = models.NewGroup()
newGroup := newGroupInput.Group
newGroup.Name = strings.TrimSpace(input.Name)
newGroup.Aliases = translator.string(input.Aliases)
@ -59,28 +63,19 @@ func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*mo
newGroup.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls))
}
return &newGroup, nil
}
func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) {
newGroup, err := groupFromGroupCreateInput(ctx, input)
if err != nil {
return nil, err
}
newGroupInput.CustomFields = convertMapJSONNumbers(input.CustomFields)
// Process the base 64 encoded image string
var frontimageData []byte
if input.FrontImage != nil {
frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage)
newGroupInput.FrontImageData, err = utils.ProcessImageInput(ctx, *input.FrontImage)
if err != nil {
return nil, fmt.Errorf("processing front image: %w", err)
}
}
// Process the base 64 encoded image string
var backimageData []byte
if input.BackImage != nil {
backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage)
newGroupInput.BackImageData, err = utils.ProcessImageInput(ctx, *input.BackImage)
if err != nil {
return nil, fmt.Errorf("processing back image: %w", err)
}
@ -88,13 +83,22 @@ func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInp
// HACK: if back image is being set, set the front image to the default.
// This is because we can't have a null front image with a non-null back image.
if len(frontimageData) == 0 && len(backimageData) != 0 {
frontimageData = static.ReadAll(static.DefaultGroupImage)
if len(newGroupInput.FrontImageData) == 0 && len(newGroupInput.BackImageData) != 0 {
newGroupInput.FrontImageData = static.ReadAll(static.DefaultGroupImage)
}
return newGroupInput, nil
}
func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) {
createGroupInput, err := groupFromGroupCreateInput(ctx, input)
if err != nil {
return nil, err
}
// Start the transaction and save the group
if err := r.withTxn(ctx, func(ctx context.Context) error {
if err = r.groupService.Create(ctx, newGroup, frontimageData, backimageData); err != nil {
if err = r.groupService.Create(ctx, createGroupInput); err != nil {
return err
}
@ -104,9 +108,9 @@ func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInp
}
// for backwards compatibility - run both movie and group hooks
r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.GroupCreatePost, input, nil)
r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.MovieCreatePost, input, nil)
return r.getGroup(ctx, newGroup.ID)
r.hookExecutor.ExecutePostHooks(ctx, createGroupInput.Group.ID, hook.GroupCreatePost, input, nil)
r.hookExecutor.ExecutePostHooks(ctx, createGroupInput.Group.ID, hook.MovieCreatePost, input, nil)
return r.getGroup(ctx, createGroupInput.Group.ID)
}
func groupPartialFromGroupUpdateInput(translator changesetTranslator, input GroupUpdateInput) (ret models.GroupPartial, err error) {
@ -150,6 +154,12 @@ func groupPartialFromGroupUpdateInput(translator changesetTranslator, input Grou
}
updatedGroup.URLs = translator.updateStrings(input.Urls, "urls")
if input.CustomFields != nil {
updatedGroup.CustomFields = *input.CustomFields
// convert json.Numbers to int/float
updatedGroup.CustomFields.Full = convertMapJSONNumbers(updatedGroup.CustomFields.Full)
updatedGroup.CustomFields.Partial = convertMapJSONNumbers(updatedGroup.CustomFields.Partial)
}
return updatedGroup, nil
}
@ -217,6 +227,12 @@ func (r *mutationResolver) GroupUpdate(ctx context.Context, input GroupUpdateInp
func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input BulkGroupUpdateInput) (ret models.GroupPartial, err error) {
updatedGroup := models.NewGroupPartial()
updatedGroup.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
err = fmt.Errorf("converting date: %w", err)
return
}
updatedGroup.Synopsis = translator.optionalString(input.Synopsis, "synopsis")
updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100")
updatedGroup.Director = translator.optionalString(input.Director, "director")
@ -246,6 +262,13 @@ func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input
updatedGroup.URLs = translator.optionalURLsBulk(input.Urls, nil)
if input.CustomFields != nil {
updatedGroup.CustomFields = *input.CustomFields
// convert json.Numbers to int/float
updatedGroup.CustomFields.Full = convertMapJSONNumbers(updatedGroup.CustomFields.Full)
updatedGroup.CustomFields.Partial = convertMapJSONNumbers(updatedGroup.CustomFields.Partial)
}
return updatedGroup, nil
}

View file

@ -177,6 +177,13 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input models.ImageUp
return nil, fmt.Errorf("converting tag ids: %w", err)
}
if input.CustomFields != nil {
updatedImage.CustomFields = *input.CustomFields
// convert json.Numbers to int/float
updatedImage.CustomFields.Full = convertMapJSONNumbers(updatedImage.CustomFields.Full)
updatedImage.CustomFields.Partial = convertMapJSONNumbers(updatedImage.CustomFields.Partial)
}
qb := r.repository.Image
image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
if err != nil {
@ -237,6 +244,13 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
return nil, fmt.Errorf("converting tag ids: %w", err)
}
if input.CustomFields != nil {
updatedImage.CustomFields = *input.CustomFields
// convert json.Numbers to int/float
updatedImage.CustomFields.Full = convertMapJSONNumbers(updatedImage.CustomFields.Full)
updatedImage.CustomFields.Partial = convertMapJSONNumbers(updatedImage.CustomFields.Partial)
}
// Start the transaction and save the images
if err := r.withTxn(ctx, func(ctx context.Context) error {
var updatedGalleryIDs []int
@ -325,7 +339,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
return fmt.Errorf("image with id %d not found", imageID)
}
return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile))
return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry))
}); err != nil {
fileDeleter.Rollback()
return false, err
@ -372,7 +386,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
images = append(images, i)
if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil {
if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry)); err != nil {
return err
}
}

View file

@ -122,9 +122,10 @@ func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error
func (r *mutationResolver) BackupDatabase(ctx context.Context, input BackupDatabaseInput) (*string, error) {
// if download is true, then backup to temporary file and return a link
download := input.Download != nil && *input.Download
includeBlobs := input.IncludeBlobs != nil && *input.IncludeBlobs
mgr := manager.GetInstance()
backupPath, backupName, err := mgr.BackupDatabase(download)
backupPath, backupName, err := mgr.BackupDatabase(download, includeBlobs)
if err != nil {
logger.Errorf("Error backing up database: %v", err)
return nil, err

View file

@ -2,13 +2,16 @@ package api
import (
"context"
"errors"
"fmt"
"slices"
"strconv"
"strings"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/performer"
"github.com/stashapp/stash/pkg/plugin/hook"
"github.com/stashapp/stash/pkg/sliceutil"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
"github.com/stashapp/stash/pkg/utils"
)
@ -40,7 +43,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
newPerformer.Name = strings.TrimSpace(input.Name)
newPerformer.Disambiguation = translator.string(input.Disambiguation)
newPerformer.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.AliasList))
newPerformer.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.AliasList), newPerformer.Name))
newPerformer.Gender = input.Gender
newPerformer.Ethnicity = translator.string(input.Ethnicity)
newPerformer.Country = translator.string(input.Country)
@ -49,7 +52,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
newPerformer.FakeTits = translator.string(input.FakeTits)
newPerformer.PenisLength = input.PenisLength
newPerformer.Circumcised = input.Circumcised
newPerformer.CareerLength = translator.string(input.CareerLength)
newPerformer.Tattoos = translator.string(input.Tattoos)
newPerformer.Piercings = translator.string(input.Piercings)
newPerformer.Favorite = translator.bool(input.Favorite)
@ -87,6 +89,25 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return nil, fmt.Errorf("converting death date: %w", err)
}
newPerformer.CareerStart, err = translator.datePtr(input.CareerStart)
if err != nil {
return nil, fmt.Errorf("converting career start: %w", err)
}
newPerformer.CareerEnd, err = translator.datePtr(input.CareerEnd)
if err != nil {
return nil, fmt.Errorf("converting career end: %w", err)
}
// if career_start/career_end not provided, parse deprecated career_length
if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil {
start, end, err := models.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
newPerformer.CareerStart = start
newPerformer.CareerEnd = end
}
newPerformer.TagIDs, err = translator.relatedIds(input.TagIds)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
@ -136,7 +157,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return r.getPerformer(ctx, newPerformer.ID)
}
func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator) error {
func validateNoLegacyURLs(translator changesetTranslator) error {
// ensure url/twitter/instagram are not included in the input
if translator.hasField("url") {
return fmt.Errorf("url field must not be included if urls is included")
@ -151,7 +172,7 @@ func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator)
return nil
}
func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURL, legacyTwitter, legacyInstagram models.OptionalString, updatedPerformer *models.PerformerPartial) error {
func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURLs legacyPerformerURLs, updatedPerformer *models.PerformerPartial) error {
qb := r.repository.Performer
// we need to be careful with URL/Twitter/Instagram
@ -170,23 +191,23 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int
existingURLs := p.URLs.List()
// performer partial URLs should be empty
if legacyURL.Set {
if legacyURLs.URL.Set {
replaced := false
for i, url := range existingURLs {
if !performer.IsTwitterURL(url) && !performer.IsInstagramURL(url) {
existingURLs[i] = legacyURL.Value
existingURLs[i] = legacyURLs.URL.Value
replaced = true
break
}
}
if !replaced {
existingURLs = append(existingURLs, legacyURL.Value)
existingURLs = append(existingURLs, legacyURLs.URL.Value)
}
}
if legacyTwitter.Set {
value := utils.URLFromHandle(legacyTwitter.Value, twitterURL)
if legacyURLs.Twitter.Set {
value := utils.URLFromHandle(legacyURLs.Twitter.Value, twitterURL)
found := false
// find and replace the first twitter URL
for i, url := range existingURLs {
@ -201,9 +222,9 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int
existingURLs = append(existingURLs, value)
}
}
if legacyInstagram.Set {
if legacyURLs.Instagram.Set {
found := false
value := utils.URLFromHandle(legacyInstagram.Value, instagramURL)
value := utils.URLFromHandle(legacyURLs.Instagram.Value, instagramURL)
// find and replace the first instagram URL
for i, url := range existingURLs {
if performer.IsInstagramURL(url) {
@ -226,16 +247,25 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int
return nil
}
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
performerID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
type legacyPerformerURLs struct {
URL models.OptionalString
Twitter models.OptionalString
Instagram models.OptionalString
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
func (u *legacyPerformerURLs) AnySet() bool {
return u.URL.Set || u.Twitter.Set || u.Instagram.Set
}
func legacyPerformerURLsFromInput(input models.PerformerUpdateInput, translator changesetTranslator) legacyPerformerURLs {
return legacyPerformerURLs{
URL: translator.optionalString(input.URL, "url"),
Twitter: translator.optionalString(input.Twitter, "twitter"),
Instagram: translator.optionalString(input.Instagram, "instagram"),
}
}
func performerPartialFromInput(input models.PerformerUpdateInput, translator changesetTranslator) (*models.PerformerPartial, error) {
// Populate performer from the input
updatedPerformer := models.NewPerformerPartial()
@ -249,7 +279,29 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits")
updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length")
updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised")
updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length")
// prefer career_start/career_end over deprecated career_length
if translator.hasField("career_start") || translator.hasField("career_end") {
var err error
updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start")
if err != nil {
return nil, fmt.Errorf("converting career start: %w", err)
}
updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end")
if err != nil {
return nil, fmt.Errorf("converting career end: %w", err)
}
} else if translator.hasField("career_length") && input.CareerLength != nil {
start, end, err := models.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
if start != nil {
updatedPerformer.CareerStart = models.NewOptionalDate(*start)
}
if end != nil {
updatedPerformer.CareerEnd = models.NewOptionalDate(*end)
}
}
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings")
updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite")
@ -260,19 +312,17 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
updatedPerformer.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids")
var err error
if translator.hasField("urls") {
// ensure url/twitter/instagram are not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
if err := validateNoLegacyURLs(translator); err != nil {
return nil, err
}
updatedPerformer.URLs = translator.updateStrings(input.Urls, "urls")
}
legacyURL := translator.optionalString(input.URL, "url")
legacyTwitter := translator.optionalString(input.Twitter, "twitter")
legacyInstagram := translator.optionalString(input.Instagram, "instagram")
updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate")
if err != nil {
return nil, fmt.Errorf("converting birthdate: %w", err)
@ -299,6 +349,26 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
updatedPerformer.CustomFields = handleUpdateCustomFields(input.CustomFields)
return &updatedPerformer, nil
}
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
performerID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedPerformer, err := performerPartialFromInput(input, translator)
if err != nil {
return nil, err
}
legacyURLs := legacyPerformerURLsFromInput(input, translator)
var imageData []byte
imageIncluded := translator.hasField("image")
if input.Image != nil {
@ -312,17 +382,38 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Performer
if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set {
if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil {
if legacyURLs.AnySet() {
if err := r.handleLegacyURLs(ctx, performerID, legacyURLs, updatedPerformer); err != nil {
return err
}
}
if err := performer.ValidateUpdate(ctx, performerID, updatedPerformer, qb); err != nil {
if updatedPerformer.Aliases != nil {
p, err := qb.Find(ctx, performerID)
if err != nil {
return err
}
if p != nil {
if err := p.LoadAliases(ctx, qb); err != nil {
return err
}
effectiveAliases := updatedPerformer.Aliases.Apply(p.Aliases.List())
name := p.Name
if updatedPerformer.Name.Set {
name = updatedPerformer.Name.Value
}
sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name)
updatedPerformer.Aliases.Values = sanitized
updatedPerformer.Aliases.Mode = models.RelationshipUpdateModeSet
}
}
if err := performer.ValidateUpdate(ctx, performerID, *updatedPerformer, qb); err != nil {
return err
}
_, err = qb.UpdatePartial(ctx, performerID, updatedPerformer)
_, err = qb.UpdatePartial(ctx, performerID, *updatedPerformer)
if err != nil {
return err
}
@ -366,7 +457,28 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits")
updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length")
updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised")
updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length")
// prefer career_start/career_end over deprecated career_length
if translator.hasField("career_start") || translator.hasField("career_end") {
updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start")
if err != nil {
return nil, fmt.Errorf("converting career start: %w", err)
}
updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end")
if err != nil {
return nil, fmt.Errorf("converting career end: %w", err)
}
} else if translator.hasField("career_length") && input.CareerLength != nil {
start, end, err := models.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
if start != nil {
updatedPerformer.CareerStart = models.NewOptionalDate(*start)
}
if end != nil {
updatedPerformer.CareerEnd = models.NewOptionalDate(*end)
}
}
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings")
@ -379,16 +491,18 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
if translator.hasField("urls") {
// ensure url/twitter/instagram are not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
if err := validateNoLegacyURLs(translator); err != nil {
return nil, err
}
updatedPerformer.URLs = translator.updateStringsBulk(input.Urls, "urls")
}
legacyURL := translator.optionalString(input.URL, "url")
legacyTwitter := translator.optionalString(input.Twitter, "twitter")
legacyInstagram := translator.optionalString(input.Instagram, "instagram")
legacyURLs := legacyPerformerURLs{
URL: translator.optionalString(input.URL, "url"),
Twitter: translator.optionalString(input.Twitter, "twitter"),
Instagram: translator.optionalString(input.Instagram, "instagram"),
}
updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate")
if err != nil {
@ -425,8 +539,8 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
qb := r.repository.Performer
for _, performerID := range performerIDs {
if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set {
if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil {
if legacyURLs.AnySet() {
if err := r.handleLegacyURLs(ctx, performerID, legacyURLs, &updatedPerformer); err != nil {
return err
}
}
@ -506,3 +620,87 @@ func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs [
return true, nil
}
func (r *mutationResolver) PerformerMerge(ctx context.Context, input PerformerMergeInput) (*models.Performer, error) {
srcIDs, err := stringslice.StringSliceToIntSlice(input.Source)
if err != nil {
return nil, fmt.Errorf("converting source ids: %w", err)
}
// ensure source ids are unique
srcIDs = sliceutil.AppendUniques(nil, srcIDs)
destID, err := strconv.Atoi(input.Destination)
if err != nil {
return nil, fmt.Errorf("converting destination id: %w", err)
}
// ensure destination is not in source list
if slices.Contains(srcIDs, destID) {
return nil, errors.New("destination performer cannot be in source list")
}
var values *models.PerformerPartial
var imageData []byte
if input.Values != nil {
translator := changesetTranslator{
inputMap: getNamedUpdateInputMap(ctx, "input.values"),
}
values, err = performerPartialFromInput(*input.Values, translator)
if err != nil {
return nil, err
}
legacyURLs := legacyPerformerURLsFromInput(*input.Values, translator)
if legacyURLs.AnySet() {
return nil, errors.New("Merging legacy performer URLs is not supported")
}
if input.Values.Image != nil {
var err error
imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image)
if err != nil {
return nil, fmt.Errorf("processing cover image: %w", err)
}
}
} else {
v := models.NewPerformerPartial()
values = &v
}
var dest *models.Performer
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Performer
dest, err = qb.Find(ctx, destID)
if err != nil {
return fmt.Errorf("finding destination performer ID %d: %w", destID, err)
}
// ensure source performers exist
if _, err := qb.FindMany(ctx, srcIDs); err != nil {
return fmt.Errorf("finding source performers: %w", err)
}
if _, err := qb.UpdatePartial(ctx, destID, *values); err != nil {
return fmt.Errorf("updating performer: %w", err)
}
if err := qb.Merge(ctx, srcIDs, destID); err != nil {
return fmt.Errorf("merging performers: %w", err)
}
if len(imageData) > 0 {
if err := qb.UpdateImage(ctx, destID, imageData); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
return dest, nil
}

View file

@ -103,8 +103,15 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCr
}
}
customFields := convertMapJSONNumbers(input.CustomFields)
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.Resolver.sceneService.Create(ctx, &newScene, fileIDs, coverImageData)
ret, err = r.Resolver.sceneService.Create(ctx, models.CreateSceneInput{
Scene: &newScene,
FileIDs: fileIDs,
CoverImage: coverImageData,
CustomFields: customFields,
})
return err
}); err != nil {
return nil, err
@ -297,6 +304,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
}
var coverImageData []byte
coverImageIncluded := translator.hasField("cover_image")
if input.CoverImage != nil {
var err error
coverImageData, err = utils.ProcessImageInput(ctx, *input.CoverImage)
@ -305,26 +313,41 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
}
}
var customFields *models.CustomFieldsInput
if input.CustomFields != nil {
cfCopy := *input.CustomFields
customFields = &cfCopy
// convert json.Numbers to int/float
customFields.Full = convertMapJSONNumbers(customFields.Full)
customFields.Partial = convertMapJSONNumbers(customFields.Partial)
}
scene, err := qb.UpdatePartial(ctx, sceneID, *updatedScene)
if err != nil {
return nil, err
}
if err := r.sceneUpdateCoverImage(ctx, scene, coverImageData); err != nil {
return nil, err
if coverImageIncluded {
if err := r.sceneUpdateCoverImage(ctx, scene, coverImageData); err != nil {
return nil, err
}
}
if customFields != nil {
if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil {
return nil, err
}
}
return scene, nil
}
func (r *mutationResolver) sceneUpdateCoverImage(ctx context.Context, s *models.Scene, coverImageData []byte) error {
if len(coverImageData) > 0 {
qb := r.repository.Scene
qb := r.repository.Scene
// update cover table
if err := qb.UpdateCover(ctx, s.ID, coverImageData); err != nil {
return err
}
// update cover table - empty data will clear the cover
if err := qb.UpdateCover(ctx, s.ID, coverImageData); err != nil {
return err
}
return nil
@ -386,6 +409,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
}
}
var customFields *models.CustomFieldsInput
if input.CustomFields != nil {
cf := handleUpdateCustomFields(*input.CustomFields)
customFields = &cf
}
ret := []*models.Scene{}
// Start the transaction and save the scenes
@ -398,6 +427,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
return err
}
if customFields != nil {
if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil {
return err
}
}
ret = append(ret, scene)
}
@ -440,6 +475,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
destroyFileEntry := utils.IsTrue(input.DestroyFileEntry)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
@ -456,7 +492,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo)
return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile)
return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry)
}); err != nil {
fileDeleter.Rollback()
return false, err
@ -494,6 +530,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
destroyFileEntry := utils.IsTrue(input.DestroyFileEntry)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
@ -512,7 +549,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// kill any running encoders
manager.KillRunningStreams(scene, fileNamingAlgo)
if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile); err != nil {
if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil {
return err
}
}
@ -572,6 +609,7 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
var values *models.ScenePartial
var coverImageData []byte
var customFields *models.CustomFieldsInput
if input.Values != nil {
translator := changesetTranslator{
@ -590,6 +628,11 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
return nil, fmt.Errorf("processing cover image: %w", err)
}
}
if input.Values.CustomFields != nil {
cf := handleUpdateCustomFields(*input.Values.CustomFields)
customFields = &cf
}
} else {
v := models.NewScenePartial()
values = &v
@ -621,7 +664,20 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
return fmt.Errorf("scene with id %d not found", destID)
}
return r.sceneUpdateCoverImage(ctx, ret, coverImageData)
// only update cover image if one was provided
if len(coverImageData) > 0 {
if err := r.sceneUpdateCoverImage(ctx, ret, coverImageData); err != nil {
return err
}
}
if customFields != nil {
if err := r.Resolver.repository.Scene.SetCustomFields(ctx, ret.ID, *customFields); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}

View file

@ -58,6 +58,16 @@ func (r *mutationResolver) StashBoxBatchStudioTag(ctx context.Context, input man
return strconv.Itoa(jobID), nil
}
func (r *mutationResolver) StashBoxBatchTagTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) {
b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) //nolint:staticcheck
if err != nil {
return "", err
}
jobID := manager.GetInstance().StashBoxBatchTagTag(ctx, b, input)
return strconv.Itoa(jobID), nil
}
func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) {
b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint)
if err != nil {

View file

@ -31,14 +31,15 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
}
// Populate a new studio from the input
newStudio := models.NewStudio()
newStudio := models.NewCreateStudioInput()
newStudio.Name = strings.TrimSpace(input.Name)
newStudio.Rating = input.Rating100
newStudio.Favorite = translator.bool(input.Favorite)
newStudio.Details = translator.string(input.Details)
newStudio.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
newStudio.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases))
newStudio.Organized = translator.bool(input.Organized)
newStudio.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newStudio.Name))
newStudio.StashIDs = models.NewRelatedStashIDs(models.StashIDInputs(input.StashIds).ToStashIDs())
var err error
@ -61,6 +62,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
newStudio.CustomFields = convertMapJSONNumbers(input.CustomFields)
// Process the base 64 encoded image string
var imageData []byte
@ -119,6 +121,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
updatedStudio.Rating = translator.optionalInt(input.Rating100, "rating100")
updatedStudio.Favorite = translator.optionalBool(input.Favorite, "favorite")
updatedStudio.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
updatedStudio.Organized = translator.optionalBool(input.Organized, "organized")
updatedStudio.Aliases = translator.updateStrings(input.Aliases, "aliases")
updatedStudio.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids")
@ -134,7 +137,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
if translator.hasField("urls") {
// ensure url not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
if err := validateNoLegacyURLs(translator); err != nil {
return nil, err
}
@ -152,6 +155,11 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
}
}
updatedStudio.CustomFields = input.CustomFields
// convert json.Numbers to int/float
updatedStudio.CustomFields.Full = convertMapJSONNumbers(updatedStudio.CustomFields.Full)
updatedStudio.CustomFields.Partial = convertMapJSONNumbers(updatedStudio.CustomFields.Partial)
// Process the base 64 encoded image string
var imageData []byte
imageIncluded := translator.hasField("image")
@ -167,6 +175,28 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Studio
if updatedStudio.Aliases != nil {
s, err := qb.Find(ctx, studioID)
if err != nil {
return err
}
if s != nil {
if err := s.LoadAliases(ctx, qb); err != nil {
return err
}
effectiveAliases := updatedStudio.Aliases.Apply(s.Aliases.List())
name := s.Name
if updatedStudio.Name.Set {
name = updatedStudio.Name.Value
}
sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name)
updatedStudio.Aliases.Values = sanitized
updatedStudio.Aliases.Mode = models.RelationshipUpdateModeSet
}
}
if err := studio.ValidateModify(ctx, updatedStudio, qb); err != nil {
return err
}
@ -211,7 +241,7 @@ func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudi
if translator.hasField("urls") {
// ensure url/twitter/instagram are not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
if err := validateNoLegacyURLs(translator); err != nil {
return nil, err
}
@ -233,6 +263,7 @@ func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudi
partial.Rating = translator.optionalInt(input.Rating100, "rating100")
partial.Details = translator.optionalString(input.Details, "details")
partial.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
partial.Organized = translator.optionalBool(input.Organized, "organized")
partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids")
if err != nil {

View file

@ -6,7 +6,6 @@ import (
"strconv"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin/hook"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
@ -31,11 +30,14 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
}
// Populate a new tag from the input
newTag := models.NewTag()
newTag := models.CreateTagInput{
Tag: &models.Tag{},
}
*newTag.Tag = models.NewTag()
newTag.Name = strings.TrimSpace(input.Name)
newTag.SortName = translator.string(input.SortName)
newTag.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases))
newTag.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newTag.Name))
newTag.Favorite = translator.bool(input.Favorite)
newTag.Description = translator.string(input.Description)
newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
@ -60,6 +62,8 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
return nil, fmt.Errorf("converting child tag ids: %w", err)
}
newTag.CustomFields = convertMapJSONNumbers(input.CustomFields)
// Process the base 64 encoded image string
var imageData []byte
if input.Image != nil {
@ -73,7 +77,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Tag
if err := tag.ValidateCreate(ctx, newTag, qb); err != nil {
if err := tag.ValidateCreate(ctx, *newTag.Tag, qb); err != nil {
return err
}
@ -98,17 +102,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
return r.getTag(ctx, newTag.ID)
}
func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) {
tagID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Populate tag from the input
func tagPartialFromInput(input TagUpdateInput, translator changesetTranslator) (*models.TagPartial, error) {
updatedTag := models.NewTagPartial()
updatedTag.Name = translator.optionalString(input.Name, "name")
@ -127,6 +121,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput)
}
updatedTag.StashIDs = translator.updateStashIDs(updateStashIDInputs, "stash_ids")
var err error
updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids")
if err != nil {
return nil, fmt.Errorf("converting parent tag ids: %w", err)
@ -137,6 +132,32 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput)
return nil, fmt.Errorf("converting child tag ids: %w", err)
}
if input.CustomFields != nil {
updatedTag.CustomFields = *input.CustomFields
// convert json.Numbers to int/float
updatedTag.CustomFields.Full = convertMapJSONNumbers(updatedTag.CustomFields.Full)
updatedTag.CustomFields.Partial = convertMapJSONNumbers(updatedTag.CustomFields.Partial)
}
return &updatedTag, nil
}
func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) {
tagID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Populate tag from the input
updatedTag, err := tagPartialFromInput(input, translator)
if err != nil {
return nil, err
}
var imageData []byte
imageIncluded := translator.hasField("image")
if input.Image != nil {
@ -151,11 +172,33 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Tag
if err := tag.ValidateUpdate(ctx, tagID, updatedTag, qb); err != nil {
if updatedTag.Aliases != nil {
t, err := qb.Find(ctx, tagID)
if err != nil {
return err
}
if t != nil {
if err := t.LoadAliases(ctx, qb); err != nil {
return err
}
newAliases := updatedTag.Aliases.Apply(t.Aliases.List())
name := t.Name
if updatedTag.Name.Set {
name = updatedTag.Name.Value
}
sanitized := stringslice.UniqueExcludeFold(newAliases, name)
updatedTag.Aliases.Values = sanitized
updatedTag.Aliases.Mode = models.RelationshipUpdateModeSet
}
}
if err := tag.ValidateUpdate(ctx, tagID, *updatedTag, qb); err != nil {
return err
}
t, err = qb.UpdatePartial(ctx, tagID, updatedTag)
t, err = qb.UpdatePartial(ctx, tagID, *updatedTag)
if err != nil {
return err
}
@ -303,6 +346,31 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput)
return nil, nil
}
var values *models.TagPartial
var imageData []byte
if input.Values != nil {
translator := changesetTranslator{
inputMap: getNamedUpdateInputMap(ctx, "input.values"),
}
values, err = tagPartialFromInput(*input.Values, translator)
if err != nil {
return nil, err
}
if input.Values.Image != nil {
var err error
imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image)
if err != nil {
return nil, fmt.Errorf("processing cover image: %w", err)
}
}
} else {
v := models.NewTagPartial()
values = &v
}
var t *models.Tag
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Tag
@ -317,28 +385,22 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput)
return fmt.Errorf("tag with id %d not found", destination)
}
parents, children, err := tag.MergeHierarchy(ctx, destination, source, qb)
if err != nil {
return err
}
if err = qb.Merge(ctx, source, destination); err != nil {
return err
}
err = qb.UpdateParentTags(ctx, destination, parents)
if err != nil {
return err
}
err = qb.UpdateChildTags(ctx, destination, children)
if err != nil {
if err := tag.ValidateUpdate(ctx, destination, *values, qb); err != nil {
return err
}
err = tag.ValidateHierarchyExisting(ctx, t, parents, children, qb)
if err != nil {
logger.Errorf("Error merging tag: %s", err)
return err
if _, err := qb.UpdatePartial(ctx, destination, *values); err != nil {
return fmt.Errorf("updating tag: %w", err)
}
if len(imageData) > 0 {
if err := qb.UpdateImage(ctx, destination, imageData); err != nil {
return err
}
}
return nil

View file

@ -96,6 +96,11 @@ func makeConfigGeneralResult() *ConfigGeneralResult {
CalculateMd5: config.IsCalculateMD5(),
VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
ParallelTasks: config.GetParallelTasks(),
UseCustomSpriteInterval: config.GetUseCustomSpriteInterval(),
SpriteInterval: config.GetSpriteInterval(),
SpriteScreenshotSize: config.GetSpriteScreenshotSize(),
MinimumSprites: config.GetMinimumSprites(),
MaximumSprites: config.GetMaximumSprites(),
PreviewAudio: config.GetPreviewAudio(),
PreviewSegments: config.GetPreviewSegments(),
PreviewSegmentDuration: config.GetPreviewSegmentDuration(),
@ -156,6 +161,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult {
javascriptEnabled := config.GetJavascriptEnabled()
customLocales := config.GetCustomLocales()
customLocalesEnabled := config.GetCustomLocalesEnabled()
disableCustomizations := config.GetDisableCustomizations()
language := config.GetLanguage()
handyKey := config.GetHandyKey()
scriptOffset := config.GetFunscriptOffset()
@ -183,6 +189,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult {
JavascriptEnabled: &javascriptEnabled,
CustomLocales: &customLocales,
CustomLocalesEnabled: &customLocalesEnabled,
DisableCustomizations: &disableCustomizations,
Language: &language,
ImageLightbox: &imageLightboxOptions,

View file

@ -6,6 +6,7 @@ import (
"fmt"
"slices"
"strconv"
"strings"
"github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models"
@ -363,7 +364,8 @@ func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Sour
client := r.newStashBoxClient(*b)
var ret []*models.ScrapedTag
out, err := client.QueryTag(ctx, *input.Query)
query := *input.Query
out, err := client.QueryTag(ctx, query)
if err != nil {
return nil, err
@ -383,6 +385,22 @@ func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Sour
}); err != nil {
return nil, err
}
// tag name query returns results that may not match the query exactly.
// if there is an exact match, it should be first
if query != "" {
for i, result := range ret {
if strings.EqualFold(result.Name, query) {
// prepend exact match to the front of the slice
if i != 0 {
ret = append([]*models.ScrapedTag{result}, append(ret[:i], ret[i+1:]...)...)
}
break
}
}
}
return ret, nil
}

View file

@ -12,6 +12,7 @@ import (
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/internal/static"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil"
@ -243,6 +244,12 @@ func (rs sceneRoutes) streamSegment(w http.ResponseWriter, r *http.Request, stre
}
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
// if default flag is set, return the default image
if r.URL.Query().Get("default") == "true" {
utils.ServeImage(w, r, static.ReadAll(static.DefaultSceneImage))
return
}
scene := r.Context().Value(sceneKey).(*models.Scene)
ss := manager.SceneServer{

View file

@ -11,6 +11,7 @@ import (
"net/http"
"os"
"path"
"path/filepath"
"runtime/debug"
"strconv"
"strings"
@ -255,6 +256,9 @@ func Initialize() (*Server, error) {
staticUI = statigz.FileServer(ui.UIBox.(fs.ReadDirFS))
}
// handle favicon override
r.HandleFunc("/favicon.ico", handleFavicon(staticUI))
// Serve the web app
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
@ -295,6 +299,31 @@ func Initialize() (*Server, error) {
return server, nil
}
func handleFavicon(staticUI *statigz.Server) func(w http.ResponseWriter, r *http.Request) {
mgr := manager.GetInstance()
cfg := mgr.Config
// check if favicon.ico exists in the config directory
// if so, use that
// otherwise, use the embedded one
iconPath := filepath.Join(cfg.GetConfigPath(), "favicon.ico")
exists, _ := fsutil.FileExists(iconPath)
if exists {
logger.Debugf("Using custom favicon at %s", iconPath)
}
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Cache-Control", "no-cache")
if exists {
http.ServeFile(w, r, iconPath)
} else {
staticUI.ServeHTTP(w, r)
}
}
}
// Start starts the server. It listens on the configured address and port.
// It calls ListenAndServeTLS if TLS is configured, otherwise it calls ListenAndServe.
// Calls to Start are blocked until the server is shutdown.
@ -421,7 +450,7 @@ func cssHandler(c *config.Config) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
var paths []string
if c.GetCSSEnabled() {
if c.GetCSSEnabled() && !c.GetDisableCustomizations() {
// search for custom.css in current directory, then $HOME/.stash
fn := c.GetCSSPath()
exists, _ := fsutil.FileExists(fn)
@ -439,7 +468,7 @@ func javascriptHandler(c *config.Config) func(w http.ResponseWriter, r *http.Req
return func(w http.ResponseWriter, r *http.Request) {
var paths []string
if c.GetJavascriptEnabled() {
if c.GetJavascriptEnabled() && !c.GetDisableCustomizations() {
// search for custom.js in current directory, then $HOME/.stash
fn := c.GetJavascriptPath()
exists, _ := fsutil.FileExists(fn)
@ -457,7 +486,7 @@ func customLocalesHandler(c *config.Config) func(w http.ResponseWriter, r *http.
return func(w http.ResponseWriter, r *http.Request) {
buffer := bytes.Buffer{}
if c.GetCustomLocalesEnabled() {
if c.GetCustomLocalesEnabled() && !c.GetDisableCustomizations() {
// search for custom-locales.json in current directory, then $HOME/.stash
path := c.GetCustomLocalesPath()
exists, _ := fsutil.FileExists(path)

View file

@ -101,16 +101,15 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error {
func createStudio(ctx context.Context, qb models.StudioWriter, name string) (*models.Studio, error) {
// create the studio
studio := models.Studio{
Name: name,
}
studio := models.NewCreateStudioInput()
studio.Name = name
err := qb.Create(ctx, &studio)
if err != nil {
return nil, err
}
return &studio, nil
return studio.Studio, nil
}
func createTag(ctx context.Context, qb models.TagWriter) error {
@ -119,7 +118,7 @@ func createTag(ctx context.Context, qb models.TagWriter) error {
Name: testName,
}
err := qb.Create(ctx, &tag)
err := qb.Create(ctx, &models.CreateTagInput{Tag: &tag})
if err != nil {
return err
}
@ -366,7 +365,10 @@ func makeImage(expectedResult bool) *models.Image {
}
func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *models.ImageFile) error {
err := w.Create(ctx, o, []models.FileID{f.ID})
err := w.Create(ctx, &models.CreateImageInput{
Image: o,
FileIDs: []models.FileID{f.ID},
})
if err != nil {
return fmt.Errorf("Failed to create image with path '%s': %s", f.Path, err.Error())
@ -469,7 +471,10 @@ func makeGallery(expectedResult bool) *models.Gallery {
}
func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *models.BaseFile) error {
err := w.Create(ctx, o, []models.FileID{f.ID})
err := w.Create(ctx, &models.CreateGalleryInput{
Gallery: o,
FileIDs: []models.FileID{f.ID},
})
if err != nil {
return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error())
}

View file

@ -2,6 +2,7 @@
package desktop
import (
"fmt"
"os"
"path"
"path/filepath"
@ -155,15 +156,17 @@ func getIconPath() string {
return path.Join(config.GetInstance().GetConfigPath(), "icon.png")
}
func RevealInFileManager(path string) {
exists, err := fsutil.FileExists(path)
func RevealInFileManager(path string) error {
info, err := os.Stat(path)
if err != nil {
logger.Errorf("Error checking file: %s", err)
return
return fmt.Errorf("error checking path: %w", err)
}
if exists && IsDesktop() {
revealInFileManager(path)
absPath, err := filepath.Abs(path)
if err != nil {
return fmt.Errorf("error getting absolute path: %w", err)
}
return revealInFileManager(absPath, info)
}
func getServerURL(path string) string {

View file

@ -4,9 +4,11 @@
package desktop
import (
"fmt"
"os"
"os/exec"
"github.com/kermieisinthehouse/gosx-notifier"
gosxnotifier "github.com/kermieisinthehouse/gosx-notifier"
"github.com/stashapp/stash/pkg/logger"
)
@ -32,8 +34,11 @@ func sendNotification(notificationTitle string, notificationText string) {
}
}
func revealInFileManager(path string) {
exec.Command(`open`, `-R`, path)
func revealInFileManager(path string, _ os.FileInfo) error {
if err := exec.Command(`open`, `-R`, path).Run(); err != nil {
return fmt.Errorf("error revealing path in Finder: %w", err)
}
return nil
}
func isDoubleClickLaunched() bool {

View file

@ -4,8 +4,10 @@
package desktop
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/logger"
@ -33,8 +35,15 @@ func sendNotification(notificationTitle string, notificationText string) {
}
}
func revealInFileManager(path string) {
func revealInFileManager(path string, info os.FileInfo) error {
dir := path
if !info.IsDir() {
dir = filepath.Dir(path)
}
if err := exec.Command("xdg-open", dir).Run(); err != nil {
return fmt.Errorf("error opening directory in file manager: %w", err)
}
return nil
}
func isDoubleClickLaunched() bool {

View file

@ -4,6 +4,7 @@
package desktop
import (
"os"
"os/exec"
"syscall"
"unsafe"
@ -83,6 +84,10 @@ func sendNotification(notificationTitle string, notificationText string) {
}
}
func revealInFileManager(path string) {
exec.Command(`explorer`, `\select`, path)
func revealInFileManager(path string, _ os.FileInfo) error {
c := exec.Command(`explorer`, `/select,`, path)
logger.Debugf("Running: %s", c.String())
// explorer seems to return an error code even when it works, so ignore the error
_ = c.Run()
return nil
}

View file

@ -3,6 +3,7 @@
package desktop
import (
"fmt"
"runtime"
"strings"
@ -58,12 +59,12 @@ func startSystray(exit chan int, faviconProvider FaviconProvider) {
func systrayInitialize(exit chan<- int, faviconProvider FaviconProvider) {
favicon := faviconProvider.GetFavicon()
systray.SetTemplateIcon(favicon, favicon)
systray.SetTooltip("🟢 Stash is Running.")
c := config.GetInstance()
systray.SetTooltip(fmt.Sprintf("🟢 Stash is Running on port %d.", c.GetPort()))
openStashButton := systray.AddMenuItem("Open Stash", "Open a browser window to Stash")
var menuItems []string
systray.AddSeparator()
c := config.GetInstance()
if !c.IsNewSystem() {
menuItems = c.GetMenuItems()
for _, item := range menuItems {

333
internal/dlna/activity.go Normal file
View file

@ -0,0 +1,333 @@
package dlna
import (
"context"
"fmt"
"sync"
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/txn"
)
const (
// DefaultSessionTimeout is the time after which a session is considered complete
// if no new requests are received.
// This is set high (5 minutes) because DLNA clients buffer aggressively and may not
// send any HTTP requests for extended periods while the user is still watching.
DefaultSessionTimeout = 5 * time.Minute
// monitorInterval is how often we check for expired sessions.
monitorInterval = 10 * time.Second
)
// ActivityConfig provides configuration options for DLNA activity tracking.
type ActivityConfig interface {
// GetDLNAActivityTrackingEnabled returns true if activity tracking should be enabled.
// If not implemented, defaults to true.
GetDLNAActivityTrackingEnabled() bool
// GetMinimumPlayPercent returns the minimum percentage of a video that must be
// watched before incrementing the play count. Uses UI setting if available.
GetMinimumPlayPercent() int
}
// SceneActivityWriter provides methods for saving scene activity.
type SceneActivityWriter interface {
SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error)
AddViews(ctx context.Context, sceneID int, dates []time.Time) ([]time.Time, error)
}
// streamSession represents an active DLNA streaming session.
type streamSession struct {
SceneID int
ClientIP string
StartTime time.Time
LastActivity time.Time
VideoDuration float64
PlayCountAdded bool
}
// sessionKey generates a unique key for a session based on client IP and scene ID.
func sessionKey(clientIP string, sceneID int) string {
return fmt.Sprintf("%s:%d", clientIP, sceneID)
}
// percentWatched calculates the estimated percentage of video watched.
// Uses a time-based approach since DLNA clients buffer aggressively and byte
// positions don't correlate with actual playback position.
//
// The key insight: you cannot have watched more of the video than time has elapsed.
// If the video is 30 minutes and only 1 minute has passed, maximum watched is ~3.3%.
func (s *streamSession) percentWatched() float64 {
if s.VideoDuration <= 0 {
return 0
}
// Calculate elapsed time from session start to last activity
elapsed := s.LastActivity.Sub(s.StartTime).Seconds()
if elapsed <= 0 {
return 0
}
// Maximum possible percent is based on elapsed time
// You can't watch more of the video than time has passed
timeBasedPercent := (elapsed / s.VideoDuration) * 100
// Cap at 100%
if timeBasedPercent > 100 {
return 100
}
return timeBasedPercent
}
// estimatedResumeTime calculates the estimated resume time based on elapsed time.
// Since DLNA clients buffer aggressively, byte positions don't correlate with playback.
// Instead, we estimate based on how long the session has been active.
// Returns the time in seconds, or 0 if the video is nearly complete (>=98%).
func (s *streamSession) estimatedResumeTime() float64 {
if s.VideoDuration <= 0 {
return 0
}
// Calculate elapsed time from session start
elapsed := s.LastActivity.Sub(s.StartTime).Seconds()
if elapsed <= 0 {
return 0
}
// If elapsed time exceeds 98% of video duration, reset resume time (matches frontend behavior)
if elapsed >= s.VideoDuration*0.98 {
return 0
}
// Resume time is approximately where the user was watching
// Capped by video duration
if elapsed > s.VideoDuration {
elapsed = s.VideoDuration
}
return elapsed
}
// ActivityTracker tracks DLNA streaming activity and saves it to the database.
type ActivityTracker struct {
txnManager txn.Manager
sceneWriter SceneActivityWriter
config ActivityConfig
sessionTimeout time.Duration
sessions map[string]*streamSession
mutex sync.RWMutex
ctx context.Context
cancelFunc context.CancelFunc
wg sync.WaitGroup
}
// NewActivityTracker creates a new ActivityTracker.
func NewActivityTracker(
txnManager txn.Manager,
sceneWriter SceneActivityWriter,
config ActivityConfig,
) *ActivityTracker {
ctx, cancel := context.WithCancel(context.Background())
tracker := &ActivityTracker{
txnManager: txnManager,
sceneWriter: sceneWriter,
config: config,
sessionTimeout: DefaultSessionTimeout,
sessions: make(map[string]*streamSession),
ctx: ctx,
cancelFunc: cancel,
}
// Start the session monitor goroutine
tracker.wg.Add(1)
go tracker.monitorSessions()
return tracker
}
// Stop stops the activity tracker and processes any remaining sessions.
func (t *ActivityTracker) Stop() {
t.cancelFunc()
t.wg.Wait()
// Process any remaining sessions
t.mutex.Lock()
sessions := make([]*streamSession, 0, len(t.sessions))
for _, session := range t.sessions {
sessions = append(sessions, session)
}
t.sessions = make(map[string]*streamSession)
t.mutex.Unlock()
for _, session := range sessions {
t.processCompletedSession(session)
}
}
// RecordRequest records a streaming request for activity tracking.
// Each request updates the session's LastActivity time, which is used for
// time-based tracking of watch progress.
func (t *ActivityTracker) RecordRequest(sceneID int, clientIP string, videoDuration float64) {
if !t.isEnabled() {
return
}
key := sessionKey(clientIP, sceneID)
now := time.Now()
t.mutex.Lock()
defer t.mutex.Unlock()
session, exists := t.sessions[key]
if !exists {
session = &streamSession{
SceneID: sceneID,
ClientIP: clientIP,
StartTime: now,
VideoDuration: videoDuration,
}
t.sessions[key] = session
logger.Debugf("[DLNA Activity] New session started: scene=%d, client=%s", sceneID, clientIP)
}
session.LastActivity = now
}
// monitorSessions periodically checks for expired sessions and processes them.
func (t *ActivityTracker) monitorSessions() {
defer t.wg.Done()
ticker := time.NewTicker(monitorInterval)
defer ticker.Stop()
for {
select {
case <-t.ctx.Done():
return
case <-ticker.C:
t.processExpiredSessions()
}
}
}
// processExpiredSessions finds and processes sessions that have timed out.
func (t *ActivityTracker) processExpiredSessions() {
now := time.Now()
var expiredSessions []*streamSession
t.mutex.Lock()
for key, session := range t.sessions {
timeSinceStart := now.Sub(session.StartTime)
timeSinceActivity := now.Sub(session.LastActivity)
// Must have no HTTP activity for the full timeout period
if timeSinceActivity <= t.sessionTimeout {
continue
}
// DLNA clients buffer aggressively - they fetch most/all of the video quickly,
// then play from cache with NO further HTTP requests.
//
// Two scenarios:
// 1. User watched the whole video: timeSinceStart >= videoDuration
// -> Set LastActivity to when timeout began (they finished watching)
// 2. User stopped early: timeSinceStart < videoDuration
// -> Keep LastActivity as-is (best estimate of when they stopped)
videoDuration := time.Duration(session.VideoDuration) * time.Second
if timeSinceStart >= videoDuration && videoDuration > 0 {
// User likely watched the whole video, then it timed out
// Estimate they watched until the timeout period started
session.LastActivity = now.Add(-t.sessionTimeout)
}
// else: User stopped early - LastActivity is already our best estimate
expiredSessions = append(expiredSessions, session)
delete(t.sessions, key)
}
t.mutex.Unlock()
for _, session := range expiredSessions {
t.processCompletedSession(session)
}
}
// processCompletedSession saves activity data for a completed streaming session.
func (t *ActivityTracker) processCompletedSession(session *streamSession) {
percentWatched := session.percentWatched()
resumeTime := session.estimatedResumeTime()
logger.Debugf("[DLNA Activity] Session completed: scene=%d, client=%s, videoDuration=%.1fs, percent=%.1f%%, resume=%.1fs",
session.SceneID, session.ClientIP, session.VideoDuration, percentWatched, resumeTime)
// Only save if there was meaningful activity (at least 1% watched)
if percentWatched < 1 {
logger.Debugf("[DLNA Activity] Session too short, skipping save")
return
}
// Skip DB operations if txnManager is nil (for testing)
if t.txnManager == nil {
logger.Debugf("[DLNA Activity] No transaction manager, skipping DB save")
return
}
// Determine what needs to be saved
shouldSaveResume := resumeTime > 0
shouldAddView := !session.PlayCountAdded && percentWatched >= float64(t.getMinimumPlayPercent())
// Nothing to save
if !shouldSaveResume && !shouldAddView {
return
}
// Save everything in a single transaction
ctx := context.Background()
if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error {
// Save resume time only. DLNA clients buffer aggressively and don't report
// playback position, so we can't accurately track play duration - saving
// guesses would corrupt analytics. Resume time is still useful as a
// "continue watching" hint even if imprecise.
if shouldSaveResume {
if _, err := t.sceneWriter.SaveActivity(ctx, session.SceneID, &resumeTime, nil); err != nil {
return fmt.Errorf("save resume time: %w", err)
}
}
// Increment play count (also updates last_played_at via view date)
if shouldAddView {
if _, err := t.sceneWriter.AddViews(ctx, session.SceneID, []time.Time{time.Now()}); err != nil {
return fmt.Errorf("add view: %w", err)
}
session.PlayCountAdded = true
logger.Debugf("[DLNA Activity] Incremented play count for scene %d (%.1f%% watched)",
session.SceneID, percentWatched)
}
return nil
}); err != nil {
logger.Warnf("[DLNA Activity] Failed to save activity for scene %d: %v", session.SceneID, err)
}
}
// isEnabled returns true if activity tracking is enabled.
func (t *ActivityTracker) isEnabled() bool {
if t.config == nil {
return true // Default to enabled
}
return t.config.GetDLNAActivityTrackingEnabled()
}
// getMinimumPlayPercent returns the minimum play percentage for incrementing play count.
func (t *ActivityTracker) getMinimumPlayPercent() int {
if t.config == nil {
return 0 // Default: any play increments count (matches frontend default)
}
return t.config.GetMinimumPlayPercent()
}

View file

@ -0,0 +1,420 @@
package dlna
import (
"context"
"sync"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
// mockSceneWriter is a mock implementation of SceneActivityWriter
type mockSceneWriter struct {
mu sync.Mutex
saveActivityCalls []saveActivityCall
addViewsCalls []addViewsCall
}
type saveActivityCall struct {
sceneID int
resumeTime *float64
playDuration *float64
}
type addViewsCall struct {
sceneID int
dates []time.Time
}
func (m *mockSceneWriter) SaveActivity(_ context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) {
m.mu.Lock()
m.saveActivityCalls = append(m.saveActivityCalls, saveActivityCall{
sceneID: sceneID,
resumeTime: resumeTime,
playDuration: playDuration,
})
m.mu.Unlock()
return true, nil
}
func (m *mockSceneWriter) AddViews(_ context.Context, sceneID int, dates []time.Time) ([]time.Time, error) {
m.mu.Lock()
m.addViewsCalls = append(m.addViewsCalls, addViewsCall{
sceneID: sceneID,
dates: dates,
})
m.mu.Unlock()
return dates, nil
}
// mockConfig is a mock implementation of ActivityConfig
type mockConfig struct {
enabled bool
minPlayPercent int
}
func (c *mockConfig) GetDLNAActivityTrackingEnabled() bool {
return c.enabled
}
func (c *mockConfig) GetMinimumPlayPercent() int {
return c.minPlayPercent
}
func TestStreamSession_PercentWatched(t *testing.T) {
now := time.Now()
tests := []struct {
name string
startTime time.Time
lastActivity time.Time
videoDuration float64
expected float64
}{
{
name: "no video duration",
startTime: now.Add(-60 * time.Second),
lastActivity: now,
videoDuration: 0,
expected: 0,
},
{
name: "half watched",
startTime: now.Add(-60 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 1 minute = 50%
expected: 50.0,
},
{
name: "fully watched",
startTime: now.Add(-120 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 2 minutes = 100%
expected: 100.0,
},
{
name: "quarter watched",
startTime: now.Add(-30 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 30 seconds = 25%
expected: 25.0,
},
{
name: "elapsed exceeds duration - capped at 100%",
startTime: now.Add(-180 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, but 3 minutes elapsed = capped at 100%
expected: 100.0,
},
{
name: "no elapsed time",
startTime: now,
lastActivity: now,
videoDuration: 120.0,
expected: 0,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
session := &streamSession{
StartTime: tt.startTime,
LastActivity: tt.lastActivity,
VideoDuration: tt.videoDuration,
}
result := session.percentWatched()
assert.InDelta(t, tt.expected, result, 0.01)
})
}
}
func TestStreamSession_EstimatedResumeTime(t *testing.T) {
now := time.Now()
tests := []struct {
name string
startTime time.Time
lastActivity time.Time
videoDuration float64
expected float64
}{
{
name: "no elapsed time",
startTime: now,
lastActivity: now,
videoDuration: 120.0,
expected: 0,
},
{
name: "half way through",
startTime: now.Add(-60 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 1 minute = resume at 60s
expected: 60.0,
},
{
name: "quarter way through",
startTime: now.Add(-30 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 30 seconds = resume at 30s
expected: 30.0,
},
{
name: "98% complete - should reset to 0",
startTime: now.Add(-118 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 98.3% elapsed, should reset
expected: 0,
},
{
name: "100% complete - should reset to 0",
startTime: now.Add(-120 * time.Second),
lastActivity: now,
videoDuration: 120.0,
expected: 0,
},
{
name: "elapsed exceeds duration - capped and reset to 0",
startTime: now.Add(-180 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 150% elapsed, capped at 100%, reset to 0
expected: 0,
},
{
name: "no video duration",
startTime: now.Add(-60 * time.Second),
lastActivity: now,
videoDuration: 0,
expected: 0,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
session := &streamSession{
StartTime: tt.startTime,
LastActivity: tt.lastActivity,
VideoDuration: tt.videoDuration,
}
result := session.estimatedResumeTime()
assert.InDelta(t, tt.expected, result, 1.0) // Allow 1 second tolerance
})
}
}
func TestSessionKey(t *testing.T) {
key := sessionKey("192.168.1.100", 42)
assert.Equal(t, "192.168.1.100:42", key)
}
func TestActivityTracker_RecordRequest(t *testing.T) {
config := &mockConfig{enabled: true, minPlayPercent: 50}
// Create tracker without starting the goroutine (for unit testing)
tracker := &ActivityTracker{
txnManager: nil, // Don't need DB for this test
sceneWriter: nil,
config: config,
sessionTimeout: DefaultSessionTimeout,
sessions: make(map[string]*streamSession),
}
// Record first request - should create new session
tracker.RecordRequest(42, "192.168.1.100", 120.0)
tracker.mutex.RLock()
session := tracker.sessions["192.168.1.100:42"]
tracker.mutex.RUnlock()
assert.NotNil(t, session)
assert.Equal(t, 42, session.SceneID)
assert.Equal(t, "192.168.1.100", session.ClientIP)
assert.Equal(t, 120.0, session.VideoDuration)
assert.False(t, session.StartTime.IsZero())
assert.False(t, session.LastActivity.IsZero())
// Record second request - should update LastActivity
firstActivity := session.LastActivity
time.Sleep(10 * time.Millisecond)
tracker.RecordRequest(42, "192.168.1.100", 120.0)
tracker.mutex.RLock()
session = tracker.sessions["192.168.1.100:42"]
tracker.mutex.RUnlock()
assert.True(t, session.LastActivity.After(firstActivity))
}
func TestActivityTracker_DisabledTracking(t *testing.T) {
config := &mockConfig{enabled: false, minPlayPercent: 50}
// Create tracker without starting the goroutine (for unit testing)
tracker := &ActivityTracker{
txnManager: nil,
sceneWriter: nil,
config: config,
sessionTimeout: DefaultSessionTimeout,
sessions: make(map[string]*streamSession),
}
// Record request - should be ignored when tracking is disabled
tracker.RecordRequest(42, "192.168.1.100", 120.0)
tracker.mutex.RLock()
sessionCount := len(tracker.sessions)
tracker.mutex.RUnlock()
assert.Equal(t, 0, sessionCount)
}
func TestActivityTracker_SessionExpiration(t *testing.T) {
// For this test, we'll test the session expiration logic directly
// without the full transaction manager integration
sceneWriter := &mockSceneWriter{}
config := &mockConfig{enabled: true, minPlayPercent: 10}
// Create a tracker with nil txnManager - we'll test processCompletedSession separately
// Here we just verify the session management logic
tracker := &ActivityTracker{
txnManager: nil, // Skip DB calls for this test
sceneWriter: sceneWriter,
config: config,
sessionTimeout: 100 * time.Millisecond,
sessions: make(map[string]*streamSession),
}
// Manually add a session
// Use a short video duration (1 second) so the test can verify expiration quickly.
now := time.Now()
tracker.sessions["192.168.1.100:42"] = &streamSession{
SceneID: 42,
ClientIP: "192.168.1.100",
StartTime: now.Add(-5 * time.Second), // Started 5 seconds ago
LastActivity: now.Add(-200 * time.Millisecond), // Last activity 200ms ago (> 100ms timeout)
VideoDuration: 1.0, // Short video so timeSinceStart > videoDuration
}
// Verify session exists
assert.Len(t, tracker.sessions, 1)
// Process expired sessions - this will try to save activity but txnManager is nil
// so it will skip the DB calls but still remove the session
tracker.processExpiredSessions()
// Verify session was removed (even though DB calls were skipped)
assert.Len(t, tracker.sessions, 0)
}
func TestActivityTracker_SessionExpiration_StoppedEarly(t *testing.T) {
// Test that sessions expire when user stops watching early (before video ends)
// This was a bug where sessions wouldn't expire until video duration passed
config := &mockConfig{enabled: true, minPlayPercent: 10}
tracker := &ActivityTracker{
txnManager: nil,
sceneWriter: nil,
config: config,
sessionTimeout: 100 * time.Millisecond,
sessions: make(map[string]*streamSession),
}
// User started watching a 30-minute video but stopped after 5 seconds
now := time.Now()
tracker.sessions["192.168.1.100:42"] = &streamSession{
SceneID: 42,
ClientIP: "192.168.1.100",
StartTime: now.Add(-5 * time.Second), // Started 5 seconds ago
LastActivity: now.Add(-200 * time.Millisecond), // Last activity 200ms ago (> 100ms timeout)
VideoDuration: 1800.0, // 30 minute video - much longer than elapsed time
}
assert.Len(t, tracker.sessions, 1)
// Session should expire because timeSinceActivity > timeout
// Even though the video is 30 minutes and only 5 seconds have passed
tracker.processExpiredSessions()
// Verify session was expired
assert.Len(t, tracker.sessions, 0, "Session should expire when user stops early, not wait for video duration")
}
func TestActivityTracker_MinimumPlayPercentThreshold(t *testing.T) {
// Test the threshold logic without full transaction integration
config := &mockConfig{enabled: true, minPlayPercent: 75} // High threshold
tracker := &ActivityTracker{
txnManager: nil,
sceneWriter: nil,
config: config,
sessionTimeout: 50 * time.Millisecond,
sessions: make(map[string]*streamSession),
}
// Test that getMinimumPlayPercent returns the configured value
assert.Equal(t, 75, tracker.getMinimumPlayPercent())
// Create a session with 30% watched (36 seconds of a 120 second video)
now := time.Now()
session := &streamSession{
SceneID: 42,
StartTime: now.Add(-36 * time.Second),
LastActivity: now,
VideoDuration: 120.0,
}
// 30% is below 75% threshold
percentWatched := session.percentWatched()
assert.InDelta(t, 30.0, percentWatched, 0.1)
assert.False(t, percentWatched >= float64(tracker.getMinimumPlayPercent()))
}
func TestActivityTracker_MultipleSessions(t *testing.T) {
config := &mockConfig{enabled: true, minPlayPercent: 50}
// Create tracker without starting the goroutine (for unit testing)
tracker := &ActivityTracker{
txnManager: nil,
sceneWriter: nil,
config: config,
sessionTimeout: DefaultSessionTimeout,
sessions: make(map[string]*streamSession),
}
// Different clients watching same scene
tracker.RecordRequest(42, "192.168.1.100", 120.0)
tracker.RecordRequest(42, "192.168.1.101", 120.0)
// Same client watching different scenes
tracker.RecordRequest(43, "192.168.1.100", 180.0)
tracker.mutex.RLock()
assert.Len(t, tracker.sessions, 3)
tracker.mutex.RUnlock()
}
func TestActivityTracker_ShortSessionIgnored(t *testing.T) {
// Test that short sessions are ignored
// Create a session with only ~0.8% watched (1 second of a 120 second video)
now := time.Now()
session := &streamSession{
SceneID: 42,
ClientIP: "192.168.1.100",
StartTime: now.Add(-1 * time.Second), // Only 1 second
LastActivity: now,
VideoDuration: 120.0, // 2 minutes
}
// Verify percent watched is below threshold (1s / 120s = 0.83%)
assert.InDelta(t, 0.83, session.percentWatched(), 0.1)
// Verify elapsed time is short
elapsed := session.LastActivity.Sub(session.StartTime).Seconds()
assert.InDelta(t, 1.0, elapsed, 0.5)
// Both are below the minimum thresholds (1% and 5 seconds)
percentWatched := session.percentWatched()
shouldSkip := percentWatched < 1 && elapsed < 5
assert.True(t, shouldSkip, "Short session should be skipped")
}

View file

@ -278,6 +278,7 @@ type Server struct {
repository Repository
sceneServer sceneServer
ipWhitelistManager *ipWhitelistManager
activityTracker *ActivityTracker
VideoSortOrder string
subscribeLock sync.Mutex
@ -596,6 +597,7 @@ func (me *Server) initMux(mux *http.ServeMux) {
mux.HandleFunc(resPath, func(w http.ResponseWriter, r *http.Request) {
sceneId := r.URL.Query().Get("scene")
var scene *models.Scene
var videoDuration float64
repo := me.repository
err := repo.WithReadTxn(r.Context(), func(ctx context.Context) error {
sceneIdInt, err := strconv.Atoi(sceneId)
@ -603,6 +605,15 @@ func (me *Server) initMux(mux *http.ServeMux) {
return nil
}
scene, _ = repo.SceneFinder.Find(ctx, sceneIdInt)
if scene != nil {
// Load primary file to get duration for activity tracking
if err := scene.LoadPrimaryFile(ctx, repo.FileGetter); err != nil {
logger.Debugf("failed to load primary file for scene %d: %v", sceneIdInt, err)
}
if f := scene.Files.Primary(); f != nil {
videoDuration = f.Duration
}
}
return nil
})
if err != nil {
@ -615,6 +626,14 @@ func (me *Server) initMux(mux *http.ServeMux) {
w.Header().Set("transferMode.dlna.org", "Streaming")
w.Header().Set("contentFeatures.dlna.org", "DLNA.ORG_OP=01;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=01500000000000000000000000000000")
// Track activity - uses time-based tracking, updated on each request
if me.activityTracker != nil {
sceneIdInt, _ := strconv.Atoi(sceneId)
clientIP, _, _ := net.SplitHostPort(r.RemoteAddr)
me.activityTracker.RecordRequest(sceneIdInt, clientIP, videoDuration)
}
me.sceneServer.StreamSceneDirect(scene, w, r)
})
mux.HandleFunc(rootDescPath, func(w http.ResponseWriter, r *http.Request) {

View file

@ -77,13 +77,29 @@ type Config interface {
GetDLNADefaultIPWhitelist() []string
GetVideoSortOrder() string
GetDLNAPortAsString() string
GetDLNAActivityTrackingEnabled() bool
}
// activityConfig wraps Config to implement ActivityConfig.
type activityConfig struct {
config Config
minPlayPercent int // cached from UI config
}
func (c *activityConfig) GetDLNAActivityTrackingEnabled() bool {
return c.config.GetDLNAActivityTrackingEnabled()
}
func (c *activityConfig) GetMinimumPlayPercent() int {
return c.minPlayPercent
}
type Service struct {
repository Repository
config Config
sceneServer sceneServer
ipWhitelistMgr *ipWhitelistManager
repository Repository
config Config
sceneServer sceneServer
ipWhitelistMgr *ipWhitelistManager
activityTracker *ActivityTracker
server *Server
running bool
@ -155,6 +171,7 @@ func (s *Service) init() error {
repository: s.repository,
sceneServer: s.sceneServer,
ipWhitelistManager: s.ipWhitelistMgr,
activityTracker: s.activityTracker,
Interfaces: interfaces,
HTTPConn: func() net.Listener {
conn, err := net.Listen("tcp", dmsConfig.Http)
@ -215,7 +232,14 @@ func (s *Service) init() error {
// }
// NewService initialises and returns a new DLNA service.
func NewService(repo Repository, cfg Config, sceneServer sceneServer) *Service {
// The sceneWriter parameter should implement SceneActivityWriter (typically models.SceneReaderWriter).
// The minPlayPercent parameter is the minimum percentage of video that must be played to increment play count.
func NewService(repo Repository, cfg Config, sceneServer sceneServer, sceneWriter SceneActivityWriter, minPlayPercent int) *Service {
activityCfg := &activityConfig{
config: cfg,
minPlayPercent: minPlayPercent,
}
ret := &Service{
repository: repo,
sceneServer: sceneServer,
@ -223,7 +247,8 @@ func NewService(repo Repository, cfg Config, sceneServer sceneServer) *Service {
ipWhitelistMgr: &ipWhitelistManager{
config: cfg,
},
mutex: sync.Mutex{},
activityTracker: NewActivityTracker(repo.TxnManager, sceneWriter, activityCfg),
mutex: sync.Mutex{},
}
return ret
@ -283,6 +308,12 @@ func (s *Service) Stop(duration *time.Duration) {
if s.running {
logger.Info("Stopping DLNA")
// Stop activity tracker first to process any pending sessions
if s.activityTracker != nil {
s.activityTracker.Stop()
}
err := s.server.Close()
if err != nil {
logger.Error(err)

View file

@ -147,6 +147,9 @@ func (t *SceneIdentifier) getOptions(source ScraperSource) MetadataOptions {
if source.Options.IncludeMalePerformers != nil {
options.IncludeMalePerformers = source.Options.IncludeMalePerformers
}
if source.Options.PerformerGenders != nil {
options.PerformerGenders = source.Options.PerformerGenders
}
if source.Options.SkipMultipleMatches != nil {
options.SkipMultipleMatches = source.Options.SkipMultipleMatches
}
@ -204,13 +207,23 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene,
ret.Partial.StudioID = models.NewOptionalInt(*studioID)
}
includeMalePerformers := true
if options.IncludeMalePerformers != nil {
includeMalePerformers = *options.IncludeMalePerformers
// Determine allowed genders for performer filtering
var allowedGenders []models.GenderEnum
if options.PerformerGenders != nil {
// New field takes precedence
allowedGenders = options.PerformerGenders
} else if options.IncludeMalePerformers != nil && !*options.IncludeMalePerformers {
// Legacy: if includeMalePerformers is false, include all genders except male
for _, g := range models.AllGenderEnum {
if g != models.GenderEnumMale {
allowedGenders = append(allowedGenders, g)
}
}
}
// nil allowedGenders means include all performers
addSkipSingleNamePerformerTag := false
performerIDs, err := rel.performers(ctx, !includeMalePerformers)
performerIDs, err := rel.performers(ctx, allowedGenders)
if err != nil {
if errors.Is(err, ErrSkipSingleNamePerformer) {
addSkipSingleNamePerformerTag = true

View file

@ -60,9 +60,15 @@ func TestSceneIdentifier_Identify(t *testing.T) {
)
defaultOptions := &MetadataOptions{
SetOrganized: &boolFalse,
SetCoverImage: &boolFalse,
IncludeMalePerformers: &boolFalse,
SetOrganized: &boolFalse,
SetCoverImage: &boolFalse,
PerformerGenders: []models.GenderEnum{
models.GenderEnumFemale,
models.GenderEnumTransgenderFemale,
models.GenderEnumTransgenderMale,
models.GenderEnumIntersex,
models.GenderEnumNonBinary,
},
SkipSingleNamePerformers: &boolFalse,
}
sources := []ScraperSource{
@ -216,9 +222,15 @@ func TestSceneIdentifier_modifyScene(t *testing.T) {
boolFalse := false
defaultOptions := &MetadataOptions{
SetOrganized: &boolFalse,
SetCoverImage: &boolFalse,
IncludeMalePerformers: &boolFalse,
SetOrganized: &boolFalse,
SetCoverImage: &boolFalse,
PerformerGenders: []models.GenderEnum{
models.GenderEnumFemale,
models.GenderEnumTransgenderFemale,
models.GenderEnumTransgenderMale,
models.GenderEnumIntersex,
models.GenderEnumNonBinary,
},
SkipSingleNamePerformers: &boolFalse,
}
tr := &SceneIdentifier{

View file

@ -5,6 +5,7 @@ import (
"io"
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scraper"
)
@ -32,7 +33,10 @@ type MetadataOptions struct {
SetCoverImage *bool `json:"setCoverImage"`
SetOrganized *bool `json:"setOrganized"`
// defaults to true if not provided
// Deprecated: use PerformerGenders instead
IncludeMalePerformers *bool `json:"includeMalePerformers"`
// Filter to only include performers with these genders. If not provided, all genders are included.
PerformerGenders []models.GenderEnum `json:"performerGenders"`
// defaults to true if not provided
SkipMultipleMatches *bool `json:"skipMultipleMatches"`
// ID of tag to tag skipped multiple matches with

View file

@ -5,6 +5,7 @@ import (
"context"
"errors"
"fmt"
"slices"
"strconv"
"strings"
"time"
@ -69,7 +70,7 @@ func (g sceneRelationships) studio(ctx context.Context) (*int, error) {
return nil, nil
}
func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]int, error) {
func (g sceneRelationships) performers(ctx context.Context, allowedGenders []models.GenderEnum) ([]int, error) {
fieldStrategy := g.fieldOptions["performers"]
scraped := g.result.result.Performers
@ -97,8 +98,11 @@ func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]
singleNamePerformerSkipped := false
for _, p := range scraped {
if ignoreMale && p.Gender != nil && strings.EqualFold(*p.Gender, models.GenderEnumMale.String()) {
continue
if allowedGenders != nil && p.Gender != nil {
gender := models.GenderEnum(strings.ToUpper(*p.Gender))
if !slices.Contains(allowedGenders, gender) {
continue
}
}
performerID, err := getPerformerID(ctx, endpoint, g.performerCreator, p, createMissing, g.skipSingleNamePerformers)
@ -167,7 +171,9 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
} else if createMissing {
newTag := t.ToTag(endpoint, nil)
err := g.tagCreator.Create(ctx, newTag)
err := g.tagCreator.Create(ctx, &models.CreateTagInput{
Tag: newTag,
})
if err != nil {
return nil, fmt.Errorf("error creating tag: %w", err)
}

View file

@ -27,7 +27,7 @@ func Test_sceneRelationships_studio(t *testing.T) {
db := mocks.NewDatabase()
db.Studio.On("Create", testCtx, mock.Anything).Run(func(args mock.Arguments) {
s := args.Get(1).(*models.Studio)
s := args.Get(1).(*models.CreateStudioInput)
s.ID = validStoredIDInt
}).Return(nil)
@ -183,13 +183,13 @@ func Test_sceneRelationships_performers(t *testing.T) {
}
tests := []struct {
name string
scene *models.Scene
fieldOptions *FieldOptions
scraped []*models.ScrapedPerformer
ignoreMale bool
want []int
wantErr bool
name string
scene *models.Scene
fieldOptions *FieldOptions
scraped []*models.ScrapedPerformer
allowedGenders []models.GenderEnum
want []int
wantErr bool
}{
{
"ignore",
@ -202,7 +202,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &validStoredID,
},
},
false,
nil,
nil,
false,
},
@ -211,7 +211,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
emptyScene,
defaultOptions,
[]*models.ScrapedPerformer{},
false,
nil,
nil,
false,
},
@ -225,7 +225,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &existingPerformerStr,
},
},
false,
nil,
nil,
false,
},
@ -239,7 +239,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &validStoredID,
},
},
false,
nil,
[]int{existingPerformerID, validStoredIDInt},
false,
},
@ -254,7 +254,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
Gender: &male,
},
},
true,
[]models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary},
nil,
false,
},
@ -270,7 +270,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &validStoredID,
},
},
false,
nil,
[]int{validStoredIDInt},
false,
},
@ -287,7 +287,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
Gender: &female,
},
},
true,
[]models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary},
[]int{validStoredIDInt},
false,
},
@ -304,7 +304,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &invalidStoredID,
},
},
false,
nil,
nil,
true,
},
@ -319,7 +319,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
},
}
got, err := tr.performers(testCtx, tt.ignoreMale)
got, err := tr.performers(testCtx, tt.allowedGenders)
if (err != nil) != tt.wantErr {
t.Errorf("sceneRelationships.performers() error = %v, wantErr %v", err, tt.wantErr)
return
@ -368,14 +368,14 @@ func Test_sceneRelationships_tags(t *testing.T) {
db := mocks.NewDatabase()
db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool {
return p.Name == validName
db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool {
return p.Tag.Name == validName
})).Run(func(args mock.Arguments) {
t := args.Get(1).(*models.Tag)
t.ID = validStoredIDInt
t := args.Get(1).(*models.CreateTagInput)
t.Tag.ID = validStoredIDInt
}).Return(nil)
db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool {
return p.Name == invalidName
db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool {
return p.Tag.Name == invalidName
})).Return(errors.New("error creating tag"))
tr := sceneRelationships{

View file

@ -21,13 +21,13 @@ func Test_createMissingStudio(t *testing.T) {
db := mocks.NewDatabase()
db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool {
db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool {
return p.Name == validName
})).Run(func(args mock.Arguments) {
s := args.Get(1).(*models.Studio)
s := args.Get(1).(*models.CreateStudioInput)
s.ID = createdID
}).Return(nil)
db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool {
db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool {
return p.Name == invalidName
})).Return(errors.New("error creating studio"))

185
internal/manager/backup.go Normal file
View file

@ -0,0 +1,185 @@
package manager
import (
"archive/zip"
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"strings"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger"
)
type databaseBackupZip struct {
*zip.Writer
}
func (z *databaseBackupZip) zipFileRename(fn, outDir, outFn string) error {
p := filepath.Join(outDir, outFn)
p = filepath.ToSlash(p)
f, err := z.Create(p)
if err != nil {
return fmt.Errorf("error creating zip entry for %s: %v", fn, err)
}
i, err := os.Open(fn)
if err != nil {
return fmt.Errorf("error opening %s: %v", fn, err)
}
defer i.Close()
if _, err := io.Copy(f, i); err != nil {
return fmt.Errorf("error writing %s to zip: %v", fn, err)
}
return nil
}
func (z *databaseBackupZip) zipFile(fn, outDir string) error {
return z.zipFileRename(fn, outDir, filepath.Base(fn))
}
func (s *Manager) BackupDatabase(download bool, includeBlobs bool) (string, string, error) {
var backupPath string
var backupName string
// if we include blobs, then the output is a zip file
// if not, using the same backup logic as before, which creates a sqlite file
if !includeBlobs || s.Config.GetBlobsStorage() != config.BlobStorageTypeFilesystem {
return s.backupDatabaseOnly(download)
}
// use tmp directory for the backup
backupDir := s.Paths.Generated.Tmp
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
f, err := os.CreateTemp(backupDir, "backup*.sqlite")
if err != nil {
return "", "", err
}
backupPath = f.Name()
backupName = s.Database.DatabaseBackupPath("")
f.Close()
// delete the temp file so that the backup operation can create it
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
if err := s.Database.Backup(backupPath); err != nil {
return "", "", err
}
// create a zip file
zipFileDir := s.Paths.Generated.Downloads
if !download {
zipFileDir = s.Config.GetBackupDirectoryPathOrDefault()
if zipFileDir != "" {
if err := fsutil.EnsureDir(zipFileDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", zipFileDir, err)
}
}
}
zipFileName := backupName + ".zip"
zipFilePath := filepath.Join(zipFileDir, zipFileName)
logger.Debugf("Preparing zip file for database backup at %v", zipFilePath)
zf, err := os.Create(zipFilePath)
if err != nil {
return "", "", fmt.Errorf("could not create zip file %v: %w", zipFilePath, err)
}
defer zf.Close()
z := databaseBackupZip{
Writer: zip.NewWriter(zf),
}
defer z.Close()
// move the database file into the zip
dbFn := filepath.Base(s.Config.GetDatabasePath())
if err := z.zipFileRename(backupPath, "", dbFn); err != nil {
return "", "", fmt.Errorf("could not add database backup to zip file: %w", err)
}
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
// walk the blobs directory and add files to the zip
blobsDir := s.Config.GetBlobsPath()
err = filepath.WalkDir(blobsDir, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
// calculate out dir by removing the blobsDir prefix from the path
outDir := filepath.Join("blobs", strings.TrimPrefix(filepath.Dir(path), blobsDir))
if err := z.zipFile(path, outDir); err != nil {
return fmt.Errorf("could not add blob %v to zip file: %w", path, err)
}
return nil
})
if err != nil {
return "", "", fmt.Errorf("error walking blobs directory: %w", err)
}
return zipFilePath, zipFileName, nil
}
func (s *Manager) backupDatabaseOnly(download bool) (string, string, error) {
var backupPath string
var backupName string
if download {
backupDir := s.Paths.Generated.Downloads
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
f, err := os.CreateTemp(backupDir, "backup*.sqlite")
if err != nil {
return "", "", err
}
backupPath = f.Name()
backupName = s.Database.DatabaseBackupPath("")
f.Close()
// delete the temp file so that the backup operation can create it
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
} else {
backupDir := s.Config.GetBackupDirectoryPathOrDefault()
if backupDir != "" {
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
}
backupPath = s.Database.DatabaseBackupPath(backupDir)
backupName = filepath.Base(backupPath)
}
err := s.Database.Backup(backupPath)
if err != nil {
return "", "", err
}
return backupPath, backupName, nil
}

View file

@ -83,6 +83,21 @@ const (
ParallelTasks = "parallel_tasks"
parallelTasksDefault = 1
UseCustomSpriteInterval = "use_custom_sprite_interval"
UseCustomSpriteIntervalDefault = false
SpriteInterval = "sprite_interval"
SpriteIntervalDefault = 30
MinimumSprites = "minimum_sprites"
MinimumSpritesDefault = 10
MaximumSprites = "maximum_sprites"
MaximumSpritesDefault = 500
SpriteScreenshotSize = "sprite_screenshot_width"
spriteScreenshotSizeDefault = 160
PreviewPreset = "preview_preset"
TranscodeHardwareAcceleration = "ffmpeg.hardware_acceleration"
@ -194,6 +209,7 @@ const (
CSSEnabled = "cssenabled"
JavascriptEnabled = "javascriptenabled"
CustomLocalesEnabled = "customlocalesenabled"
DisableCustomizations = "disable_customizations"
ShowScrubber = "show_scrubber"
showScrubberDefault = true
@ -974,6 +990,50 @@ func (i *Config) GetParallelTasksWithAutoDetection() int {
return parallelTasks
}
// GetUseCustomSpriteInterval returns true if the sprite minimum, maximum, and interval settings
// should be used instead of the default
func (i *Config) GetUseCustomSpriteInterval() bool {
value := i.getBool(UseCustomSpriteInterval)
return value
}
// GetSpriteInterval returns the time (in seconds) to be between each scrubber sprite
// A value of 0 indicates that the sprite interval should be automatically determined
// based on the minimum sprite setting.
func (i *Config) GetSpriteInterval() float64 {
value := i.getFloat64(SpriteInterval)
return value
}
// GetMinimumSprites returns the minimum number of sprites that have to be generated
// A value of 0 will be overridden with the default of 10.
func (i *Config) GetMinimumSprites() int {
value := i.getInt(MinimumSprites)
if value <= 0 {
return MinimumSpritesDefault
}
return value
}
// GetMaximumSprites returns the maximum number of sprites that can be generated
// A value of 0 indicates no maximum.
func (i *Config) GetMaximumSprites() int {
value := i.getInt(MaximumSprites)
return value
}
// GetSpriteScreenshotSize returns the required size of the screenshots to be taken
// during sprite generation in pixels. This will be the width for landscape scenes
// and the height for portrait scenes, with the other dimension being scaled to maintain
// the aspect ratio. If the value is less than or equal to 0, the default will be used.
func (i *Config) GetSpriteScreenshotSize() int {
value := i.getInt(SpriteScreenshotSize)
if value <= 0 {
return spriteScreenshotSizeDefault
}
return value
}
func (i *Config) GetPreviewAudio() bool {
return i.getBool(PreviewAudio)
}
@ -1323,6 +1383,26 @@ func (i *Config) GetUIConfiguration() map[string]interface{} {
return i.forKey(UI).Cut(UI).Raw()
}
// GetMinimumPlayPercent returns the minimum percentage of a video that must be
// watched before incrementing the play count. Returns 0 if not configured.
func (i *Config) GetMinimumPlayPercent() int {
uiConfig := i.GetUIConfiguration()
if uiConfig == nil {
return 0
}
if val, ok := uiConfig["minimumPlayPercent"]; ok {
switch v := val.(type) {
case int:
return v
case float64:
return int(v)
case int64:
return int(v)
}
}
return 0
}
func (i *Config) SetUIConfiguration(v map[string]interface{}) {
i.Lock()
defer i.Unlock()
@ -1459,6 +1539,13 @@ func (i *Config) GetCustomLocalesEnabled() bool {
return i.getBool(CustomLocalesEnabled)
}
// GetDisableCustomizations returns true if all customizations (plugins, custom CSS,
// custom JavaScript, and custom locales) should be disabled. This is useful for
// troubleshooting issues without permanently disabling individual customizations.
func (i *Config) GetDisableCustomizations() bool {
return i.getBool(DisableCustomizations)
}
func (i *Config) GetHandyKey() string {
return i.getString(HandyKey)
}
@ -1615,6 +1702,22 @@ func (i *Config) GetDLNAPortAsString() string {
return ":" + strconv.Itoa(i.GetDLNAPort())
}
// GetDLNAActivityTrackingEnabled returns true if DLNA activity tracking is enabled.
// This uses the same "trackActivity" UI setting that controls frontend play history tracking.
// When enabled, scenes played via DLNA will have their play count and duration tracked.
func (i *Config) GetDLNAActivityTrackingEnabled() bool {
uiConfig := i.GetUIConfiguration()
if uiConfig == nil {
return true // Default to enabled
}
if val, ok := uiConfig["trackActivity"]; ok {
if v, ok := val.(bool); ok {
return v
}
}
return true // Default to enabled
}
// GetVideoSortOrder returns the sort order to display videos. If
// empty, videos will be sorted by titles.
func (i *Config) GetVideoSortOrder() string {
@ -1817,6 +1920,12 @@ func (i *Config) setDefaultValues() {
i.setDefault(PreviewAudio, previewAudioDefault)
i.setDefault(SoundOnPreview, false)
i.setDefault(UseCustomSpriteInterval, UseCustomSpriteIntervalDefault)
i.setDefault(SpriteInterval, SpriteIntervalDefault)
i.setDefault(MinimumSprites, MinimumSpritesDefault)
i.setDefault(MaximumSprites, MaximumSpritesDefault)
i.setDefault(SpriteScreenshotSize, spriteScreenshotSizeDefault)
i.setDefault(ThemeColor, DefaultThemeColor)
i.setDefault(WriteImageThumbnails, writeImageThumbnailsDefault)

View file

@ -38,3 +38,12 @@ func (s StashConfigs) GetStashFromDirPath(dirPath string) *StashConfig {
}
return nil
}
func (s StashConfigs) Paths() []string {
paths := make([]string, len(s))
for i, c := range s {
// #6618 - clean the path to ensure comparison works correctly
paths[i] = filepath.Clean(c.Path)
}
return paths
}

View file

@ -11,8 +11,10 @@ type ScanMetadataOptions struct {
ScanGenerateImagePreviews bool `json:"scanGenerateImagePreviews"`
// Generate sprites during scan
ScanGenerateSprites bool `json:"scanGenerateSprites"`
// Generate phashes during scan
// Generate video phashes during scan
ScanGeneratePhashes bool `json:"scanGeneratePhashes"`
// Generate image phashes during scan
ScanGenerateImagePhashes bool `json:"scanGenerateImagePhashes"`
// Generate image thumbnails during scan
ScanGenerateThumbnails bool `json:"scanGenerateThumbnails"`
// Generate image thumbnails during scan

View file

@ -21,8 +21,7 @@ type SpriteGenerator struct {
VideoChecksum string
ImageOutputPath string
VTTOutputPath string
Rows int
Columns int
Config SpriteGeneratorConfig
SlowSeek bool // use alternate seek function, very slow!
Overwrite bool
@ -30,13 +29,81 @@ type SpriteGenerator struct {
g *generate.Generator
}
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) {
// SpriteGeneratorConfig holds configuration for the SpriteGenerator
type SpriteGeneratorConfig struct {
// MinimumSprites is the minimum number of sprites to generate, even if the video duration is short
// SpriteInterval will be adjusted accordingly to ensure at least this many sprites are generated.
// A value of 0 means no minimum, and the generator will use the provided SpriteInterval or
// calculate it based on the video duration and MaximumSprites
MinimumSprites int
// MaximumSprites is the maximum number of sprites to generate, even if the video duration is long
// SpriteInterval will be adjusted accordingly to ensure no more than this many sprites are generated
// A value of 0 means no maximum, and the generator will use the provided SpriteInterval or
// calculate it based on the video duration and MinimumSprites
MaximumSprites int
// SpriteInterval is the default interval in seconds between each sprite.
// If MinimumSprites or MaximumSprites are set, this value will be adjusted accordingly
// to ensure the desired number of sprites are generated
// A value of 0 means the generator will calculate the interval based on the video duration and
// the provided MinimumSprites and MaximumSprites
SpriteInterval float64
// SpriteSize is the size in pixels of the longest dimension of each sprite image.
// The other dimension will be automatically calculated to maintain the aspect ratio of the video
SpriteSize int
}
const (
// DefaultSpriteAmount is the default number of sprites to generate if no configuration is provided
// This corresponds to the legacy behavior of the generator, which generates 81 sprites at equal
// intervals across the video duration
DefaultSpriteAmount = 81
// DefaultSpriteSize is the default size in pixels of the longest dimension of each sprite image
// if no configuration is provided. This corresponds to the legacy behavior of the generator.
DefaultSpriteSize = 160
)
var DefaultSpriteGeneratorConfig = SpriteGeneratorConfig{
MinimumSprites: DefaultSpriteAmount,
MaximumSprites: DefaultSpriteAmount,
SpriteInterval: 0,
SpriteSize: DefaultSpriteSize,
}
// NewSpriteGenerator creates a new SpriteGenerator for the given video file and configuration
// It calculates the appropriate sprite interval and count based on the video duration and the provided configuration
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, config SpriteGeneratorConfig) (*SpriteGenerator, error) {
exists, err := fsutil.FileExists(videoFile.Path)
if !exists {
return nil, err
}
if videoFile.VideoStreamDuration <= 0 {
s := fmt.Sprintf("video %s: duration(%.3f)/frame count(%d) invalid, skipping sprite creation", videoFile.Path, videoFile.VideoStreamDuration, videoFile.FrameCount)
return nil, errors.New(s)
}
config.SpriteInterval = calculateSpriteInterval(videoFile, config)
chunkCount := int(math.Ceil(videoFile.VideoStreamDuration / config.SpriteInterval))
// adjust the chunk count to the next highest perfect square, to ensure the sprite image
// is completely filled (no empty space in the grid) and the grid is as square as possible (minimizing the number of rows/columns)
gridSize := generate.GetSpriteGridSize(chunkCount)
newChunkCount := gridSize * gridSize
if newChunkCount != chunkCount {
logger.Debugf("[generator] adjusting chunk count from %d to %d to fit a %dx%d grid", chunkCount, newChunkCount, gridSize, gridSize)
chunkCount = newChunkCount
}
if config.SpriteSize <= 0 {
config.SpriteSize = DefaultSpriteSize
}
slowSeek := false
chunkCount := rows * cols
// For files with small duration / low frame count try to seek using frame number intead of seconds
if videoFile.VideoStreamDuration < 5 || (0 < videoFile.FrameCount && videoFile.FrameCount <= int64(chunkCount)) { // some files can have FrameCount == 0, only use SlowSeek if duration < 5
@ -71,9 +138,8 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO
VideoChecksum: videoChecksum,
ImageOutputPath: imageOutputPath,
VTTOutputPath: vttOutputPath,
Rows: rows,
Config: config,
SlowSeek: slowSeek,
Columns: cols,
g: &generate.Generator{
Encoder: instance.FFMpeg,
FFMpegConfig: instance.Config,
@ -83,6 +149,40 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO
}, nil
}
func calculateSpriteInterval(videoFile ffmpeg.VideoFile, config SpriteGeneratorConfig) float64 {
// If a custom sprite interval is provided, start with that
spriteInterval := config.SpriteInterval
// If no custom interval is provided, calculate the interval based on the
// video duration and minimum sprite count
if spriteInterval <= 0 {
minSprites := config.MinimumSprites
if minSprites <= 0 {
panic("invalid configuration: MinimumSprites must be greater than 0 if SpriteInterval is not set")
}
logger.Debugf("[generator] calculating sprite interval for video duration %.3fs with minimum sprites %d", videoFile.VideoStreamDuration, minSprites)
return videoFile.VideoStreamDuration / float64(minSprites)
}
// Calculate the number of sprites that would be generated with the provided interval
spriteCount := int(math.Ceil(videoFile.VideoStreamDuration / spriteInterval))
// If the calculated sprite count is greater than the maximum, adjust the interval to meet the maximum
if config.MaximumSprites > 0 && spriteCount > int(config.MaximumSprites) {
spriteInterval = videoFile.VideoStreamDuration / float64(config.MaximumSprites)
logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which exceeds the maximum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MaximumSprites, spriteInterval)
}
// If the calculated sprite count is less than the minimum, adjust the interval to meet the minimum
if config.MinimumSprites > 0 && spriteCount < int(config.MinimumSprites) {
spriteInterval = videoFile.VideoStreamDuration / float64(config.MinimumSprites)
logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which is less than the minimum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MinimumSprites, spriteInterval)
}
return spriteInterval
}
func (g *SpriteGenerator) Generate() error {
if err := g.generateSpriteImage(); err != nil {
return err
@ -100,6 +200,8 @@ func (g *SpriteGenerator) generateSpriteImage() error {
var images []image.Image
isPortrait := g.Info.VideoFile.Height > g.Info.VideoFile.Width
if !g.SlowSeek {
logger.Infof("[generator] generating sprite image for %s", g.Info.VideoFile.Path)
// generate `ChunkCount` thumbnails
@ -107,8 +209,7 @@ func (g *SpriteGenerator) generateSpriteImage() error {
for i := 0; i < g.Info.ChunkCount; i++ {
time := float64(i) * stepSize
img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time)
img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time, g.Config.SpriteSize, isPortrait)
if err != nil {
return err
}
@ -126,7 +227,7 @@ func (g *SpriteGenerator) generateSpriteImage() error {
return errors.New("invalid frame number conversion")
}
img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame))
img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame), g.Config.SpriteSize)
if err != nil {
return err
}
@ -158,7 +259,7 @@ func (g *SpriteGenerator) generateSpriteVTT() error {
stepSize /= g.Info.FrameRate
}
return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize)
return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize, g.Info.ChunkCount)
}
func (g *SpriteGenerator) imageExists() bool {

View file

@ -78,7 +78,7 @@ func Initialize(cfg *config.Config, l *log.Logger) (*Manager, error) {
}
dlnaRepository := dlna.NewRepository(repo)
dlnaService := dlna.NewService(dlnaRepository, cfg, sceneServer)
dlnaService := dlna.NewService(dlnaRepository, cfg, sceneServer, repo.Scene, cfg.GetMinimumPlayPercent())
mgr := &Manager{
Config: cfg,

View file

@ -313,46 +313,6 @@ func (s *Manager) validateFFmpeg() error {
return nil
}
func (s *Manager) BackupDatabase(download bool) (string, string, error) {
var backupPath string
var backupName string
if download {
backupDir := s.Paths.Generated.Downloads
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
f, err := os.CreateTemp(backupDir, "backup*.sqlite")
if err != nil {
return "", "", err
}
backupPath = f.Name()
backupName = s.Database.DatabaseBackupPath("")
f.Close()
// delete the temp file so that the backup operation can create it
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
} else {
backupDir := s.Config.GetBackupDirectoryPathOrDefault()
if backupDir != "" {
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
}
backupPath = s.Database.DatabaseBackupPath(backupDir)
backupName = filepath.Base(backupPath)
}
err := s.Database.Backup(backupPath)
if err != nil {
return "", "", err
}
return backupPath, backupName, nil
}
func (s *Manager) AnonymiseDatabase(download bool) (string, string, error) {
var outPath string
var outName string

View file

@ -74,6 +74,28 @@ func getScanPaths(inputPaths []string) []*config.StashConfig {
return ret
}
// Filters the input array for paths that are within the paths managed by stash
func filterStashPaths(inputPaths []string) []string {
if len(inputPaths) == 0 {
return inputPaths
}
stashPaths := config.GetInstance().GetStashPaths()
var ret []string
for _, p := range inputPaths {
s := stashPaths.GetStashFromDirPath(p)
if s == nil {
logger.Warnf("%s is not in the configured stash paths", p)
continue
}
ret = append(ret, p)
}
return ret
}
// ScanSubscribe subscribes to a notification that is triggered when a
// scan or clean is complete.
func (s *Manager) ScanSubscribe(ctx context.Context) <-chan bool {
@ -100,6 +122,8 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error
return 0, err
}
cfg := config.GetInstance()
scanner := &file.Scanner{
Repository: file.NewRepository(s.Repository),
FileDecorators: []file.Decorator{
@ -118,6 +142,11 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error
},
FingerprintCalculator: &fingerprintCalculator{s.Config},
FS: &file.OsFS{},
ZipFileExtensions: cfg.GetGalleryExtensions(),
// ScanFilters is set in ScanJob.Execute
// HandlerRequiredFilters is set in ScanJob.Execute
RootPaths: cfg.GetStashPaths().Paths(),
Rescan: input.Rescan,
}
scanJob := ScanJob{
@ -285,6 +314,8 @@ type CleanMetadataInput struct {
Paths []string `json:"paths"`
// Do a dry run. Don't delete any files
DryRun bool `json:"dryRun"`
IgnoreZipFileContents bool `json:"ignoreZipFileContents"`
}
func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
@ -402,7 +433,7 @@ type StashBoxBatchTagInput struct {
ExcludeFields []string `json:"exclude_fields"`
// Refresh items already tagged by StashBox if true. Only tag items with no StashBox tagging if false
Refresh bool `json:"refresh"`
// If batch adding studios, should their parent studios also be created?
// If batch adding studios or tags, should their parent entities also be created?
CreateParent bool `json:"createParent"`
// IDs in stash of the items to update.
// If set, names and stash_ids fields will be ignored.
@ -698,3 +729,137 @@ func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashB
return s.JobManager.Add(ctx, "Batch stash-box studio tag...", j)
}
func (s *Manager) batchTagTagsByIds(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) {
var tasks []Task
err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
tagQuery := s.Repository.Tag
for _, tagID := range input.Ids {
if id, err := strconv.Atoi(tagID); err == nil {
t, err := tagQuery.Find(ctx, id)
if err != nil {
return err
}
if err := t.LoadStashIDs(ctx, tagQuery); err != nil {
return fmt.Errorf("loading tag stash ids: %w", err)
}
hasStashID := t.StashIDs.ForEndpoint(box.Endpoint) != nil
if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) {
tasks = append(tasks, &stashBoxBatchTagTagTask{
tag: t,
createParent: input.CreateParent,
box: box,
excludedFields: input.ExcludeFields,
})
}
}
}
return nil
})
return tasks, err
}
func (s *Manager) batchTagTagsByNamesOrStashIds(input StashBoxBatchTagInput, box *models.StashBox) []Task {
var tasks []Task
for i := range input.StashIDs {
stashID := input.StashIDs[i]
if len(stashID) > 0 {
tasks = append(tasks, &stashBoxBatchTagTagTask{
stashID: &stashID,
createParent: input.CreateParent,
box: box,
excludedFields: input.ExcludeFields,
})
}
}
for i := range input.Names {
name := input.Names[i]
if len(name) > 0 {
tasks = append(tasks, &stashBoxBatchTagTagTask{
name: &name,
createParent: input.CreateParent,
box: box,
excludedFields: input.ExcludeFields,
})
}
}
return tasks
}
func (s *Manager) batchTagAllTags(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) {
var tasks []Task
err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
tagQuery := s.Repository.Tag
var tags []*models.Tag
var err error
tags, err = tagQuery.FindByStashIDStatus(ctx, input.Refresh, box.Endpoint)
if err != nil {
return fmt.Errorf("error querying tags: %v", err)
}
for _, t := range tags {
tasks = append(tasks, &stashBoxBatchTagTagTask{
tag: t,
createParent: input.CreateParent,
box: box,
excludedFields: input.ExcludeFields,
})
}
return nil
})
return tasks, err
}
func (s *Manager) StashBoxBatchTagTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int {
j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error {
logger.Infof("Initiating stash-box batch tag tag")
var tasks []Task
var err error
switch input.getBatchTagType(false) {
case batchTagByIds:
tasks, err = s.batchTagTagsByIds(ctx, input, box)
case batchTagByNamesOrStashIds:
tasks = s.batchTagTagsByNamesOrStashIds(input, box)
case batchTagAll:
tasks, err = s.batchTagAllTags(ctx, input, box)
}
if err != nil {
return err
}
if len(tasks) == 0 {
return nil
}
progress.SetTotal(len(tasks))
logger.Infof("Starting stash-box batch operation for %d tags", len(tasks))
for _, task := range tasks {
progress.ExecuteTask(task.GetDescription(), func() {
task.Start(ctx)
})
progress.Increment()
}
return nil
})
return s.JobManager.Add(ctx, "Batch stash-box tag tag...", j)
}

View file

@ -10,17 +10,17 @@ import (
)
type SceneService interface {
Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error)
Create(ctx context.Context, input models.CreateSceneInput) (*models.Scene, error)
AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error
Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error
FindByIDs(ctx context.Context, ids []int, load ...scene.LoadRelationshipOption) ([]*models.Scene, error)
sceneFingerprintGetter
}
type ImageService interface {
Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error
Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error
DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error)
}
@ -31,7 +31,7 @@ type GalleryService interface {
SetCover(ctx context.Context, g *models.Gallery, coverImageId int) error
ResetCover(ctx context.Context, g *models.Gallery) error
Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error)
Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error)
ValidateImageGalleryChange(ctx context.Context, i *models.Image, updateIDs models.UpdateIDs) error
@ -39,7 +39,7 @@ type GalleryService interface {
}
type GroupService interface {
Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error
Create(ctx context.Context, input *models.CreateGroupInput) error
UpdatePartial(ctx context.Context, id int, updatedGroup models.GroupPartial, frontImage group.ImageInput, backImage group.ImageInput) (*models.Group, error)
AddSubGroups(ctx context.Context, groupID int, subGroups []models.GroupIDDescription, insertIndex *int) error

View file

@ -0,0 +1,268 @@
//go:build integration
// +build integration
package manager
import (
"context"
"io/fs"
"os"
"path/filepath"
"testing"
"github.com/stashapp/stash/pkg/file"
// Necessary to register custom migrations.
_ "github.com/stashapp/stash/pkg/sqlite/migrations"
)
// stashIgnorePathFilter wraps StashIgnoreFilter to implement PathFilter for testing.
// It provides a fixed library root for the filter.
type stashIgnorePathFilter struct {
filter *file.StashIgnoreFilter
libraryRoot string
}
func (f *stashIgnorePathFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
return f.filter.Accept(ctx, path, info, f.libraryRoot, zipFilePath)
}
// createTestFileOnDisk creates a file with some content.
func createTestFileOnDisk(t *testing.T, dir, name string) string {
t.Helper()
path := filepath.Join(dir, name)
if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
t.Fatalf("failed to create directory for %s: %v", path, err)
}
// Write some content so the file has a non-zero size.
if err := os.WriteFile(path, []byte("test content for "+name), 0644); err != nil {
t.Fatalf("failed to create file %s: %v", path, err)
}
return path
}
// createStashIgnoreFile creates a .stashignore file with the given content.
func createStashIgnoreFile(t *testing.T, dir, content string) {
t.Helper()
path := filepath.Join(dir, ".stashignore")
if err := os.WriteFile(path, []byte(content), 0644); err != nil {
t.Fatalf("failed to create .stashignore: %v", err)
}
}
func TestScannerWithStashIgnore(t *testing.T) {
// Create temp directory structure.
tmpDir := t.TempDir()
// Create test files.
createTestFileOnDisk(t, tmpDir, "video1.mp4")
createTestFileOnDisk(t, tmpDir, "video2.mp4")
createTestFileOnDisk(t, tmpDir, "ignore_me.mp4")
createTestFileOnDisk(t, tmpDir, "subdir/video3.mp4")
createTestFileOnDisk(t, tmpDir, "subdir/skip_this.mp4")
createTestFileOnDisk(t, tmpDir, "excluded_dir/video4.mp4")
createTestFileOnDisk(t, tmpDir, "temp/processing.mp4")
// Create .stashignore file.
stashignore := `# Ignore specific files
ignore_me.mp4
subdir/skip_this.mp4
# Ignore directories
excluded_dir/
temp/
`
createStashIgnoreFile(t, tmpDir, stashignore)
// Create stashignore filter with library root.
stashIgnoreFilter := &stashIgnorePathFilter{
filter: file.NewStashIgnoreFilter(),
libraryRoot: tmpDir,
}
// Create scanner.
scanner := &file.Scanner{
ScanFilters: []file.PathFilter{stashIgnoreFilter},
}
testScenarios := []struct {
path string
accepted bool
}{
{filepath.Join(tmpDir, "video1.mp4"), true},
{filepath.Join(tmpDir, "video2.mp4"), true},
{filepath.Join(tmpDir, "ignore_me.mp4"), false},
{filepath.Join(tmpDir, "subdir/video3.mp4"), true},
{filepath.Join(tmpDir, "subdir/skip_this.mp4"), false},
{filepath.Join(tmpDir, "excluded_dir/video4.mp4"), false},
{filepath.Join(tmpDir, "temp/processing.mp4"), false},
}
ctx := context.Background()
for _, scenario := range testScenarios {
info, err := os.Stat(scenario.path)
if err != nil {
t.Fatalf("failed to stat file %s: %v", scenario.path, err)
}
accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
if accepted != scenario.accepted {
t.Errorf("unexpected accept result for %s: expected %v, got %v",
scenario.path, scenario.accepted, accepted)
}
}
}
func TestScannerWithNestedStashIgnore(t *testing.T) {
// Create temp directory structure.
tmpDir := t.TempDir()
// Create test files.
createTestFileOnDisk(t, tmpDir, "root.mp4")
createTestFileOnDisk(t, tmpDir, "root.tmp")
createTestFileOnDisk(t, tmpDir, "subdir/sub.mp4")
createTestFileOnDisk(t, tmpDir, "subdir/sub.log")
createTestFileOnDisk(t, tmpDir, "subdir/sub.tmp")
// Root .stashignore excludes *.tmp.
createStashIgnoreFile(t, tmpDir, "*.tmp\n")
// Subdir .stashignore excludes *.log.
createStashIgnoreFile(t, filepath.Join(tmpDir, "subdir"), "*.log\n")
// Create stashignore filter with library root.
stashIgnoreFilter := &stashIgnorePathFilter{
filter: file.NewStashIgnoreFilter(),
libraryRoot: tmpDir,
}
// Create scanner.
scanner := &file.Scanner{
ScanFilters: []file.PathFilter{stashIgnoreFilter},
}
testScenarios := []struct {
path string
accepted bool
}{
{filepath.Join(tmpDir, "root.mp4"), true},
{filepath.Join(tmpDir, "root.tmp"), false},
{filepath.Join(tmpDir, "subdir/sub.mp4"), true},
{filepath.Join(tmpDir, "subdir/sub.log"), false},
{filepath.Join(tmpDir, "subdir/sub.tmp"), false},
}
ctx := context.Background()
for _, scenario := range testScenarios {
info, err := os.Stat(scenario.path)
if err != nil {
t.Fatalf("failed to stat file %s: %v", scenario.path, err)
}
accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
if accepted != scenario.accepted {
t.Errorf("unexpected accept result for %s: expected %v, got %v",
scenario.path, scenario.accepted, accepted)
}
}
}
func TestScannerWithoutStashIgnore(t *testing.T) {
// Create temp directory structure (no .stashignore).
tmpDir := t.TempDir()
// Create test files.
createTestFileOnDisk(t, tmpDir, "video1.mp4")
createTestFileOnDisk(t, tmpDir, "video2.mp4")
createTestFileOnDisk(t, tmpDir, "subdir/video3.mp4")
// Create stashignore filter with library root (but no .stashignore file exists).
stashIgnoreFilter := &stashIgnorePathFilter{
filter: file.NewStashIgnoreFilter(),
libraryRoot: tmpDir,
}
// Create scanner.
scanner := &file.Scanner{
ScanFilters: []file.PathFilter{stashIgnoreFilter},
}
testScenarios := []struct {
path string
accepted bool
}{
{filepath.Join(tmpDir, "video1.mp4"), true},
{filepath.Join(tmpDir, "video2.mp4"), true},
{filepath.Join(tmpDir, "subdir/video3.mp4"), true},
}
ctx := context.Background()
for _, scenario := range testScenarios {
info, err := os.Stat(scenario.path)
if err != nil {
t.Fatalf("failed to stat file %s: %v", scenario.path, err)
}
accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
if accepted != scenario.accepted {
t.Errorf("unexpected accept result for %s: expected %v, got %v",
scenario.path, scenario.accepted, accepted)
}
}
}
func TestScannerWithNegationPattern(t *testing.T) {
// Create temp directory structure.
tmpDir := t.TempDir()
// Create test files.
createTestFileOnDisk(t, tmpDir, "file1.tmp")
createTestFileOnDisk(t, tmpDir, "file2.tmp")
createTestFileOnDisk(t, tmpDir, "keep_this.tmp")
createTestFileOnDisk(t, tmpDir, "video.mp4")
// Create .stashignore with negation.
stashignore := `*.tmp
!keep_this.tmp
`
createStashIgnoreFile(t, tmpDir, stashignore)
// Create stashignore filter with library root.
stashIgnoreFilter := &stashIgnorePathFilter{
filter: file.NewStashIgnoreFilter(),
libraryRoot: tmpDir,
}
// Create scanner.
scanner := &file.Scanner{
ScanFilters: []file.PathFilter{stashIgnoreFilter},
}
testScenarios := []struct {
path string
accepted bool
}{
{filepath.Join(tmpDir, "file1.tmp"), false},
{filepath.Join(tmpDir, "file2.tmp"), false},
{filepath.Join(tmpDir, "keep_this.tmp"), true},
{filepath.Join(tmpDir, "video.mp4"), true},
}
ctx := context.Background()
for _, scenario := range testScenarios {
info, err := os.Stat(scenario.path)
if err != nil {
t.Fatalf("failed to stat file %s: %v", scenario.path, err)
}
accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
if accepted != scenario.accepted {
t.Errorf("unexpected accept result for %s: expected %v, got %v",
scenario.path, scenario.accepted, accepted)
}
}
}

View file

@ -565,6 +565,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job.
j.setProgressFromFilename(sceneHash[0:2], progress)
// check if the scene exists
var walkErr error
if err := j.Repository.WithReadTxn(ctx, func(ctx context.Context) error {
var err error
scenes, err = j.getScenesWithHash(ctx, sceneHash)
@ -575,15 +576,18 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job.
if len(scenes) == 0 {
j.logDelete("deleting unused marker directory: %s", sceneHash)
j.deleteDir(path)
} else {
// get the markers now
for _, scene := range scenes {
thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID)
if err != nil {
return fmt.Errorf("error getting markers for scene: %v", err)
}
markers = append(markers, thisMarkers...)
// #5911 - we've just deleted the directory, so skip it in the walk to avoid errors
walkErr = fs.SkipDir
return nil
}
// get the markers now
for _, scene := range scenes {
thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID)
if err != nil {
return fmt.Errorf("error getting markers for scene: %v", err)
}
markers = append(markers, thisMarkers...)
}
return nil
@ -591,7 +595,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job.
logger.Error(err.Error())
}
return nil
return walkErr
}
filename := info.Name()

View file

@ -40,9 +40,10 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) error {
}
j.cleaner.Clean(ctx, file.CleanOptions{
Paths: j.input.Paths,
DryRun: j.input.DryRun,
PathFilter: newCleanFilter(instance.Config),
Paths: j.input.Paths,
DryRun: j.input.DryRun,
IgnoreZipFileContents: j.input.IgnoreZipFileContents,
PathFilter: newCleanFilter(instance.Config),
}, progress)
if job.IsCancelled(ctx) {
@ -154,11 +155,12 @@ func newCleanFilter(c *config.Config) *cleanFilter {
generatedPath: c.GetGeneratedPath(),
videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
stashIgnoreFilter: file.NewStashIgnoreFilter(),
},
}
}
func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
// #1102 - clean anything in generated path
generatedPath := f.generatedPath
@ -173,12 +175,18 @@ func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
}
if stash == nil {
logger.Infof("%s not in any stash library directories. Marking to clean: \"%s\"", fileOrFolder, path)
logger.Infof("%s not in any stash library directories. Marking to clean: %q", fileOrFolder, path)
return false
}
if fsutil.IsPathInDir(generatedPath, path) {
logger.Infof("%s is in generated path. Marking to clean: \"%s\"", fileOrFolder, path)
logger.Infof("%s is in generated path. Marking to clean: %q", fileOrFolder, path)
return false
}
// Check .stashignore files, bounded to the library root.
if !f.stashIgnoreFilter.Accept(ctx, path, info, stash.Path, zipFilePath) {
logger.Infof("%s is excluded due to .stashignore. Marking to clean: %q", fileOrFolder, path)
return false
}
@ -300,7 +308,10 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil
// only delete if the scene has no other files
if len(scene.Files.List()) <= 1 {
logger.Infof("Deleting scene %q since it has no other related files", scene.DisplayName())
if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil {
const deleteGenerated = true
const deleteFile = false
const destroyFileEntry = false
if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil {
return err
}
@ -421,7 +432,10 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil
if len(i.Files.List()) <= 1 {
logger.Infof("Deleting image %q since it has no other related files", i.DisplayName())
if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil {
const deleteGenerated = true
const deleteFile = false
const destroyFileEntry = false
if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil {
return err
}

View file

@ -651,6 +651,7 @@ func (t *ExportTask) exportImage(ctx context.Context, wg *sync.WaitGroup, jobCha
galleryReader := r.Gallery
performerReader := r.Performer
tagReader := r.Tag
imageReader := r.Image
for s := range jobChan {
imageHash := s.Checksum
@ -665,14 +666,17 @@ func (t *ExportTask) exportImage(ctx context.Context, wg *sync.WaitGroup, jobCha
continue
}
newImageJSON := image.ToBasicJSON(s)
newImageJSON, err := image.ToBasicJSON(ctx, imageReader, s)
if err != nil {
logger.Errorf("[images] <%s> error converting image to JSON: %v", imageHash, err)
continue
}
// export files
for _, f := range s.Files.List() {
t.exportFile(f)
}
var err error
newImageJSON.Studio, err = image.GetStudioName(ctx, studioReader, s)
if err != nil {
logger.Errorf("[images] <%s> error getting image studio name: %v", imageHash, err)
@ -779,6 +783,7 @@ func (t *ExportTask) exportGallery(ctx context.Context, wg *sync.WaitGroup, jobC
studioReader := r.Studio
performerReader := r.Performer
tagReader := r.Tag
galleryReader := r.Gallery
galleryChapterReader := r.GalleryChapter
for g := range jobChan {
@ -847,6 +852,12 @@ func (t *ExportTask) exportGallery(ctx context.Context, wg *sync.WaitGroup, jobC
newGalleryJSON.Tags = tag.GetNames(tags)
newGalleryJSON.CustomFields, err = galleryReader.GetCustomFields(ctx, g.ID)
if err != nil {
logger.Errorf("[galleries] <%s> error getting gallery custom fields: %v", g.DisplayName(), err)
continue
}
if t.includeDependencies {
if g.StudioID != nil {
t.studios.IDs = sliceutil.AppendUnique(t.studios.IDs, *g.StudioID)

View file

@ -29,6 +29,7 @@ type GenerateMetadataInput struct {
// Generate transcodes even if not required
ForceTranscodes bool `json:"forceTranscodes"`
Phashes bool `json:"phashes"`
ImagePhashes bool `json:"imagePhashes"`
InteractiveHeatmapsSpeeds bool `json:"interactiveHeatmapsSpeeds"`
ClipPreviews bool `json:"clipPreviews"`
ImageThumbnails bool `json:"imageThumbnails"`
@ -36,8 +37,14 @@ type GenerateMetadataInput struct {
SceneIDs []string `json:"sceneIDs"`
// marker ids to generate for
MarkerIDs []string `json:"markerIDs"`
// image ids to generate for
ImageIDs []string `json:"imageIDs"`
// gallery ids to generate for
GalleryIDs []string `json:"galleryIDs"`
// overwrite existing media
Overwrite bool `json:"overwrite"`
// paths to run generate on, in addition to the other ID lists
Paths []string `json:"paths"`
}
type GeneratePreviewOptionsInput struct {
@ -73,6 +80,7 @@ type totalsGenerate struct {
markers int64
transcodes int64
phashes int64
imagePhashes int64
interactiveHeatmapSpeeds int64
clipPreviews int64
imageThumbnails int64
@ -82,8 +90,9 @@ type totalsGenerate struct {
func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error {
var scenes []*models.Scene
var err error
var markers []*models.SceneMarker
var images []*models.Image
var err error
j.overwrite = j.input.Overwrite
j.fileNamingAlgo = config.GetInstance().GetVideoFileNamingAlgorithm()
@ -105,6 +114,14 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
if err != nil {
logger.Error(err.Error())
}
imageIDs, err := stringslice.StringSliceToIntSlice(j.input.ImageIDs)
if err != nil {
logger.Error(err.Error())
}
galleryIDs, err := stringslice.StringSliceToIntSlice(j.input.GalleryIDs)
if err != nil {
logger.Error(err.Error())
}
g := &generate.Generator{
Encoder: instance.FFMpeg,
@ -118,8 +135,13 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
r := j.repository
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
qb := r.Scene
if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 {
j.queueTasks(ctx, g, queue)
if len(j.input.SceneIDs) == 0 &&
len(j.input.MarkerIDs) == 0 &&
len(j.input.ImageIDs) == 0 &&
len(j.input.GalleryIDs) == 0 &&
len(j.input.Paths) == 0 {
j.queueTasks(ctx, g, nil, queue)
} else {
if len(j.input.SceneIDs) > 0 {
scenes, err = qb.FindMany(ctx, sceneIDs)
@ -141,6 +163,38 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
j.queueMarkerJob(g, m, queue)
}
}
if len(j.input.ImageIDs) > 0 {
images, err = r.Image.FindMany(ctx, imageIDs)
for _, i := range images {
if err := i.LoadFiles(ctx, r.Image); err != nil {
return err
}
j.queueImageJob(g, i, queue)
}
}
if len(j.input.GalleryIDs) > 0 {
for _, galleryID := range galleryIDs {
imgs, err := r.Image.FindByGalleryID(ctx, galleryID)
if err != nil {
return err
}
for _, img := range imgs {
if err := img.LoadFiles(ctx, r.Image); err != nil {
return err
}
j.queueImageJob(g, img, queue)
}
}
}
if len(j.input.Paths) > 0 {
paths := filterStashPaths(j.input.Paths)
j.queueTasks(ctx, g, paths, queue)
}
}
return nil
@ -172,14 +226,17 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
if j.input.Phashes {
logMsg += fmt.Sprintf(" %d phashes", totals.phashes)
}
if j.input.ImagePhashes {
logMsg += fmt.Sprintf(" %d image phashes", totals.imagePhashes)
}
if j.input.InteractiveHeatmapsSpeeds {
logMsg += fmt.Sprintf(" %d heatmaps & speeds", totals.interactiveHeatmapSpeeds)
}
if j.input.ClipPreviews {
logMsg += fmt.Sprintf(" %d Image Clip Previews", totals.clipPreviews)
logMsg += fmt.Sprintf(" %d image clip previews", totals.clipPreviews)
}
if j.input.ImageThumbnails {
logMsg += fmt.Sprintf(" %d Image Thumbnails", totals.imageThumbnails)
logMsg += fmt.Sprintf(" %d image thumbnails", totals.imageThumbnails)
}
if logMsg == "Generating" {
logMsg = "Nothing selected to generate"
@ -231,17 +288,18 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
return nil
}
func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) {
func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) {
j.totals = totalsGenerate{}
j.queueScenesTasks(ctx, g, queue)
j.queueImagesTasks(ctx, g, queue)
j.queueScenesTasks(ctx, g, paths, queue)
j.queueImagesTasks(ctx, g, paths, queue)
}
func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) {
func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) {
const batchSize = 1000
findFilter := models.BatchFindFilter(batchSize)
sceneFilter := scene.FilterFromPaths(paths)
r := j.repository
@ -250,7 +308,7 @@ func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generato
return
}
scenes, err := scene.Query(ctx, r.Scene, nil, findFilter)
scenes, err := scene.Query(ctx, r.Scene, sceneFilter, findFilter)
if err != nil {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return
@ -277,19 +335,20 @@ func (j *GenerateJob) queueScenesTasks(ctx context.Context, g *generate.Generato
}
}
func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generator, queue chan<- Task) {
func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generator, paths []string, queue chan<- Task) {
const batchSize = 1000
findFilter := models.BatchFindFilter(batchSize)
imageFilter := image.FilterFromPaths(paths)
r := j.repository
for more := j.input.ClipPreviews || j.input.ImageThumbnails; more; {
for more := j.input.ClipPreviews || j.input.ImageThumbnails || j.input.ImagePhashes; more; {
if job.IsCancelled(ctx) {
return
}
images, err := image.Query(ctx, r.Image, nil, findFilter)
images, err := image.Query(ctx, r.Image, imageFilter, findFilter)
if err != nil {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return
@ -411,12 +470,13 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
}
}
if j.input.Markers {
if j.input.Markers || j.input.MarkerImagePreviews || j.input.MarkerScreenshots {
task := &GenerateMarkersTask{
repository: r,
Scene: scene,
Overwrite: j.overwrite,
fileNamingAlgorithm: j.fileNamingAlgo,
VideoPreview: j.input.Markers,
ImagePreview: j.input.MarkerImagePreviews,
Screenshot: j.input.MarkerScreenshots,
@ -488,6 +548,9 @@ func (j *GenerateJob) queueMarkerJob(g *generate.Generator, marker *models.Scene
Marker: marker,
Overwrite: j.overwrite,
fileNamingAlgorithm: j.fileNamingAlgo,
VideoPreview: j.input.Markers,
ImagePreview: j.input.MarkerImagePreviews,
Screenshot: j.input.MarkerScreenshots,
generator: g,
}
j.totals.markers++
@ -521,4 +584,23 @@ func (j *GenerateJob) queueImageJob(g *generate.Generator, image *models.Image,
queue <- task
}
}
if j.input.ImagePhashes {
// generate for all files in image
for _, f := range image.Files.List() {
if imageFile, ok := f.(*models.ImageFile); ok {
task := &GenerateImagePhashTask{
repository: j.repository,
File: imageFile,
Overwrite: j.overwrite,
}
if task.required() {
j.totals.imagePhashes++
j.totals.tasks++
queue <- task
}
}
}
}
}

View file

@ -0,0 +1,103 @@
package manager
import (
"context"
"fmt"
"github.com/stashapp/stash/pkg/hash/imagephash"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
type GenerateImagePhashTask struct {
repository models.Repository
File *models.ImageFile
Overwrite bool
}
func (t *GenerateImagePhashTask) GetDescription() string {
return fmt.Sprintf("Generating phash for %s", t.File.Path)
}
func (t *GenerateImagePhashTask) Start(ctx context.Context) {
if !t.required() {
return
}
var hash int64
set := false
// #4393 - if there is a file with the same md5, we can use the same phash
// only use this if we're not overwriting
if !t.Overwrite {
existing, err := t.findExistingPhash(ctx)
if err != nil {
logger.Warnf("Error finding existing phash: %v", err)
} else if existing != nil {
logger.Infof("Using existing phash for %s", t.File.Path)
hash = existing.(int64)
set = true
}
}
if !set {
generated, err := imagephash.Generate(instance.FFMpeg, t.File)
if err != nil {
logger.Errorf("Error generating phash for %q: %v", t.File.Path, err)
logErrorOutput(err)
return
}
hash = int64(*generated)
}
r := t.repository
if err := r.WithTxn(ctx, func(ctx context.Context) error {
t.File.Fingerprints = t.File.Fingerprints.AppendUnique(models.Fingerprint{
Type: models.FingerprintTypePhash,
Fingerprint: hash,
})
return r.File.Update(ctx, t.File)
}); err != nil && ctx.Err() == nil {
logger.Errorf("Error setting phash: %v", err)
}
}
func (t *GenerateImagePhashTask) findExistingPhash(ctx context.Context) (interface{}, error) {
r := t.repository
var ret interface{}
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
md5 := t.File.Fingerprints.Get(models.FingerprintTypeMD5)
// find other files with the same md5
files, err := r.File.FindByFingerprint(ctx, models.Fingerprint{
Type: models.FingerprintTypeMD5,
Fingerprint: md5,
})
if err != nil {
return fmt.Errorf("finding files by md5: %w", err)
}
// find the first file with a phash
for _, file := range files {
if phash := file.Base().Fingerprints.Get(models.FingerprintTypePhash); phash != nil {
ret = phash
return nil
}
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *GenerateImagePhashTask) required() bool {
if t.Overwrite {
return true
}
return t.File.Fingerprints.Get(models.FingerprintTypePhash) == nil
}

View file

@ -18,6 +18,7 @@ type GenerateMarkersTask struct {
Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
VideoPreview bool
ImagePreview bool
Screenshot bool
@ -115,9 +116,11 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene
g := t.generator
if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil {
logger.Errorf("[generator] failed to generate marker video: %v", err)
logErrorOutput(err)
if t.VideoPreview {
if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil {
logger.Errorf("[generator] failed to generate marker video: %v", err)
logErrorOutput(err)
}
}
if t.ImagePreview {
@ -164,7 +167,7 @@ func (t *GenerateMarkersTask) markerExists(sceneChecksum string, seconds int) bo
return false
}
videoExists := t.videoExists(sceneChecksum, seconds)
videoExists := !t.VideoPreview || t.videoExists(sceneChecksum, seconds)
imageExists := !t.ImagePreview || t.imageExists(sceneChecksum, seconds)
screenshotExists := !t.Screenshot || t.screenshotExists(sceneChecksum, seconds)

View file

@ -44,7 +44,7 @@ func (t *GeneratePhashTask) Start(ctx context.Context) {
if !set {
generated, err := videophash.Generate(instance.FFMpeg, t.File)
if err != nil {
logger.Errorf("Error generating phash: %v", err)
logger.Errorf("Error generating phash for %q: %v", t.File.Path, err)
logErrorOutput(err)
return
}

View file

@ -34,7 +34,17 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) {
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
imagePath := instance.Paths.Scene.GetSpriteImageFilePath(sceneHash)
vttPath := instance.Paths.Scene.GetSpriteVttFilePath(sceneHash)
generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, 9, 9)
cfg := DefaultSpriteGeneratorConfig
cfg.SpriteSize = instance.Config.GetSpriteScreenshotSize()
if instance.Config.GetUseCustomSpriteInterval() {
cfg.MinimumSprites = instance.Config.GetMinimumSprites()
cfg.MaximumSprites = instance.Config.GetMaximumSprites()
cfg.SpriteInterval = instance.Config.GetSpriteInterval()
}
generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, cfg)
if err != nil {
logger.Errorf("error creating sprite generator: %s", err.Error())

View file

@ -2,13 +2,17 @@ package manager
import (
"context"
"errors"
"fmt"
"io/fs"
"path/filepath"
"regexp"
"runtime/debug"
"sync"
"time"
"github.com/99designs/gqlgen/graphql/handler/lru"
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/file/video"
@ -22,16 +26,18 @@ import (
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate"
"github.com/stashapp/stash/pkg/txn"
"github.com/stashapp/stash/pkg/utils"
)
type scanner interface {
Scan(ctx context.Context, handlers []file.Handler, options file.ScanOptions, progressReporter file.ProgressReporter)
}
type ScanJob struct {
scanner scanner
scanner *file.Scanner
input ScanMetadataInput
subscriptions *subscriptionManager
fileQueue chan file.ScannedFile
count int
unmatchedCaptionFiles utils.MutexField[[]string]
}
func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
@ -55,22 +61,24 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
start := time.Now()
nTasks := cfg.GetParallelTasksWithAutoDetection()
const taskQueueSize = 200000
taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, cfg.GetParallelTasksWithAutoDetection())
taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, nTasks)
var minModTime time.Time
if j.input.Filter != nil && j.input.Filter.MinModTime != nil {
minModTime = *j.input.Filter.MinModTime
}
j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{
Paths: paths,
ScanFilters: []file.PathFilter{newScanFilter(c, repo, minModTime)},
ZipFileExtensions: cfg.GetGalleryExtensions(),
ParallelTasks: cfg.GetParallelTasksWithAutoDetection(),
HandlerRequiredFilters: []file.Filter{newHandlerRequiredFilter(cfg, repo)},
Rescan: j.input.Rescan,
}, progress)
// HACK - these should really be set in the scanner initialization
j.scanner.FileHandlers = getScanHandlers(j.input, taskQueue, progress)
j.scanner.ScanFilters = []file.PathFilter{newScanFilter(c, repo, minModTime)}
j.scanner.HandlerRequiredFilters = []file.Filter{newHandlerRequiredFilter(cfg, repo)}
logger.Infof("Starting scan of %d paths with %d parallel tasks", len(paths), nTasks)
j.runJob(ctx, paths, nTasks, progress)
taskQueue.Close()
@ -80,12 +88,336 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
}
elapsed := time.Since(start)
logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed))
logger.Infof("Scan finished (%s)", elapsed)
j.subscriptions.notify()
return nil
}
func (j *ScanJob) runJob(ctx context.Context, paths []string, nTasks int, progress *job.Progress) {
var wg sync.WaitGroup
wg.Add(1)
j.fileQueue = make(chan file.ScannedFile, scanQueueSize)
go func() {
defer func() {
wg.Done()
// handle panics in goroutine
if p := recover(); p != nil {
logger.Errorf("panic while queuing files for scan: %v", p)
logger.Errorf(string(debug.Stack()))
}
}()
if err := j.queueFiles(ctx, paths, progress); err != nil {
if errors.Is(err, context.Canceled) {
return
}
logger.Errorf("error queuing files for scan: %v", err)
return
}
logger.Infof("Finished adding files to queue. %d files queued", j.count)
}()
defer wg.Wait()
j.processQueue(ctx, nTasks, progress)
}
const scanQueueSize = 200000
func (j *ScanJob) queueFiles(ctx context.Context, paths []string, progress *job.Progress) error {
fs := &file.OsFS{}
defer func() {
close(j.fileQueue)
progress.AddTotal(j.count)
progress.Definite()
}()
var err error
progress.ExecuteTask("Walking directory tree", func() {
for _, p := range paths {
err = file.SymWalk(fs, p, j.queueFileFunc(ctx, fs, nil, progress))
if err != nil {
return
}
}
})
return err
}
func (j *ScanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *file.ScannedFile, progress *job.Progress) fs.WalkDirFunc {
return func(path string, d fs.DirEntry, err error) error {
if err != nil {
// don't let errors prevent scanning
logger.Errorf("error scanning %s: %v", path, err)
return nil
}
if err = ctx.Err(); err != nil {
return err
}
info, err := d.Info()
if err != nil {
logger.Errorf("reading info for %q: %v", path, err)
return nil
}
zipFilePath := ""
if zipFile != nil {
zipFilePath = zipFile.Path
}
if !j.scanner.AcceptEntry(ctx, path, info, zipFilePath) {
if info.IsDir() {
logger.Debugf("Skipping directory %s", path)
return fs.SkipDir
}
// we don't include caption files in the file scan, but we do need
// to handle them
if fsutil.MatchExtension(path, video.CaptionExts) {
fileRepo := j.scanner.Repository.File
matched := video.AssociateCaptions(ctx, path, j.scanner.Repository.TxnManager, fileRepo, fileRepo)
if !matched {
logger.Debugf("No matching video file found for caption file %s", path)
j.unmatchedCaptionFiles.SetFunc(func(files []string) []string {
return append(files, path)
})
}
return nil
}
logger.Debugf("Skipping file %s", path)
return nil
}
size, err := file.GetFileSize(f, path, info)
if err != nil {
return err
}
ff := file.ScannedFile{
BaseFile: &models.BaseFile{
DirEntry: models.DirEntry{
ModTime: file.ModTime(info),
},
Path: path,
Basename: filepath.Base(path),
Size: size,
},
FS: f,
Info: info,
}
if zipFile != nil {
ff.ZipFileID = &zipFile.ID
ff.ZipFile = zipFile
}
if info.IsDir() {
// handle folders immediately
if err := j.handleFolder(ctx, ff, progress); err != nil {
if !errors.Is(err, context.Canceled) {
logger.Errorf("error processing %q: %v", path, err)
}
// skip the directory since we won't be able to process the files anyway
return fs.SkipDir
}
return nil
}
// if zip file is present, we handle immediately
if zipFile != nil {
progress.ExecuteTask("Scanning "+path, func() {
// don't increment progress in zip files
if err := j.handleFile(ctx, ff, nil); err != nil {
if !errors.Is(err, context.Canceled) {
logger.Errorf("error processing %q: %v", path, err)
}
// don't return an error, just skip the file
}
})
return nil
}
logger.Tracef("Queueing file %s for scanning", path)
j.fileQueue <- ff
j.count++
return nil
}
}
func (j *ScanJob) processQueue(ctx context.Context, parallelTasks int, progress *job.Progress) {
if parallelTasks < 1 {
parallelTasks = 1
}
wg := sizedwaitgroup.New(parallelTasks)
func() {
defer func() {
wg.Wait()
// handle panics in goroutine
if p := recover(); p != nil {
logger.Errorf("panic while scanning files: %v", p)
logger.Errorf(string(debug.Stack()))
}
}()
for f := range j.fileQueue {
logger.Tracef("Processing queued file %s", f.Path)
if err := ctx.Err(); err != nil {
return
}
wg.Add()
ff := f
go func() {
defer wg.Done()
j.processQueueItem(ctx, ff, progress)
}()
}
}()
}
func (j *ScanJob) processQueueItem(ctx context.Context, f file.ScannedFile, progress *job.Progress) {
progress.ExecuteTask("Scanning "+f.Path, func() {
var err error
if f.Info.IsDir() {
err = j.handleFolder(ctx, f, progress)
} else {
err = j.handleFile(ctx, f, progress)
}
if err != nil && !errors.Is(err, context.Canceled) {
logger.Errorf("error processing %q: %v", f.Path, err)
}
})
}
func (j *ScanJob) handleFolder(ctx context.Context, f file.ScannedFile, progress *job.Progress) error {
if progress != nil {
defer progress.Increment()
}
_, err := j.scanner.ScanFolder(ctx, f)
if err != nil {
return err
}
return nil
}
func (j *ScanJob) handleFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error {
if progress != nil {
defer progress.Increment()
}
r, err := j.scanner.ScanFile(ctx, f)
if err != nil {
return err
}
// if this is a new video file, match it with any unmatched caption files
if r.New && len(j.unmatchedCaptionFiles.Get()) > 0 {
videoFile, _ := r.File.(*models.VideoFile)
if videoFile != nil {
// try to match any unmatched caption files to this video file
for _, captionPath := range j.unmatchedCaptionFiles.Get() {
if video.MatchesCaption(videoFile.Path, captionPath) {
video.AssociateCaptions(ctx, captionPath, j.scanner.Repository.TxnManager, j.scanner.Repository.File, j.scanner.Repository.File)
// remove from the unmatched list
j.unmatchedCaptionFiles.SetFunc(func(files []string) []string {
newFiles := make([]string, 0, len(files)-1)
for _, f := range files {
if f != captionPath {
newFiles = append(newFiles, f)
}
}
return newFiles
})
}
}
}
}
// clean captions - scene handler handles this as well, but
// unchanged files aren't processed by the scene handler
if r.IsUnchanged() {
videoFile, _ := r.File.(*models.VideoFile)
if videoFile != nil {
txnMgr := j.scanner.Repository.TxnManager
fileRepo := j.scanner.Repository.File
if err := txn.WithDatabase(ctx, txnMgr, func(ctx context.Context) error {
return video.CleanCaptions(ctx, videoFile, txnMgr, fileRepo)
}); err != nil {
logger.Errorf("Error cleaning captions: %v", err)
}
}
}
// handle rename should have already handled the contents of the zip file
// so shouldn't need to scan it again.
// Only scan zip contents if the file is new, the fingerprint changed,
// or if a force rescan was requested.
if j.scanner.IsZipFile(f.Info.Name()) && (r.New || r.FingerprintChanged || j.scanner.Rescan) {
ff := r.File
f.BaseFile = ff.Base()
// scan zip files with a different context that is not cancellable
// cancelling while scanning zip file contents results in the scan
// contents being partially completed
zipCtx := context.WithoutCancel(ctx)
if err := j.scanZipFile(zipCtx, f, progress); err != nil {
logger.Errorf("Error scanning zip file %q: %v", f.Path, err)
}
} else if r.Updated && j.scanner.IsZipFile(f.Info.Name()) {
logger.Debugf("Skipping zip file scan for %q: fingerprint unchanged", f.Path)
}
return nil
}
func (j *ScanJob) scanZipFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error {
zipFS, err := f.FS.OpenZip(f.Path, f.Size)
if err != nil {
if errors.Is(err, file.ErrNotReaderAt) {
// can't walk the zip file
// just return
logger.Debugf("Skipping zip file %q as it cannot be opened for walking", f.Path)
return nil
}
return err
}
defer zipFS.Close()
return file.SymWalk(zipFS, f.Path, j.queueFileFunc(ctx, zipFS, &f, progress))
}
type extensionConfig struct {
vidExt []string
imgExt []string
@ -117,11 +449,10 @@ type sceneFinder interface {
// handlerRequiredFilter returns true if a File's handler needs to be executed despite the file not being updated.
type handlerRequiredFilter struct {
extensionConfig
txnManager txn.Manager
SceneFinder sceneFinder
ImageFinder fileCounter
GalleryFinder galleryFinder
CaptionUpdater video.CaptionUpdater
txnManager txn.Manager
SceneFinder sceneFinder
ImageFinder fileCounter
GalleryFinder galleryFinder
FolderCache *lru.LRU[bool]
@ -137,7 +468,6 @@ func newHandlerRequiredFilter(c *config.Config, repo models.Repository) *handler
SceneFinder: repo.Scene,
ImageFinder: repo.Image,
GalleryFinder: repo.Gallery,
CaptionUpdater: repo.File,
FolderCache: lru.New[bool](processes * 2),
videoFileNamingAlgorithm: c.GetVideoFileNamingAlgorithm(),
}
@ -212,65 +542,35 @@ func (f *handlerRequiredFilter) Accept(ctx context.Context, ff models.File) bool
}
}
if isVideoFile {
// TODO - check if the cover exists
// hash := scene.GetHash(ff, f.videoFileNamingAlgorithm)
// ssPath := instance.Paths.Scene.GetScreenshotPath(hash)
// if exists, _ := fsutil.FileExists(ssPath); !exists {
// // if not, check if the file is a primary file for a scene
// scenes, err := f.SceneFinder.FindByPrimaryFileID(ctx, ff.Base().ID)
// if err != nil {
// // just ignore
// return false
// }
// if len(scenes) > 0 {
// // if it is, then it needs to be re-generated
// return true
// }
// }
// clean captions - scene handler handles this as well, but
// unchanged files aren't processed by the scene handler
videoFile, _ := ff.(*models.VideoFile)
if videoFile != nil {
if err := video.CleanCaptions(ctx, videoFile, f.txnManager, f.CaptionUpdater); err != nil {
logger.Errorf("Error cleaning captions: %v", err)
}
}
}
return false
}
type scanFilter struct {
extensionConfig
txnManager txn.Manager
FileFinder models.FileFinder
CaptionUpdater video.CaptionUpdater
txnManager txn.Manager
stashPaths config.StashConfigs
generatedPath string
videoExcludeRegex []*regexp.Regexp
imageExcludeRegex []*regexp.Regexp
minModTime time.Time
stashIgnoreFilter *file.StashIgnoreFilter
}
func newScanFilter(c *config.Config, repo models.Repository, minModTime time.Time) *scanFilter {
return &scanFilter{
extensionConfig: newExtensionConfig(c),
txnManager: repo.TxnManager,
FileFinder: repo.File,
CaptionUpdater: repo.File,
stashPaths: c.GetStashPaths(),
generatedPath: c.GetGeneratedPath(),
videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
minModTime: minModTime,
stashIgnoreFilter: file.NewStashIgnoreFilter(),
}
}
func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
if fsutil.IsPathInDir(f.generatedPath, path) {
logger.Warnf("Skipping %q as it overlaps with the generated folder", path)
return false
@ -287,19 +587,16 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
return false
}
// Check .stashignore files, bounded to the library root.
if !f.stashIgnoreFilter.Accept(ctx, path, info, s.Path, zipFilePath) {
logger.Debugf("Skipping %s due to .stashignore", path)
return false
}
isVideoFile := useAsVideo(path)
isImageFile := useAsImage(path)
isZipFile := fsutil.MatchExtension(path, f.zipExt)
// handle caption files
if fsutil.MatchExtension(path, video.CaptionExts) {
// we don't include caption files in the file scan, but we do need
// to handle them
video.AssociateCaptions(ctx, path, f.txnManager, f.FileFinder, f.CaptionUpdater)
return false
}
if !info.IsDir() && !isVideoFile && !isImageFile && !isZipFile {
logger.Debugf("Skipping %s as it does not match any known file extensions", path)
return false
@ -363,8 +660,9 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
&file.FilteredHandler{
Filter: file.FilterFunc(imageFileFilter),
Handler: &image.ScanHandler{
CreatorUpdater: r.Image,
GalleryFinder: r.Gallery,
CreatorUpdater: r.Image,
GalleryFinder: r.Gallery,
SceneFinderUpdater: r.Scene,
ScanGenerator: &imageGenerators{
input: options,
taskQueue: taskQueue,
@ -393,9 +691,10 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
&file.FilteredHandler{
Filter: file.FilterFunc(videoFileFilter),
Handler: &scene.ScanHandler{
CreatorUpdater: r.Scene,
CaptionUpdater: r.File,
PluginCache: pluginCache,
CreatorUpdater: r.Scene,
GalleryFinderUpdater: r.Gallery,
CaptionUpdater: r.File,
PluginCache: pluginCache,
ScanGenerator: &sceneGenerators{
input: options,
taskQueue: taskQueue,
@ -463,6 +762,29 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f model
}
}
if t.ScanGenerateImagePhashes {
progress.AddTotal(1)
phashFn := func(ctx context.Context) {
mgr := GetInstance()
// Only generate phash for image files, not video files
if imageFile, ok := f.(*models.ImageFile); ok {
taskPhash := GenerateImagePhashTask{
repository: mgr.Repository,
File: imageFile,
Overwrite: overwrite,
}
taskPhash.Start(ctx)
}
progress.Increment()
}
if g.sequentialScanning {
phashFn(ctx)
} else {
g.taskQueue.Add(fmt.Sprintf("Generating phash for %s", path), phashFn)
}
}
return nil
}

View file

@ -4,6 +4,7 @@ import (
"context"
"fmt"
"strconv"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/match"
@ -12,6 +13,7 @@ import (
"github.com/stashapp/stash/pkg/sliceutil"
"github.com/stashapp/stash/pkg/stashbox"
"github.com/stashapp/stash/pkg/studio"
"github.com/stashapp/stash/pkg/tag"
)
// stashBoxBatchPerformerTagTask is used to tag or create performers from stash-box.
@ -275,6 +277,12 @@ func (t *stashBoxBatchStudioTagTask) getName() string {
}
func (t *stashBoxBatchStudioTagTask) Start(ctx context.Context) {
// Skip organized studios
if t.studio != nil && t.studio.Organized {
logger.Infof("Skipping organized studio %s", t.studio.Name)
return
}
studio, err := t.findStashBoxStudio(ctx)
if err != nil {
logger.Errorf("Error fetching studio data from stash-box: %v", err)
@ -523,3 +531,235 @@ func (t *stashBoxBatchStudioTagTask) processParentStudio(ctx context.Context, pa
return err
}
}
// stashBoxBatchTagTagTask is used to tag or create tags from stash-box.
//
// Two modes of operation:
// - Update existing tag: set tag to update from stash-box data
// - Create new tag: set name or stashID to search stash-box and create locally
type stashBoxBatchTagTagTask struct {
box *models.StashBox
name *string
stashID *string
tag *models.Tag
createParent bool
excludedFields []string
}
func (t *stashBoxBatchTagTagTask) getName() string {
switch {
case t.name != nil:
return *t.name
case t.stashID != nil:
return *t.stashID
case t.tag != nil:
return t.tag.Name
default:
return ""
}
}
func (t *stashBoxBatchTagTagTask) Start(ctx context.Context) {
scrapedTag, err := t.findStashBoxTag(ctx)
if err != nil {
logger.Errorf("Error fetching tag data from stash-box: %v", err)
return
}
excluded := map[string]bool{}
for _, field := range t.excludedFields {
excluded[field] = true
}
if scrapedTag != nil {
t.processMatchedTag(ctx, scrapedTag, excluded)
} else {
logger.Infof("No match found for %s", t.getName())
}
}
func (t *stashBoxBatchTagTagTask) GetDescription() string {
return fmt.Sprintf("Tagging tag %s from stash-box", t.getName())
}
func (t *stashBoxBatchTagTagTask) findStashBoxTag(ctx context.Context) (*models.ScrapedTag, error) {
var results []*models.ScrapedTag
var err error
r := instance.Repository
client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns()))
nameQuery := ""
switch {
case t.name != nil:
nameQuery = *t.name
results, err = client.QueryTag(ctx, *t.name)
case t.stashID != nil:
results, err = client.QueryTag(ctx, *t.stashID)
case t.tag != nil:
var remoteID string
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
if !t.tag.StashIDs.Loaded() {
err = t.tag.LoadStashIDs(ctx, r.Tag)
if err != nil {
return err
}
}
for _, id := range t.tag.StashIDs.List() {
if id.Endpoint == t.box.Endpoint {
remoteID = id.StashID
}
}
return nil
}); err != nil {
return nil, err
}
if remoteID != "" {
results, err = client.QueryTag(ctx, remoteID)
} else {
nameQuery = t.tag.Name
results, err = client.QueryTag(ctx, t.tag.Name)
}
}
if err != nil {
return nil, err
}
if len(results) == 0 {
return nil, nil
}
var result *models.ScrapedTag
// QueryTag returns tags that partially match the name, so find the exact match if searching by name
if nameQuery != "" {
for _, r := range results {
if strings.EqualFold(r.Name, nameQuery) {
result = r
break
}
}
} else {
result = results[0]
}
if result == nil {
return nil, nil
}
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
return match.ScrapedTagHierarchy(ctx, r.Tag, result, t.box.Endpoint)
}); err != nil {
return nil, err
}
return result, nil
}
func (t *stashBoxBatchTagTagTask) processParentTag(ctx context.Context, parent *models.ScrapedTag, excluded map[string]bool) error {
if parent.StoredID == nil {
// Create new parent tag
newParentTag := parent.ToTag(t.box.Endpoint, excluded)
r := instance.Repository
err := r.WithTxn(ctx, func(ctx context.Context) error {
qb := r.Tag
if err := tag.ValidateCreate(ctx, *newParentTag, qb); err != nil {
return err
}
if err := qb.Create(ctx, &models.CreateTagInput{Tag: newParentTag}); err != nil {
return err
}
storedID := strconv.Itoa(newParentTag.ID)
parent.StoredID = &storedID
return nil
})
if err != nil {
logger.Errorf("Failed to create parent tag %s: %v", parent.Name, err)
} else {
logger.Infof("Created parent tag %s", parent.Name)
}
return err
}
// Parent already exists — nothing to update for categories
return nil
}
func (t *stashBoxBatchTagTagTask) processMatchedTag(ctx context.Context, s *models.ScrapedTag, excluded map[string]bool) {
// Determine the tag ID to update — either from the task's tag or from the
// StoredID set by match.ScrapedTag (when batch adding by name and the tag
// already exists locally).
tagID := 0
if t.tag != nil {
tagID = t.tag.ID
} else if s.StoredID != nil {
tagID, _ = strconv.Atoi(*s.StoredID)
}
if s.Parent != nil && t.createParent {
if err := t.processParentTag(ctx, s.Parent, excluded); err != nil {
return
}
}
if tagID > 0 {
r := instance.Repository
err := r.WithTxn(ctx, func(ctx context.Context) error {
qb := r.Tag
existingStashIDs, err := qb.GetStashIDs(ctx, tagID)
if err != nil {
return err
}
storedID := strconv.Itoa(tagID)
partial := s.ToPartial(storedID, t.box.Endpoint, excluded, existingStashIDs)
if err := tag.ValidateUpdate(ctx, tagID, partial, qb); err != nil {
return err
}
if _, err := qb.UpdatePartial(ctx, tagID, partial); err != nil {
return err
}
return nil
})
if err != nil {
logger.Errorf("Failed to update tag %s: %v", s.Name, err)
} else {
logger.Infof("Updated tag %s", s.Name)
}
} else if s.Name != "" {
// no existing tag, create a new one
newTag := s.ToTag(t.box.Endpoint, excluded)
r := instance.Repository
err := r.WithTxn(ctx, func(ctx context.Context) error {
qb := r.Tag
if err := tag.ValidateCreate(ctx, *newTag, qb); err != nil {
return err
}
if err := qb.Create(ctx, &models.CreateTagInput{Tag: newTag}); err != nil {
return err
}
return nil
})
if err != nil {
logger.Errorf("Failed to create tag %s: %v", s.Name, err)
} else {
logger.Infof("Created tag %s", s.Name)
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" fill-rule="evenodd" d="M480.72 36.58c-10.56.01-21.69 1.97-34.35 9.32-28.85 23.98-32.38 59.58-34.6 98.83 2.47 24.57 6.03 47.44-4.17 73.49-11.7 1.42-24.56 5.66-37.6 3.92-13.16-1.77-28.8.54-35.24 7.21-20.4 27.97-29.09 64.17-40.61 100.3-11.53 36.13-18.01 65.15-18.22 70.6-2.83 15.39-2.51 33.25 2.95 50.89 11.33 29.32 21.89 56.36 33.98 87.95.79 1.81-3.34 3.11-2.83 5.1s1.53 4.44 1.83 6.54c.31 2.09 1.54 4.43 1.7 6.54.16 2.12 3.58 4.21 3.68 6.28.2 4.13.72 8.09 1.09 11.57.35 3.46.94 6.44 2.03 8.64l10.12 14.39c1.57 2.4 7.54-.99 8.58.93 2.17 4.07 8.13 2.75 10.29.62 1.53-1.52 3.99 2.03 6.07 1.8 2.09-.21 4.15-.6 6.13-1.25-7.69 22.24-20 44.47-23.07 66.71-8.78 29.38-21.69 44.27-26.36 88.13-1.3 17.3-.07 34.6 0 51.89l-9.88 32.94L280 901.69c-19.57 58.62-20.55 113.74-24.72 174.61l-6.59 12.36v18.93c-3.64 30.03-16.39 58.76-20.53 88.73.83 2.15 1.18 4.75 3.06 6.7 3.02 3.11 8.17 5.46 11.77 8.47 4.47 3.09 9.69 2.36 14.92 1.66 54.03-16.43 20.05-34.21 27.03-52.83.98-17.36 1.37-34.58 13.18-51.07 2.31-9.33-1.25-18.67-3.3-28.01 5.88-43.68 48.11-124.8 50.24-134.25l15.66-65.07c16.14-20.59 23.3-52.18 32.94-80.72l43.65-86.48 21.31-51.73c9.84-.07 19-10.61 23.16-16.63l9.88.82c-.86 10.43.07 20.87 10.71 31.29l27.19 132.61 16.47 37.88 11.54 52.71 15.65 104.61 1.64 42-4.11 14.83.82 30.48-4.94 38.72-11.1 31.68c-1.36 4.76-.24 11.37 2.86 14.2 20.97 20.41 52.34-.35 55.72-11.59.63-18.29 1.25-32.22 4.12-50.52l-.54-35.66-2.47-4.94v-12.36l-3.3-18.12c4.08-41.75 10.1-82.84 9.89-126.02-.54-35.83-6.63-70.28-16.48-103.77-2.39-22.61-4.97-45.14-6.92-67.98-1.4-16.4-2.48-32.94-2.95-49.8 1.15-11.25 2.29-21.55 3.17-31.35 2.86-32.14 2.9-58.75-8.16-94.67l-2.43-6.59 4.95-3.29c3.65-.82 3.05 4.44 11.53-3.3 1.52 1.27 4.12.91 8.72-2.47 3.65-9.09 11.8-16.92 9.84-28.1-6.39-4.95-2.79-3.45-21.03-13.08-2.21-9.68.25-18.2.82-27.17.17-15.34-1.29-29.93-5.11-44.61-5.09-19.73-8.47-39.44-13-59.18-5.76-20.28-4.09-56.63-11.97-89.87-9.42-15.61-19.5-15.96-21.42-17.38l-11.08-31.12c3.56 6.11 6.41 12.23 10.71 18.33l-5.77-19.15c5.23 9.53 16.02 18.6 12.92 28.83 3.22-8.21-1.64-14.95-5.61-21.85 3.91 6.17 10.05 11.8 14.92 17.73-7.23-10.16-14.41-20.32-14.83-30.48l-4.94-24.71c3.06 7.81 2.43 15.63 9.65 23.44-4.6-8.91-1.76-17.82-2.24-26.75 1.76 9.07 3.58 18.13 9.89 27.19-6.47-12.36-6.92-24.71-9.06-37.06-2.43-8.59-5.1-17.18-6.13-25.77-1.33-11.18-.67-22.37-1.29-33.54l.83-42.84c-2.94-12.9-5.05-25.81-16.48-38.72-7.84-10.63-17.89-16.1-28.01-21.41-8-.74-15.89-1.91-24.11-1.91Zm69.42 210.28c.48 1.07 1.02 2.13 1.6 3.19-.6-1.05-1.15-2.11-1.6-3.19Zm-208.4 107.77c1.56 22.71.19 45.21 3.62 67.92 5.74 22.85 11.94 41.69 16.71 66.52.16 12.4 1.33 22.33-6.98 55.89l-4.83-13.46c-1.32-2.05-2.21-4.2-6.7-5.73-4.36-16.84-11.3-31.44-13.26-48.73-2.25-19.97-6.79-34.65-11.92-51.22 1.11-16.12 7.62-28.97 12.69-46.1l10.66-25.09Zm205.09 80.48 7.85 29.71c2.19 15.92 5.13 31.36 15.73 41.92l-6.7 17.77c.05-5.06.11-10.09-.59-15.14-3.01-7.45-6-12.83-9-17.16l-19.23-27.39c-1.85-2.04-4.76-4.67-8.16-7l-.87-7.26-1.76-7.29c10.68-.94 17.45-4.12 22.73-8.16Zm38.42 99.9 1.17 8.13-4.07 11.07-6.13-12.51 7-3.5 2.03-3.19Z"/></svg>

After

Width:  |  Height:  |  Size: 3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M376.15 24.84c-3.58 1.02-10.08 2.26-14.39 2.85-9.64 1.31-12.93 2.19-18.26 4.82-14.17 7.01-23.67 19.21-29.66 37.99-3.36 10.52-4.53 19.14-5.26 37.62-.37 9.64-1.1 18.99-1.75 21.99-3.43 16.14-10.3 24.47-23.3 28.12-5.33 1.53-7.52 1.75-20.09 1.75-7.67 0-14.03.07-14.03.15 0 .15 1.46.58 3.29 1.02s3.29 1.02 3.29 1.31-3.07.95-6.72 1.46c-12.2 1.75-22.72 5.41-27.98 9.86-1.68 1.46-1.97 1.97-1.17 2.56.73.44.37 1.1-1.68 2.7-3.36 2.7-6.72 7.38-8.18 11.4-1.31 3.8-1.46 11.25-.22 13.95s2.12 2.48 2.12-.58c0-7.38 8.04-12.2 16.44-9.86 1.83.51 3.29 1.02 3.29 1.17 0 .22-1.39 2.05-3.14 4.16-1.75 2.19-3.87 5.33-4.75 7.16-1.83 3.94-2.92 9.13-1.68 8.4 1.17-.73 1.1-.44-1.39 4.82-2.63 5.62-4.38 13.22-4.38 19.21 0 5.11 1.02 10.08 2.12 10.45.44.15.8 1.53.8 3.07 0 3.87 2.48 11.4 5.04 15.41 2.19 3.58 5.84 6.72 8.69 7.6l1.68.51-1.53-2.48c-.8-1.31-1.46-3.29-1.46-4.38 0-2.48 2.78-10.08 3.73-10.08.37 0 .66 1.75.66 3.8.07 2.12.51 4.68 1.1 5.7 1.02 1.75 1.1 1.75.66-1.17-.51-3.29.88-11.25 1.97-11.25.58 0 .22 5.55-.51 7.89-.58 2.05 1.31 10.67 3.36 15.19 2.19 4.82 3.36 6.06 2.78 2.85-.58-3.07.66-2.05 1.53 1.31.95 3.14 5.04 10.45 5.62 9.86.15-.22 0-2.7-.37-5.55-.73-5.84 0-13.88 1.83-19.87 1.24-4.09 5.84-13.81 7.82-16.44s2.12-.58.22 3.14c-3.36 6.72-5.62 15.92-6.06 24.69-.44 8.62.15 14.9 1.31 14.9.37 0 .66-.44.58-.95-.22-2.05.15-4.89.66-4.89.29 0 .44 2.12.29 4.68-.73 11.47 4.46 26.44 10.23 29.51 3.8 2.05 3.94 2.7 3.43 25.86-.51 24.4-1.24 31.63-8.33 81.3-5.92 41.57-6.57 46.9-8.4 66.26-2.19 24.11-2.12 23.74-12.35 43.25-2.19 4.16-3 6.57-3 8.91q0 3.21 4.24 5.26l4.16 1.97.22 6.65c.22 7.52.22 7.6 6.57 7.6h3.14l-.44 5.26c-1.1 12.71-2.78 24.18-4.53 32-3 13.22-2.85 14.76 2.41 25.93 4.09 8.84 13.37 25.71 18.85 34.33 2.05 3.07 3 3.87 5.99 4.82 3.43 1.02 3.58 1.24 4.09 4.31.29 1.83 1.1 19.72 1.68 39.81.58 20.09 1.75 45.07 2.56 55.52 3.43 43.83 3.36 54.64-.73 100.08-1.17 13.22-3.29 43.03-4.75 66.11-1.39 23.08-4.02 65.16-5.84 93.5-3.65 58.07-6.65 108.33-7.38 124.33l-.51 10.74 1.75.37c.95.22 4.53 1.1 8.04 1.83 8.99 2.12 14.83 4.46 21.33 8.55 5.33 3.36 5.62 3.73 5.26 5.77-1.9 11.18-.58 19.87 3.87 24.62 7.52 7.96 25.06 8.55 34.84 1.17 5.92-4.46 8.4-11.4 8.4-23.52v-6.79l7.74-3.43c4.31-1.97 9.42-4.09 11.54-4.82l3.73-1.24-.22-3.21c-.07-1.75-.95-19.65-1.97-39.74-3.8-75.83-4.68-90.58-9.13-148.29-4.46-57.71-5.19-71.44-5.62-104.83-.58-41.71.44-58.37 7.38-121.7 4.82-43.83 8.33-79.26 9.2-93.8.95-14.61 2.26-23.96 4.38-30.68 1.97-6.5 2.63-7.38 3.51-5.11.44 1.02 4.38 11.03 8.77 22.28 4.38 11.25 14.46 36.96 22.35 57.2 7.96 20.16 20.75 52.23 28.49 71.22 32.14 78.75 37.55 96.06 49.02 159.03 2.26 12.27 5.41 29.37 7.01 37.99 6.72 36.6 12.56 72.32 27.03 165.38 6.28 40.1 6.94 43.76 8.33 44.19.8.29 10.88 3.8 22.35 7.74l20.89 7.16.51 6.28c1.46 19.07 7.89 26.3 24.03 26.96 6.5.29 7.38.15 11.1-1.61 4.46-2.26 9.72-7.6 12.05-12.49 1.97-4.09 2.19-12.49.44-19.07-.66-2.48-1.17-4.68-1.17-4.89 0-.15 2.19-1.61 4.82-3.14 5.7-3.29 9.57-7.52 10.23-11.03.73-3.94-.88-11.91-7.38-36.31-7.67-28.78-33.24-118.05-39.45-137.77-2.63-8.25-10.52-32.07-17.6-52.96-17.82-52.81-19.21-57.93-34.63-124.91-8.99-39.3-18.7-80.79-30.68-130.76-5.55-23.3-11.18-47.77-12.42-54.42-2.63-14.03-5.84-24.11-13.15-41.27-2.92-6.87-6.65-16.07-8.25-20.45l-2.92-8.04-.22-18.63c-.07-10.23-.51-22.06-.95-26.22s-.58-7.89-.37-8.25c.29-.44 1.46-1.17 2.78-1.68 2.19-.95 2.34-.88 5.99 2.56 7.16 6.79 15.49 7.38 27.03 1.9 3.07-1.46 6.43-3.29 7.6-4.09l2.05-1.46 4.09 5.55 4.16 5.48 4.53-3.58c6.36-5.04 11.25-7.96 15.41-9.06 3-.8 4.02-1.61 6.79-5.11 4.53-5.99 8.99-14.54 13.73-26.88 2.26-5.84 6.28-15.19 8.91-20.82 18.34-39.23 23.08-55.96 19.58-69.32-.66-2.41-4.31-10.74-8.11-18.63-17.17-34.99-29.37-64.87-43.03-105.26-8.18-24.25-11.4-31.78-15.71-37.04-1.97-2.34-4.97-6.43-6.57-9.2-3.87-6.43-4.97-7.09-11.69-7.01-3.87.07-8.77-.73-16.8-2.56-7.6-1.83-14.83-2.92-21.99-3.51l-10.74-.88 1.31-3.8c11.18-32.43 14.03-69.18 7.38-96.79-6.43-27.25-27.9-59.46-47.92-71.95-3.07-1.9-8.4-4.46-11.91-5.62-5.7-2.05-7.38-2.26-16.58-2.48-9.06-.22-10.96 0-16.8 1.61Zm125.64 306.44c7.3 11.25 14.76 27.76 25.2 55.74 9.72 26.15 10.96 32.51 8.33 41.42-1.31 4.31-13.37 28.85-15.85 32.29-.95 1.31-3.14 2.63-5.84 3.65-5.55 1.97-8.55 4.82-8.55 7.89 0 1.83.73 3.07 3.29 5.55 1.83 1.75 3.29 3.58 3.29 4.09 0 .58-.95 1.53-2.05 2.19-1.17.73-3.51 2.78-5.19 4.6-2.92 3.21-3.07 3.51-1.9 4.75 1.1 1.24 1.1 1.39-.15 1.68-5.62 1.61-5.26 1.75-13.44-6.21-4.24-4.16-8.91-8.18-10.37-8.91-1.39-.8-2.78-2.12-3-2.92-.22-.88-.8-6.57-1.31-12.78-1.61-20.97 1.97-48.36 10.81-82.25 1.75-6.87 4.82-18.7 6.79-26.3 1.97-7.67 4.16-17.39 4.89-21.77.66-4.31 1.39-7.82 1.53-7.82s1.75 2.26 3.51 5.11ZM283.01 578.92c0 9.72-.66 11.61-2.19 6.79-.95-2.7-2.19-14.76-1.68-15.78.15-.29 1.1-.51 2.12-.51h1.75v9.5Zm-.73 60.92c.58.07 1.31.88 1.53 1.9.73 2.78.73 10.23.07 10.23-1.53 0-7.45-10.3-7.45-12.93 0-.22 1.1-.07 2.41.22 1.24.29 2.85.51 3.43.58Z" vector-effect="non-scaling-stroke"/></svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

Some files were not shown because too many files have changed in this diff Show more