Merge branch 'develop' into airplay-signed-urls

This commit is contained in:
modal-error 2026-05-03 09:50:43 -04:00 committed by GitHub
commit a704edf238
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
315 changed files with 13400 additions and 8357 deletions

View file

@ -6,6 +6,15 @@ body:
attributes:
value: |
Thanks for taking the time to fill out this bug report!
- type: checkboxes
id: confirm-troubleshooting
attributes:
label: Have you enabled troubleshooting mode?
description: |
To ensure the bug is not caused by custom modifications or plugins make sure to enable troubleshooting mode before filing the report. In Stash go to Settings and click **Troubleshooting mode** and then retest to see if the bug still occurs.
options:
- label: I confirm that the troubleshooting mode is enabled.
required: true
- type: textarea
id: description
attributes:
@ -61,4 +70,4 @@ body:
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output from Settings > Logs. This will be automatically formatted into code, so no need for backticks.
render: shell
render: shell

28
.github/workflows/build-compiler.yml vendored Normal file
View file

@ -0,0 +1,28 @@
name: Compiler Build
on:
workflow_dispatch:
env:
COMPILER_IMAGE: ghcr.io/stashapp/compiler:14
jobs:
build-compiler:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v6
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
push: true
context: "{{defaultContext}}:docker/compiler"
tags: |
${{ env.COMPILER_IMAGE }}
ghcr.io/stashapp/compiler:latest
cache-from: type=gha,scope=all,mode=max
cache-to: type=gha,scope=all,mode=max

View file

@ -2,7 +2,7 @@ name: Build
on:
push:
branches:
branches:
- develop
- master
- 'releases/**'
@ -15,50 +15,165 @@ concurrency:
cancel-in-progress: true
env:
COMPILER_IMAGE: stashapp/compiler:12
COMPILER_IMAGE: ghcr.io/stashapp/compiler:14
jobs:
build:
runs-on: ubuntu-22.04
# Job 1: Generate code and build UI
# Runs natively (no Docker) — go generate/gqlgen and node don't need cross-compilers.
# Produces artifacts (generated Go files + UI build) consumed by test and build jobs.
generate:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v2
- name: Checkout
run: git fetch --prune --unshallow --tags
- uses: actions/checkout@v6
with:
fetch-depth: 0
fetch-tags: true
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@v6
with:
go-version-file: 'go.mod'
- name: Pull compiler image
run: docker pull $COMPILER_IMAGE
- name: Cache node modules
uses: actions/cache@v3
env:
cache-name: cache-node_modules
# pnpm version is read from the packageManager field in package.json
# very broken (4.3, 4.4)
- name: Install pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
with:
path: ui/v2.5/node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml') }}
package_json_file: ui/v2.5/package.json
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: '20'
cache: 'pnpm'
cache-dependency-path: ui/v2.5/pnpm-lock.yaml
- name: Install UI dependencies
run: cd ui/v2.5 && pnpm install --frozen-lockfile
- name: Generate
run: make generate
- name: Cache UI build
uses: actions/cache@v3
uses: actions/cache@v5
id: cache-ui
env:
cache-name: cache-ui
with:
path: ui/v2.5/build
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
key: ${{ runner.os }}-ui-build-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
- name: Cache go build
uses: actions/cache@v3
env:
# increment the number suffix to bump the cache
cache-name: cache-go-cache-1
- name: Validate UI
# skip UI validation for pull requests if UI is unchanged
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
run: make validate-ui
- name: Build UI
# skip UI build for pull requests if UI is unchanged (UI was cached)
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
run: make ui
# Bundle generated Go files + UI build for downstream jobs (test + build)
- name: Upload generated artifacts
uses: actions/upload-artifact@v7
with:
name: generated
retention-days: 1
path: |
internal/api/generated_exec.go
internal/api/generated_models.go
ui/v2.5/build/
ui/login/locales/
# Job 2: Integration tests
# Runs natively (no Docker) — only needs Go + GCC (for CGO/SQLite), both on ubuntu-22.04.
# Runs in parallel with the build matrix jobs.
test:
needs: generate
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v6
- name: Setup Go
uses: actions/setup-go@v6
with:
go-version-file: 'go.mod'
# Places generated Go files + UI build into the working tree so the build compiles
- name: Download generated artifacts
uses: actions/download-artifact@v8
with:
name: generated
- name: Test Backend
run: make it
# Job 3: Cross-compile for all platforms
# Each platform gets its own runner and Docker container (ghcr.io/stashapp/compiler:13).
# Each build-cc-* make target is self-contained (sets its own GOOS/GOARCH/CC),
# so running them in separate containers is functionally identical to one container.
# Runs in parallel with the test job.
build:
needs: generate
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
include:
- platform: windows
make-target: build-cc-windows
artifact-paths: |
dist/stash-win.exe
tag: win
- platform: macos
make-target: build-cc-macos
artifact-paths: |
dist/stash-macos
dist/Stash.app.zip
tag: osx
- platform: linux
make-target: build-cc-linux
artifact-paths: |
dist/stash-linux
tag: linux
- platform: linux-arm64v8
make-target: build-cc-linux-arm64v8
artifact-paths: |
dist/stash-linux-arm64v8
tag: arm
- platform: linux-arm32v7
make-target: build-cc-linux-arm32v7
artifact-paths: |
dist/stash-linux-arm32v7
tag: arm
- platform: linux-arm32v6
make-target: build-cc-linux-arm32v6
artifact-paths: |
dist/stash-linux-arm32v6
tag: arm
- platform: freebsd
make-target: build-cc-freebsd
artifact-paths: |
dist/stash-freebsd
tag: freebsd
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
fetch-tags: true
- name: Download generated artifacts
uses: actions/download-artifact@v8
with:
name: generated
- name: Cache Go build
uses: actions/cache@v5
with:
path: .go-cache
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('go.mod', '**/go.sum') }}
key: ${{ runner.os }}-go-cache-${{ matrix.platform }}-${{ hashFiles('go.mod', '**/go.sum') }}
# kept seperate to test timings
- name: pull compiler image
run: docker pull $COMPILER_IMAGE
- name: Start build container
env:
@ -67,45 +182,50 @@ jobs:
mkdir -p .go-cache
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null
- name: Pre-install
run: docker exec -t build /bin/bash -c "make CI=1 pre-ui"
- name: Generate
run: docker exec -t build /bin/bash -c "make generate"
- name: Validate UI
# skip UI validation for pull requests if UI is unchanged
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
run: docker exec -t build /bin/bash -c "make validate-ui"
# Static validation happens in the linter workflow in parallel to this workflow
# Run Dynamic validation here, to make sure we pass all the projects integration tests
- name: Test Backend
run: docker exec -t build /bin/bash -c "make it"
- name: Build UI
# skip UI build for pull requests if UI is unchanged (UI was cached)
# this means that the build version/time may be incorrect if the UI is
# not changed in a pull request
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
run: docker exec -t build /bin/bash -c "make ui"
- name: Compile for all supported platforms
run: |
docker exec -t build /bin/bash -c "make build-cc-windows"
docker exec -t build /bin/bash -c "make build-cc-macos"
docker exec -t build /bin/bash -c "make build-cc-linux"
docker exec -t build /bin/bash -c "make build-cc-linux-arm64v8"
docker exec -t build /bin/bash -c "make build-cc-linux-arm32v7"
docker exec -t build /bin/bash -c "make build-cc-linux-arm32v6"
docker exec -t build /bin/bash -c "make build-cc-freebsd"
- name: Zip UI
run: docker exec -t build /bin/bash -c "make zip-ui"
- name: Build (${{ matrix.platform }})
run: docker exec -t build /bin/bash -c "make ${{ matrix.make-target }}"
- name: Cleanup build container
run: docker rm -f -v build
- name: Upload build artifact
uses: actions/upload-artifact@v7
with:
name: build-${{ matrix.platform }}
retention-days: 1
path: ${{ matrix.artifact-paths }}
# Job 4: Release
# Waits for both test and build to pass, then collects all platform artifacts
# into dist/ for checksums, GitHub releases, and multi-arch Docker push.
release:
needs: [test, build]
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
fetch-tags: true
# Downloads all artifacts (generated + 7 platform builds) into artifacts/ subdirectories
- name: Download all build artifacts
uses: actions/download-artifact@v8
with:
path: artifacts
# Reassemble platform binaries from matrix job artifacts into a single dist/ directory
# make sure that artifacts have executable bit set
# upload-artifact@v4 strips the common path prefix (dist/), so files are at the artifact root
- name: Collect binaries
run: |
mkdir -p dist
cp artifacts/build-*/* dist/
chmod +x dist/*
- name: Zip UI
run: |
cd artifacts/generated/ui/v2.5/build && zip -r ../../../../../dist/stash-ui.zip .
- name: Generate checksums
run: |
git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1
@ -116,7 +236,7 @@ jobs:
- name: Upload Windows binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: stash-win.exe
path: dist/stash-win.exe
@ -124,15 +244,23 @@ jobs:
- name: Upload macOS binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: stash-macos
path: dist/stash-macos
- name: Upload macOS bundle
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v7
with:
name: Stash.app.zip
path: dist/Stash.app.zip
- name: Upload Linux binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: stash-linux
path: dist/stash-linux
@ -140,14 +268,14 @@ jobs:
- name: Upload UI
# only upload for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v7
with:
name: stash-ui.zip
path: dist/stash-ui.zip
- name: Update latest_develop tag
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
run : git tag -f latest_develop; git push -f --tags
run: git tag -f latest_develop; git push -f --tags
- name: Development Release
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
@ -197,7 +325,7 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
docker run --rm --privileged tonistiigi/binfmt
docker info
docker buildx create --name builder --use
docker buildx inspect --bootstrap
@ -213,7 +341,7 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
docker run --rm --privileged tonistiigi/binfmt
docker info
docker buildx create --name builder --use
docker buildx inspect --bootstrap

View file

@ -9,65 +9,24 @@ on:
- 'releases/**'
pull_request:
env:
COMPILER_IMAGE: stashapp/compiler:12
jobs:
golangci:
name: lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Checkout
run: git fetch --prune --unshallow --tags
- name: Setup Go
uses: actions/setup-go@v5
# no tags or depth needed for lint
- uses: actions/checkout@v6
- uses: actions/setup-go@v6
with:
go-version-file: 'go.mod'
- name: Pull compiler image
run: docker pull $COMPILER_IMAGE
- name: Start build container
run: |
mkdir -p .go-cache
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null
# generate-backend runs natively (just go generate + touch-ui) — no Docker needed
- name: Generate Backend
run: docker exec -t build /bin/bash -c "make generate-backend"
run: make generate-backend
## WARN
## using v1, update in a later PR
- name: Run golangci-lint
uses: golangci/golangci-lint-action@v6
uses: golangci/golangci-lint-action@v8
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: latest
# Optional: working directory, useful for monorepos
# working-directory: somedir
# Optional: golangci-lint command line arguments.
#
# Note: By default, the `.golangci.yml` file should be at the root of the repository.
# The location of the configuration file can be changed by using `--config=`
args: --timeout=5m
# Optional: show only new issues if it's a pull request. The default value is `false`.
# only-new-issues: true
# Optional: if set to true, then all caching functionality will be completely disabled,
# takes precedence over all other caching options.
# skip-cache: true
# Optional: if set to true, then the action won't cache or restore ~/go/pkg.
# skip-pkg-cache: true
# Optional: if set to true, then the action won't cache or restore ~/.cache/go-build.
# skip-build-cache: true
# Optional: The mode to install golangci-lint. It can be 'binary' or 'goinstall'.
# install-mode: "goinstall"
- name: Cleanup build container
run: docker rm -f -v build
version: v2.11.4

View file

@ -1,87 +1,100 @@
# options for analysis running
run:
timeout: 5m
version: "2"
linters:
disable-all: true
default: none
enable:
# Default set of linters from golangci-lint
- errcheck
- gosimple
- govet
- ineffassign
- staticcheck
- typecheck
- unused
# Linters added by the stash project.
# - contextcheck
- copyloopvar
- dogsled
- errcheck
- errchkjson
- errorlint
# - exhaustive
- gocritic
# - goerr113
- gofmt
# - gomnd
# - ifshort
- govet
- ineffassign
- misspell
# - nakedret
- noctx
# TODO - fix these in a later PR
# - noctx
- revive
- rowserrcheck
- sqlclosecheck
# Project-specific linter overrides
linters-settings:
gofmt:
simplify: false
errorlint:
# Disable errorf because there are false positives, where you don't want to wrap
# an error.
errorf: false
asserts: true
comparison: true
revive:
ignore-generated-header: true
severity: error
confidence: 0.8
rules:
- name: blank-imports
disabled: true
- name: context-as-argument
- name: context-keys-type
- name: dot-imports
- name: error-return
- name: error-strings
- name: error-naming
- name: exported
disabled: true
- name: if-return
disabled: true
- name: increment-decrement
- name: var-naming
disabled: true
- name: var-declaration
- name: package-comments
- name: range
- name: receiver-naming
- name: time-naming
- name: unexported-return
disabled: true
- name: indent-error-flow
disabled: true
- name: errorf
- name: empty-block
disabled: true
- name: superfluous-else
- name: unused-parameter
disabled: true
- name: unreachable-code
- name: redefines-builtin-id
rowserrcheck:
packages:
- github.com/jmoiron/sqlx
- staticcheck
- unused
settings:
staticcheck:
checks:
- all
# we specify (unnecessary) embedded fields for clarity in many places
- -QF1008
# there's lots of misnamed (eg intId instead of intID) fields in the code.
# it's not exactly world-ending, so I'm deferring fixing these for now
- -ST1003
errorlint:
errorf: false
asserts: true
comparison: true
revive:
confidence: 0.8
severity: error
rules:
- name: blank-imports
disabled: true
- name: context-as-argument
- name: context-keys-type
- name: dot-imports
- name: error-return
- name: error-strings
- name: error-naming
- name: exported
disabled: true
- name: if-return
disabled: true
- name: increment-decrement
- name: var-naming
disabled: true
- name: var-declaration
- name: package-comments
- name: range
- name: receiver-naming
- name: time-naming
- name: unexported-return
disabled: true
- name: indent-error-flow
disabled: true
- name: errorf
- name: empty-block
disabled: true
- name: superfluous-else
- name: unused-parameter
disabled: true
- name: unreachable-code
- name: redefines-builtin-id
rowserrcheck:
packages:
- github.com/jmoiron/sqlx
exclusions:
generated: lax
presets:
- comments
- common-false-positives
- legacy
- std-error-handling
paths:
- third_party$
- builtin$
- examples$
formatters:
enable:
- gofmt
settings:
gofmt:
simplify: false
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$

View file

@ -50,7 +50,7 @@ export CGO_ENABLED := 1
# define COMPILER_IMAGE for cross-compilation docker container
ifndef COMPILER_IMAGE
COMPILER_IMAGE := stashapp/compiler:latest
COMPILER_IMAGE := ghcr.io/stashapp/compiler:latest
endif
.PHONY: release
@ -129,7 +129,7 @@ phasher: build-flags
# builds dynamically-linked debug binaries
.PHONY: build
build: stash phasher
build: stash
# builds dynamically-linked PIE release binaries
.PHONY: build-release
@ -187,8 +187,6 @@ build-cc-macos:
# Combine into universal binaries
lipo -create -output dist/stash-macos dist/stash-macos-intel dist/stash-macos-arm
rm dist/stash-macos-intel dist/stash-macos-arm
lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm
rm dist/phasher-macos-intel dist/phasher-macos-arm
# Place into bundle and zip up
rm -rf dist/Stash.app
@ -198,6 +196,16 @@ build-cc-macos:
cd dist && rm -f Stash.app.zip && zip -r Stash.app.zip Stash.app
rm -rf dist/Stash.app
.PHONY: build-cc-macos-phasher
build-cc-macos-phasher:
make build-cc-macos-arm
make build-cc-macos-intel
# Combine into universal binaries
lipo -create -output dist/phasher-macos dist/phasher-macos-intel dist/phasher-macos-arm
rm dist/phasher-macos-intel dist/phasher-macos-arm
# do not bundle phasher
.PHONY: build-cc-freebsd
build-cc-freebsd: export GOOS := freebsd
build-cc-freebsd: export GOARCH := amd64

View file

@ -9,14 +9,14 @@
[![GitHub release (latest by date)](https://img.shields.io/github/v/release/stashapp/stash?logo=github)](https://github.com/stashapp/stash/releases/latest)
[![GitHub issues by-label](https://img.shields.io/github/issues-raw/stashapp/stash/bounty)](https://github.com/stashapp/stash/labels/bounty)
### **Stash is a self-hosted webapp written in Go which organizes and serves your diverse content collection, catering to both your SFW and NSFW needs.**
<h3>Stash is a self-hosted webapp written in Go which organizes and serves your diverse content collection, catering to both your SFW and NSFW needs.</h3>
![Screenshot of Stash web application interface](docs/readme_assets/demo_image.png)
* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.
* Stash supports a wide variety of both video and image formats.
* You can tag videos and find them later.
* Stash provides statistics about performers, tags, studios and more.
- Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.
- Stash supports a wide variety of both video and image formats.
- You can tag videos and find them later.
- Stash provides statistics about performers, tags, studios and more.
You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action.
@ -24,17 +24,20 @@ For further information you can consult the [documentation](https://docs.stashap
# Installing Stash
> [!tip]
Step-by-step instructions are available at [docs.stashapp.cc/installation](https://docs.stashapp.cc/installation/).
#### Windows Users:
As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._
At least Windows 10 or Server 2016 is required.
#### Mac Users:
As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later.
Stash can still be run through docker on older versions of macOS.
> [!important]
> **Windows Users**
>
> As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._
> At least Windows 10 or Server 2016 is required.
>
> **macOS Users**
>
> As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later.
> As of version 0.32.0, Stash requires _macOS 12 Monterey_ or later.
> Stash can still be run through Docker on older versions of macOS.
<img src="docs/readme_assets/windows_logo.svg" width="100%" height="75"> Windows | <img src="docs/readme_assets/mac_logo.svg" width="100%" height="75"> macOS | <img src="docs/readme_assets/linux_logo.svg" width="100%" height="75"> Linux | <img src="docs/readme_assets/docker_logo.svg" width="100%" height="75"> Docker
:---:|:---:|:---:|:---:
@ -85,23 +88,36 @@ The badge below shows the current translation status of Stash across all support
Need help or want to get involved? Start with the documentation, then reach out to the community if you need further assistance.
- Documentation
- Official docs: https://docs.stashapp.cc - official guides guides and troubleshooting.
- In-app manual: press <kbd>Shift</kbd> + <kbd>?</kbd> in the app or view the manual online: https://docs.stashapp.cc/in-app-manual.
- FAQ: https://discourse.stashapp.cc/c/support/faq/28 - common questions and answers.
- Community wiki: https://discourse.stashapp.cc/tags/c/community-wiki/22/stash - guides, how-tos and tips.
### Documentation
- [Official documentation](https://docs.stashapp.cc) - official guides guides and troubleshooting.
- [In-app manual](https://docs.stashapp.cc/in-app-manual) press <kbd>Shift</kbd> + <kbd>?</kbd> in the app or view the manual online.
- [FAQ](https://discourse.stashapp.cc/c/support/faq/28) - common questions and answers.
- [Community wiki](https://discourse.stashapp.cc/tags/c/community-wiki/22/stash) - guides, how-tos and tips.
- Community & discussion
- Community forum: https://discourse.stashapp.cc - community support, feature requests and discussions.
- Discord: https://discord.gg/2TsNFKt - real-time chat and community support.
- GitHub discussions: https://github.com/stashapp/stash/discussions - community support and feature discussions.
- Lemmy community: https://discuss.online/c/stashapp - Reddit-style community space.
### Community & discussion
- [Community forum](https://discourse.stashapp.cc) - community support, feature requests and discussions.
- [Discord](https://discord.gg/2TsNFKt) - real-time chat and community support.
- [GitHub discussions](https://github.com/stashapp/stash/discussions) - community support and feature discussions.
- [Lemmy community](https://discuss.online/c/stashapp) - board-style community space.
- Community scrapers & plugins
- Metadata sources: https://docs.stashapp.cc/metadata-sources/
- Plugins: https://docs.stashapp.cc/plugins/
- Themes: https://docs.stashapp.cc/themes/
- Other projects: https://docs.stashapp.cc/other-projects/
### Community scrapers & plugins
- [Metadata sources](https://docs.stashapp.cc/metadata-sources/)
- [Plugins](https://docs.stashapp.cc/plugins/)
- [Themes](https://docs.stashapp.cc/themes/)
- [Other projects](https://docs.stashapp.cc/other-projects/)
# Architecture
## Backend
- Go
- GraphQL API
- SQLite
## Frontend
- React
- TypeScript
# For Developers

View file

@ -148,7 +148,7 @@ func recoverPanic() {
exitCode = 1
logger.Errorf("panic: %v\n%s", err, debug.Stack())
if desktop.IsDesktop() {
desktop.FatalError(fmt.Errorf("Panic: %v", err))
desktop.FatalError(fmt.Errorf("panic: %v", err))
}
}
}

View file

@ -12,7 +12,7 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-linux-arm32v6; \
FROM --platform=$TARGETPLATFORM alpine:latest AS app
COPY --from=binary /stash /usr/bin/
RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools \
RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools vips-heif \
&& pip install --break-system-packages mechanicalsoup cloudscraper stashapp-tools
ENV STASH_CONFIG_FILE=/root/.stash/config.yml

View file

@ -1 +0,0 @@
*.sdk.tar.*

View file

@ -1,82 +1,86 @@
FROM golang:1.24.3
### OSXCROSS
FROM debian:bookworm AS osxcross
# add osxcross
WORKDIR /tmp/osxcross
ARG OSXCROSS_REVISION=5e1b71fcceb23952f3229995edca1b6231525b5b
ADD --checksum=sha256:d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647 https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION} /tmp/osxcross.tar.gz
LABEL maintainer="https://discord.gg/2TsNFKt"
ARG OSX_SDK_VERSION=12.3
ARG OSX_SDK_DOWNLOAD_FILE=MacOSX${OSX_SDK_VERSION}.sdk.tar.xz
ARG OSX_SDK_DOWNLOAD_URL=https://github.com/joseluisq/macosx-sdks/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE}
ADD --checksum=sha256:3abd261ceb483c44295a6623fdffe5d44fc4ac2c872526576ec5ab5ad0f6e26c ${OSX_SDK_DOWNLOAD_URL} /tmp/osxcross/tarballs/${OSX_SDK_DOWNLOAD_FILE}
RUN apt-get update && apt-get install -y apt-transport-https ca-certificates gnupg
ENV UNATTENDED=yes \
SDK_VERSION=${OSX_SDK_VERSION} \
OSX_VERSION_MIN=12.0
RUN apt update && \
apt install -y --no-install-recommends \
bash ca-certificates clang cmake git patch libssl-dev bzip2 cpio libbz2-dev libxml2-dev make python3 xz-utils zlib1g-dev
# lzma-dev libxml2-dev xz
RUN tar --strip=1 -C /tmp/osxcross -xf /tmp/osxcross.tar.gz
RUN ./build.sh
RUN mkdir -p /etc/apt/keyrings
### FREEBSD cross-compilation stage
# use alpine for cacheable image since apt is notorous for not caching
FROM alpine:3 AS freebsd
# match golang latest
# https://go.dev/wiki/FreeBSD
ARG FREEBSD_VERSION=12.4
ADD --checksum=sha256:581c7edacfd2fca2bdf5791f667402d22fccd8a5e184635e0cac075564d57aa8 \
http://ftp-archive.freebsd.org/mirror/FreeBSD-Archive/old-releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz \
/tmp/base.txz
ADD https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key nodesource.gpg.key
RUN cat nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && rm nodesource.gpg.key
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_24.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
WORKDIR /opt/cross-freebsd
RUN apk add --no-cache tar xz
RUN tar -xf /tmp/base.txz --strip-components=1 ./usr/lib ./usr/include ./lib
RUN cd /opt/cross-freebsd/usr/lib && \
find . -type l -exec sh -c ' \
for link; do \
target=$(readlink "$link"); \
case "$target" in \
/lib/*) ln -sf "/opt/cross-freebsd$target" "$link";; \
esac; \
done \
' sh {} + && \
ln -s libc++.a libstdc++.a && \
ln -s libc++.so libstdc++.so
RUN apt-get update && \
apt-get install -y --no-install-recommends \
git make tar bash nodejs zip \
clang llvm-dev cmake patch libxml2-dev uuid-dev libssl-dev xz-utils \
bzip2 gzip sed cpio libbz2-dev zlib1g-dev \
gcc-mingw-w64 \
gcc-arm-linux-gnueabi libc-dev-armel-cross linux-libc-dev-armel-cross \
gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
rm -rf /var/lib/apt/lists/*;
### BUILDER
FROM golang:1.25.9 AS builder
ENV PATH=/opt/osx-ndk-x86/bin:$PATH
# copy in nodejs instead of using nodesource :thumbsup:
COPY --from=docker.io/library/node:24-bookworm /usr/local /usr/local
# copy in osxcross
COPY --from=osxcross /tmp/osxcross/target/lib /usr/lib
COPY --from=osxcross /tmp/osxcross/target /opt/osx-ndk-x86
# copy in cross-freebsd
COPY --from=freebsd /opt/cross-freebsd /opt/cross-freebsd
# pnpm install with npm
RUN npm install -g pnpm
# FreeBSD cross-compilation setup
# https://github.com/smartmontools/docker-build/blob/6b8c92560d17d325310ba02d9f5a4b250cb0764a/Dockerfile#L66
ENV FREEBSD_VERSION 13.4
ENV FREEBSD_DOWNLOAD_URL http://ftp.plusline.de/FreeBSD/releases/amd64/${FREEBSD_VERSION}-RELEASE/base.txz
ENV FREEBSD_SHA 8e13b0a93daba349b8d28ad246d7beb327659b2ef4fe44d89f447392daec5a7c
# git for getting hash
# make and bash for building
RUN cd /tmp && \
curl -o base.txz $FREEBSD_DOWNLOAD_URL && \
echo "$FREEBSD_SHA base.txz" | sha256sum -c - && \
mkdir -p /opt/cross-freebsd && \
cd /opt/cross-freebsd && \
tar -xf /tmp/base.txz ./lib/ ./usr/lib/ ./usr/include/ && \
rm -f /tmp/base.txz && \
cd /opt/cross-freebsd/usr/lib && \
find . -xtype l | xargs ls -l | grep ' /lib/' | awk '{print "ln -sf /opt/cross-freebsd"$11 " " $9}' | /bin/sh && \
ln -s libc++.a libstdc++.a && \
ln -s libc++.so libstdc++.so
# macOS cross-compilation setup
ENV OSX_SDK_VERSION 11.3
ENV OSX_SDK_DOWNLOAD_FILE MacOSX${OSX_SDK_VERSION}.sdk.tar.xz
ENV OSX_SDK_DOWNLOAD_URL https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE}
ENV OSX_SDK_SHA cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4
ENV OSXCROSS_REVISION 5e1b71fcceb23952f3229995edca1b6231525b5b
ENV OSXCROSS_DOWNLOAD_URL https://codeload.github.com/tpoechtrager/osxcross/tar.gz/${OSXCROSS_REVISION}
ENV OSXCROSS_SHA d3f771bbc20612fea577b18a71be3af2eb5ad2dd44624196cf55de866d008647
RUN cd /tmp && \
curl -o osxcross.tar.gz $OSXCROSS_DOWNLOAD_URL && \
echo "$OSXCROSS_SHA osxcross.tar.gz" | sha256sum -c - && \
mkdir osxcross && \
tar --strip=1 -C osxcross -xf osxcross.tar.gz && \
rm -f osxcross.tar.gz && \
curl -Lo $OSX_SDK_DOWNLOAD_FILE $OSX_SDK_DOWNLOAD_URL && \
echo "$OSX_SDK_SHA $OSX_SDK_DOWNLOAD_FILE" | sha256sum -c - && \
mv $OSX_SDK_DOWNLOAD_FILE osxcross/tarballs/ && \
UNATTENDED=yes SDK_VERSION=$OSX_SDK_VERSION OSX_VERSION_MIN=10.10 osxcross/build.sh && \
cp osxcross/target/lib/* /usr/lib/ && \
mv osxcross/target /opt/osx-ndk-x86 && \
rm -rf /tmp/osxcross
ENV PATH /opt/osx-ndk-x86/bin:$PATH
RUN mkdir -p /root/.ssh && \
chmod 0700 /root/.ssh && \
ssh-keyscan github.com > /root/.ssh/known_hosts
# ignore "dubious ownership" errors
# clang for macos
# zip for stashapp.zip
# gcc-extensions for cross-arch build
# we still target arm soft float?
RUN apt-get update && \
apt-get install -y --no-install-recommends \
git make bash \
clang zip \
gcc-mingw-w64 \
gcc-arm-linux-gnueabi \
libc-dev-armel-cross linux-libc-dev-armel-cross \
gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
rm -rf /var/lib/apt/lists/*;
RUN git config --global safe.directory '*'
# To test locally:
# make generate
# make ui
# cd docker/compiler
# make build
# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t stashapp/compiler:latest make build-cc-all
# # binaries will show up in /dist
# docker build . -t ghcr.io/stashapp/compiler:latest
# docker run --rm -v /PATH_TO_STASH:/stash -w /stash -i -t ghcr.io/stashapp/compiler:latest make build-cc-all
# # binaries will show up in /dist

View file

@ -1,16 +1,22 @@
host=ghcr.io
user=stashapp
repo=compiler
version=12
version=14
VERSION_IMAGE = ${host}/${user}/${repo}:${version}
LATEST_IMAGE = ${host}/${user}/${repo}:latest
latest:
docker build -t ${user}/${repo}:latest .
docker build -t ${LATEST_IMAGE} .
build:
docker build -t ${user}/${repo}:${version} -t ${user}/${repo}:latest .
docker build -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} .
build-no-cache:
docker build --no-cache -t ${user}/${repo}:${version} -t ${user}/${repo}:latest .
docker build --no-cache -t ${VERSION_IMAGE} -t ${LATEST_IMAGE} .
install: build
docker push ${user}/${repo}:${version}
docker push ${user}/${repo}:latest
# requires docker login ghcr.io
# echo $CR_PAT | docker login ghcr.io -u USERNAME --password-stdin
push:
docker push ${VERSION_IMAGE}
docker push ${LATEST_IMAGE}

View file

@ -1,3 +1,3 @@
Modified from https://github.com/bep/dockerfiles/tree/master/ci-goreleaser
When the Dockerfile is changed, the version number should be incremented in the Makefile and the new version tag should be pushed to Docker Hub. The GitHub workflow files also need to be updated to pull the correct image tag.
When the Dockerfile is changed, the version number should be incremented in [.github/workflows/build-compiler.yml](../../.github/workflows/build-compiler.yml) and the workflow [manually ran](). `env: COMPILER_IMAGE` in [.github/workflows/build.yml](../../.github/workflows/build.yml) also needs to be updated to pull the correct image tag.

View file

@ -118,8 +118,8 @@ This project uses a modification of the [CI-GoReleaser](https://github.com/bep/d
To cross-compile the app yourself:
1. Run `make pre-ui`, `make generate` and `make ui` outside the container, to generate files and build the UI.
2. Pull the latest compiler image from Docker Hub: `docker pull stashapp/compiler`
3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it stashapp/compiler /bin/bash` to open a shell inside the container.
2. Pull the latest compiler image from GHCR: `docker pull ghcr.io/stashapp/compiler`
3. Run `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -it ghcr.io/stashapp/compiler /bin/bash` to open a shell inside the container.
4. From inside the container, run `make build-cc-all` to build for all platforms, or run `make build-cc-{platform}` to build for a specific platform (have a look at the `Makefile` for the list of targets).
5. You will find the compiled binaries in `dist/`.

24
go.mod
View file

@ -1,6 +1,6 @@
module github.com/stashapp/stash
go 1.24.3
go 1.25.0
require (
github.com/99designs/gqlgen v0.17.73
@ -15,6 +15,7 @@ require (
github.com/disintegration/imaging v1.6.2
github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d
github.com/doug-martin/goqu/v9 v9.18.0
github.com/feederbox826/gosx-notifier v0.2.2
github.com/go-chi/chi/v5 v5.2.2
github.com/go-chi/cors v1.2.1
github.com/go-chi/httplog v0.3.1
@ -30,7 +31,6 @@ require (
github.com/jinzhu/copier v0.4.0
github.com/jmoiron/sqlx v1.4.0
github.com/json-iterator/go v1.1.12
github.com/kermieisinthehouse/gosx-notifier v0.1.2
github.com/kermieisinthehouse/systray v1.2.4
github.com/knadh/koanf/parsers/yaml v1.1.0
github.com/knadh/koanf/providers/env v1.1.0
@ -56,12 +56,12 @@ require (
github.com/vektra/mockery/v2 v2.10.0
github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e
github.com/zencoder/go-dash/v3 v3.0.2
golang.org/x/crypto v0.45.0
golang.org/x/image v0.18.0
golang.org/x/net v0.47.0
golang.org/x/sys v0.38.0
golang.org/x/term v0.37.0
golang.org/x/text v0.31.0
golang.org/x/crypto v0.48.0
golang.org/x/image v0.38.0
golang.org/x/net v0.50.0
golang.org/x/sys v0.41.0
golang.org/x/term v0.40.0
golang.org/x/text v0.35.0
golang.org/x/time v0.10.0
gopkg.in/guregu/null.v4 v4.0.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1
@ -70,7 +70,7 @@ require (
require (
github.com/agnivade/levenshtein v1.2.1 // indirect
github.com/antchfx/xpath v1.3.5 // indirect
github.com/antchfx/xpath v1.3.6 // indirect
github.com/asticode/go-astikit v0.20.0 // indirect
github.com/asticode/go-astits v1.8.0 // indirect
github.com/chromedp/sysutil v1.1.0 // indirect
@ -121,9 +121,9 @@ require (
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.yaml.in/yaml/v3 v3.0.3 // indirect
golang.org/x/mod v0.29.0 // indirect
golang.org/x/sync v0.18.0 // indirect
golang.org/x/tools v0.38.0 // indirect
golang.org/x/mod v0.33.0 // indirect
golang.org/x/sync v0.20.0 // indirect
golang.org/x/tools v0.42.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

43
go.sum
View file

@ -87,8 +87,9 @@ github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kk
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0=
github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA=
github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ=
github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antchfx/xpath v1.3.6 h1:s0y+ElRRtTQdfHP609qFu0+c6bglDv20pqOViQjjdPI=
github.com/antchfx/xpath v1.3.6/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
@ -187,6 +188,8 @@ github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
github.com/feederbox826/gosx-notifier v0.2.2 h1:26NkaJZ8Wzptx82R46c9pkVAcFwGSU7kxWrOKmRWlC0=
github.com/feederbox826/gosx-notifier v0.2.2/go.mod h1:R6rqw7VuwuiCuvsr7EOONmWq++CRA5Ijmkmx75/C3Fs=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
@ -389,8 +392,6 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kermieisinthehouse/gosx-notifier v0.1.2 h1:KV0KBeKK2B24kIHY7iK0jgS64Q05f4oB+hUZmsPodxQ=
github.com/kermieisinthehouse/gosx-notifier v0.1.2/go.mod h1:xyWT07azFtUOcHl96qMVvKhvKzsMcS7rKTHQyv8WTho=
github.com/kermieisinthehouse/systray v1.2.4 h1:pdH5vnl+KKjRrVCRU4g/2W1/0HVzuuJ6WXHlPPHYY6s=
github.com/kermieisinthehouse/systray v1.2.4/go.mod h1:axh6C/jNuSyC0QGtidZJURc9h+h41HNoMySoLVrhVR4=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
@ -667,8 +668,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -682,8 +683,8 @@ golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMk
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ=
golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E=
golang.org/x/image v0.38.0 h1:5l+q+Y9JDC7mBOMjo4/aPhMDcxEptsX+Tt3GgRQRPuE=
golang.org/x/image v0.38.0/go.mod h1:/3f6vaXC+6CEanU4KJxbcUZyEePbyKbaLoDOe4ehFYY=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@ -714,8 +715,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -770,8 +771,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -806,8 +807,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -894,8 +895,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@ -905,8 +906,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -923,8 +924,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8=
golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -992,8 +993,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View file

@ -16,6 +16,9 @@ type Folder {
parent_folders: [Folder!]!
zip_file: BasicFile
"Returns direct sub-folders"
sub_folders: [Folder!]!
mod_time: Time!
created_at: Time!
@ -156,7 +159,7 @@ input MoveFilesInput {
input SetFingerprintsInput {
type: String!
"an null value will remove the fingerprint"
"a null value will remove the fingerprint"
value: String
}

View file

@ -152,15 +152,15 @@ input PerformerFilterType {
fake_tits: StringCriterionInput
"Filter by penis length value"
penis_length: FloatCriterionInput
"Filter by ciricumcision"
"Filter by circumcision"
circumcised: CircumcisionCriterionInput
"Deprecated: use career_start and career_end. This filter is non-functional."
career_length: StringCriterionInput
@deprecated(reason: "Use career_start and career_end")
"Filter by career start year"
career_start: IntCriterionInput
"Filter by career end year"
career_end: IntCriterionInput
"Filter by career start"
career_start: DateCriterionInput
"Filter by career end"
career_end: DateCriterionInput
"Filter by tattoos"
tattoos: StringCriterionInput
"Filter by piercings"
@ -249,9 +249,9 @@ input SceneMarkerFilterType {
updated_at: TimestampCriterionInput
"Filter by scene date"
scene_date: DateCriterionInput
"Filter by cscene reation time"
"Filter by scene creation time"
scene_created_at: TimestampCriterionInput
"Filter by lscene ast update time"
"Filter by scene last update time"
scene_updated_at: TimestampCriterionInput
"Filter by related scenes that meet this criteria"
scene_filter: SceneFilterType
@ -665,7 +665,7 @@ input TagFilterType {
"Filter by number of parent tags the tag has"
parent_count: IntCriterionInput
"Filter by number f child tags the tag has"
"Filter by number of child tags the tag has"
child_count: IntCriterionInput
"Filter by autotag ignore value"
@ -933,7 +933,7 @@ input GenderCriterionInput {
}
input CircumcisionCriterionInput {
value: [CircumisedEnum!]
value: [CircumcisedEnum!]
modifier: CriterionModifier!
}

View file

@ -99,6 +99,8 @@ input BulkGroupUpdateInput {
ids: [ID!]
# rating expressed as 1-100
rating100: Int
date: String
synopsis: String
studio_id: ID
director: String
urls: BulkUpdateStrings

View file

@ -131,6 +131,14 @@ type ScanMetadataOptions {
input CleanMetadataInput {
paths: [String!]
"""
Don't check zip file contents when determining whether to clean a file.
This can significantly speed up the clean process, but will potentially miss removed files within zip files.
Where users do not modify zip files contents directly, this should be safe to use.
Defaults to false.
"""
ignoreZipFileContents: Boolean
"Do a dry run. Don't delete any files"
dryRun: Boolean!
}

View file

@ -7,7 +7,7 @@ enum GenderEnum {
NON_BINARY
}
enum CircumisedEnum {
enum CircumcisedEnum {
CUT
UNCUT
}
@ -29,10 +29,10 @@ type Performer {
measurements: String
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
career_start: String
career_end: String
tattoos: String
piercings: String
alias_list: [String!]!
@ -78,10 +78,10 @@ input PerformerCreateInput {
measurements: String
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
career_start: String
career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
@ -119,10 +119,10 @@ input PerformerUpdateInput {
measurements: String
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
career_start: String
career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
@ -165,10 +165,10 @@ input BulkPerformerUpdateInput {
measurements: String
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
circumcised: CircumcisedEnum
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
career_start: String
career_end: String
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will result in an error (case-insensitive)"

View file

@ -19,8 +19,8 @@ type ScrapedPerformer {
penis_length: String
circumcised: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
career_start: String
career_end: String
tattoos: String
piercings: String
# aliases must be comma-delimited to be parsed correctly
@ -57,8 +57,8 @@ input ScrapedPerformerInput {
penis_length: String
circumcised: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
career_start: String
career_end: String
tattoos: String
piercings: String
aliases: String

View file

@ -73,6 +73,7 @@ type ScrapedTag {
name: String!
description: String
alias_list: [String!]
parent: ScrapedTag
"Remote site ID, if applicable"
remote_site_id: String
}

View file

@ -31,6 +31,11 @@ fragment TagFragment on Tag {
id
description
aliases
category {
id
name
description
}
}
fragment MeasurementsFragment on Measurements {

View file

@ -148,12 +148,12 @@ func makeGithubRequest(ctx context.Context, url string, output interface{}) erro
response, err := client.Do(req)
if err != nil {
//lint:ignore ST1005 Github is a proper capitalized noun
//nolint:staticcheck // ST1005 Github is a proper capitalized noun
return fmt.Errorf("Github API request failed: %w", err)
}
if response.StatusCode != http.StatusOK {
//lint:ignore ST1005 Github is a proper capitalized noun
//nolint:staticcheck // ST1005 Github is a proper capitalized noun
return fmt.Errorf("Github API request failed: %s", response.Status)
}
@ -161,7 +161,7 @@ func makeGithubRequest(ctx context.Context, url string, output interface{}) erro
data, err := io.ReadAll(response.Body)
if err != nil {
//lint:ignore ST1005 Github is a proper capitalized noun
//nolint:staticcheck // ST1005 Github is a proper capitalized noun
return fmt.Errorf("Github API read response failed: %w", err)
}
@ -295,10 +295,10 @@ func printLatestVersion(ctx context.Context) {
logger.Errorf("Couldn't retrieve latest version: %v", err)
} else {
_, githash, _ := build.Version()
switch {
case githash == "":
switch githash {
case "":
logger.Infof("Latest version: %s (%s)", latestRelease.Version, latestRelease.ShortHash)
case githash == latestRelease.ShortHash:
case latestRelease.ShortHash:
logger.Infof("Version %s (%s) is already the latest released", latestRelease.Version, latestRelease.ShortHash)
default:
logger.Infof("New version available: %s (%s)", latestRelease.Version, latestRelease.ShortHash)

View file

@ -11,7 +11,7 @@
//go:generate go run github.com/vektah/dataloaden GroupLoader int *github.com/stashapp/stash/pkg/models.Group
//go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File
//go:generate go run github.com/vektah/dataloaden FolderLoader github.com/stashapp/stash/pkg/models.FolderID *github.com/stashapp/stash/pkg/models.Folder
//go:generate go run github.com/vektah/dataloaden FolderParentFolderIDsLoader github.com/stashapp/stash/pkg/models.FolderID []github.com/stashapp/stash/pkg/models.FolderID
//go:generate go run github.com/vektah/dataloaden FolderRelatedFolderIDsLoader github.com/stashapp/stash/pkg/models.FolderID []github.com/stashapp/stash/pkg/models.FolderID
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
@ -75,7 +75,8 @@ type Loaders struct {
FileByID *FileLoader
FolderByID *FolderLoader
FolderParentFolderIDs *FolderParentFolderIDsLoader
FolderParentFolderIDs *FolderRelatedFolderIDsLoader
FolderSubFolderIDs *FolderRelatedFolderIDsLoader
}
type Middleware struct {
@ -166,11 +167,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchFolders(ctx),
},
FolderParentFolderIDs: &FolderParentFolderIDsLoader{
FolderParentFolderIDs: &FolderRelatedFolderIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFoldersParentFolderIDs(ctx),
},
FolderSubFolderIDs: &FolderRelatedFolderIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFoldersSubFolderIDs(ctx),
},
SceneFiles: &SceneFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
@ -427,6 +433,17 @@ func (m Middleware) fetchFoldersParentFolderIDs(ctx context.Context) func(keys [
}
}
func (m Middleware) fetchFoldersSubFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) {
return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Folder.GetManySubFolderIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) {
return func(keys []int) (ret [][]models.FileID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {

View file

@ -22,16 +22,16 @@ type FolderParentFolderIDsLoaderConfig struct {
}
// NewFolderParentFolderIDsLoader creates a new FolderParentFolderIDsLoader given a fetch, wait, and maxBatch
func NewFolderParentFolderIDsLoader(config FolderParentFolderIDsLoaderConfig) *FolderParentFolderIDsLoader {
return &FolderParentFolderIDsLoader{
func NewFolderParentFolderIDsLoader(config FolderParentFolderIDsLoaderConfig) *FolderRelatedFolderIDsLoader {
return &FolderRelatedFolderIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// FolderParentFolderIDsLoader batches and caches requests
type FolderParentFolderIDsLoader struct {
// FolderRelatedFolderIDsLoader batches and caches requests
type FolderRelatedFolderIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []models.FolderID) ([][]models.FolderID, []error)
@ -63,14 +63,14 @@ type folderParentFolderIDsLoaderBatch struct {
}
// Load a FolderID by key, batching and caching will be applied automatically
func (l *FolderParentFolderIDsLoader) Load(key models.FolderID) ([]models.FolderID, error) {
func (l *FolderRelatedFolderIDsLoader) Load(key models.FolderID) ([]models.FolderID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a FolderID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FolderParentFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]models.FolderID, error) {
func (l *FolderRelatedFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]models.FolderID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
@ -113,7 +113,7 @@ func (l *FolderParentFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]m
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *FolderParentFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]models.FolderID, []error) {
func (l *FolderRelatedFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]models.FolderID, []error) {
results := make([]func() ([]models.FolderID, error), len(keys))
for i, key := range keys {
@ -131,7 +131,7 @@ func (l *FolderParentFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]model
// LoadAllThunk returns a function that when called will block waiting for a FolderIDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FolderParentFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func() ([][]models.FolderID, []error) {
func (l *FolderRelatedFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func() ([][]models.FolderID, []error) {
results := make([]func() ([]models.FolderID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
@ -149,7 +149,7 @@ func (l *FolderParentFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func(
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *FolderParentFolderIDsLoader) Prime(key models.FolderID, value []models.FolderID) bool {
func (l *FolderRelatedFolderIDsLoader) Prime(key models.FolderID, value []models.FolderID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
@ -164,13 +164,13 @@ func (l *FolderParentFolderIDsLoader) Prime(key models.FolderID, value []models.
}
// Clear the value at key from the cache, if it exists
func (l *FolderParentFolderIDsLoader) Clear(key models.FolderID) {
func (l *FolderRelatedFolderIDsLoader) Clear(key models.FolderID) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *FolderParentFolderIDsLoader) unsafeSet(key models.FolderID, value []models.FolderID) {
func (l *FolderRelatedFolderIDsLoader) unsafeSet(key models.FolderID, value []models.FolderID) {
if l.cache == nil {
l.cache = map[models.FolderID][]models.FolderID{}
}
@ -179,7 +179,7 @@ func (l *FolderParentFolderIDsLoader) unsafeSet(key models.FolderID, value []mod
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderParentFolderIDsLoader, key models.FolderID) int {
func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderRelatedFolderIDsLoader, key models.FolderID) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
@ -203,7 +203,7 @@ func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderParentFolderIDsLoad
return pos
}
func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderParentFolderIDsLoader) {
func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderRelatedFolderIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
@ -219,7 +219,7 @@ func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderParentFolderIDsLo
b.end(l)
}
func (b *folderParentFolderIDsLoaderBatch) end(l *FolderParentFolderIDsLoader) {
func (b *folderParentFolderIDsLoaderBatch) end(l *FolderRelatedFolderIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -7,6 +7,7 @@ import (
"sort"
"strconv"
"github.com/99designs/gqlgen/graphql"
"github.com/stashapp/stash/internal/build"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/logger"
@ -145,6 +146,13 @@ func (r *Resolver) withReadTxn(ctx context.Context, fn func(ctx context.Context)
return r.repository.WithReadTxn(ctx, fn)
}
// idOnly returns true if the query is only asking for the id field.
// This can be used to optimize certain queries where we don't need to load the full object if we're only getting the id.
func (r *Resolver) idOnly(ctx context.Context) bool {
fields := graphql.CollectAllFields(ctx)
return len(fields) == 1 && fields[0] == "id"
}
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.SceneMarker.Wall(ctx, q)

View file

@ -17,20 +17,62 @@ func (r *folderResolver) ParentFolder(ctx context.Context, obj *models.Folder) (
return nil, nil
}
if r.idOnly(ctx) {
return &models.Folder{ID: *obj.ParentFolderID}, nil
}
return loaders.From(ctx).FolderByID.Load(*obj.ParentFolderID)
}
func foldersFromIDs(ids []models.FolderID) []*models.Folder {
ret := make([]*models.Folder, len(ids))
for i, id := range ids {
ret[i] = &models.Folder{ID: id}
}
return ret
}
func (r *folderResolver) ParentFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) {
ids, err := loaders.From(ctx).FolderParentFolderIDs.Load(obj.ID)
if err != nil {
return nil, err
}
if r.idOnly(ctx) {
return foldersFromIDs(ids), nil
}
var errs []error
ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids)
return ret, firstError(errs)
}
func (r *folderResolver) SubFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) {
ids, err := loaders.From(ctx).FolderSubFolderIDs.Load(obj.ID)
if err != nil {
return nil, err
}
if r.idOnly(ctx) {
return foldersFromIDs(ids), nil
}
var errs []error
ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids)
return ret, firstError(errs)
}
func (r *folderResolver) ZipFile(ctx context.Context, obj *models.Folder) (*BasicFile, error) {
// shortcut for id only queries
if r.idOnly(ctx) {
if obj.ZipFileID == nil {
return nil, nil
}
return &BasicFile{
BaseFile: &models.BaseFile{ID: *obj.ZipFileID},
}, nil
}
return zipFileResolver(ctx, obj.ZipFileID)
}

View file

@ -10,7 +10,6 @@ import (
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/performer"
"github.com/stashapp/stash/pkg/utils"
)
func (r *performerResolver) AliasList(ctx context.Context, obj *models.Performer) ([]string, error) {
@ -110,12 +109,28 @@ func (r *performerResolver) HeightCm(ctx context.Context, obj *models.Performer)
return obj.Height, nil
}
func (r *performerResolver) CareerStart(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.CareerStart != nil {
ret := obj.CareerStart.String()
return &ret, nil
}
return nil, nil
}
func (r *performerResolver) CareerEnd(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.CareerEnd != nil {
ret := obj.CareerEnd.String()
return &ret, nil
}
return nil, nil
}
func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.CareerStart == nil && obj.CareerEnd == nil {
return nil, nil
}
ret := utils.FormatYearRange(obj.CareerStart, obj.CareerEnd)
ret := models.FormatYearRange(obj.CareerStart, obj.CareerEnd)
return &ret, nil
}

View file

@ -140,7 +140,8 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePat
objHash := obj.GetHash(config.GetVideoFileNamingAlgorithm())
vttPath := builder.GetSpriteVTTURL(objHash)
spritePath := builder.GetSpriteURL(objHash)
funscriptPath := builder.GetFunscriptURL()
funscriptPath := builder.GetFunscriptURL(config.GetAPIKey()).String()
captionBasePath := builder.GetCaptionURL()
interactiveHeatmap := builder.GetInteractiveHeatmapURL()
return &ScenePathsType{

View file

@ -227,6 +227,12 @@ func (r *mutationResolver) GroupUpdate(ctx context.Context, input GroupUpdateInp
func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input BulkGroupUpdateInput) (ret models.GroupPartial, err error) {
updatedGroup := models.NewGroupPartial()
updatedGroup.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
err = fmt.Errorf("converting date: %w", err)
return
}
updatedGroup.Synopsis = translator.optionalString(input.Synopsis, "synopsis")
updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100")
updatedGroup.Director = translator.optionalString(input.Director, "director")

View file

@ -47,6 +47,10 @@ func (r *mutationResolver) Migrate(ctx context.Context, input manager.MigrateInp
Database: mgr.Database,
}
if err := t.PreExecute(); err != nil {
return "", err
}
jobID := mgr.JobManager.Add(ctx, "Migrating database...", t)
return strconv.Itoa(jobID), nil

View file

@ -12,9 +12,10 @@ import (
func refreshPackageType(typeArg PackageType) {
mgr := manager.GetInstance()
if typeArg == PackageTypePlugin {
switch typeArg {
case PackageTypePlugin:
mgr.RefreshPluginCache()
} else if typeArg == PackageTypeScraper {
case PackageTypeScraper:
mgr.RefreshScraperCache()
}
}

View file

@ -52,17 +52,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
newPerformer.FakeTits = translator.string(input.FakeTits)
newPerformer.PenisLength = input.PenisLength
newPerformer.Circumcised = input.Circumcised
newPerformer.CareerStart = input.CareerStart
newPerformer.CareerEnd = input.CareerEnd
// if career_start/career_end not provided, parse deprecated career_length
if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil {
start, end, err := utils.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
newPerformer.CareerStart = start
newPerformer.CareerEnd = end
}
newPerformer.Tattoos = translator.string(input.Tattoos)
newPerformer.Piercings = translator.string(input.Piercings)
newPerformer.Favorite = translator.bool(input.Favorite)
@ -100,6 +89,25 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return nil, fmt.Errorf("converting death date: %w", err)
}
newPerformer.CareerStart, err = translator.datePtr(input.CareerStart)
if err != nil {
return nil, fmt.Errorf("converting career start: %w", err)
}
newPerformer.CareerEnd, err = translator.datePtr(input.CareerEnd)
if err != nil {
return nil, fmt.Errorf("converting career end: %w", err)
}
// if career_start/career_end not provided, parse deprecated career_length
if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil {
start, end, err := models.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
newPerformer.CareerStart = start
newPerformer.CareerEnd = end
}
newPerformer.TagIDs, err = translator.relatedIds(input.TagIds)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
@ -273,18 +281,25 @@ func performerPartialFromInput(input models.PerformerUpdateInput, translator cha
updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised")
// prefer career_start/career_end over deprecated career_length
if translator.hasField("career_start") || translator.hasField("career_end") {
updatedPerformer.CareerStart = translator.optionalInt(input.CareerStart, "career_start")
updatedPerformer.CareerEnd = translator.optionalInt(input.CareerEnd, "career_end")
var err error
updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start")
if err != nil {
return nil, fmt.Errorf("converting career start: %w", err)
}
updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end")
if err != nil {
return nil, fmt.Errorf("converting career end: %w", err)
}
} else if translator.hasField("career_length") && input.CareerLength != nil {
start, end, err := utils.ParseYearRangeString(*input.CareerLength)
start, end, err := models.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
if start != nil {
updatedPerformer.CareerStart = models.NewOptionalInt(*start)
updatedPerformer.CareerStart = models.NewOptionalDate(*start)
}
if end != nil {
updatedPerformer.CareerEnd = models.NewOptionalInt(*end)
updatedPerformer.CareerEnd = models.NewOptionalDate(*end)
}
}
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
@ -444,18 +459,24 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised")
// prefer career_start/career_end over deprecated career_length
if translator.hasField("career_start") || translator.hasField("career_end") {
updatedPerformer.CareerStart = translator.optionalInt(input.CareerStart, "career_start")
updatedPerformer.CareerEnd = translator.optionalInt(input.CareerEnd, "career_end")
updatedPerformer.CareerStart, err = translator.optionalDate(input.CareerStart, "career_start")
if err != nil {
return nil, fmt.Errorf("converting career start: %w", err)
}
updatedPerformer.CareerEnd, err = translator.optionalDate(input.CareerEnd, "career_end")
if err != nil {
return nil, fmt.Errorf("converting career end: %w", err)
}
} else if translator.hasField("career_length") && input.CareerLength != nil {
start, end, err := utils.ParseYearRangeString(*input.CareerLength)
start, end, err := models.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
if start != nil {
updatedPerformer.CareerStart = models.NewOptionalInt(*start)
updatedPerformer.CareerStart = models.NewOptionalDate(*start)
}
if end != nil {
updatedPerformer.CareerEnd = models.NewOptionalInt(*end)
updatedPerformer.CareerEnd = models.NewOptionalDate(*end)
}
}
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
@ -633,7 +654,7 @@ func (r *mutationResolver) PerformerMerge(ctx context.Context, input PerformerMe
}
legacyURLs := legacyPerformerURLsFromInput(*input.Values, translator)
if legacyURLs.AnySet() {
return nil, errors.New("Merging legacy performer URLs is not supported")
return nil, errors.New("merging legacy performer URLs is not supported")
}
if input.Values.Image != nil {

View file

@ -33,15 +33,26 @@ func (r *queryResolver) FindJob(ctx context.Context, input FindJobInput) (*Job,
}
func jobToJobModel(j job.Job) *Job {
subTasks := make([]string, len(j.Details))
for i, t := range j.Details {
subTasks[i] = sanitiseWebsocketString(t)
}
var jobError *string
if j.Error != nil {
s := sanitiseWebsocketString(*j.Error)
jobError = &s
}
ret := &Job{
ID: strconv.Itoa(j.ID),
Status: JobStatus(j.Status),
Description: j.Description,
SubTasks: j.Details,
Description: sanitiseWebsocketString(j.Description),
SubTasks: subTasks,
StartTime: j.StartTime,
EndTime: j.EndTime,
AddTime: j.AddTime,
Error: j.Error,
Error: jobError,
}
if j.Progress != -1 {

View file

@ -6,6 +6,7 @@ import (
"fmt"
"slices"
"strconv"
"strings"
"github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models"
@ -363,7 +364,8 @@ func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Sour
client := r.newStashBoxClient(*b)
var ret []*models.ScrapedTag
out, err := client.QueryTag(ctx, *input.Query)
query := *input.Query
out, err := client.QueryTag(ctx, query)
if err != nil {
return nil, err
@ -383,6 +385,22 @@ func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Sour
}); err != nil {
return nil, err
}
// tag name query returns results that may not match the query exactly.
// if there is an exact match, it should be first
if query != "" {
for i, result := range ret {
if strings.EqualFold(result.Name, query) {
// prepend exact match to the front of the slice
if i != 0 {
ret = append([]*models.ScrapedTag{result}, append(ret[:i], ret[i+1:]...)...)
}
break
}
}
}
return ret, nil
}

View file

@ -2,11 +2,19 @@ package api
import (
"context"
"strings"
"github.com/stashapp/stash/internal/log"
"github.com/stashapp/stash/internal/manager"
)
// sanitiseWebsocketString is used to ensure that any strings sent over the websocket are valid UTF-8.
// Any invalid UTF-8 sequences will be replaced with the Unicode replacement character (U+FFFD).
// Invalid UTF-8 sequences can cause the websocket connection to be closed.
func sanitiseWebsocketString(s string) string {
return strings.ToValidUTF8(s, "\uFFFD")
}
func getLogLevel(logType string) LogLevel {
switch logType {
case "progress":
@ -33,7 +41,7 @@ func logEntriesFromLogItems(logItems []log.LogItem) []*LogEntry {
ret[i] = &LogEntry{
Time: entry.Time,
Level: getLogLevel(entry.Type),
Message: entry.Message,
Message: sanitiseWebsocketString(entry.Message),
}
}

View file

@ -57,8 +57,20 @@ func (b SceneURLBuilder) GetScreenshotURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?t=" + b.UpdatedAt
}
func (b SceneURLBuilder) GetFunscriptURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
func (b SceneURLBuilder) GetFunscriptURL(apiKey string) *url.URL {
u, err := url.Parse(fmt.Sprintf("%s/scene/%s/funscript", b.BaseURL, b.SceneID))
if err != nil {
// shouldn't happen
panic(err)
}
if apiKey != "" {
v := u.Query()
v.Set("apikey", apiKey)
u.RawQuery = v.Encode()
}
return u
}
func (b SceneURLBuilder) GetCaptionURL() string {

View file

@ -8,7 +8,7 @@ import (
"os"
"os/exec"
gosxnotifier "github.com/kermieisinthehouse/gosx-notifier"
gosxnotifier "github.com/feederbox826/gosx-notifier"
"github.com/stashapp/stash/pkg/logger"
)

View file

@ -33,8 +33,10 @@ type MetadataOptions struct {
SetCoverImage *bool `json:"setCoverImage"`
SetOrganized *bool `json:"setOrganized"`
// defaults to true if not provided
// Deprecated: use PerformerGenders instead
IncludeMalePerformers *bool `json:"includeMalePerformers"`
// Filter to only include performers with these genders. If not provided, all genders are included.
PerformerGenders []models.GenderEnum `json:"performerGenders"`
// defaults to true if not provided

View file

@ -22,7 +22,8 @@ type SceneMissingHashCounter interface {
// will ensure that all oshash values are set on all scenes.
func ValidateVideoFileNamingAlgorithm(ctx context.Context, qb SceneMissingHashCounter, newValue models.HashAlgorithm) error {
// if algorithm is being set to MD5, then all checksums must be present
if newValue == models.HashAlgorithmMd5 {
switch newValue {
case models.HashAlgorithmMd5:
missingMD5, err := qb.CountMissingChecksum(ctx)
if err != nil {
return err
@ -31,7 +32,7 @@ func ValidateVideoFileNamingAlgorithm(ctx context.Context, qb SceneMissingHashCo
if missingMD5 > 0 {
return errors.New("some checksums are missing on scenes. Run Scan with calculateMD5 set to true")
}
} else if newValue == models.HashAlgorithmOshash {
case models.HashAlgorithmOshash:
missingOSHash, err := qb.CountMissingOSHash(ctx)
if err != nil {
return err

View file

@ -408,7 +408,7 @@ func ConvertFunscriptToCSV(funscriptPath string) ([]byte, error) {
}
// I don't know whether the csv format requires int or float, so for now we'll use int
buffer.WriteString(fmt.Sprintf("%d,%d\r\n", int(math.Round(action.At)), pos))
fmt.Fprintf(&buffer, "%d,%d\r\n", int(math.Round(action.At)), pos)
}
return buffer.Bytes(), nil
}

View file

@ -76,9 +76,10 @@ func performImport(ctx context.Context, i importer, duplicateBehaviour ImportDup
var id int
if existing != nil {
if duplicateBehaviour == ImportDuplicateEnumFail {
switch duplicateBehaviour {
case ImportDuplicateEnumFail:
return fmt.Errorf("existing object with name '%s'", name)
} else if duplicateBehaviour == ImportDuplicateEnumIgnore {
case ImportDuplicateEnumIgnore:
logger.Infof("Skipping existing object %q", name)
return nil
}

View file

@ -314,6 +314,8 @@ type CleanMetadataInput struct {
Paths []string `json:"paths"`
// Do a dry run. Don't delete any files
DryRun bool `json:"dryRun"`
IgnoreZipFileContents bool `json:"ignoreZipFileContents"`
}
func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
@ -431,7 +433,7 @@ type StashBoxBatchTagInput struct {
ExcludeFields []string `json:"exclude_fields"`
// Refresh items already tagged by StashBox if true. Only tag items with no StashBox tagging if false
Refresh bool `json:"refresh"`
// If batch adding studios, should their parent studios also be created?
// If batch adding studios or tags, should their parent entities also be created?
CreateParent bool `json:"createParent"`
// IDs in stash of the items to update.
// If set, names and stash_ids fields will be ignored.
@ -749,6 +751,7 @@ func (s *Manager) batchTagTagsByIds(ctx context.Context, input StashBoxBatchTagI
if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) {
tasks = append(tasks, &stashBoxBatchTagTagTask{
tag: t,
createParent: input.CreateParent,
box: box,
excludedFields: input.ExcludeFields,
})
@ -769,6 +772,7 @@ func (s *Manager) batchTagTagsByNamesOrStashIds(input StashBoxBatchTagInput, box
if len(stashID) > 0 {
tasks = append(tasks, &stashBoxBatchTagTagTask{
stashID: &stashID,
createParent: input.CreateParent,
box: box,
excludedFields: input.ExcludeFields,
})
@ -780,6 +784,7 @@ func (s *Manager) batchTagTagsByNamesOrStashIds(input StashBoxBatchTagInput, box
if len(name) > 0 {
tasks = append(tasks, &stashBoxBatchTagTagTask{
name: &name,
createParent: input.CreateParent,
box: box,
excludedFields: input.ExcludeFields,
})
@ -806,6 +811,7 @@ func (s *Manager) batchTagAllTags(ctx context.Context, input StashBoxBatchTagInp
for _, t := range tags {
tasks = append(tasks, &stashBoxBatchTagTagTask{
tag: t,
createParent: input.CreateParent,
box: box,
excludedFields: input.ExcludeFields,
})

View file

@ -23,8 +23,8 @@ type stashIgnorePathFilter struct {
libraryRoot string
}
func (f *stashIgnorePathFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
return f.filter.Accept(ctx, path, info, f.libraryRoot)
func (f *stashIgnorePathFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
return f.filter.Accept(ctx, path, info, f.libraryRoot, zipFilePath)
}
// createTestFileOnDisk creates a file with some content.
@ -105,7 +105,7 @@ temp/
if err != nil {
t.Fatalf("failed to stat file %s: %v", scenario.path, err)
}
accepted := scanner.AcceptEntry(ctx, scenario.path, info)
accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
if accepted != scenario.accepted {
t.Errorf("unexpected accept result for %s: expected %v, got %v",
@ -160,7 +160,7 @@ func TestScannerWithNestedStashIgnore(t *testing.T) {
if err != nil {
t.Fatalf("failed to stat file %s: %v", scenario.path, err)
}
accepted := scanner.AcceptEntry(ctx, scenario.path, info)
accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
if accepted != scenario.accepted {
t.Errorf("unexpected accept result for %s: expected %v, got %v",
@ -205,7 +205,7 @@ func TestScannerWithoutStashIgnore(t *testing.T) {
if err != nil {
t.Fatalf("failed to stat file %s: %v", scenario.path, err)
}
accepted := scanner.AcceptEntry(ctx, scenario.path, info)
accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
if accepted != scenario.accepted {
t.Errorf("unexpected accept result for %s: expected %v, got %v",
@ -258,7 +258,7 @@ func TestScannerWithNegationPattern(t *testing.T) {
if err != nil {
t.Fatalf("failed to stat file %s: %v", scenario.path, err)
}
accepted := scanner.AcceptEntry(ctx, scenario.path, info)
accepted := scanner.AcceptEntry(ctx, scenario.path, info, "")
if accepted != scenario.accepted {
t.Errorf("unexpected accept result for %s: expected %v, got %v",

View file

@ -313,9 +313,36 @@ func (j *CleanGeneratedJob) cleanBlobFiles(ctx context.Context, progress *job.Pr
return err
}
// remove empty hash prefix subdirectories
j.removeEmptyDirs(j.Paths.Blobs)
return nil
}
func (j *CleanGeneratedJob) removeEmptyDirs(root string) {
entries, err := os.ReadDir(root)
if err != nil {
return
}
for _, entry := range entries {
if !entry.IsDir() {
continue
}
dirPath := filepath.Join(root, entry.Name())
subEntries, err := os.ReadDir(dirPath)
if err != nil {
continue
}
if len(subEntries) == 0 {
j.logDelete("removing empty directory: %s", entry.Name())
j.deleteDir(dirPath)
}
}
}
func (j *CleanGeneratedJob) getScenesWithHash(ctx context.Context, hash string) ([]*models.Scene, error) {
fp := models.Fingerprint{
Fingerprint: hash,
@ -637,6 +664,8 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job.
return err
}
j.removeEmptyDirs(j.Paths.Generated.Markers)
return nil
}
@ -730,5 +759,7 @@ func (j *CleanGeneratedJob) cleanThumbnailFiles(ctx context.Context, progress *j
return err
}
j.removeEmptyDirs(j.Paths.Generated.Thumbnails)
return nil
}

View file

@ -7,6 +7,8 @@ import (
"os"
"path/filepath"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/sqlite"
@ -29,6 +31,21 @@ type databaseSchemaInfo struct {
StepsRequired uint
}
// PreExecute validates the environment before executing the migration.
// It returns an error if the migration cannot be performed.
func (s *MigrateJob) PreExecute() error {
// ensure backup directory exists and is writable
backupDir := s.Config.GetBackupDirectoryPathOrDefault()
if backupDir != "" {
if err := fsutil.EnsureDir(backupDir); err != nil {
logger.Errorf("error ensuring backup directory exists: %s", err)
logger.Warnf("Backup directory (%s) must be modified to a valid directory or removed from the config file", config.BackupDirectoryPath)
return fmt.Errorf("error creating backup directory: %w", err)
}
}
return nil
}
func (s *MigrateJob) Execute(ctx context.Context, progress *job.Progress) error {
schemaInfo, err := s.required()
if err != nil {

View file

@ -40,9 +40,10 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) error {
}
j.cleaner.Clean(ctx, file.CleanOptions{
Paths: j.input.Paths,
DryRun: j.input.DryRun,
PathFilter: newCleanFilter(instance.Config),
Paths: j.input.Paths,
DryRun: j.input.DryRun,
IgnoreZipFileContents: j.input.IgnoreZipFileContents,
PathFilter: newCleanFilter(instance.Config),
}, progress)
if job.IsCancelled(ctx) {
@ -159,7 +160,7 @@ func newCleanFilter(c *config.Config) *cleanFilter {
}
}
func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
// #1102 - clean anything in generated path
generatedPath := f.generatedPath
@ -184,7 +185,7 @@ func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
}
// Check .stashignore files, bounded to the library root.
if !f.stashIgnoreFilter.Accept(ctx, path, info, stash.Path) {
if !f.stashIgnoreFilter.Accept(ctx, path, info, stash.Path, zipFilePath) {
logger.Infof("%s is excluded due to .stashignore. Marking to clean: %q", fileOrFolder, path)
return false
}

View file

@ -262,7 +262,9 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
for f := range queue {
if job.IsCancelled(ctx) {
break
// keep draining the queue so the producer goroutine can finish
// and release its read transaction, otherwise the DB stays locked
continue
}
wg.Add()

View file

@ -35,7 +35,7 @@ func (j *OptimiseDatabaseJob) Execute(ctx context.Context, progress *job.Progres
return nil
}
if err != nil {
return fmt.Errorf("Error analyzing database: %w", err)
return fmt.Errorf("error analyzing database: %w", err)
}
progress.ExecuteTask("Vacuuming database", func() {

View file

@ -20,12 +20,12 @@ func (s *Manager) RunPluginTask(
pluginProgress := make(chan float64)
task, err := s.PluginCache.CreateTask(ctx, pluginID, taskName, args, pluginProgress)
if err != nil {
return fmt.Errorf("Error creating plugin task: %w", err)
return fmt.Errorf("error creating plugin task: %w", err)
}
err = task.Start()
if err != nil {
return fmt.Errorf("Error running plugin task: %w", err)
return fmt.Errorf("error running plugin task: %w", err)
}
done := make(chan bool)

View file

@ -171,7 +171,12 @@ func (j *ScanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *file.
return nil
}
if !j.scanner.AcceptEntry(ctx, path, info) {
zipFilePath := ""
if zipFile != nil {
zipFilePath = zipFile.Path
}
if !j.scanner.AcceptEntry(ctx, path, info, zipFilePath) {
if info.IsDir() {
logger.Debugf("Skipping directory %s", path)
return fs.SkipDir
@ -278,8 +283,10 @@ func (j *ScanJob) processQueue(ctx context.Context, parallelTasks int, progress
for f := range j.fileQueue {
logger.Tracef("Processing queued file %s", f.Path)
if err := ctx.Err(); err != nil {
return
if ctx.Err() != nil {
// Keep receiving until queueFiles closes the channel; otherwise
// the walker can block on send (full buffer) and never finish.
continue
}
wg.Add()
@ -565,7 +572,7 @@ func newScanFilter(c *config.Config, repo models.Repository, minModTime time.Tim
}
}
func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
if fsutil.IsPathInDir(f.generatedPath, path) {
logger.Warnf("Skipping %q as it overlaps with the generated folder", path)
return false
@ -583,7 +590,7 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
}
// Check .stashignore files, bounded to the library root.
if !f.stashIgnoreFilter.Accept(ctx, path, info, s.Path) {
if !f.stashIgnoreFilter.Accept(ctx, path, info, s.Path, zipFilePath) {
logger.Debugf("Skipping %s due to .stashignore", path)
return false
}
@ -655,8 +662,9 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
&file.FilteredHandler{
Filter: file.FilterFunc(imageFileFilter),
Handler: &image.ScanHandler{
CreatorUpdater: r.Image,
GalleryFinder: r.Gallery,
CreatorUpdater: r.Image,
GalleryFinder: r.Gallery,
SceneFinderUpdater: r.Scene,
ScanGenerator: &imageGenerators{
input: options,
taskQueue: taskQueue,
@ -685,9 +693,10 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
&file.FilteredHandler{
Filter: file.FilterFunc(videoFileFilter),
Handler: &scene.ScanHandler{
CreatorUpdater: r.Scene,
CaptionUpdater: r.File,
PluginCache: pluginCache,
CreatorUpdater: r.Scene,
GalleryFinderUpdater: r.Gallery,
CaptionUpdater: r.File,
PluginCache: pluginCache,
ScanGenerator: &sceneGenerators{
input: options,
taskQueue: taskQueue,

View file

@ -4,6 +4,7 @@ import (
"context"
"fmt"
"strconv"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/match"
@ -541,6 +542,7 @@ type stashBoxBatchTagTagTask struct {
name *string
stashID *string
tag *models.Tag
createParent bool
excludedFields []string
}
@ -588,8 +590,11 @@ func (t *stashBoxBatchTagTagTask) findStashBoxTag(ctx context.Context) (*models.
client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns()))
nameQuery := ""
switch {
case t.name != nil:
nameQuery = *t.name
results, err = client.QueryTag(ctx, *t.name)
case t.stashID != nil:
results, err = client.QueryTag(ctx, *t.stashID)
@ -615,6 +620,7 @@ func (t *stashBoxBatchTagTagTask) findStashBoxTag(ctx context.Context) (*models.
if remoteID != "" {
results, err = client.QueryTag(ctx, remoteID)
} else {
nameQuery = t.tag.Name
results, err = client.QueryTag(ctx, t.tag.Name)
}
}
@ -627,10 +633,26 @@ func (t *stashBoxBatchTagTagTask) findStashBoxTag(ctx context.Context) (*models.
return nil, nil
}
result := results[0]
var result *models.ScrapedTag
// QueryTag returns tags that partially match the name, so find the exact match if searching by name
if nameQuery != "" {
for _, r := range results {
if strings.EqualFold(r.Name, nameQuery) {
result = r
break
}
}
} else {
result = results[0]
}
if result == nil {
return nil, nil
}
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
return match.ScrapedTag(ctx, r.Tag, result, t.box.Endpoint)
return match.ScrapedTagHierarchy(ctx, r.Tag, result, t.box.Endpoint)
}); err != nil {
return nil, err
}
@ -638,6 +660,39 @@ func (t *stashBoxBatchTagTagTask) findStashBoxTag(ctx context.Context) (*models.
return result, nil
}
func (t *stashBoxBatchTagTagTask) processParentTag(ctx context.Context, parent *models.ScrapedTag, excluded map[string]bool) error {
if parent.StoredID == nil {
// Create new parent tag
newParentTag := parent.ToTag(t.box.Endpoint, excluded)
r := instance.Repository
err := r.WithTxn(ctx, func(ctx context.Context) error {
qb := r.Tag
if err := tag.ValidateCreate(ctx, *newParentTag, qb); err != nil {
return err
}
if err := qb.Create(ctx, &models.CreateTagInput{Tag: newParentTag}); err != nil {
return err
}
storedID := strconv.Itoa(newParentTag.ID)
parent.StoredID = &storedID
return nil
})
if err != nil {
logger.Errorf("Failed to create parent tag %s: %v", parent.Name, err)
} else {
logger.Infof("Created parent tag %s", parent.Name)
}
return err
}
// Parent already exists — nothing to update for categories
return nil
}
func (t *stashBoxBatchTagTagTask) processMatchedTag(ctx context.Context, s *models.ScrapedTag, excluded map[string]bool) {
// Determine the tag ID to update — either from the task's tag or from the
// StoredID set by match.ScrapedTag (when batch adding by name and the tag
@ -649,6 +704,12 @@ func (t *stashBoxBatchTagTagTask) processMatchedTag(ctx context.Context, s *mode
tagID, _ = strconv.Atoi(*s.StoredID)
}
if s.Parent != nil && t.createParent {
if err := t.processParentTag(ctx, s.Parent, excluded); err != nil {
return
}
}
if tagID > 0 {
r := instance.Repository
err := r.WithTxn(ctx, func(ctx context.Context) error {

View file

@ -45,13 +45,13 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
// log if the initialization takes too long
const hwInitLogTimeoutSecondsDefault = 5
hwInitLogTimeoutSeconds := hwInitLogTimeoutSecondsDefault * time.Second
timer := time.NewTimer(hwInitLogTimeoutSeconds)
hwInitLogTimeout := hwInitLogTimeoutSecondsDefault * time.Second
timer := time.NewTimer(hwInitLogTimeout)
go func() {
select {
case <-timer.C:
logger.Warnf("[InitHWSupport] Hardware codec initialization is taking longer than %s...", hwInitLogTimeoutSeconds)
logger.Warnf("[InitHWSupport] Hardware codec initialization is taking longer than %s...", hwInitLogTimeout)
logger.Info("[InitHWSupport] Hardware encoding will not be available until initialization is complete.")
case <-done:
if !timer.Stop() {
@ -96,16 +96,16 @@ func (f *FFMpeg) initHWSupport(ctx context.Context) {
// #6064 - add timeout to context to prevent hangs
const hwTestTimeoutSecondsDefault = 10
hwTestTimeoutSeconds := hwTestTimeoutSecondsDefault * time.Second
hwTestTimeout := hwTestTimeoutSecondsDefault * time.Second
// allow timeout to be overridden with environment variable
if timeout := os.Getenv("STASH_HW_TEST_TIMEOUT"); timeout != "" {
if seconds, err := strconv.Atoi(timeout); err == nil {
hwTestTimeoutSeconds = time.Duration(seconds) * time.Second
hwTestTimeout = time.Duration(seconds) * time.Second
}
}
testCtx, cancel := context.WithTimeout(ctx, hwTestTimeoutSeconds)
testCtx, cancel := context.WithTimeout(ctx, hwTestTimeout)
defer cancel()
cmd := f.Command(testCtx, args)
@ -117,7 +117,7 @@ func (f *FFMpeg) initHWSupport(ctx context.Context) {
if err := cmd.Run(); err != nil {
if testCtx.Err() != nil {
logger.Debugf("[InitHWSupport] Codec %s test timed out after %s", codec, hwTestTimeoutSeconds)
logger.Debugf("[InitHWSupport] Codec %s test timed out after %s", codec, hwTestTimeout)
continue
}
@ -185,6 +185,12 @@ func (f *FFMpeg) hwCanFullHWTranscode(ctx context.Context, codec VideoCodec, vf
// Prepend input for hardware encoding only
func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args {
// check for custom /dev/dri device #6435
driDevice := os.Getenv("STASH_HW_DRI_DEVICE")
if driDevice == "" {
driDevice = "/dev/dri/renderD128"
}
switch toCodec {
case VideoCodecN264,
VideoCodecN264H:
@ -201,7 +207,7 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args {
case VideoCodecV264,
VideoCodecVVP9:
args = append(args, "-vaapi_device")
args = append(args, "/dev/dri/renderD128")
args = append(args, driDevice)
if fullhw {
args = append(args, "-hwaccel")
args = append(args, "vaapi")

View file

@ -33,6 +33,11 @@ type cleanJob struct {
type CleanOptions struct {
Paths []string
// IgnoreZipFileContents will skip checking the contents of zip files when determining whether to clean a file.
// This can significantly speed up the clean process, but will potentially miss removed files within zip files.
// Where users do not modify zip files contents directly, this should be safe to use.
IgnoreZipFileContents bool
// Do a dry run. Don't delete any files
DryRun bool
@ -174,13 +179,16 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error {
more := true
r := j.Repository
includeZipContents := !j.options.IgnoreZipFileContents
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
for more {
if job.IsCancelled(ctx) {
return nil
}
files, err := r.File.FindAllInPaths(ctx, j.options.Paths, batchSize, offset)
files, err := r.File.FindAllInPaths(ctx, j.options.Paths, includeZipContents, batchSize, offset)
if err != nil {
return fmt.Errorf("error querying for files: %w", err)
}
@ -258,6 +266,8 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error
offset := 0
progress := j.progress
includeZipContents := !j.options.IgnoreZipFileContents
more := true
r := j.Repository
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
@ -266,7 +276,7 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error
return nil
}
folders, err := r.Folder.FindAllInPaths(ctx, j.options.Paths, batchSize, offset)
folders, err := r.Folder.FindAllInPaths(ctx, j.options.Paths, includeZipContents, batchSize, offset)
if err != nil {
return fmt.Errorf("error querying for folders: %w", err)
}
@ -348,8 +358,14 @@ func (j *cleanJob) shouldClean(ctx context.Context, f models.File) bool {
// run through path filter, if returns false then the file should be cleaned
filter := j.options.PathFilter
// need to get the zip file path if present
zipFilePath := ""
if f.Base().ZipFile != nil {
zipFilePath = f.Base().ZipFile.Base().Path
}
// don't log anything - assume filter will have logged the reason
return !filter.Accept(ctx, path, info)
return !filter.Accept(ctx, path, info, zipFilePath)
}
func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool {
@ -387,8 +403,14 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool
// run through path filter, if returns false then the file should be cleaned
filter := j.options.PathFilter
// need to get the zip file path if present
zipFilePath := ""
if f.ZipFile != nil {
zipFilePath = f.ZipFile.Base().Path
}
// don't log anything - assume filter will have logged the reason
return !filter.Accept(ctx, path, info)
return !filter.Accept(ctx, path, info, zipFilePath)
}
func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) {

View file

@ -2,7 +2,6 @@ package file
import (
"context"
"errors"
"fmt"
"io/fs"
@ -88,6 +87,11 @@ func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*mode
r := s.Repository
zipFilePath := ""
if file.ZipFile != nil {
zipFilePath = file.ZipFile.Base().Path
}
if err := SymWalk(file.FS, file.Path, func(path string, d fs.DirEntry, err error) error {
if err != nil {
// don't let errors prevent scanning
@ -111,7 +115,7 @@ func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*mode
return nil
}
if !s.AcceptEntry(ctx, path, info) {
if !s.AcceptEntry(ctx, path, info, zipFilePath) {
return nil
}
@ -161,9 +165,7 @@ func (s *Scanner) detectFolderMove(ctx context.Context, file ScannedFile) (*mode
continue
}
if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf("checking for parent folder %q: %w", pf.Path, err)
}
// treat any error as missing folder
// parent folder is missing, possible candidate
// count the total number of files in the existing folder

View file

@ -9,7 +9,7 @@ import (
// PathFilter provides a filter function for paths.
type PathFilter interface {
Accept(ctx context.Context, path string, info fs.FileInfo) bool
Accept(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool
}
type PathFilterFunc func(path string) bool

View file

@ -111,12 +111,12 @@ type ScannedFile struct {
}
// AcceptEntry determines if the file entry should be accepted for scanning
func (s *Scanner) AcceptEntry(ctx context.Context, path string, info fs.FileInfo) bool {
func (s *Scanner) AcceptEntry(ctx context.Context, path string, info fs.FileInfo, zipFilePath string) bool {
// always accept if there's no filters
accept := len(s.ScanFilters) == 0
for _, filter := range s.ScanFilters {
// accept if any filter accepts the file
if filter.Accept(ctx, path, info) {
if filter.Accept(ctx, path, info, zipFilePath) {
accept = true
break
}
@ -462,7 +462,11 @@ func (s *Scanner) onNewFile(ctx context.Context, f ScannedFile) (*ScanFileResult
// determine if the file is renamed from an existing file in the store
// do this after decoration so that missing fields can be populated
renamed, err := s.handleRename(ctx, file, fp)
zipFilePath := ""
if f.ZipFile != nil {
zipFilePath = f.ZipFile.Base().Path
}
renamed, err := s.handleRename(ctx, file, fp, zipFilePath)
if err != nil {
return nil, err
}
@ -572,7 +576,7 @@ func (s *Scanner) getFileFS(f *models.BaseFile) (models.FS, error) {
return fs.OpenZip(zipPath, zipSize)
}
func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) {
func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint, zipFilePath string) (models.File, error) {
var others []models.File
for _, tfp := range fp {
@ -614,7 +618,7 @@ func (s *Scanner) handleRename(ctx context.Context, f models.File, fp []models.F
// treat as a move
missing = append(missing, other)
}
case !s.AcceptEntry(ctx, other.Base().Path, info):
case !s.AcceptEntry(ctx, other.Base().Path, info, zipFilePath):
// #4393 - if the file is no longer in the configured library paths, treat it as a move
logger.Debugf("File %q no longer in library paths. Treating as a move.", other.Base().Path)
missing = append(missing, other)

View file

@ -53,7 +53,9 @@ func NewStashIgnoreFilter() *StashIgnoreFilter {
// applies gitignore-style pattern matching.
// The libraryRoot parameter bounds the search for .stashignore files -
// only directories within the library root are checked.
func (f *StashIgnoreFilter) Accept(ctx context.Context, path string, info fs.FileInfo, libraryRoot string) bool {
// zipFilepath is the path of the zip file if the file is inside a zip.
// .stashignore files will not be read within zip files.
func (f *StashIgnoreFilter) Accept(ctx context.Context, path string, info fs.FileInfo, libraryRoot string, zipFilePath string) bool {
// If no library root provided, accept the file (safety fallback).
if libraryRoot == "" {
return true
@ -62,6 +64,11 @@ func (f *StashIgnoreFilter) Accept(ctx context.Context, path string, info fs.Fil
// Get the directory containing this path.
dir := filepath.Dir(path)
// If the file is inside a zip, use the zip file's directory as the base for .stashignore lookup.
if zipFilePath != "" {
dir = filepath.Dir(zipFilePath)
}
// Collect all applicable ignore entries from library root to this directory.
entries := f.collectIgnoreEntries(dir, libraryRoot)
@ -135,6 +142,8 @@ func (f *StashIgnoreFilter) collectIgnoreEntries(dir string, libraryRoot string)
current := dir
for {
// Check if we're still within the library root.
// nolint:staticcheck // QF1006 - we could make this the for condition
// but I don't think it improves readability
if !isPathInOrEqual(libraryRoot, current) {
break
}

View file

@ -64,7 +64,7 @@ func walkAndFilter(t *testing.T, root string, filter *StashIgnoreFilter) []strin
return err
}
if filter.Accept(ctx, path, info, root) {
if filter.Accept(ctx, path, info, root, "") {
relPath, _ := filepath.Rel(root, path)
accepted = append(accepted, relPath)
} else if info.IsDir() {

View file

@ -148,7 +148,7 @@ func Touch(path string) error {
var (
replaceCharsRE = regexp.MustCompile(`[&=\\/:*"?_ ]`)
removeCharsRE = regexp.MustCompile(`[^[:alnum:]-.]`)
removeCharsRE = regexp.MustCompile(`[^\p{L}\p{N}\-.]`)
multiHyphenRE = regexp.MustCompile(`\-+`)
)

View file

@ -15,6 +15,9 @@ func TestSanitiseBasename(t *testing.T) {
{"multi-hyphen", `hyphened--name`, "hyphened-name-2da2a58f"},
{"replaced characters", `a&b=c\d/:e*"f?_ g`, "a-b-c-d-e-f-g-ffca6fb0"},
{"removed characters", `foo!!bar@@and, more`, "foobarand-more-7cee02ab"},
{"unicode cjk", `テスト`, "テスト-63b560db"},
{"unicode korean", `시험`, "시험-3fcc7beb"},
{"mixed unicode", `Test テスト`, "Test-テスト-366aff1e"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {

View file

@ -24,7 +24,6 @@ type ScanCreatorUpdater interface {
type ScanSceneFinderUpdater interface {
FindByPath(ctx context.Context, p string) ([]*models.Scene, error)
Update(ctx context.Context, updatedScene *models.Scene) error
AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error
}

View file

@ -7,6 +7,7 @@ import (
"os"
"path/filepath"
"slices"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
@ -39,6 +40,11 @@ type GalleryFinderCreator interface {
UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error)
}
type ScanSceneFinderUpdater interface {
FindByPath(ctx context.Context, p string) ([]*models.Scene, error)
AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error
}
type ScanConfig interface {
GetCreateGalleriesFromFolders() bool
}
@ -48,8 +54,9 @@ type ScanGenerator interface {
}
type ScanHandler struct {
CreatorUpdater ScanCreatorUpdater
GalleryFinder GalleryFinderCreator
CreatorUpdater ScanCreatorUpdater
GalleryFinder GalleryFinderCreator
SceneFinderUpdater ScanSceneFinderUpdater
ScanGenerator ScanGenerator
@ -62,19 +69,19 @@ type ScanHandler struct {
func (h *ScanHandler) validate() error {
if h.CreatorUpdater == nil {
return errors.New("CreatorUpdater is required")
return errors.New("internal error: CreatorUpdater is required")
}
if h.ScanGenerator == nil {
return errors.New("ScanGenerator is required")
return errors.New("internal error: ScanGenerator is required")
}
if h.GalleryFinder == nil {
return errors.New("GalleryFinder is required")
return errors.New("internal error: GalleryFinder is required")
}
if h.ScanConfig == nil {
return errors.New("ScanConfig is required")
return errors.New("internal error: ScanConfig is required")
}
if h.Paths == nil {
return errors.New("Paths is required")
return errors.New("internal error: Paths is required")
}
return nil
@ -322,11 +329,39 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile mo
return nil, fmt.Errorf("creating zip-based gallery: %w", err)
}
// try to associate with scene
if err := h.associateScene(ctx, &newGallery, zipFile); err != nil {
return nil, fmt.Errorf("associating scene: %w", err)
}
h.PluginCache.RegisterPostHooks(ctx, newGallery.ID, hook.GalleryCreatePost, nil, nil)
return &newGallery, nil
}
func (h *ScanHandler) associateScene(ctx context.Context, existing *models.Gallery, zipFile models.File) error {
galleryIDs := []int{existing.ID}
path := zipFile.Base().Path
withoutExt := strings.TrimSuffix(path, filepath.Ext(path)) + ".*"
// find scenes with a file that matches
scenes, err := h.SceneFinderUpdater.FindByPath(ctx, withoutExt)
if err != nil {
return err
}
for _, scene := range scenes {
// found related Scene
logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID)
if err := h.SceneFinderUpdater.AddGalleryIDs(ctx, scene.ID, galleryIDs); err != nil {
return err
}
}
return nil
}
func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*models.Gallery, error) {
// don't create folder-based galleries for files in zip file
if f.Base().ZipFile != nil {
@ -340,13 +375,13 @@ func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*m
if _, err := os.Stat(filepath.Join(folderPath, ".forcegallery")); err == nil {
forceGallery = true
} else if !errors.Is(err, os.ErrNotExist) {
return nil, fmt.Errorf("Could not test Path %s: %w", folderPath, err)
return nil, fmt.Errorf("could not test Path %s: %w", folderPath, err)
}
exemptGallery := false
if _, err := os.Stat(filepath.Join(folderPath, ".nogallery")); err == nil {
exemptGallery = true
} else if !errors.Is(err, os.ErrNotExist) {
return nil, fmt.Errorf("Could not test Path %s: %w", folderPath, err)
return nil, fmt.Errorf("could not test Path %s: %w", folderPath, err)
}
if forceGallery || (h.ScanConfig.GetCreateGalleriesFromFolders() && !exemptGallery) {

View file

@ -66,6 +66,23 @@ type Job struct {
cancelFunc context.CancelFunc
}
// statusCopy returns a copy of the Job with only the fields needed for
// status reporting. Internal fields (exec, cancelFunc, outerCtx) are
// excluded so that subscription channels don't retain heavy resources.
func (j *Job) statusCopy() Job {
return Job{
ID: j.ID,
Status: j.Status,
Details: j.Details,
Description: j.Description,
Progress: j.Progress,
StartTime: j.StartTime,
EndTime: j.EndTime,
AddTime: j.AddTime,
Error: j.Error,
}
}
// TimeElapsed returns the total time elapsed for the job.
// If the EndTime is set, then it uses this to calculate the elapsed time, otherwise it uses time.Now.
func (j *Job) TimeElapsed() time.Duration {
@ -80,9 +97,10 @@ func (j *Job) TimeElapsed() time.Duration {
}
func (j *Job) cancel() {
if j.Status == StatusReady {
switch j.Status {
case StatusReady:
j.Status = StatusCancelled
} else if j.Status == StatusRunning {
case StatusRunning:
j.Status = StatusStopping
}

View file

@ -105,7 +105,7 @@ func (m *Manager) notifyNewJob(j *Job) {
for _, s := range m.subscriptions {
// don't block if channel is full
select {
case s.newJob <- *j:
case s.newJob <- j.statusCopy():
default:
}
}
@ -232,7 +232,9 @@ func (m *Manager) removeJob(job *Job) {
return
}
// clear any subtasks
// release the executor and subtask details so they can be GC'd
// while the job remains in the graveyard for status reporting
job.exec = nil
job.Details = nil
m.queue = append(m.queue[:index], m.queue[index+1:]...)
@ -246,7 +248,7 @@ func (m *Manager) removeJob(job *Job) {
for _, s := range m.subscriptions {
// don't block if channel is full
select {
case s.removedJob <- *job:
case s.removedJob <- job.statusCopy():
default:
}
}
@ -310,8 +312,7 @@ func (m *Manager) GetJob(id int) *Job {
// get from the queue or graveyard
_, j := m.getJob(append(m.queue, m.graveyard...), id)
if j != nil {
// make a copy of the job and return the pointer
jCopy := *j
jCopy := j.statusCopy()
return &jCopy
}
@ -326,8 +327,7 @@ func (m *Manager) GetQueue() []Job {
var ret []Job
for _, j := range m.queue {
jCopy := *j
ret = append(ret, jCopy)
ret = append(ret, j.statusCopy())
}
return ret
@ -372,7 +372,7 @@ func (m *Manager) notifyJobUpdate(j *Job) {
for _, s := range m.subscriptions {
// don't block if channel is full
select {
case s.updatedJob <- *j:
case s.updatedJob <- j.statusCopy():
default:
}
}

View file

@ -51,7 +51,7 @@ func (tq *TaskQueue) executer(ctx context.Context) {
defer tq.wg.Wait()
for task := range tq.tasks {
if IsCancelled(ctx) {
return
continue // allow channel to continue draining until Close()
}
tt := task

View file

@ -22,7 +22,7 @@ type GroupNamesFinder interface {
type SceneRelationships struct {
PerformerFinder PerformerFinder
TagFinder models.TagQueryer
TagFinder models.TagNameFinder
StudioFinder StudioFinder
}
@ -188,9 +188,23 @@ func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, na
return
}
// ScrapedTagHierarchy executes ScrapedTag for the provided tag and its parent.
func ScrapedTagHierarchy(ctx context.Context, qb models.TagNameFinder, s *models.ScrapedTag, stashBoxEndpoint string) error {
if err := ScrapedTag(ctx, qb, s, stashBoxEndpoint); err != nil {
return err
}
if s.Parent == nil {
return nil
}
// Match parent by name only (categories don't have StashDB tag IDs)
return ScrapedTag(ctx, qb, s.Parent, "")
}
// ScrapedTag matches the provided tag with the tags
// in the database and sets the ID field if one is found.
func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag, stashBoxEndpoint string) error {
func ScrapedTag(ctx context.Context, qb models.TagNameFinder, s *models.ScrapedTag, stashBoxEndpoint string) error {
if s.StoredID != nil {
return nil
}

View file

@ -2,6 +2,7 @@ package models
import (
"fmt"
"strings"
"time"
"github.com/stashapp/stash/pkg/utils"
@ -61,3 +62,114 @@ func ParseDate(s string) (Date, error) {
return Date{}, fmt.Errorf("failed to parse date %q: %v", s, errs)
}
func DateFromYear(year int) Date {
return Date{
Time: time.Date(year, 1, 1, 0, 0, 0, 0, time.UTC),
Precision: DatePrecisionYear,
}
}
func FormatYearRange(start *Date, end *Date) string {
var (
startStr, endStr string
)
if start != nil {
startStr = start.Format(dateFormatPrecision[DatePrecisionYear])
}
if end != nil {
endStr = end.Format(dateFormatPrecision[DatePrecisionYear])
}
switch {
case startStr == "" && endStr == "":
return ""
case endStr == "":
return fmt.Sprintf("%s -", startStr)
case startStr == "":
return fmt.Sprintf("- %s", endStr)
default:
return fmt.Sprintf("%s - %s", startStr, endStr)
}
}
func FormatYearRangeString(start *string, end *string) string {
switch {
case start == nil && end == nil:
return ""
case end == nil:
return fmt.Sprintf("%s -", *start)
case start == nil:
return fmt.Sprintf("- %s", *end)
default:
return fmt.Sprintf("%s - %s", *start, *end)
}
}
// ParseYearRangeString parses a year range string into start and end year integers.
// Supported formats: "YYYY", "YYYY - YYYY", "YYYY-YYYY", "YYYY -", "- YYYY", "YYYY-present".
// Returns nil for start/end if not present in the string.
func ParseYearRangeString(s string) (start *Date, end *Date, err error) {
s = strings.TrimSpace(s)
if s == "" {
return nil, nil, fmt.Errorf("empty year range string")
}
// normalize "present" to empty end
lower := strings.ToLower(s)
lower = strings.ReplaceAll(lower, "present", "")
// split on "-" if it contains one
var parts []string
if strings.Contains(lower, "-") {
parts = strings.SplitN(lower, "-", 2)
} else {
// single value, treat as start year
year, err := parseYear(lower)
if err != nil {
return nil, nil, fmt.Errorf("invalid year range %q: %w", s, err)
}
return year, nil, nil
}
startStr := strings.TrimSpace(parts[0])
endStr := strings.TrimSpace(parts[1])
if startStr != "" {
y, err := parseYear(startStr)
if err != nil {
return nil, nil, fmt.Errorf("invalid start year in %q: %w", s, err)
}
start = y
}
if endStr != "" {
y, err := parseYear(endStr)
if err != nil {
return nil, nil, fmt.Errorf("invalid end year in %q: %w", s, err)
}
end = y
}
if start == nil && end == nil {
return nil, nil, fmt.Errorf("could not parse year range %q", s)
}
return start, end, nil
}
func parseYear(s string) (*Date, error) {
ret, err := ParseDate(s)
if err != nil {
return nil, fmt.Errorf("parsing year %q: %w", s, err)
}
year := ret.Time.Year()
if year < 1900 || year > 2200 {
return nil, fmt.Errorf("year %d out of reasonable range", year)
}
return &ret, nil
}

View file

@ -3,6 +3,8 @@ package models
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestParseDateStringAsTime(t *testing.T) {
@ -48,3 +50,102 @@ func TestParseDateStringAsTime(t *testing.T) {
})
}
}
func TestFormatYearRange(t *testing.T) {
datePtr := func(v int) *Date {
date := DateFromYear(v)
return &date
}
tests := []struct {
name string
start *Date
end *Date
want string
}{
{"both nil", nil, nil, ""},
{"only start", datePtr(2005), nil, "2005 -"},
{"only end", nil, datePtr(2010), "- 2010"},
{"start and end", datePtr(2005), datePtr(2010), "2005 - 2010"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := FormatYearRange(tt.start, tt.end)
assert.Equal(t, tt.want, got)
})
}
}
func TestFormatYearRangeString(t *testing.T) {
stringPtr := func(v string) *string { return &v }
tests := []struct {
name string
start *string
end *string
want string
}{
{"both nil", nil, nil, ""},
{"only start", stringPtr("2005"), nil, "2005 -"},
{"only end", nil, stringPtr("2010"), "- 2010"},
{"start and end", stringPtr("2005"), stringPtr("2010"), "2005 - 2010"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := FormatYearRangeString(tt.start, tt.end)
assert.Equal(t, tt.want, got)
})
}
}
func TestParseYearRangeString(t *testing.T) {
intPtr := func(v int) *int { return &v }
tests := []struct {
name string
input string
wantStart *int
wantEnd *int
wantErr bool
}{
{"single year", "2005", intPtr(2005), nil, false},
{"year range with spaces", "2005 - 2010", intPtr(2005), intPtr(2010), false},
{"year range no spaces", "2005-2010", intPtr(2005), intPtr(2010), false},
{"year dash open", "2005 -", intPtr(2005), nil, false},
{"year dash open no space", "2005-", intPtr(2005), nil, false},
{"dash year", "- 2010", nil, intPtr(2010), false},
{"year present", "2005-present", intPtr(2005), nil, false},
{"year Present caps", "2005 - Present", intPtr(2005), nil, false},
{"whitespace padding", " 2005 - 2010 ", intPtr(2005), intPtr(2010), false},
{"empty string", "", nil, nil, true},
{"garbage", "not a year", nil, nil, true},
{"partial garbage start", "abc - 2010", nil, nil, true},
{"partial garbage end", "2005 - abc", nil, nil, true},
{"year out of range", "1800", nil, nil, true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
start, end, err := ParseYearRangeString(tt.input)
if tt.wantErr {
assert.Error(t, err)
return
}
assert.NoError(t, err)
if tt.wantStart != nil {
assert.NotNil(t, start)
assert.Equal(t, *tt.wantStart, start.Time.Year())
} else {
assert.Nil(t, start)
}
if tt.wantEnd != nil {
assert.NotNil(t, end)
assert.Equal(t, *tt.wantEnd, end.Time.Year())
} else {
assert.Nil(t, end)
}
})
}
}

View file

@ -49,8 +49,8 @@ type Performer struct {
PenisLength float64 `json:"penis_length,omitempty"`
Circumcised string `json:"circumcised,omitempty"`
CareerLength string `json:"career_length,omitempty"` // deprecated - for import only
CareerStart *int `json:"career_start,omitempty"`
CareerEnd *int `json:"career_end,omitempty"`
CareerStart string `json:"career_start,omitempty"`
CareerEnd string `json:"career_end,omitempty"`
Tattoos string `json:"tattoos,omitempty"`
Piercings string `json:"piercings,omitempty"`
Aliases StringOrStringList `json:"aliases,omitempty"`

View file

@ -153,13 +153,13 @@ func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string, case
return r0, r1
}
// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset
func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]models.File, error) {
ret := _m.Called(ctx, p, limit, offset)
// FindAllInPaths provides a mock function with given fields: ctx, p, includeZipContents, limit, offset
func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit int, offset int) ([]models.File, error) {
ret := _m.Called(ctx, p, includeZipContents, limit, offset)
var r0 []models.File
if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []models.File); ok {
r0 = rf(ctx, p, limit, offset)
if rf, ok := ret.Get(0).(func(context.Context, []string, bool, int, int) []models.File); ok {
r0 = rf(ctx, p, includeZipContents, limit, offset)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]models.File)
@ -167,8 +167,8 @@ func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limi
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok {
r1 = rf(ctx, p, limit, offset)
if rf, ok := ret.Get(1).(func(context.Context, []string, bool, int, int) error); ok {
r1 = rf(ctx, p, includeZipContents, limit, offset)
} else {
r1 = ret.Error(1)
}

View file

@ -86,13 +86,13 @@ func (_m *FolderReaderWriter) Find(ctx context.Context, id models.FolderID) (*mo
return r0, r1
}
// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset
func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]*models.Folder, error) {
ret := _m.Called(ctx, p, limit, offset)
// FindAllInPaths provides a mock function with given fields: ctx, p, includeZipContents, limit, offset
func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit int, offset int) ([]*models.Folder, error) {
ret := _m.Called(ctx, p, includeZipContents, limit, offset)
var r0 []*models.Folder
if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []*models.Folder); ok {
r0 = rf(ctx, p, limit, offset)
if rf, ok := ret.Get(0).(func(context.Context, []string, bool, int, int) []*models.Folder); ok {
r0 = rf(ctx, p, includeZipContents, limit, offset)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Folder)
@ -100,8 +100,8 @@ func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, li
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok {
r1 = rf(ctx, p, limit, offset)
if rf, ok := ret.Get(1).(func(context.Context, []string, bool, int, int) error); ok {
r1 = rf(ctx, p, includeZipContents, limit, offset)
} else {
r1 = ret.Error(1)
}
@ -224,6 +224,29 @@ func (_m *FolderReaderWriter) GetManyParentFolderIDs(ctx context.Context, folder
return r0, r1
}
// GetManySubFolderIDs provides a mock function with given fields: ctx, folderIDs
func (_m *FolderReaderWriter) GetManySubFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) {
ret := _m.Called(ctx, folderIDs)
var r0 [][]models.FolderID
if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) [][]models.FolderID); ok {
r0 = rf(ctx, folderIDs)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([][]models.FolderID)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok {
r1 = rf(ctx, folderIDs)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Query provides a mock function with given fields: ctx, options
func (_m *FolderReaderWriter) Query(ctx context.Context, options models.FolderQueryOptions) (*models.FolderQueryResult, error) {
ret := _m.Called(ctx, options)

View file

@ -49,6 +49,20 @@ func (_m *GalleryReaderWriter) AddImages(ctx context.Context, galleryID int, ima
return r0
}
// AddSceneIDs provides a mock function with given fields: ctx, galleryID, sceneIDs
func (_m *GalleryReaderWriter) AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error {
ret := _m.Called(ctx, galleryID, sceneIDs)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok {
r0 = rf(ctx, galleryID, sceneIDs)
} else {
r0 = ret.Error(0)
}
return r0
}
// All provides a mock function with given fields: ctx
func (_m *GalleryReaderWriter) All(ctx context.Context) ([]*models.Gallery, error) {
ret := _m.Called(ctx)

View file

@ -197,6 +197,29 @@ func (_m *TagReaderWriter) FindAllDescendants(ctx context.Context, tagID int, ex
return r0, r1
}
// FindByAlias provides a mock function with given fields: ctx, alias, nocase
func (_m *TagReaderWriter) FindByAlias(ctx context.Context, alias string, nocase bool) (*models.Tag, error) {
ret := _m.Called(ctx, alias, nocase)
var r0 *models.Tag
if rf, ok := ret.Get(0).(func(context.Context, string, bool) *models.Tag); ok {
r0 = rf(ctx, alias, nocase)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*models.Tag)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok {
r1 = rf(ctx, alias, nocase)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// FindByChildTagID provides a mock function with given fields: ctx, childID
func (_m *TagReaderWriter) FindByChildTagID(ctx context.Context, childID int) ([]*models.Tag, error) {
ret := _m.Called(ctx, childID)

View file

@ -6,26 +6,26 @@ import (
)
type Performer struct {
ID int `json:"id"`
Name string `json:"name"`
Disambiguation string `json:"disambiguation"`
Gender *GenderEnum `json:"gender"`
Birthdate *Date `json:"birthdate"`
Ethnicity string `json:"ethnicity"`
Country string `json:"country"`
EyeColor string `json:"eye_color"`
Height *int `json:"height"`
Measurements string `json:"measurements"`
FakeTits string `json:"fake_tits"`
PenisLength *float64 `json:"penis_length"`
Circumcised *CircumisedEnum `json:"circumcised"`
CareerStart *int `json:"career_start"`
CareerEnd *int `json:"career_end"`
Tattoos string `json:"tattoos"`
Piercings string `json:"piercings"`
Favorite bool `json:"favorite"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
ID int `json:"id"`
Name string `json:"name"`
Disambiguation string `json:"disambiguation"`
Gender *GenderEnum `json:"gender"`
Birthdate *Date `json:"birthdate"`
Ethnicity string `json:"ethnicity"`
Country string `json:"country"`
EyeColor string `json:"eye_color"`
Height *int `json:"height"`
Measurements string `json:"measurements"`
FakeTits string `json:"fake_tits"`
PenisLength *float64 `json:"penis_length"`
Circumcised *CircumcisedEnum `json:"circumcised"`
CareerStart *Date `json:"career_start"`
CareerEnd *Date `json:"career_end"`
Tattoos string `json:"tattoos"`
Piercings string `json:"piercings"`
Favorite bool `json:"favorite"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
// Rating expressed in 1-100 scale
Rating *int `json:"rating"`
Details string `json:"details"`
@ -76,8 +76,8 @@ type PerformerPartial struct {
FakeTits OptionalString
PenisLength OptionalFloat64
Circumcised OptionalString
CareerStart OptionalInt
CareerEnd OptionalInt
CareerStart OptionalDate
CareerEnd OptionalDate
Tattoos OptionalString
Piercings OptionalString
Favorite OptionalBool

View file

@ -177,8 +177,8 @@ type ScrapedPerformer struct {
PenisLength *string `json:"penis_length"`
Circumcised *string `json:"circumcised"`
CareerLength *string `json:"career_length"` // deprecated: use CareerStart/CareerEnd
CareerStart *int `json:"career_start"`
CareerEnd *int `json:"career_end"`
CareerStart *string `json:"career_start"`
CareerEnd *string `json:"career_end"`
Tattoos *string `json:"tattoos"`
Piercings *string `json:"piercings"`
Aliases *string `json:"aliases"`
@ -225,12 +225,16 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool
// assume that career length is _not_ populated in favour of start/end
if p.CareerStart != nil && !excluded["career_start"] {
cs := *p.CareerStart
ret.CareerStart = &cs
date, err := ParseDate(*p.CareerStart)
if err == nil {
ret.CareerStart = &date
}
}
if p.CareerEnd != nil && !excluded["career_end"] {
ce := *p.CareerEnd
ret.CareerEnd = &ce
date, err := ParseDate(*p.CareerEnd)
if err == nil {
ret.CareerEnd = &date
}
}
if p.Country != nil && !excluded["country"] {
ret.Country = *p.Country
@ -288,7 +292,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool
}
}
if p.Circumcised != nil && !excluded["circumcised"] {
v := CircumisedEnum(*p.Circumcised)
v := CircumcisedEnum(*p.Circumcised)
if v.IsValid() {
ret.Circumcised = &v
}
@ -367,13 +371,13 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
}
if p.CareerLength != nil && !excluded["career_length"] {
// parse career_length into career_start/career_end
start, end, err := utils.ParseYearRangeString(*p.CareerLength)
start, end, err := ParseYearRangeString(*p.CareerLength)
if err == nil {
if start != nil {
ret.CareerStart = NewOptionalInt(*start)
ret.CareerStart = NewOptionalDate(*start)
}
if end != nil {
ret.CareerEnd = NewOptionalInt(*end)
ret.CareerEnd = NewOptionalDate(*end)
}
}
}
@ -471,11 +475,12 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
type ScrapedTag struct {
// Set if tag matched
StoredID *string `json:"stored_id"`
Name string `json:"name"`
Description *string `json:"description"`
AliasList []string `json:"alias_list"`
RemoteSiteID *string `json:"remote_site_id"`
StoredID *string `json:"stored_id"`
Name string `json:"name"`
Description *string `json:"description"`
AliasList []string `json:"alias_list"`
RemoteSiteID *string `json:"remote_site_id"`
Parent *ScrapedTag `json:"parent"`
}
func (ScrapedTag) IsScrapedContent() {}
@ -496,6 +501,13 @@ func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag {
ret.Aliases = NewRelatedStrings(t.AliasList)
}
if t.Parent != nil && t.Parent.StoredID != nil {
parentID, err := strconv.Atoi(*t.Parent.StoredID)
if err == nil && parentID > 0 {
ret.ParentIDs = NewRelatedIDs([]int{parentID})
}
}
if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" {
ret.StashIDs = NewRelatedStashIDs([]StashID{
{
@ -527,6 +539,16 @@ func (t *ScrapedTag) ToPartial(storedID string, endpoint string, excluded map[st
}
}
if t.Parent != nil && t.Parent.StoredID != nil {
parentID, err := strconv.Atoi(*t.Parent.StoredID)
if err == nil && parentID > 0 {
ret.ParentIDs = &UpdateIDs{
IDs: []int{parentID},
Mode: RelationshipUpdateModeAdd,
}
}
}
if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" {
ret.StashIDs = &UpdateStashIDs{
StashIDs: existingStashIDs,

View file

@ -8,8 +8,6 @@ import (
"github.com/stretchr/testify/assert"
)
func intPtr(i int) *int { return &i }
func Test_scrapedToStudioInput(t *testing.T) {
const name = "name"
url := "url"
@ -186,8 +184,8 @@ func Test_scrapedToPerformerInput(t *testing.T) {
Weight: nextVal(),
Measurements: nextVal(),
FakeTits: nextVal(),
CareerStart: intPtr(2005),
CareerEnd: intPtr(2015),
CareerStart: dateStrFromInt(2005),
CareerEnd: dateStrFromInt(2015),
Tattoos: nextVal(),
Piercings: nextVal(),
Aliases: nextVal(),
@ -212,8 +210,8 @@ func Test_scrapedToPerformerInput(t *testing.T) {
Weight: nextIntVal(),
Measurements: *nextVal(),
FakeTits: *nextVal(),
CareerStart: intPtr(2005),
CareerEnd: intPtr(2015),
CareerStart: dateFromInt(2005),
CareerEnd: dateFromInt(2015),
Tattoos: *nextVal(), // skip CareerLength counter slot
Piercings: *nextVal(),
Aliases: NewRelatedStrings([]string{*nextVal()}),

View file

@ -61,49 +61,49 @@ type GenderCriterionInput struct {
Modifier CriterionModifier `json:"modifier"`
}
type CircumisedEnum string
type CircumcisedEnum string
const (
CircumisedEnumCut CircumisedEnum = "CUT"
CircumisedEnumUncut CircumisedEnum = "UNCUT"
CircumcisedEnumCut CircumcisedEnum = "CUT"
CircumcisedEnumUncut CircumcisedEnum = "UNCUT"
)
var AllCircumcisionEnum = []CircumisedEnum{
CircumisedEnumCut,
CircumisedEnumUncut,
var AllCircumcisionEnum = []CircumcisedEnum{
CircumcisedEnumCut,
CircumcisedEnumUncut,
}
func (e CircumisedEnum) IsValid() bool {
func (e CircumcisedEnum) IsValid() bool {
switch e {
case CircumisedEnumCut, CircumisedEnumUncut:
case CircumcisedEnumCut, CircumcisedEnumUncut:
return true
}
return false
}
func (e CircumisedEnum) String() string {
func (e CircumcisedEnum) String() string {
return string(e)
}
func (e *CircumisedEnum) UnmarshalGQL(v interface{}) error {
func (e *CircumcisedEnum) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = CircumisedEnum(str)
*e = CircumcisedEnum(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid CircumisedEnum", str)
return fmt.Errorf("%s is not a valid CircumcisedEnum", str)
}
return nil
}
func (e CircumisedEnum) MarshalGQL(w io.Writer) {
func (e CircumcisedEnum) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type CircumcisionCriterionInput struct {
Value []CircumisedEnum `json:"value"`
Value []CircumcisedEnum `json:"value"`
Modifier CriterionModifier `json:"modifier"`
}
@ -139,9 +139,9 @@ type PerformerFilterType struct {
// Filter by career length
CareerLength *StringCriterionInput `json:"career_length"` // deprecated
// Filter by career start year
CareerStart *IntCriterionInput `json:"career_start"`
CareerStart *DateCriterionInput `json:"career_start"`
// Filter by career end year
CareerEnd *IntCriterionInput `json:"career_end"`
CareerEnd *DateCriterionInput `json:"career_end"`
// Filter by tattoos
Tattoos *StringCriterionInput `json:"tattoos"`
// Filter by piercings
@ -216,32 +216,32 @@ type PerformerFilterType struct {
}
type PerformerCreateInput struct {
Name string `json:"name"`
Disambiguation *string `json:"disambiguation"`
URL *string `json:"url"` // deprecated
Urls []string `json:"urls"`
Gender *GenderEnum `json:"gender"`
Birthdate *string `json:"birthdate"`
Ethnicity *string `json:"ethnicity"`
Country *string `json:"country"`
EyeColor *string `json:"eye_color"`
Height *string `json:"height"`
HeightCm *int `json:"height_cm"`
Measurements *string `json:"measurements"`
FakeTits *string `json:"fake_tits"`
PenisLength *float64 `json:"penis_length"`
Circumcised *CircumisedEnum `json:"circumcised"`
CareerLength *string `json:"career_length"`
CareerStart *int `json:"career_start"`
CareerEnd *int `json:"career_end"`
Tattoos *string `json:"tattoos"`
Piercings *string `json:"piercings"`
Aliases *string `json:"aliases"`
AliasList []string `json:"alias_list"`
Twitter *string `json:"twitter"` // deprecated
Instagram *string `json:"instagram"` // deprecated
Favorite *bool `json:"favorite"`
TagIds []string `json:"tag_ids"`
Name string `json:"name"`
Disambiguation *string `json:"disambiguation"`
URL *string `json:"url"` // deprecated
Urls []string `json:"urls"`
Gender *GenderEnum `json:"gender"`
Birthdate *string `json:"birthdate"`
Ethnicity *string `json:"ethnicity"`
Country *string `json:"country"`
EyeColor *string `json:"eye_color"`
Height *string `json:"height"`
HeightCm *int `json:"height_cm"`
Measurements *string `json:"measurements"`
FakeTits *string `json:"fake_tits"`
PenisLength *float64 `json:"penis_length"`
Circumcised *CircumcisedEnum `json:"circumcised"`
CareerLength *string `json:"career_length"`
CareerStart *string `json:"career_start"`
CareerEnd *string `json:"career_end"`
Tattoos *string `json:"tattoos"`
Piercings *string `json:"piercings"`
Aliases *string `json:"aliases"`
AliasList []string `json:"alias_list"`
Twitter *string `json:"twitter"` // deprecated
Instagram *string `json:"instagram"` // deprecated
Favorite *bool `json:"favorite"`
TagIds []string `json:"tag_ids"`
// This should be a URL or a base64 encoded data URL
Image *string `json:"image"`
StashIds []StashIDInput `json:"stash_ids"`
@ -256,33 +256,33 @@ type PerformerCreateInput struct {
}
type PerformerUpdateInput struct {
ID string `json:"id"`
Name *string `json:"name"`
Disambiguation *string `json:"disambiguation"`
URL *string `json:"url"` // deprecated
Urls []string `json:"urls"`
Gender *GenderEnum `json:"gender"`
Birthdate *string `json:"birthdate"`
Ethnicity *string `json:"ethnicity"`
Country *string `json:"country"`
EyeColor *string `json:"eye_color"`
Height *string `json:"height"`
HeightCm *int `json:"height_cm"`
Measurements *string `json:"measurements"`
FakeTits *string `json:"fake_tits"`
PenisLength *float64 `json:"penis_length"`
Circumcised *CircumisedEnum `json:"circumcised"`
CareerLength *string `json:"career_length"`
CareerStart *int `json:"career_start"`
CareerEnd *int `json:"career_end"`
Tattoos *string `json:"tattoos"`
Piercings *string `json:"piercings"`
Aliases *string `json:"aliases"`
AliasList []string `json:"alias_list"`
Twitter *string `json:"twitter"` // deprecated
Instagram *string `json:"instagram"` // deprecated
Favorite *bool `json:"favorite"`
TagIds []string `json:"tag_ids"`
ID string `json:"id"`
Name *string `json:"name"`
Disambiguation *string `json:"disambiguation"`
URL *string `json:"url"` // deprecated
Urls []string `json:"urls"`
Gender *GenderEnum `json:"gender"`
Birthdate *string `json:"birthdate"`
Ethnicity *string `json:"ethnicity"`
Country *string `json:"country"`
EyeColor *string `json:"eye_color"`
Height *string `json:"height"`
HeightCm *int `json:"height_cm"`
Measurements *string `json:"measurements"`
FakeTits *string `json:"fake_tits"`
PenisLength *float64 `json:"penis_length"`
Circumcised *CircumcisedEnum `json:"circumcised"`
CareerLength *string `json:"career_length"`
CareerStart *string `json:"career_start"`
CareerEnd *string `json:"career_end"`
Tattoos *string `json:"tattoos"`
Piercings *string `json:"piercings"`
Aliases *string `json:"aliases"`
AliasList []string `json:"alias_list"`
Twitter *string `json:"twitter"` // deprecated
Instagram *string `json:"instagram"` // deprecated
Favorite *bool `json:"favorite"`
TagIds []string `json:"tag_ids"`
// This should be a URL or a base64 encoded data URL
Image *string `json:"image"`
StashIds []StashIDInput `json:"stash_ids"`

View file

@ -14,7 +14,7 @@ type FileGetter interface {
type FileFinder interface {
FileGetter
FindAllByPath(ctx context.Context, path string, caseSensitive bool) ([]File, error)
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error)
FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]File, error)
FindByPath(ctx context.Context, path string, caseSensitive bool) (File, error)
FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error)
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error)

View file

@ -11,11 +11,12 @@ type FolderGetter interface {
// FolderFinder provides methods to find folders.
type FolderFinder interface {
FolderGetter
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error)
FindAllInPaths(ctx context.Context, p []string, includeZipContents bool, limit, offset int) ([]*Folder, error)
FindByPath(ctx context.Context, path string, caseSensitive bool) (*Folder, error)
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error)
FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error)
GetManyParentFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error)
GetManySubFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error)
}
type FolderQueryer interface {

View file

@ -83,6 +83,7 @@ type GalleryWriter interface {
CustomFieldsWriter
AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error
AddFileID(ctx context.Context, id int, fileID FileID) error
AddImages(ctx context.Context, galleryID int, imageIDs ...int) error
RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error

View file

@ -9,9 +9,16 @@ type TagGetter interface {
Find(ctx context.Context, id int) (*Tag, error)
}
type TagNameFinder interface {
FindByName(ctx context.Context, name string, nocase bool) (*Tag, error)
FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error)
FindByAlias(ctx context.Context, alias string, nocase bool) (*Tag, error)
}
// TagFinder provides methods to find tags.
type TagFinder interface {
TagGetter
TagNameFinder
FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error)
FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error)
FindByParentTagID(ctx context.Context, parentID int) ([]*Tag, error)
@ -23,8 +30,6 @@ type TagFinder interface {
FindByGroupID(ctx context.Context, groupID int) ([]*Tag, error)
FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error)
FindByStudioID(ctx context.Context, studioID int) ([]*Tag, error)
FindByName(ctx context.Context, name string, nocase bool) (*Tag, error)
FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error)
FindByStashID(ctx context.Context, stashID StashID) ([]*Tag, error)
FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Tag, error)
}

View file

@ -71,10 +71,10 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode
}
if performer.CareerStart != nil {
newPerformerJSON.CareerStart = performer.CareerStart
newPerformerJSON.CareerStart = performer.CareerStart.String()
}
if performer.CareerEnd != nil {
newPerformerJSON.CareerEnd = performer.CareerEnd
newPerformerJSON.CareerEnd = performer.CareerEnd.String()
}
if err := performer.LoadAliases(ctx, reader); err != nil {

View file

@ -48,10 +48,10 @@ var (
rating = 5
height = 123
weight = 60
careerStart = 2005
careerEnd = 2015
careerStart, _ = models.ParseDate("2005")
careerEnd, _ = models.ParseDate("2015")
penisLength = 1.23
circumcisedEnum = models.CircumisedEnumCut
circumcisedEnum = models.CircumcisedEnumCut
circumcised = circumcisedEnum.String()
emptyCustomFields = make(map[string]interface{})
@ -134,8 +134,8 @@ func createFullJSONPerformer(name string, image string, withCustomFields bool) *
URLs: []string{url, twitter, instagram},
Aliases: aliases,
Birthdate: birthDate.String(),
CareerStart: &careerStart,
CareerEnd: &careerEnd,
CareerStart: careerStart.String(),
CareerEnd: careerEnd.String(),
Country: country,
Ethnicity: ethnicity,
EyeColor: eyeColor,

View file

@ -247,7 +247,7 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) (models.Perfor
}
if performerJSON.Circumcised != "" {
v := models.CircumisedEnum(performerJSON.Circumcised)
v := models.CircumcisedEnum(performerJSON.Circumcised)
newPerformer.Circumcised = &v
}
@ -285,11 +285,17 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) (models.Perfor
}
// prefer explicit career_start/career_end, fall back to parsing legacy career_length
if performerJSON.CareerStart != nil || performerJSON.CareerEnd != nil {
newPerformer.CareerStart = performerJSON.CareerStart
newPerformer.CareerEnd = performerJSON.CareerEnd
if performerJSON.CareerStart != "" || performerJSON.CareerEnd != "" {
careerStart, err := models.ParseDate(performerJSON.CareerStart)
if err == nil {
newPerformer.CareerStart = &careerStart
}
careerEnd, err := models.ParseDate(performerJSON.CareerEnd)
if err == nil {
newPerformer.CareerEnd = &careerEnd
}
} else if performerJSON.CareerLength != "" {
start, end, err := utils.ParseYearRangeString(performerJSON.CareerLength)
start, end, err := models.ParseYearRangeString(performerJSON.CareerLength)
if err != nil {
return models.Performer{}, fmt.Errorf("invalid career_length %q: %w", performerJSON.CareerLength, err)
}

View file

@ -317,15 +317,15 @@ func TestUpdate(t *testing.T) {
}
func TestImportCareerFields(t *testing.T) {
startYear := 2005
endYear := 2015
startYear, _ := models.ParseDate("2005")
endYear, _ := models.ParseDate("2015")
// explicit career_start/career_end should be used directly
t.Run("explicit fields", func(t *testing.T) {
input := jsonschema.Performer{
Name: "test",
CareerStart: &startYear,
CareerEnd: &endYear,
CareerStart: startYear.String(),
CareerEnd: endYear.String(),
}
p, err := performerJSONToPerformer(input)
@ -338,8 +338,8 @@ func TestImportCareerFields(t *testing.T) {
t.Run("explicit fields override legacy", func(t *testing.T) {
input := jsonschema.Performer{
Name: "test",
CareerStart: &startYear,
CareerEnd: &endYear,
CareerStart: startYear.String(),
CareerEnd: endYear.String(),
CareerLength: "1990 - 1995",
}

View file

@ -1,6 +1,7 @@
package pkg
import (
"sync"
"time"
)
@ -10,22 +11,23 @@ type cacheEntry struct {
}
type repositoryCache struct {
mu sync.RWMutex
// cache maps the URL to the last modified time and the data
cache map[string]cacheEntry
}
func (c *repositoryCache) ensureCache() {
if c.cache == nil {
c.cache = make(map[string]cacheEntry)
}
}
func (c *repositoryCache) lastModified(url string) *time.Time {
if c == nil {
return nil
}
c.ensureCache()
c.mu.RLock()
defer c.mu.RUnlock()
if c.cache == nil {
return nil
}
e, found := c.cache[url]
if !found {
@ -36,7 +38,13 @@ func (c *repositoryCache) lastModified(url string) *time.Time {
}
func (c *repositoryCache) getPackageList(url string) []RemotePackage {
c.ensureCache()
c.mu.RLock()
defer c.mu.RUnlock()
if c.cache == nil {
return nil
}
e, found := c.cache[url]
if !found {
@ -51,7 +59,13 @@ func (c *repositoryCache) cacheList(url string, lastModified time.Time, data []R
return
}
c.ensureCache()
c.mu.Lock()
defer c.mu.Unlock()
if c.cache == nil {
c.cache = make(map[string]cacheEntry)
}
c.cache[url] = cacheEntry{
lastModified: lastModified,
data: data,

View file

@ -10,6 +10,7 @@ import (
"net/http"
"net/url"
"path/filepath"
"sync"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
@ -31,13 +32,14 @@ type Manager struct {
Client *http.Client
cache *repositoryCache
cacheOnce sync.Once
cache *repositoryCache
}
func (m *Manager) getCache() *repositoryCache {
if m.cache == nil {
m.cacheOnce.Do(func() {
m.cache = &repositoryCache{}
}
})
return m.cache
}

View file

@ -456,7 +456,7 @@ type FilenameParserRepository struct {
Performer PerformerNamesFinder
Studio models.StudioQueryer
Group GroupNameFinder
Tag models.TagQueryer
Tag models.TagNameFinder
}
func NewFilenameParserRepository(repo models.Repository) FilenameParserRepository {
@ -599,7 +599,7 @@ func (p *FilenameParser) queryGroup(ctx context.Context, qb GroupNameFinder, gro
return ret
}
func (p *FilenameParser) queryTag(ctx context.Context, qb models.TagQueryer, tagName string) *models.Tag {
func (p *FilenameParser) queryTag(ctx context.Context, qb models.TagNameFinder, tagName string) *models.Tag {
// massage the tag name
tagName = delimiterRE.ReplaceAllString(tagName, " ")
@ -638,7 +638,7 @@ func (p *FilenameParser) setPerformers(ctx context.Context, qb PerformerNamesFin
}
}
func (p *FilenameParser) setTags(ctx context.Context, qb models.TagQueryer, h sceneHolder, result *models.SceneParserResult) {
func (p *FilenameParser) setTags(ctx context.Context, qb models.TagNameFinder, h sceneHolder, result *models.SceneParserResult) {
// query for each performer
tagsSet := make(map[int]bool)
for _, tagName := range h.tags {

View file

@ -232,7 +232,7 @@ func (g Generator) generateConcatFile(chunkFiles []string) (fn string, err error
for _, f := range chunkFiles {
// files in concat file should be relative to concat
relFile := filepath.Base(f)
if _, err := w.WriteString(fmt.Sprintf("file '%s'\n", relFile)); err != nil {
if _, err := fmt.Fprintf(w, "file '%s'\n", relFile); err != nil {
return concatFile.Name(), fmt.Errorf("writing concat file: %w", err)
}
}

View file

@ -4,6 +4,8 @@ import (
"context"
"errors"
"fmt"
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/logger"
@ -32,12 +34,18 @@ type ScanCreatorUpdater interface {
AddFileID(ctx context.Context, id int, fileID models.FileID) error
}
type ScanGalleryFinderUpdater interface {
FindByPath(ctx context.Context, p string) ([]*models.Gallery, error)
AddSceneIDs(ctx context.Context, galleryID int, sceneIDs []int) error
}
type ScanGenerator interface {
Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error
}
type ScanHandler struct {
CreatorUpdater ScanCreatorUpdater
CreatorUpdater ScanCreatorUpdater
GalleryFinderUpdater ScanGalleryFinderUpdater
ScanGenerator ScanGenerator
CaptionUpdater video.CaptionUpdater
@ -49,19 +57,19 @@ type ScanHandler struct {
func (h *ScanHandler) validate() error {
if h.CreatorUpdater == nil {
return errors.New("CreatorUpdater is required")
return errors.New("internal error: CreatorUpdater is required")
}
if h.ScanGenerator == nil {
return errors.New("ScanGenerator is required")
return errors.New("internal error: ScanGenerator is required")
}
if h.CaptionUpdater == nil {
return errors.New("CaptionUpdater is required")
return errors.New("internal error: CaptionUpdater is required")
}
if !h.FileNamingAlgorithm.IsValid() {
return errors.New("FileNamingAlgorithm is required")
return errors.New("internal error: FileNamingAlgorithm is required")
}
if h.Paths == nil {
return errors.New("Paths is required")
return errors.New("internal error: Paths is required")
}
return nil
@ -127,6 +135,10 @@ func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.
}
}
if err := h.associateGallery(ctx, existing, f); err != nil {
return err
}
// do this after the commit so that cover generation doesn't hold up the transaction
txn.AddPostCommitHook(ctx, func(ctx context.Context) {
for _, s := range existing {
@ -175,3 +187,29 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
return nil
}
func (h *ScanHandler) associateGallery(ctx context.Context, existing []*models.Scene, f models.File) error {
sceneIDs := make([]int, len(existing))
for i, s := range existing {
sceneIDs[i] = s.ID
}
path := f.Base().Path
zipPath := strings.TrimSuffix(path, filepath.Ext(path)) + ".zip"
// find galleries with a file that matches
galleries, err := h.GalleryFinderUpdater.FindByPath(ctx, zipPath)
if err != nil {
return err
}
for _, gallery := range galleries {
// found related Scene
logger.Infof("associate: Scene %s is related to gallery: %d", path, gallery.ID)
if err := h.GalleryFinderUpdater.AddSceneIDs(ctx, gallery.ID, sceneIDs); err != nil {
return err
}
}
return nil
}

View file

@ -70,6 +70,7 @@ type StudioFinder interface {
type TagFinder interface {
models.TagGetter
models.TagNameFinder
models.TagAutoTagQueryer
}

View file

@ -140,8 +140,8 @@ func (r mappedResult) scrapedPerformer() *models.ScrapedPerformer {
PenisLength: r.stringPtr("PenisLength"),
Circumcised: r.stringPtr("Circumcised"),
CareerLength: r.stringPtr("CareerLength"),
CareerStart: r.IntPtr("CareerStart"),
CareerEnd: r.IntPtr("CareerEnd"),
CareerStart: r.stringPtr("CareerStart"),
CareerEnd: r.stringPtr("CareerEnd"),
Tattoos: r.stringPtr("Tattoos"),
Piercings: r.stringPtr("Piercings"),
Aliases: r.stringPtr("Aliases"),

View file

@ -20,8 +20,8 @@ type ScrapedPerformerInput struct {
PenisLength *string `json:"penis_length"`
Circumcised *string `json:"circumcised"`
CareerLength *string `json:"career_length"`
CareerStart *int `json:"career_start"`
CareerEnd *int `json:"career_end"`
CareerStart *string `json:"career_start"`
CareerEnd *string `json:"career_end"`
Tattoos *string `json:"tattoos"`
Piercings *string `json:"piercings"`
Aliases *string `json:"aliases"`

View file

@ -0,0 +1,144 @@
package scraper
import (
"context"
"testing"
"github.com/stashapp/stash/pkg/models"
)
func TestPostScrapePerformerCareerLength(t *testing.T) {
ctx := context.Background()
const related = false
strPtr := func(s string) *string {
return &s
}
tests := []struct {
name string
input models.ScrapedPerformer
want models.ScrapedPerformer
}{
{
"start = 2000",
models.ScrapedPerformer{
CareerStart: strPtr("2000"),
},
models.ScrapedPerformer{
CareerStart: strPtr("2000"),
CareerLength: strPtr("2000 -"),
},
},
{
"end = 2000",
models.ScrapedPerformer{
CareerEnd: strPtr("2000"),
},
models.ScrapedPerformer{
CareerEnd: strPtr("2000"),
CareerLength: strPtr("- 2000"),
},
},
{
"start = 2000, end = 2020",
models.ScrapedPerformer{
CareerStart: strPtr("2000"),
CareerEnd: strPtr("2020"),
},
models.ScrapedPerformer{
CareerStart: strPtr("2000"),
CareerEnd: strPtr("2020"),
CareerLength: strPtr("2000 - 2020"),
},
},
{
"length = 2000 -",
models.ScrapedPerformer{
CareerLength: strPtr("2000 -"),
},
models.ScrapedPerformer{
CareerStart: strPtr("2000"),
CareerLength: strPtr("2000 -"),
},
},
{
"length = - 2010",
models.ScrapedPerformer{
CareerLength: strPtr("- 2010"),
},
models.ScrapedPerformer{
CareerEnd: strPtr("2010"),
CareerLength: strPtr("- 2010"),
},
},
{
"length = 2000 - 2010",
models.ScrapedPerformer{
CareerLength: strPtr("2000 - 2010"),
},
models.ScrapedPerformer{
CareerStart: strPtr("2000"),
CareerEnd: strPtr("2010"),
CareerLength: strPtr("2000 - 2010"),
},
},
{
"invalid start",
models.ScrapedPerformer{
CareerStart: strPtr("two thousand"),
},
models.ScrapedPerformer{
CareerStart: strPtr("two thousand"),
},
},
{
"invalid end",
models.ScrapedPerformer{
CareerEnd: strPtr("two thousand"),
},
models.ScrapedPerformer{
CareerEnd: strPtr("two thousand"),
},
},
{
"invalid career length",
models.ScrapedPerformer{
CareerLength: strPtr("1234 - 4567 - 9224"),
},
models.ScrapedPerformer{
CareerLength: strPtr("1234 - 4567 - 9224"),
},
},
}
compareStrPtr := func(a, b *string) bool {
if a == b {
return true
}
if a == nil || b == nil {
return false
}
return *a == *b
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := &postScraper{}
got, err := c.postScrapePerformer(ctx, tt.input, related)
if err != nil {
t.Fatalf("postScrapePerformer returned error: %v", err)
}
postScraped := got.(models.ScrapedPerformer)
if !compareStrPtr(postScraped.CareerStart, tt.want.CareerStart) {
t.Errorf("CareerStart = %v, want %v", postScraped.CareerStart, tt.want.CareerStart)
}
if !compareStrPtr(postScraped.CareerEnd, tt.want.CareerEnd) {
t.Errorf("CareerEnd = %v, want %v", postScraped.CareerEnd, tt.want.CareerEnd)
}
if !compareStrPtr(postScraped.CareerLength, tt.want.CareerLength) {
t.Errorf("CareerLength = %v, want %v", postScraped.CareerLength, tt.want.CareerLength)
}
})
}
}

View file

@ -125,23 +125,64 @@ func (c *postScraper) postScrapePerformer(ctx context.Context, p models.ScrapedP
}
}
isEmptyStr := func(s *string) bool { return s == nil || *s == "" }
isEmptyInt := func(s *int) bool { return s == nil || *s == 0 }
// populate career start/end from career length and vice versa
if !isEmptyStr(p.CareerLength) && isEmptyInt(p.CareerStart) && isEmptyInt(p.CareerEnd) {
p.CareerStart, p.CareerEnd, err = utils.ParseYearRangeString(*p.CareerLength)
if err != nil {
logger.Warnf("Could not parse career length %s: %v", *p.CareerLength, err)
}
} else if isEmptyStr(p.CareerLength) && (!isEmptyInt(p.CareerStart) || !isEmptyInt(p.CareerEnd)) {
v := utils.FormatYearRange(p.CareerStart, p.CareerEnd)
p.CareerLength = &v
}
c.postProcessCareerLength(&p)
return p, nil
}
func (c *postScraper) postProcessCareerLength(p *models.ScrapedPerformer) {
isEmptyStr := func(s *string) bool { return s == nil || *s == "" }
// populate career start/end from career length and vice versa
if !isEmptyStr(p.CareerLength) && isEmptyStr(p.CareerStart) && isEmptyStr(p.CareerEnd) {
start, end, err := models.ParseYearRangeString(*p.CareerLength)
if err != nil {
logger.Warnf("Could not parse career length %s: %v", *p.CareerLength, err)
return
}
if start != nil {
startStr := start.String()
p.CareerStart = &startStr
}
if end != nil {
endStr := end.String()
p.CareerEnd = &endStr
}
return
}
// populate career length from career start/end if career length is missing
if isEmptyStr(p.CareerLength) {
var (
start *models.Date
end *models.Date
)
if !isEmptyStr(p.CareerStart) {
date, err := models.ParseDate(*p.CareerStart)
if err != nil {
logger.Warnf("Could not parse career start %s: %v", *p.CareerStart, err)
return
}
start = &date
}
if !isEmptyStr(p.CareerEnd) {
date, err := models.ParseDate(*p.CareerEnd)
if err != nil {
logger.Warnf("Could not parse career end %s: %v", *p.CareerEnd, err)
return
}
end = &date
}
v := models.FormatYearRange(start, end)
p.CareerLength = &v
}
}
func (c *postScraper) postScrapeMovie(ctx context.Context, m models.ScrapedMovie, related bool) (_ ScrapedContent, err error) {
r := c.repository
tqb := r.TagFinder

View file

@ -17,6 +17,12 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
ret["oshash"] = scene.OSHash
ret["filename"] = filepath.Base(scene.Path)
// pull phash from primary file
phashFingerprints := scene.Files.Primary().Base().Fingerprints.Filter(models.FingerprintTypePhash)
if len(phashFingerprints) > 0 {
ret["phash"] = phashFingerprints[0].Value()
}
if scene.Title != "" {
ret["title"] = scene.Title
}

View file

@ -11,7 +11,7 @@ import (
"github.com/stashapp/stash/pkg/sliceutil"
)
func postProcessTags(ctx context.Context, tqb models.TagQueryer, scrapedTags []*models.ScrapedTag) (ret []*models.ScrapedTag, err error) {
func postProcessTags(ctx context.Context, tqb models.TagNameFinder, scrapedTags []*models.ScrapedTag) (ret []*models.ScrapedTag, err error) {
ret = make([]*models.ScrapedTag, 0, len(scrapedTags))
for _, t := range scrapedTags {

Some files were not shown because too many files have changed in this diff Show more