mirror of
https://github.com/stashapp/stash.git
synced 2026-01-18 14:11:41 +01:00
Merge pull request #1824 from stashapp/develop
Merge develop to master for 0.10
This commit is contained in:
commit
f4b783871a
862 changed files with 50645 additions and 28863 deletions
|
|
@ -15,16 +15,10 @@
|
|||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Packr2 artifacts
|
||||
**/*-packr.go
|
||||
|
||||
# GraphQL generated output
|
||||
pkg/models/generated_*.go
|
||||
ui/v2.5/src/core/generated-*.tsx
|
||||
|
||||
# packr generated files
|
||||
*-packr.go
|
||||
|
||||
####
|
||||
# Jetbrains
|
||||
####
|
||||
|
|
@ -60,4 +54,4 @@ ui/v2.5/build
|
|||
stash
|
||||
dist
|
||||
|
||||
docker
|
||||
docker
|
||||
|
|
|
|||
46
.github/workflows/build.yml
vendored
46
.github/workflows/build.yml
vendored
|
|
@ -8,8 +8,12 @@ on:
|
|||
release:
|
||||
types: [ published ]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
COMPILER_IMAGE: stashapp/compiler:4
|
||||
COMPILER_IMAGE: stashapp/compiler:5
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
|
@ -52,34 +56,40 @@ jobs:
|
|||
run: |
|
||||
mkdir -p .go-cache
|
||||
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null
|
||||
|
||||
|
||||
- name: Pre-install
|
||||
run: docker exec -t build /bin/bash -c "make pre-ui"
|
||||
|
||||
- name: Generate
|
||||
run: docker exec -t build /bin/bash -c "make generate"
|
||||
|
||||
|
||||
- name: Validate UI
|
||||
# skip UI validation for pull requests if UI is unchanged
|
||||
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
|
||||
run: docker exec -t build /bin/bash -c "make ui-validate"
|
||||
run: docker exec -t build /bin/bash -c "make validate-frontend"
|
||||
|
||||
# TODO: Replace with `make validate` once `revive` is bundled in COMPILER_IMAGE
|
||||
- name: Validate
|
||||
run: docker exec -t build /bin/bash -c "make fmt-check vet it"
|
||||
# Static validation happens in the linter workflow in parallel to this workflow
|
||||
# Run Dynamic validation here, to make sure we pass all the projects integration tests
|
||||
#
|
||||
# create UI file so that the embed doesn't fail
|
||||
- name: Test Backend
|
||||
run: |
|
||||
mkdir -p ui/v2.5/build
|
||||
touch ui/v2.5/build/index.html
|
||||
docker exec -t build /bin/bash -c "make it"
|
||||
|
||||
- name: Build UI
|
||||
# skip UI build for pull requests if UI is unchanged (UI was cached)
|
||||
# this means that the build version/time may be incorrect if the UI is
|
||||
# not changed in a pull request
|
||||
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
|
||||
run: docker exec -t build /bin/bash -c "make ui-only"
|
||||
run: docker exec -t build /bin/bash -c "make ui"
|
||||
|
||||
- name: Compile for all supported platforms
|
||||
run: |
|
||||
docker exec -t build /bin/bash -c "make packr"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-windows"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-osx"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-osx-intel"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-osx-applesilicon"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-linux"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-linux-arm64v8"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-linux-arm32v7"
|
||||
|
|
@ -94,7 +104,7 @@ jobs:
|
|||
sha1sum dist/stash-* | sed 's/dist\///g' | tee -a CHECKSUMS_SHA1
|
||||
echo "STASH_VERSION=$(git describe --tags --exclude latest_develop)" >> $GITHUB_ENV
|
||||
echo "RELEASE_DATE=$(date +'%Y-%m-%d %H:%M:%S %Z')" >> $GITHUB_ENV
|
||||
|
||||
|
||||
- name: Upload Windows binary
|
||||
# only upload binaries for pull requests
|
||||
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
|
||||
|
|
@ -118,13 +128,13 @@ jobs:
|
|||
with:
|
||||
name: stash-linux
|
||||
path: dist/stash-linux
|
||||
|
||||
|
||||
- name: Update latest_develop tag
|
||||
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
|
||||
run : git tag -f latest_develop; git push -f --tags
|
||||
|
||||
- name: Development Release
|
||||
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
|
||||
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
|
||||
uses: marvinpinto/action-automatic-releases@v1.1.2
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
|
@ -133,13 +143,14 @@ jobs:
|
|||
title: "${{ env.STASH_VERSION }}: Latest development build"
|
||||
files: |
|
||||
dist/stash-osx
|
||||
dist/stash-osx-applesilicon
|
||||
dist/stash-win.exe
|
||||
dist/stash-linux
|
||||
dist/stash-linux-arm64v8
|
||||
dist/stash-linux-arm32v7
|
||||
dist/stash-pi
|
||||
CHECKSUMS_SHA1
|
||||
|
||||
|
||||
- name: Master release
|
||||
if: ${{ github.event_name == 'release' && github.ref != 'refs/tags/latest_develop' }}
|
||||
uses: meeDamian/github-release@2.0
|
||||
|
|
@ -148,6 +159,7 @@ jobs:
|
|||
allow_override: true
|
||||
files: |
|
||||
dist/stash-osx
|
||||
dist/stash-osx-applesilicon
|
||||
dist/stash-win.exe
|
||||
dist/stash-linux
|
||||
dist/stash-linux-arm64v8
|
||||
|
|
@ -155,7 +167,7 @@ jobs:
|
|||
dist/stash-pi
|
||||
CHECKSUMS_SHA1
|
||||
gzip: false
|
||||
|
||||
|
||||
- name: Development Docker
|
||||
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
|
||||
env:
|
||||
|
|
@ -163,7 +175,7 @@ jobs:
|
|||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: |
|
||||
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
|
||||
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
|
||||
docker info
|
||||
docker buildx create --name builder --use
|
||||
docker buildx inspect --bootstrap
|
||||
|
|
@ -177,7 +189,7 @@ jobs:
|
|||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: |
|
||||
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
|
||||
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
|
||||
docker info
|
||||
docker buildx create --name builder --use
|
||||
docker buildx inspect --bootstrap
|
||||
|
|
|
|||
60
.github/workflows/golangci-lint.yml
vendored
Normal file
60
.github/workflows/golangci-lint.yml
vendored
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
name: Lint (golangci-lint)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
COMPILER_IMAGE: stashapp/compiler:5
|
||||
|
||||
jobs:
|
||||
golangci:
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Checkout
|
||||
run: git fetch --prune --unshallow --tags
|
||||
|
||||
- name: Pull compiler image
|
||||
run: docker pull $COMPILER_IMAGE
|
||||
|
||||
- name: Start build container
|
||||
run: |
|
||||
mkdir -p .go-cache
|
||||
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null
|
||||
|
||||
- name: Generate Backend
|
||||
run: docker exec -t build /bin/bash -c "make generate-backend"
|
||||
|
||||
- name: Run golangci-lint
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
|
||||
version: v1.42.1
|
||||
|
||||
# Optional: working directory, useful for monorepos
|
||||
# working-directory: somedir
|
||||
|
||||
# Optional: golangci-lint command line arguments.
|
||||
args: --modules-download-mode=vendor --timeout=3m
|
||||
|
||||
# Optional: show only new issues if it's a pull request. The default value is `false`.
|
||||
# only-new-issues: true
|
||||
|
||||
# Optional: if set to true then the action will use pre-installed Go.
|
||||
# skip-go-installation: true
|
||||
|
||||
# Optional: if set to true then the action don't cache or restore ~/go/pkg.
|
||||
# skip-pkg-cache: true
|
||||
|
||||
# Optional: if set to true then the action don't cache or restore ~/.cache/go-build.
|
||||
# skip-build-cache: true
|
||||
|
||||
- name: Cleanup build container
|
||||
run: docker rm -f -v build
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -15,16 +15,10 @@
|
|||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Packr2 artifacts
|
||||
**/*-packr.go
|
||||
|
||||
# GraphQL generated output
|
||||
pkg/models/generated_*.go
|
||||
ui/v2.5/src/core/generated-*.tsx
|
||||
|
||||
# packr generated files
|
||||
*-packr.go
|
||||
|
||||
####
|
||||
# Jetbrains
|
||||
####
|
||||
|
|
|
|||
82
.golangci.yml
Normal file
82
.golangci.yml
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
# options for analysis running
|
||||
run:
|
||||
timeout: 3m
|
||||
modules-download-mode: vendor
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
# Default set of linters from golangci-lint
|
||||
- deadcode
|
||||
- errcheck
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- staticcheck
|
||||
- structcheck
|
||||
- typecheck
|
||||
- unused
|
||||
- varcheck
|
||||
# Linters added by the stash project
|
||||
# - bodyclose
|
||||
- dogsled
|
||||
# - errorlint
|
||||
# - exhaustive
|
||||
- exportloopref
|
||||
# - goconst
|
||||
# - gocritic
|
||||
# - goerr113
|
||||
- gofmt
|
||||
# - gosec
|
||||
# - ifshort
|
||||
- misspell
|
||||
# - nakedret
|
||||
# - noctx
|
||||
# - paralleltest
|
||||
- revive
|
||||
- rowserrcheck
|
||||
- sqlclosecheck
|
||||
|
||||
linters-settings:
|
||||
gofmt:
|
||||
simplify: false
|
||||
|
||||
revive:
|
||||
ignore-generated-header: true
|
||||
severity: error
|
||||
confidence: 0.8
|
||||
error-code: 1
|
||||
warning-code: 1
|
||||
rules:
|
||||
- name: blank-imports
|
||||
disabled: true
|
||||
- name: context-as-argument
|
||||
- name: context-keys-type
|
||||
- name: dot-imports
|
||||
- name: error-return
|
||||
- name: error-strings
|
||||
- name: error-naming
|
||||
- name: exported
|
||||
disabled: true
|
||||
- name: if-return
|
||||
disabled: true
|
||||
- name: increment-decrement
|
||||
- name: var-naming
|
||||
disabled: true
|
||||
- name: var-declaration
|
||||
- name: package-comments
|
||||
- name: range
|
||||
- name: receiver-naming
|
||||
- name: time-naming
|
||||
- name: unexported-return
|
||||
disabled: true
|
||||
- name: indent-error-flow
|
||||
disabled: true
|
||||
- name: errorf
|
||||
- name: empty-block
|
||||
disabled: true
|
||||
- name: superfluous-else
|
||||
- name: unused-parameter
|
||||
disabled: true
|
||||
- name: unreachable-code
|
||||
- name: redefines-builtin-id
|
||||
|
|
@ -29,6 +29,18 @@ builds:
|
|||
- darwin
|
||||
goarch:
|
||||
- amd64
|
||||
- binary: stash-osx-applesilicon
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=oa64-clang
|
||||
- CXX=oa64-clang++
|
||||
flags:
|
||||
- -tags
|
||||
- extended
|
||||
goos:
|
||||
- darwin
|
||||
goarch:
|
||||
- arm64
|
||||
- binary: stash-linux
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
|
|
|
|||
|
|
@ -5,12 +5,9 @@ git:
|
|||
depth: false
|
||||
language: go
|
||||
go:
|
||||
- 1.13.x
|
||||
- 1.17.x
|
||||
services:
|
||||
- docker
|
||||
env:
|
||||
global:
|
||||
- GO111MODULE=on
|
||||
before_install:
|
||||
- set -e
|
||||
# Configure environment so changes are picked up when the Docker daemon is restarted after upgrading
|
||||
|
|
@ -41,7 +38,7 @@ script:
|
|||
#- make lint
|
||||
- make fmt-check vet it
|
||||
after_success:
|
||||
- docker pull stashapp/compiler:4
|
||||
- docker pull stashapp/compiler:5
|
||||
- sh ./scripts/cross-compile.sh
|
||||
- git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1
|
||||
- sha1sum dist/stash-* | sed 's/dist\///g' | tee -a CHECKSUMS_SHA1
|
||||
|
|
@ -62,6 +59,7 @@ deploy:
|
|||
secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00=
|
||||
file:
|
||||
- dist/stash-osx
|
||||
- dist/stash-osx-applesilicon
|
||||
- dist/stash-win.exe
|
||||
- dist/stash-linux
|
||||
- dist/stash-linux-arm64v8
|
||||
|
|
@ -89,6 +87,7 @@ deploy:
|
|||
secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00=
|
||||
file:
|
||||
- dist/stash-osx
|
||||
- dist/stash-osx-applesilicon
|
||||
- dist/stash-win.exe
|
||||
- dist/stash-linux
|
||||
- dist/stash-linux-arm64v8
|
||||
|
|
|
|||
101
Makefile
101
Makefile
|
|
@ -9,7 +9,7 @@ endif
|
|||
ifdef IS_WIN
|
||||
SEPARATOR := &&
|
||||
SET := set
|
||||
else
|
||||
else
|
||||
SEPARATOR := ;
|
||||
SET := export
|
||||
endif
|
||||
|
|
@ -23,9 +23,8 @@ ifdef OUTPUT
|
|||
endif
|
||||
|
||||
export CGO_ENABLED = 1
|
||||
export GO111MODULE = on
|
||||
|
||||
.PHONY: release pre-build install clean
|
||||
.PHONY: release pre-build
|
||||
|
||||
release: generate ui build-release
|
||||
|
||||
|
|
@ -44,14 +43,15 @@ endif
|
|||
|
||||
build: pre-build
|
||||
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/pkg/api.version=$(STASH_VERSION)' -X 'github.com/stashapp/stash/pkg/api.buildstamp=$(BUILD_DATE)' -X 'github.com/stashapp/stash/pkg/api.githash=$(GITHASH)')
|
||||
go build $(OUTPUT) -mod=vendor -v -tags "sqlite_omit_load_extension osusergo netgo" -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS)"
|
||||
go build $(OUTPUT) -mod=vendor -v -tags "sqlite_omit_load_extension osusergo netgo" $(GO_BUILD_FLAGS) -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS)"
|
||||
|
||||
# strips debug symbols from the release build
|
||||
# consider -trimpath in go build if we move to go 1.13+
|
||||
build-release: EXTRA_LDFLAGS := -s -w
|
||||
build-release: GO_BUILD_FLAGS := -trimpath
|
||||
build-release: build
|
||||
|
||||
build-release-static: EXTRA_LDFLAGS := -extldflags=-static -s -w
|
||||
build-release-static: GO_BUILD_FLAGS := -trimpath
|
||||
build-release-static: build
|
||||
|
||||
# cross-compile- targets should be run within the compiler docker container
|
||||
|
|
@ -62,13 +62,21 @@ cross-compile-windows: export CXX := x86_64-w64-mingw32-g++
|
|||
cross-compile-windows: OUTPUT := -o dist/stash-win.exe
|
||||
cross-compile-windows: build-release-static
|
||||
|
||||
cross-compile-osx: export GOOS := darwin
|
||||
cross-compile-osx: export GOARCH := amd64
|
||||
cross-compile-osx: export CC := o64-clang
|
||||
cross-compile-osx: export CXX := o64-clang++
|
||||
cross-compile-osx: OUTPUT := -o dist/stash-osx
|
||||
cross-compile-osx-intel: export GOOS := darwin
|
||||
cross-compile-osx-intel: export GOARCH := amd64
|
||||
cross-compile-osx-intel: export CC := o64-clang
|
||||
cross-compile-osx-intel: export CXX := o64-clang++
|
||||
cross-compile-osx-intel: OUTPUT := -o dist/stash-osx
|
||||
# can't use static build for OSX
|
||||
cross-compile-osx: build-release
|
||||
cross-compile-osx-intel: build-release
|
||||
|
||||
cross-compile-osx-applesilicon: export GOOS := darwin
|
||||
cross-compile-osx-applesilicon: export GOARCH := arm64
|
||||
cross-compile-osx-applesilicon: export CC := oa64e-clang
|
||||
cross-compile-osx-applesilicon: export CXX := oa64e-clang++
|
||||
cross-compile-osx-applesilicon: OUTPUT := -o dist/stash-osx-applesilicon
|
||||
# can't use static build for OSX
|
||||
cross-compile-osx-applesilicon: build-release
|
||||
|
||||
cross-compile-linux: export GOOS := linux
|
||||
cross-compile-linux: export GOARCH := amd64
|
||||
|
|
@ -95,20 +103,26 @@ cross-compile-pi: export CC := arm-linux-gnueabi-gcc
|
|||
cross-compile-pi: OUTPUT := -o dist/stash-pi
|
||||
cross-compile-pi: build-release-static
|
||||
|
||||
cross-compile-all: cross-compile-windows cross-compile-osx cross-compile-linux cross-compile-linux-arm64v8 cross-compile-linux-arm32v7 cross-compile-pi
|
||||
|
||||
install:
|
||||
packr2 install
|
||||
|
||||
clean:
|
||||
packr2 clean
|
||||
cross-compile-all:
|
||||
make cross-compile-windows
|
||||
make cross-compile-osx-intel
|
||||
make cross-compile-osx-applesilicon
|
||||
make cross-compile-linux
|
||||
make cross-compile-linux-arm64v8
|
||||
make cross-compile-linux-arm32v7
|
||||
make cross-compile-pi
|
||||
|
||||
# Regenerates GraphQL files
|
||||
.PHONY: generate
|
||||
generate:
|
||||
go generate -mod=vendor
|
||||
generate: generate-backend generate-frontend
|
||||
|
||||
.PHONY: generate-frontend
|
||||
generate-frontend:
|
||||
cd ui/v2.5 && yarn run gqlgen
|
||||
|
||||
.PHONY: generate-backend
|
||||
generate-backend:
|
||||
go generate -mod=vendor
|
||||
|
||||
# Regenerates stash-box client files
|
||||
.PHONY: generate-stash-box-client
|
||||
generate-stash-box-client:
|
||||
|
|
@ -119,23 +133,13 @@ generate-stash-box-client:
|
|||
fmt:
|
||||
go fmt ./...
|
||||
|
||||
# Ensures that changed files have had gofmt run on them
|
||||
.PHONY: fmt-check
|
||||
fmt-check:
|
||||
sh ./scripts/check-gofmt.sh
|
||||
|
||||
# Runs go vet on the project's source code.
|
||||
.PHONY: vet
|
||||
vet:
|
||||
go vet -mod=vendor ./...
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
revive -config revive.toml -exclude ./vendor/... ./...
|
||||
golangci-lint run
|
||||
|
||||
# runs unit tests - excluding integration tests
|
||||
.PHONY: test
|
||||
test:
|
||||
test:
|
||||
go test -mod=vendor ./...
|
||||
|
||||
# runs all tests - including integration tests
|
||||
|
|
@ -148,23 +152,19 @@ it:
|
|||
generate-test-mocks:
|
||||
go run -mod=vendor github.com/vektra/mockery/v2 --dir ./pkg/models --name '.*ReaderWriter' --outpkg mocks --output ./pkg/models/mocks
|
||||
|
||||
# installs UI dependencies. Run when first cloning repository, or if UI
|
||||
# installs UI dependencies. Run when first cloning repository, or if UI
|
||||
# dependencies have changed
|
||||
.PHONY: pre-ui
|
||||
pre-ui:
|
||||
cd ui/v2.5 && yarn install --frozen-lockfile
|
||||
|
||||
.PHONY: ui-only
|
||||
ui-only: pre-build
|
||||
.PHONY: ui
|
||||
ui: pre-build
|
||||
$(SET) REACT_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
|
||||
$(SET) REACT_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
|
||||
$(SET) REACT_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
|
||||
cd ui/v2.5 && yarn build
|
||||
|
||||
.PHONY: ui
|
||||
ui: ui-only
|
||||
packr2
|
||||
|
||||
.PHONY: ui-start
|
||||
ui-start: pre-build
|
||||
$(SET) REACT_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
|
||||
|
|
@ -181,12 +181,19 @@ fmt-ui:
|
|||
ui-validate:
|
||||
cd ui/v2.5 && yarn run validate
|
||||
|
||||
# just repacks the packr files - use when updating migrations and packed files without
|
||||
# rebuilding the UI
|
||||
.PHONY: packr
|
||||
packr:
|
||||
packr2
|
||||
|
||||
# runs all of the tests and checks required for a PR to be accepted
|
||||
.PHONY: validate
|
||||
validate: ui-validate fmt-check vet lint it
|
||||
validate: validate-frontend validate-backend
|
||||
|
||||
# runs all of the frontend PR-acceptance steps
|
||||
.PHONY: validate-frontend
|
||||
validate-frontend: ui-validate
|
||||
|
||||
# runs all of the backend PR-acceptance steps
|
||||
.PHONY: validate-backend
|
||||
validate-backend: lint it
|
||||
|
||||
# locally builds and tags a 'stash/build' docker image
|
||||
.PHONY: docker-build
|
||||
docker-build:
|
||||
docker build -t stash/build -f docker/build/x86_64/Dockerfile .
|
||||
|
|
|
|||
31
README.md
31
README.md
|
|
@ -1,6 +1,5 @@
|
|||
# Stash
|
||||
|
||||
[](https://travis-ci.org/stashapp/stash)
|
||||
[](https://goreportcard.com/report/github.com/stashapp/stash)
|
||||
[](https://discord.gg/2TsNFKt)
|
||||
|
||||
|
|
@ -68,6 +67,10 @@ This command would need customizing for your environment. [This link](https://s
|
|||
|
||||
Once you have a certificate and key file name them `stash.crt` and `stash.key` and place them in the same directory as the `config.yml` file, or the `~/.stash` directory. Stash detects these and starts up using HTTPS rather than HTTP.
|
||||
|
||||
## Basepath rewriting
|
||||
|
||||
The basepath defaults to `/`. When running stash via a reverse proxy in a subpath, the basepath can be changed by having the reverse proxy pass `X-Forwarded-Prefix` (and optionally `X-Forwarded-Port`) headers. When detects these headers, it alters the basepath URL of the UI.
|
||||
|
||||
# Customization
|
||||
|
||||
## Themes and CSS Customization
|
||||
|
|
@ -90,13 +93,10 @@ For issues not addressed there, there are a few options.
|
|||
## Pre-requisites
|
||||
|
||||
* [Go](https://golang.org/dl/)
|
||||
* [Revive](https://github.com/mgechev/revive) - Configurable linter
|
||||
* Go Install: `go get github.com/mgechev/revive`
|
||||
* [Packr2](https://github.com/gobuffalo/packr/) - Static asset bundler
|
||||
* Go Install: `go get github.com/gobuffalo/packr/v2/packr2`
|
||||
* [Binary Download](https://github.com/gobuffalo/packr/releases)
|
||||
* [GolangCI](https://golangci-lint.run/) - A meta-linter which runs several linters in parallel
|
||||
* To install, follow the [local installation instructions](https://golangci-lint.run/usage/install/#local-installation)
|
||||
* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager
|
||||
* Run `yarn install --frozen-lockfile` in the `stash/ui/v2.5` folder (before running make generate for first time).
|
||||
* Run `yarn install --frozen-lockfile` in the `stash/ui/v2.5` folder (before running make generate for first time).
|
||||
|
||||
NOTE: You may need to run the `go get` commands outside the project directory to avoid modifying the projects module file.
|
||||
|
||||
|
|
@ -112,8 +112,7 @@ TODO
|
|||
2. Download and install [MingW](https://sourceforge.net/projects/mingw-w64/)
|
||||
3. Search for "advanced system settings" and open the system properties dialog.
|
||||
1. Click the `Environment Variables` button
|
||||
2. Add `GO111MODULE=on`
|
||||
3. Under system variables find the `Path`. Edit and add `C:\Program Files\mingw-w64\*\mingw64\bin` (replace * with the correct path).
|
||||
2. Under system variables find the `Path`. Edit and add `C:\Program Files\mingw-w64\*\mingw64\bin` (replace * with the correct path).
|
||||
|
||||
NOTE: The `make` command in Windows will be `mingw32-make` with MingW.
|
||||
|
||||
|
|
@ -121,21 +120,19 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MingW.
|
|||
|
||||
* `make generate` - Generate Go and UI GraphQL files
|
||||
* `make build` - Builds the binary (make sure to build the UI as well... see below)
|
||||
* `make docker-build` - Locally builds and tags a complete 'stash/build' docker image
|
||||
* `make pre-ui` - Installs the UI dependencies. Only needs to be run once before building the UI for the first time, or if the dependencies are updated
|
||||
* `make fmt-ui` - Formats the UI source code.
|
||||
* `make ui` - Builds the frontend and the packr2 files
|
||||
* `make packr` - Generate packr2 files (sub-target of `ui`. Use to regenerate packr2 files without rebuilding UI)
|
||||
* `make vet` - Run `go vet`
|
||||
* `make lint` - Run the linter
|
||||
* `make fmt-ui` - Formats the UI source code
|
||||
* `make ui` - Builds the frontend
|
||||
* `make lint` - Run the linter on the backend
|
||||
* `make fmt` - Run `go fmt`
|
||||
* `make fmt-check` - Ensure changed files are formatted correctly
|
||||
* `make it` - Run the unit and integration tests
|
||||
* `make validate` - Run all of the tests and checks required to submit a PR
|
||||
* `make ui-start` - Runs the UI in development mode. Requires a running stash server to connect to. Stash port can be changed from the default of `9999` with environment variable `REACT_APP_PLATFORM_PORT`.
|
||||
|
||||
## Building a release
|
||||
|
||||
1. Run `make generate` to create generated files
|
||||
1. Run `make generate` to create generated files
|
||||
2. Run `make ui` to compile the frontend
|
||||
3. Run `make build` to build the executable for your current platform
|
||||
|
||||
|
|
@ -151,7 +148,7 @@ command to open a bash shell to the container to poke around:
|
|||
|
||||
Stash can be profiled using the `--cpuprofile <output profile filename>` command line flag.
|
||||
|
||||
The resulting file can then be used with pprof as follows:
|
||||
The resulting file can then be used with pprof as follows:
|
||||
|
||||
`go tool pprof <path to binary> <path to profile filename>`
|
||||
|
||||
|
|
|
|||
|
|
@ -1,61 +1,43 @@
|
|||
# this dockerfile must be built from the top-level stash directory
|
||||
# ie from top=level stash:
|
||||
# This dockerfile must be built from the top-level stash directory
|
||||
# ie from top-level stash:
|
||||
# docker build -t stash/build -f docker/build/x86_64/Dockerfile .
|
||||
|
||||
FROM golang:1.13.15 as compiler
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https
|
||||
RUN curl -sL https://deb.nodesource.com/setup_lts.x | bash -
|
||||
|
||||
# prevent caching of the key
|
||||
ADD https://dl.yarnpkg.com/debian/pubkey.gpg yarn.gpg
|
||||
RUN cat yarn.gpg | apt-key add - && \
|
||||
echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list && \
|
||||
rm yarn.gpg
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y nodejs yarn xz-utils --no-install-recommends || exit 1; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
ENV PACKR2_VERSION=2.0.2
|
||||
ENV PACKR2_SHA=f95ff4c96d7a28813220df030ad91700b8464fe292ab3e1dc9582305c2a338d2
|
||||
ENV PACKR2_DOWNLOAD_FILE=packr_${PACKR2_VERSION}_linux_amd64.tar.gz
|
||||
ENV PACKR2_DOWNLOAD_URL=https://github.com/gobuffalo/packr/releases/download/v${PACKR2_VERSION}/${PACKR2_DOWNLOAD_FILE}
|
||||
# Build Frontend
|
||||
FROM node:alpine as frontend
|
||||
RUN apk add --no-cache make git
|
||||
## cache node_modules separately
|
||||
COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/
|
||||
WORKDIR /stash
|
||||
RUN yarn --cwd ui/v2.5 install --frozen-lockfile.
|
||||
COPY Makefile /stash/
|
||||
COPY ./.git /stash/.git
|
||||
COPY ./graphql /stash/graphql/
|
||||
COPY ./ui /stash/ui/
|
||||
RUN make generate-frontend
|
||||
RUN BUILD_DATE=$(date +"%Y-%m-%d %H:%M:%S") make ui
|
||||
|
||||
# Build Backend
|
||||
FROM golang:1.17-alpine as backend
|
||||
RUN apk add --no-cache xz make alpine-sdk
|
||||
## install ffmpeg
|
||||
WORKDIR /
|
||||
RUN wget ${PACKR2_DOWNLOAD_URL}; \
|
||||
echo "$PACKR2_SHA $PACKR2_DOWNLOAD_FILE" | sha256sum -c - || exit 1; \
|
||||
tar -xzf $PACKR2_DOWNLOAD_FILE -C /usr/bin/ packr2; \
|
||||
rm $PACKR2_DOWNLOAD_FILE;
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN wget -O /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \
|
||||
tar xf /ffmpeg.tar.xz && \
|
||||
rm ffmpeg.tar.xz && \
|
||||
mv /ffmpeg*/ /ffmpeg/
|
||||
|
||||
# copy the ui yarn stuff so that it doesn't get rebuilt every time
|
||||
COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/
|
||||
|
||||
WORKDIR /stash
|
||||
RUN yarn --cwd ui/v2.5 install --frozen-lockfile
|
||||
|
||||
COPY . /stash/
|
||||
ENV GO111MODULE=on
|
||||
|
||||
RUN make generate
|
||||
RUN make ui
|
||||
COPY ./go* ./*.go Makefile gqlgen.yml .gqlgenc.yml /stash/
|
||||
COPY ./scripts /stash/scripts/
|
||||
COPY ./vendor /stash/vendor/
|
||||
COPY ./pkg /stash/pkg/
|
||||
COPY --from=frontend /stash /stash/
|
||||
RUN make generate-backend
|
||||
RUN make build
|
||||
|
||||
FROM ubuntu:20.04 as app
|
||||
|
||||
RUN apt-get update && apt-get -y install ca-certificates
|
||||
COPY --from=compiler /stash/stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/
|
||||
|
||||
# Final Runnable Image
|
||||
FROM alpine:latest
|
||||
RUN apk add --no-cache ca-certificates vips-tools
|
||||
COPY --from=backend /stash/stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/
|
||||
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
|
||||
|
||||
EXPOSE 9999
|
||||
CMD ["stash"]
|
||||
|
||||
|
||||
ENTRYPOINT ["stash"]
|
||||
|
|
@ -11,7 +11,7 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-pi; \
|
|||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt update && apt install -y python3 python-is-python3 python3-requests python3-requests-toolbelt python3-lxml python3-pip && pip3 install cloudscraper
|
||||
FROM ubuntu:20.04 as app
|
||||
run apt update && apt install -y python3 python-is-python3 python3-requests python3-requests-toolbelt python3-lxml python3-mechanicalsoup ffmpeg && rm -rf /var/lib/apt/lists/*
|
||||
run apt update && apt install -y python3 python-is-python3 python3-requests python3-requests-toolbelt python3-lxml python3-mechanicalsoup ffmpeg libvips-tools && rm -rf /var/lib/apt/lists/*
|
||||
COPY --from=prep /stash /usr/bin/
|
||||
COPY --from=prep /usr/local/lib/python3.8/dist-packages /usr/local/lib/python3.8/dist-packages
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,6 @@
|
|||
FROM golang:1.13.15
|
||||
FROM golang:1.17
|
||||
|
||||
LABEL maintainer="stashappdev@gmail.com"
|
||||
|
||||
ENV PACKR2_VERSION=2.0.2
|
||||
ENV PACKR2_SHA=f95ff4c96d7a28813220df030ad91700b8464fe292ab3e1dc9582305c2a338d2
|
||||
ENV PACKR2_DOWNLOAD_FILE=packr_${PACKR2_VERSION}_linux_amd64.tar.gz
|
||||
ENV PACKR2_DOWNLOAD_URL=https://github.com/gobuffalo/packr/releases/download/v${PACKR2_VERSION}/${PACKR2_DOWNLOAD_FILE}
|
||||
LABEL maintainer="https://discord.gg/2TsNFKt"
|
||||
|
||||
# Install tools
|
||||
RUN apt-get update && apt-get install -y apt-transport-https
|
||||
|
|
@ -18,10 +13,10 @@ RUN cat yarn.gpg | apt-key add - && \
|
|||
rm yarn.gpg
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y automake autogen \
|
||||
apt-get install -y automake autogen cmake \
|
||||
libtool libxml2-dev uuid-dev libssl-dev bash \
|
||||
patch make tar xz-utils bzip2 gzip sed cpio \
|
||||
gcc-8-multilib gcc-mingw-w64 g++-mingw-w64 clang llvm-dev \
|
||||
patch make tar xz-utils bzip2 gzip zlib1g-dev sed cpio \
|
||||
gcc-10-multilib gcc-mingw-w64 g++-mingw-w64 clang llvm-dev \
|
||||
gcc-arm-linux-gnueabi libc-dev-armel-cross linux-libc-dev-armel-cross \
|
||||
gcc-arm-linux-gnueabihf libc-dev-armhf-cross \
|
||||
gcc-aarch64-linux-gnu libc-dev-arm64-cross \
|
||||
|
|
@ -29,21 +24,22 @@ RUN apt-get update && \
|
|||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
# Cross compile setup
|
||||
ENV OSX_SDK_VERSION 10.11
|
||||
ENV OSX_SDK_VERSION 11.3
|
||||
ENV OSX_SDK_DOWNLOAD_FILE=MacOSX${OSX_SDK_VERSION}.sdk.tar.xz
|
||||
ENV OSX_SDK_DOWNLOAD_URL=https://github.com/ndeloof/golang-cross/raw/113fix/${OSX_SDK_DOWNLOAD_FILE}
|
||||
ENV OSX_SDK_SHA=98cdd56e0f6c1f9e1af25e11dd93d2e7d306a4aa50430a2bc6bc083ac67efbb8
|
||||
ENV OSX_SDK_DOWNLOAD_URL=https://github.com/phracker/MacOSX-SDKs/releases/download/${OSX_SDK_VERSION}/${OSX_SDK_DOWNLOAD_FILE}
|
||||
ENV OSX_SDK_SHA=cd4f08a75577145b8f05245a2975f7c81401d75e9535dcffbb879ee1deefcbf4
|
||||
ENV OSX_SDK MacOSX$OSX_SDK_VERSION.sdk
|
||||
ENV OSX_NDK_X86 /usr/local/osx-ndk-x86
|
||||
|
||||
RUN wget ${OSX_SDK_DOWNLOAD_URL}
|
||||
RUN echo "$OSX_SDK_SHA $OSX_SDK_DOWNLOAD_FILE" | sha256sum -c - || exit 1; \
|
||||
git clone https://github.com/tpoechtrager/osxcross.git && \
|
||||
git -C osxcross checkout a9317c18a3a457ca0a657f08cc4d0d43c6cf8953 || exit 1; \
|
||||
mv $OSX_SDK_DOWNLOAD_FILE osxcross/tarballs/ && \
|
||||
UNATTENDED=yes SDK_VERSION=${OSX_SDK_VERSION} OSX_VERSION_MIN=10.9 osxcross/build.sh || exit 1; \
|
||||
mv osxcross/target $OSX_NDK_X86; \
|
||||
rm -rf osxcross;
|
||||
git clone https://github.com/tpoechtrager/osxcross.git; \
|
||||
mv $OSX_SDK_DOWNLOAD_FILE osxcross/tarballs/
|
||||
|
||||
RUN UNATTENDED=yes SDK_VERSION=${OSX_SDK_VERSION} OSX_VERSION_MIN=10.10 osxcross/build.sh || exit 1;
|
||||
RUN cp osxcross/target/lib/* /usr/lib/ ; \
|
||||
mv osxcross/target $OSX_NDK_X86; \
|
||||
rm -rf osxcross;
|
||||
|
||||
ENV PATH $OSX_NDK_X86/bin:$PATH
|
||||
|
||||
|
|
@ -51,14 +47,6 @@ RUN mkdir -p /root/.ssh; \
|
|||
chmod 0700 /root/.ssh; \
|
||||
ssh-keyscan github.com > /root/.ssh/known_hosts;
|
||||
|
||||
RUN wget ${PACKR2_DOWNLOAD_URL}; \
|
||||
echo "$PACKR2_SHA $PACKR2_DOWNLOAD_FILE" | sha256sum -c - || exit 1; \
|
||||
tar -xzf $PACKR2_DOWNLOAD_FILE -C /usr/bin/ packr2; \
|
||||
rm $PACKR2_DOWNLOAD_FILE;
|
||||
|
||||
CMD ["packr2", "version"]
|
||||
|
||||
|
||||
# Notes for self:
|
||||
# Windows:
|
||||
# GOOS=windows GOARCH=amd64 CGO_ENABLED=1 CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ go build -ldflags "-extldflags '-static'" -tags extended
|
||||
|
|
@ -66,4 +54,4 @@ CMD ["packr2", "version"]
|
|||
|
||||
# Darwin
|
||||
# CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go build -tags extended
|
||||
# env GO111MODULE=on goreleaser --config=goreleaser-extended.yml --skip-publish --skip-validate --rm-dist --release-notes=temp/0.48-relnotes-ready.md
|
||||
# env goreleaser --config=goreleaser-extended.yml --skip-publish --skip-validate --rm-dist --release-notes=temp/0.48-relnotes-ready.md
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
user=stashapp
|
||||
repo=compiler
|
||||
version=4
|
||||
version=5
|
||||
|
||||
latest:
|
||||
docker build -t ${user}/${repo}:latest .
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
Modified from https://github.com/bep/dockerfiles/tree/master/ci-goreleaser
|
||||
|
||||
When the dockerfile is changed, the version number should be incremented in the Makefile and the new version tag should be pushed to docker hub. The `scripts/cross-compile.sh` script should also be updated to use the new version number tag, and `.travis.yml` needs to be updated to pull the correct image tag.
|
||||
When the dockerfile is changed, the version number should be incremented in the Makefile and the new version tag should be pushed to docker hub. The `scripts/cross-compile.sh` script should also be updated to use the new version number tag, and `.travis.yml` needs to be updated to pull the correct image tag.
|
||||
|
||||
A MacOS univeral binary can be created using `lipo -create -output stash-osx-universal stash-osx stash-osx-applesilicon`, available in the image.
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
TMP=$(mktemp -d /tmp/XXXXXXXXXXX)
|
||||
SDK="MacOSX10.11.sdk"
|
||||
|
||||
mkdir -p $TMP/$SDK/usr/include/c++
|
||||
|
||||
cp -rf /Applications/Xcode7.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/$SDK $TMP &>/dev/null || true
|
||||
cp -rf /Applications/Xcode7.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1 $TMP/$SDK/usr/include/c++ || exit -1
|
||||
|
||||
tar -C $TMP -czf $SDK.tar.gz $SDK
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
FROM ubuntu:20.04 as prep
|
||||
LABEL MAINTAINER="https://discord.gg/Uz29ny"
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get -y install curl xz-utils && \
|
||||
apt-get autoclean -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
WORKDIR /
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# added " to end of stash-linux clause so that it doesn't pick up the arm builds
|
||||
RUN curl -L -o /stash $(curl -s https://api.github.com/repos/stashapp/stash/releases/tags/latest_develop | awk '/browser_download_url/ && /stash-linux"/' | sed -e 's/.*: "\(.*\)"/\1/') && \
|
||||
chmod +x /stash
|
||||
|
||||
RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \
|
||||
tar xf /ffmpeg.tar.xz && \
|
||||
rm ffmpeg.tar.xz && \
|
||||
mv /ffmpeg*/ /ffmpeg/
|
||||
|
||||
FROM ubuntu:20.04 as app
|
||||
RUN apt-get update && apt-get -y install ca-certificates
|
||||
COPY --from=prep /stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/
|
||||
|
||||
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
|
||||
|
||||
EXPOSE 9999
|
||||
CMD ["stash"]
|
||||
|
|
@ -1,53 +1,37 @@
|
|||
# Docker install on Ubuntu 18.04
|
||||
Installing StashApp can likely work on others if your OS either has it's own package manager or comes shipped with Docker and docker-compose.
|
||||
# Docker Installation (for most 64-bit GNU/Linux systems)
|
||||
StashApp is supported on most systems that support Docker and docker-compose. Your OS likely ships with or makes available the necessary packages.
|
||||
|
||||
## Dependencies
|
||||
The goal is to avoid as many dependencies as possible so for now the only pre-requisites you are required to have are `curl`, `docker`, and `docker-compose` for the most part your understanding of the technologies can be superficial so long as you can follow commands and are open to reading a bit you should be fine.
|
||||
Only `docker` and `docker-compose` are required. For the most part your understanding of the technologies can be superficial. So long as you can follow commands and are open to reading a bit, you should be fine.
|
||||
|
||||
Installation instructions are available below, and if your distrobution's repository ships a current version of docker, you may use that.
|
||||
https://docs.docker.com/engine/install/
|
||||
|
||||
### Docker
|
||||
Docker is effectively a cross-platform software package repository. It allows you to ship an entire environment in what's referred to as a container. Containers are intended to hold everything that is needed to run an application from one place to another, making it easy for everyone along the way to reproduce the environment.
|
||||
|
||||
Docker is effectively the cross-platform software package repository it allows you to ship an entire environment in what's referred to as a container. Containers are intended to hold everything that is needed to ship what's required to run an application from one place to another with a degree of a standard that makes it easy for everyone along the way to reproduce the environment for their step in the chain.
|
||||
The StashApp docker container ships with everything you need to automatically build and run stash, including ffmpeg.
|
||||
|
||||
The other side of docker is it brings everything that we would typically have to teach you about the individual components of your soon to be installed StashApp and ffmpeg, docker-compose wraps it up nicely in a handful of easy to follow steps that should result in the same environment on everyone's host.
|
||||
### docker-compose
|
||||
Docker Compose lets you specify how and where to run your containers, and to manage their environment. The docker-compose.yml file in this folder gets you a fully working instance of StashApp exactly as you would need it to have a reasonable instance for testing / developing on. If you are deploying a live instance for production, a reverse proxy (such as NGINX or Traefik) is recommended, but not required.
|
||||
|
||||
The installation method we recommend is via the `docker.com` website however if your specific operating system's repository versions are at the latest along with docker you should be good to launch with you using whatever instructions you wish. The version of Docker we used in our deployment for testing this process was `Docker version 17.05.0-ce, build 89658be` however any versions later than this will be sufficient. At the writing of this tutorial, this was not the latest version of Docker.
|
||||
|
||||
#### Just the link to installation instructions, please
|
||||
Instructions for installing on Ubuntu are at the link that follows:
|
||||
https://docs.docker.com/install/linux/docker-ce/ubuntu/
|
||||
|
||||
If you plan on using other versions of OS you should at least aim to be a Linux base with an x86_64 CPU and the appropriate minimum version of the dependencies.
|
||||
|
||||
### Docker-compose
|
||||
Docker Compose's role in this deployment is to get you a fully working instance of StashApp exactly as you would need it to have a reasonable instance for testing / developing on, you could technically deploy a live instance with this, but without a reverse proxy, is not recommended. You are encouraged to learn how to use the Docker-Compose format, but it's not a required prerequisite for getting this running you need to have it installed successfully.
|
||||
|
||||
Install Docker Compose via this guide below, and it is essential if you're using an older version of Linux to use the official documentation from Docker.com because you require the more recent version of docker-compose at least version 3.4 aka 1.22.0 or newer.
|
||||
|
||||
#### Just the link to installation instructions, please
|
||||
https://docs.docker.com/compose/install/
|
||||
|
||||
### Install curl
|
||||
This one's easy, copy paste.
|
||||
|
||||
```
|
||||
apt update -y && \
|
||||
apt install -f curl
|
||||
```
|
||||
The latest version is always recommended.
|
||||
|
||||
### Get the docker-compose.yml file
|
||||
|
||||
Now you can either navigate to the [docker-compose.yml](https://raw.githubusercontent.com/stashapp/stash/master/docker/production/docker-compose.yml) in the repository, OR you can make your Linux console do it for you with this.
|
||||
Now you can either navigate to the [docker-compose.yml](https://raw.githubusercontent.com/stashapp/stash/master/docker/production/docker-compose.yml) in the repository, or if you have curl, you can make your Linux console do it for you:
|
||||
|
||||
```
|
||||
curl -o ~/docker-compose.yml https://raw.githubusercontent.com/stashapp/stash/master/docker/production/docker-compose.yml
|
||||
mkdir stashapp && cd stashapp
|
||||
curl -o docker-compose.yml https://raw.githubusercontent.com/stashapp/stash/master/docker/production/docker-compose.yml
|
||||
```
|
||||
|
||||
Once you have that file where you want it, you can either modify the settings as you please OR you can run the following to get it up and running instantly.
|
||||
Once you have that file where you want it, modify the settings as you please, and then run:
|
||||
|
||||
```
|
||||
cd ~ && docker-compose up -d
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
Installing StashApp this way will by default bind stash to port 9999 or in web browser terms. http://YOURIP:9999 or if you're doing this on your machine locally which is the only recommended production version of this container as is with no security configurations set at all is http://localhost:9999
|
||||
Installing StashApp this way will by default bind stash to port 9999. This is available in your web browser locally at http://localhost:9999 or on your network as http://YOUR-LOCAL-IP:9999
|
||||
|
||||
Good luck and have fun!
|
||||
|
|
|
|||
|
|
@ -4,9 +4,13 @@ version: '3.4'
|
|||
services:
|
||||
stash:
|
||||
image: stashapp/stash:latest
|
||||
container_name: stash
|
||||
restart: unless-stopped
|
||||
## the container's port must be the same with the STASH_PORT in the environment section
|
||||
ports:
|
||||
- "9999:9999"
|
||||
## If you intend to use stash's DLNA functionality uncomment the below network mode and comment out the above ports section
|
||||
# network_mode: host
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
|
|
@ -17,12 +21,14 @@ services:
|
|||
- STASH_GENERATED=/generated/
|
||||
- STASH_METADATA=/metadata/
|
||||
- STASH_CACHE=/cache/
|
||||
## Adjust below to change default port (9999)
|
||||
- STASH_PORT=9999
|
||||
volumes:
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
## Adjust below paths (the left part) to your liking.
|
||||
## E.g. you can change ./config:/root/.stash to ./stash:/root/.stash
|
||||
|
||||
## Keep configs here.
|
||||
## Keep configs, scrapers, and plugins here.
|
||||
- ./config:/root/.stash
|
||||
## Point this at your collection.
|
||||
- ./data:/data
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
FROM ubuntu:20.04 as prep
|
||||
LABEL MAINTAINER="leopere [at] nixc [dot] us"
|
||||
LABEL MAINTAINER="https://discord.gg/2TsNFKt"
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get -y install curl xz-utils && \
|
||||
|
|
|
|||
66
go.mod
66
go.mod
|
|
@ -8,21 +8,18 @@ require (
|
|||
github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84
|
||||
github.com/chromedp/chromedp v0.7.3
|
||||
github.com/corona10/goimagehash v1.0.3
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||
github.com/disintegration/imaging v1.6.0
|
||||
github.com/fvbommel/sortorder v1.0.2
|
||||
github.com/go-chi/chi v4.0.2+incompatible
|
||||
github.com/gobuffalo/logger v1.0.4 // indirect
|
||||
github.com/gobuffalo/packr/v2 v2.8.1
|
||||
github.com/golang-migrate/migrate/v4 v4.3.1
|
||||
github.com/golang-jwt/jwt/v4 v4.0.0
|
||||
github.com/golang-migrate/migrate/v4 v4.15.0-beta.1
|
||||
github.com/gorilla/securecookie v1.1.1
|
||||
github.com/gorilla/sessions v1.2.0
|
||||
github.com/gorilla/websocket v1.4.2
|
||||
github.com/h2non/filetype v1.0.8
|
||||
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
|
||||
github.com/jmoiron/sqlx v1.2.0
|
||||
github.com/jmoiron/sqlx v1.3.1
|
||||
github.com/json-iterator/go v1.1.9
|
||||
github.com/karrick/godirwalk v1.16.1 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.6
|
||||
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
|
||||
github.com/remeh/sizedwaitgroup v1.0.0
|
||||
|
|
@ -33,21 +30,68 @@ require (
|
|||
github.com/spf13/afero v1.2.0 // indirect
|
||||
github.com/spf13/pflag v1.0.3
|
||||
github.com/spf13/viper v1.7.0
|
||||
github.com/stretchr/testify v1.5.1
|
||||
github.com/stretchr/testify v1.6.1
|
||||
github.com/tidwall/gjson v1.8.1
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/vektah/gqlparser/v2 v2.0.1
|
||||
github.com/vektra/mockery/v2 v2.2.1
|
||||
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97
|
||||
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110
|
||||
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c
|
||||
golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b // indirect
|
||||
golang.org/x/tools v0.0.0-20200915031644-64986481280e // indirect
|
||||
golang.org/x/tools v0.1.0 // indirect
|
||||
gopkg.in/sourcemap.v1 v1.0.5 // indirect
|
||||
gopkg.in/yaml.v2 v2.3.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/agnivade/levenshtein v1.1.0 // indirect
|
||||
github.com/antchfx/xpath v1.1.6 // indirect
|
||||
github.com/chromedp/sysutil v1.0.0 // indirect
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/fsnotify/fsnotify v1.4.7 // indirect
|
||||
github.com/gobwas/httphead v0.1.0 // indirect
|
||||
github.com/gobwas/pool v0.2.1 // indirect
|
||||
github.com/gobwas/ws v1.1.0-rc.5 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
|
||||
github.com/hashicorp/errwrap v1.0.0 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.0 // indirect
|
||||
github.com/hashicorp/golang-lru v0.5.1 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/magiconair/properties v1.8.1 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.1.2 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.1 // indirect
|
||||
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 // indirect
|
||||
github.com/pelletier/go-toml v1.7.0 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rs/zerolog v1.18.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.0.1 // indirect
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect
|
||||
github.com/spf13/cast v1.3.0 // indirect
|
||||
github.com/spf13/cobra v1.0.0 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.0.0 // indirect
|
||||
github.com/stretchr/objx v0.2.0 // indirect
|
||||
github.com/subosito/gotenv v1.2.0 // indirect
|
||||
github.com/tidwall/match v1.0.3 // indirect
|
||||
github.com/urfave/cli/v2 v2.1.1 // indirect
|
||||
github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e // indirect
|
||||
go.uber.org/atomic v1.6.0 // indirect
|
||||
golang.org/x/mod v0.4.1 // indirect
|
||||
golang.org/x/text v0.3.6 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||
gopkg.in/ini.v1 v1.51.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect
|
||||
)
|
||||
|
||||
replace git.apache.org/thrift.git => github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999
|
||||
|
||||
go 1.13
|
||||
go 1.17
|
||||
|
|
|
|||
16
gqlgen.yml
16
gqlgen.yml
|
|
@ -34,24 +34,8 @@ models:
|
|||
model: github.com/stashapp/stash/pkg/models.Movie
|
||||
Tag:
|
||||
model: github.com/stashapp/stash/pkg/models.Tag
|
||||
ScrapedPerformer:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedPerformer
|
||||
ScrapedScene:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedScene
|
||||
ScrapedScenePerformer:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedScenePerformer
|
||||
ScrapedSceneStudio:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedSceneStudio
|
||||
ScrapedSceneMovie:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedSceneMovie
|
||||
ScrapedSceneTag:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedSceneTag
|
||||
SceneFileType:
|
||||
model: github.com/stashapp/stash/pkg/models.SceneFileType
|
||||
ScrapedMovie:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedMovie
|
||||
ScrapedMovieStudio:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedMovieStudio
|
||||
SavedFilter:
|
||||
model: github.com/stashapp/stash/pkg/models.SavedFilter
|
||||
StashID:
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
|
|||
}
|
||||
databasePath
|
||||
generatedPath
|
||||
metadataPath
|
||||
cachePath
|
||||
calculateMD5
|
||||
videoFileNamingAlgorithm
|
||||
|
|
@ -18,10 +19,12 @@ fragment ConfigGeneralData on ConfigGeneralResult {
|
|||
previewPreset
|
||||
maxTranscodeSize
|
||||
maxStreamingTranscodeSize
|
||||
writeImageThumbnails
|
||||
apiKey
|
||||
username
|
||||
password
|
||||
maxSessionAge
|
||||
trustedProxies
|
||||
logFile
|
||||
logOut
|
||||
logLevel
|
||||
|
|
|
|||
|
|
@ -2,4 +2,4 @@ fragment SlimMovieData on Movie {
|
|||
id
|
||||
name
|
||||
front_image_path
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,4 +17,10 @@ fragment MovieData on Movie {
|
|||
front_image_path
|
||||
back_image_path
|
||||
scene_count
|
||||
|
||||
scenes {
|
||||
id
|
||||
title
|
||||
path
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ fragment PerformerData on Performer {
|
|||
scene_count
|
||||
image_count
|
||||
gallery_count
|
||||
movie_count
|
||||
|
||||
tags {
|
||||
...SlimTagData
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ fragment SceneMarkerData on SceneMarker {
|
|||
seconds
|
||||
stream
|
||||
preview
|
||||
screenshot
|
||||
|
||||
scene {
|
||||
id
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
fragment ScrapedPerformerData on ScrapedPerformer {
|
||||
stored_id
|
||||
name
|
||||
gender
|
||||
url
|
||||
|
|
@ -18,7 +19,7 @@ fragment ScrapedPerformerData on ScrapedPerformer {
|
|||
tags {
|
||||
...ScrapedSceneTagData
|
||||
}
|
||||
image
|
||||
images
|
||||
details
|
||||
death_date
|
||||
hair_color
|
||||
|
|
@ -26,7 +27,7 @@ fragment ScrapedPerformerData on ScrapedPerformer {
|
|||
remote_site_id
|
||||
}
|
||||
|
||||
fragment ScrapedScenePerformerData on ScrapedScenePerformer {
|
||||
fragment ScrapedScenePerformerData on ScrapedPerformer {
|
||||
stored_id
|
||||
name
|
||||
gender
|
||||
|
|
@ -55,8 +56,8 @@ fragment ScrapedScenePerformerData on ScrapedScenePerformer {
|
|||
weight
|
||||
}
|
||||
|
||||
fragment ScrapedMovieStudioData on ScrapedMovieStudio {
|
||||
id
|
||||
fragment ScrapedMovieStudioData on ScrapedStudio {
|
||||
stored_id
|
||||
name
|
||||
url
|
||||
}
|
||||
|
|
@ -78,7 +79,7 @@ fragment ScrapedMovieData on ScrapedMovie {
|
|||
}
|
||||
}
|
||||
|
||||
fragment ScrapedSceneMovieData on ScrapedSceneMovie {
|
||||
fragment ScrapedSceneMovieData on ScrapedMovie {
|
||||
stored_id
|
||||
name
|
||||
aliases
|
||||
|
|
@ -90,14 +91,14 @@ fragment ScrapedSceneMovieData on ScrapedSceneMovie {
|
|||
synopsis
|
||||
}
|
||||
|
||||
fragment ScrapedSceneStudioData on ScrapedSceneStudio {
|
||||
fragment ScrapedSceneStudioData on ScrapedStudio {
|
||||
stored_id
|
||||
name
|
||||
url
|
||||
remote_site_id
|
||||
}
|
||||
|
||||
fragment ScrapedSceneTagData on ScrapedSceneTag {
|
||||
fragment ScrapedSceneTagData on ScrapedTag {
|
||||
stored_id
|
||||
name
|
||||
}
|
||||
|
|
@ -108,6 +109,7 @@ fragment ScrapedSceneData on ScrapedScene {
|
|||
url
|
||||
date
|
||||
image
|
||||
remote_site_id
|
||||
|
||||
file {
|
||||
size
|
||||
|
|
@ -135,6 +137,12 @@ fragment ScrapedSceneData on ScrapedScene {
|
|||
movies {
|
||||
...ScrapedSceneMovieData
|
||||
}
|
||||
|
||||
fingerprints {
|
||||
hash
|
||||
algorithm
|
||||
duration
|
||||
}
|
||||
}
|
||||
|
||||
fragment ScrapedGalleryData on ScrapedGallery {
|
||||
|
|
|
|||
|
|
@ -11,4 +11,5 @@ fragment SlimStudioData on Studio {
|
|||
}
|
||||
details
|
||||
rating
|
||||
aliases
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,10 +18,12 @@ fragment StudioData on Studio {
|
|||
scene_count
|
||||
image_count
|
||||
gallery_count
|
||||
movie_count
|
||||
stash_ids {
|
||||
stash_id
|
||||
endpoint
|
||||
}
|
||||
details
|
||||
rating
|
||||
aliases
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,4 +8,12 @@ fragment TagData on Tag {
|
|||
image_count
|
||||
gallery_count
|
||||
performer_count
|
||||
|
||||
parents {
|
||||
...SlimTagData
|
||||
}
|
||||
|
||||
children {
|
||||
...SlimTagData
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,14 +42,14 @@ query ListMovieScrapers {
|
|||
}
|
||||
}
|
||||
|
||||
query ScrapePerformerList($scraper_id: ID!, $query: String!) {
|
||||
scrapePerformerList(scraper_id: $scraper_id, query: $query) {
|
||||
query ScrapeSinglePerformer($source: ScraperSourceInput!, $input: ScrapeSinglePerformerInput!) {
|
||||
scrapeSinglePerformer(source: $source, input: $input) {
|
||||
...ScrapedPerformerData
|
||||
}
|
||||
}
|
||||
|
||||
query ScrapePerformer($scraper_id: ID!, $scraped_performer: ScrapedPerformerInput!) {
|
||||
scrapePerformer(scraper_id: $scraper_id, scraped_performer: $scraped_performer) {
|
||||
query ScrapeMultiPerformers($source: ScraperSourceInput!, $input: ScrapeMultiPerformersInput!) {
|
||||
scrapeMultiPerformers(source: $source, input: $input) {
|
||||
...ScrapedPerformerData
|
||||
}
|
||||
}
|
||||
|
|
@ -60,8 +60,14 @@ query ScrapePerformerURL($url: String!) {
|
|||
}
|
||||
}
|
||||
|
||||
query ScrapeScene($scraper_id: ID!, $scene: SceneUpdateInput!) {
|
||||
scrapeScene(scraper_id: $scraper_id, scene: $scene) {
|
||||
query ScrapeSingleScene($source: ScraperSourceInput!, $input: ScrapeSingleSceneInput!) {
|
||||
scrapeSingleScene(source: $source, input: $input) {
|
||||
...ScrapedSceneData
|
||||
}
|
||||
}
|
||||
|
||||
query ScrapeMultiScenes($source: ScraperSourceInput!, $input: ScrapeMultiScenesInput!) {
|
||||
scrapeMultiScenes(source: $source, input: $input) {
|
||||
...ScrapedSceneData
|
||||
}
|
||||
}
|
||||
|
|
@ -72,8 +78,8 @@ query ScrapeSceneURL($url: String!) {
|
|||
}
|
||||
}
|
||||
|
||||
query ScrapeGallery($scraper_id: ID!, $gallery: GalleryUpdateInput!) {
|
||||
scrapeGallery(scraper_id: $scraper_id, gallery: $gallery) {
|
||||
query ScrapeSingleGallery($source: ScraperSourceInput!, $input: ScrapeSingleGalleryInput!) {
|
||||
scrapeSingleGallery(source: $source, input: $input) {
|
||||
...ScrapedGalleryData
|
||||
}
|
||||
}
|
||||
|
|
@ -89,15 +95,3 @@ query ScrapeMovieURL($url: String!) {
|
|||
...ScrapedMovieData
|
||||
}
|
||||
}
|
||||
|
||||
query QueryStashBoxScene($input: StashBoxSceneQueryInput!) {
|
||||
queryStashBoxScene(input: $input) {
|
||||
...ScrapedStashBoxSceneData
|
||||
}
|
||||
}
|
||||
|
||||
query QueryStashBoxPerformer($input: StashBoxPerformerQueryInput!) {
|
||||
queryStashBoxPerformer(input: $input) {
|
||||
...ScrapedStashBoxPerformerData
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -72,31 +72,50 @@ type Query {
|
|||
listGalleryScrapers: [Scraper!]!
|
||||
listMovieScrapers: [Scraper!]!
|
||||
|
||||
"""Scrape a list of performers based on name"""
|
||||
scrapePerformerList(scraper_id: ID!, query: String!): [ScrapedPerformer!]!
|
||||
"""Scrapes a complete performer record based on a scrapePerformerList result"""
|
||||
scrapePerformer(scraper_id: ID!, scraped_performer: ScrapedPerformerInput!): ScrapedPerformer
|
||||
"""Scrape for a single scene"""
|
||||
scrapeSingleScene(source: ScraperSourceInput!, input: ScrapeSingleSceneInput!): [ScrapedScene!]!
|
||||
"""Scrape for multiple scenes"""
|
||||
scrapeMultiScenes(source: ScraperSourceInput!, input: ScrapeMultiScenesInput!): [[ScrapedScene!]!]!
|
||||
|
||||
"""Scrape for a single performer"""
|
||||
scrapeSinglePerformer(source: ScraperSourceInput!, input: ScrapeSinglePerformerInput!): [ScrapedPerformer!]!
|
||||
"""Scrape for multiple performers"""
|
||||
scrapeMultiPerformers(source: ScraperSourceInput!, input: ScrapeMultiPerformersInput!): [[ScrapedPerformer!]!]!
|
||||
|
||||
"""Scrape for a single gallery"""
|
||||
scrapeSingleGallery(source: ScraperSourceInput!, input: ScrapeSingleGalleryInput!): [ScrapedGallery!]!
|
||||
|
||||
"""Scrape for a single movie"""
|
||||
scrapeSingleMovie(source: ScraperSourceInput!, input: ScrapeSingleMovieInput!): [ScrapedMovie!]!
|
||||
|
||||
"""Scrapes a complete performer record based on a URL"""
|
||||
scrapePerformerURL(url: String!): ScrapedPerformer
|
||||
"""Scrapes a complete scene record based on an existing scene"""
|
||||
scrapeScene(scraper_id: ID!, scene: SceneUpdateInput!): ScrapedScene
|
||||
"""Scrapes a complete performer record based on a URL"""
|
||||
scrapeSceneURL(url: String!): ScrapedScene
|
||||
"""Scrapes a complete gallery record based on an existing gallery"""
|
||||
scrapeGallery(scraper_id: ID!, gallery: GalleryUpdateInput!): ScrapedGallery
|
||||
"""Scrapes a complete gallery record based on a URL"""
|
||||
scrapeGalleryURL(url: String!): ScrapedGallery
|
||||
"""Scrapes a complete movie record based on a URL"""
|
||||
scrapeMovieURL(url: String!): ScrapedMovie
|
||||
|
||||
"""Scrape a list of performers based on name"""
|
||||
scrapePerformerList(scraper_id: ID!, query: String!): [ScrapedPerformer!]! @deprecated(reason: "use scrapeSinglePerformer")
|
||||
"""Scrapes a complete performer record based on a scrapePerformerList result"""
|
||||
scrapePerformer(scraper_id: ID!, scraped_performer: ScrapedPerformerInput!): ScrapedPerformer @deprecated(reason: "use scrapeSinglePerformer")
|
||||
"""Scrapes a complete scene record based on an existing scene"""
|
||||
scrapeScene(scraper_id: ID!, scene: SceneUpdateInput!): ScrapedScene @deprecated(reason: "use scrapeSingleScene")
|
||||
"""Scrapes a complete gallery record based on an existing gallery"""
|
||||
scrapeGallery(scraper_id: ID!, gallery: GalleryUpdateInput!): ScrapedGallery @deprecated(reason: "use scrapeSingleGallery")
|
||||
|
||||
"""Scrape a performer using Freeones"""
|
||||
scrapeFreeones(performer_name: String!): ScrapedPerformer
|
||||
scrapeFreeones(performer_name: String!): ScrapedPerformer @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")
|
||||
"""Scrape a list of performers from a query"""
|
||||
scrapeFreeonesPerformerList(query: String!): [String!]!
|
||||
scrapeFreeonesPerformerList(query: String!): [String!]! @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")
|
||||
|
||||
"""Query StashBox for scenes"""
|
||||
queryStashBoxScene(input: StashBoxSceneQueryInput!): [ScrapedScene!]!
|
||||
queryStashBoxPerformer(input: StashBoxPerformerQueryInput!): [StashBoxPerformerQueryResult!]!
|
||||
queryStashBoxScene(input: StashBoxSceneQueryInput!): [ScrapedScene!]! @deprecated(reason: "use scrapeSingleScene or scrapeMultiScenes")
|
||||
"""Query StashBox for performers"""
|
||||
queryStashBoxPerformer(input: StashBoxPerformerQueryInput!): [StashBoxPerformerQueryResult!]! @deprecated(reason: "use scrapeSinglePerformer or scrapeMultiPerformers")
|
||||
# === end deprecated methods ===
|
||||
|
||||
# Plugins
|
||||
"""List loaded plugins"""
|
||||
|
|
|
|||
|
|
@ -39,6 +39,8 @@ input ConfigGeneralInput {
|
|||
databasePath: String
|
||||
"""Path to generated files"""
|
||||
generatedPath: String
|
||||
"""Path to import/export files"""
|
||||
metadataPath: String
|
||||
"""Path to cache"""
|
||||
cachePath: String
|
||||
"""Whether to calculate MD5 checksums for scene video files"""
|
||||
|
|
@ -63,12 +65,16 @@ input ConfigGeneralInput {
|
|||
maxTranscodeSize: StreamingResolutionEnum
|
||||
"""Max streaming transcode size"""
|
||||
maxStreamingTranscodeSize: StreamingResolutionEnum
|
||||
"""Write image thumbnails to disk when generating on the fly"""
|
||||
writeImageThumbnails: Boolean
|
||||
"""Username"""
|
||||
username: String
|
||||
"""Password"""
|
||||
password: String
|
||||
"""Maximum session cookie age"""
|
||||
maxSessionAge: Int
|
||||
"""Comma separated list of proxies to allow traffic from"""
|
||||
trustedProxies: [String!]
|
||||
"""Name of the log file"""
|
||||
logFile: String
|
||||
"""Whether to also output to stderr"""
|
||||
|
|
@ -108,6 +114,8 @@ type ConfigGeneralResult {
|
|||
databasePath: String!
|
||||
"""Path to generated files"""
|
||||
generatedPath: String!
|
||||
"""Path to import/export files"""
|
||||
metadataPath: String!
|
||||
"""Path to the config file used"""
|
||||
configFilePath: String!
|
||||
"""Path to scrapers"""
|
||||
|
|
@ -136,6 +144,8 @@ type ConfigGeneralResult {
|
|||
maxTranscodeSize: StreamingResolutionEnum
|
||||
"""Max streaming transcode size"""
|
||||
maxStreamingTranscodeSize: StreamingResolutionEnum
|
||||
"""Write image thumbnails to disk when generating on the fly"""
|
||||
writeImageThumbnails: Boolean!
|
||||
"""API Key"""
|
||||
apiKey: String!
|
||||
"""Username"""
|
||||
|
|
@ -144,6 +154,8 @@ type ConfigGeneralResult {
|
|||
password: String!
|
||||
"""Maximum session cookie age"""
|
||||
maxSessionAge: Int!
|
||||
"""Comma separated list of proxies to allow traffic from"""
|
||||
trustedProxies: [String!]!
|
||||
"""Name of the log file"""
|
||||
logFile: String
|
||||
"""Whether to also output to stderr"""
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ input PerformerFilterType {
|
|||
"""Filter to only include performers missing this property"""
|
||||
is_missing: String
|
||||
"""Filter to only include performers with these tags"""
|
||||
tags: MultiCriterionInput
|
||||
tags: HierarchicalMultiCriterionInput
|
||||
"""Filter by tag count"""
|
||||
tag_count: IntCriterionInput
|
||||
"""Filter by scene count"""
|
||||
|
|
@ -99,11 +99,11 @@ input PerformerFilterType {
|
|||
|
||||
input SceneMarkerFilterType {
|
||||
"""Filter to only include scene markers with this tag"""
|
||||
tag_id: ID
|
||||
tag_id: ID @deprecated(reason: "use tags filter instead")
|
||||
"""Filter to only include scene markers with these tags"""
|
||||
tags: MultiCriterionInput
|
||||
tags: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include scene markers attached to a scene with these tags"""
|
||||
scene_tags: MultiCriterionInput
|
||||
scene_tags: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include scene markers with these performers"""
|
||||
performers: MultiCriterionInput
|
||||
}
|
||||
|
|
@ -143,11 +143,11 @@ input SceneFilterType {
|
|||
"""Filter to only include scenes with this movie"""
|
||||
movies: MultiCriterionInput
|
||||
"""Filter to only include scenes with these tags"""
|
||||
tags: MultiCriterionInput
|
||||
tags: HierarchicalMultiCriterionInput
|
||||
"""Filter by tag count"""
|
||||
tag_count: IntCriterionInput
|
||||
"""Filter to only include scenes with performers with these tags"""
|
||||
performer_tags: MultiCriterionInput
|
||||
performer_tags: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include scenes with these performers"""
|
||||
performers: MultiCriterionInput
|
||||
"""Filter by performer count"""
|
||||
|
|
@ -176,9 +176,15 @@ input MovieFilterType {
|
|||
is_missing: String
|
||||
"""Filter by url"""
|
||||
url: StringCriterionInput
|
||||
"""Filter to only include movies where performer appears in a scene"""
|
||||
performers: MultiCriterionInput
|
||||
}
|
||||
|
||||
input StudioFilterType {
|
||||
AND: StudioFilterType
|
||||
OR: StudioFilterType
|
||||
NOT: StudioFilterType
|
||||
|
||||
name: StringCriterionInput
|
||||
details: StringCriterionInput
|
||||
"""Filter to only include studios with this parent studio"""
|
||||
|
|
@ -197,6 +203,8 @@ input StudioFilterType {
|
|||
gallery_count: IntCriterionInput
|
||||
"""Filter by url"""
|
||||
url: StringCriterionInput
|
||||
"""Filter by studio aliases"""
|
||||
aliases: StringCriterionInput
|
||||
}
|
||||
|
||||
input GalleryFilterType {
|
||||
|
|
@ -224,11 +232,11 @@ input GalleryFilterType {
|
|||
"""Filter to only include galleries with this studio"""
|
||||
studios: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include galleries with these tags"""
|
||||
tags: MultiCriterionInput
|
||||
tags: HierarchicalMultiCriterionInput
|
||||
"""Filter by tag count"""
|
||||
tag_count: IntCriterionInput
|
||||
"""Filter to only include galleries with performers with these tags"""
|
||||
performer_tags: MultiCriterionInput
|
||||
performer_tags: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include galleries with these performers"""
|
||||
performers: MultiCriterionInput
|
||||
"""Filter by performer count"""
|
||||
|
|
@ -267,6 +275,18 @@ input TagFilterType {
|
|||
|
||||
"""Filter by number of markers with this tag"""
|
||||
marker_count: IntCriterionInput
|
||||
|
||||
"""Filter by parent tags"""
|
||||
parents: HierarchicalMultiCriterionInput
|
||||
|
||||
"""Filter by child tags"""
|
||||
children: HierarchicalMultiCriterionInput
|
||||
|
||||
"""Filter by number of parent tags the tag has"""
|
||||
parent_count: IntCriterionInput
|
||||
|
||||
"""Filter by number f child tags the tag has"""
|
||||
child_count: IntCriterionInput
|
||||
}
|
||||
|
||||
input ImageFilterType {
|
||||
|
|
@ -293,11 +313,11 @@ input ImageFilterType {
|
|||
"""Filter to only include images with this studio"""
|
||||
studios: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include images with these tags"""
|
||||
tags: MultiCriterionInput
|
||||
tags: HierarchicalMultiCriterionInput
|
||||
"""Filter by tag count"""
|
||||
tag_count: IntCriterionInput
|
||||
"""Filter to only include images with performers with these tags"""
|
||||
performer_tags: MultiCriterionInput
|
||||
performer_tags: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include images with these performers"""
|
||||
performers: MultiCriterionInput
|
||||
"""Filter by performer count"""
|
||||
|
|
@ -357,7 +377,7 @@ input GenderCriterionInput {
|
|||
input HierarchicalMultiCriterionInput {
|
||||
value: [ID!]
|
||||
modifier: CriterionModifier!
|
||||
depth: Int!
|
||||
depth: Int
|
||||
}
|
||||
|
||||
enum FilterMode {
|
||||
|
|
|
|||
|
|
@ -1,13 +1,15 @@
|
|||
scalar Upload
|
||||
|
||||
input GenerateMetadataInput {
|
||||
sprites: Boolean!
|
||||
previews: Boolean!
|
||||
imagePreviews: Boolean!
|
||||
sprites: Boolean
|
||||
previews: Boolean
|
||||
imagePreviews: Boolean
|
||||
previewOptions: GeneratePreviewOptionsInput
|
||||
markers: Boolean!
|
||||
transcodes: Boolean!
|
||||
phashes: Boolean!
|
||||
markers: Boolean
|
||||
markerImagePreviews: Boolean
|
||||
markerScreenshots: Boolean
|
||||
transcodes: Boolean
|
||||
phashes: Boolean
|
||||
|
||||
"""scene ids to generate for"""
|
||||
sceneIDs: [ID!]
|
||||
|
|
@ -45,6 +47,8 @@ input ScanMetadataInput {
|
|||
scanGenerateSprites: Boolean
|
||||
"""Generate phashes during scan"""
|
||||
scanGeneratePhashes: Boolean
|
||||
"""Generate image thumbnails during scan"""
|
||||
scanGenerateThumbnails: Boolean
|
||||
}
|
||||
|
||||
input CleanMetadataInput {
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ type Movie {
|
|||
front_image_path: String # Resolver
|
||||
back_image_path: String # Resolver
|
||||
scene_count: Int # Resolver
|
||||
scenes: [Scene!]!
|
||||
}
|
||||
|
||||
input MovieCreateInput {
|
||||
|
|
@ -60,4 +61,4 @@ input MovieDestroyInput {
|
|||
type FindMoviesResultType {
|
||||
count: Int!
|
||||
movies: [Movie!]!
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,6 +42,8 @@ type Performer {
|
|||
weight: Int
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
movie_count: Int
|
||||
movies: [Movie!]!
|
||||
}
|
||||
|
||||
input PerformerCreateInput {
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ type SceneMarker {
|
|||
stream: String! # Resolver
|
||||
"""The path to the preview image for this marker"""
|
||||
preview: String! # Resolver
|
||||
"""The path to the screenshot image for this marker"""
|
||||
screenshot: String! # Resolver
|
||||
}
|
||||
|
||||
input SceneMarkerCreateInput {
|
||||
|
|
|
|||
|
|
@ -103,6 +103,7 @@ input BulkSceneUpdateInput {
|
|||
gallery_ids: BulkUpdateIds
|
||||
performer_ids: BulkUpdateIds
|
||||
tag_ids: BulkUpdateIds
|
||||
movie_ids: BulkUpdateIds
|
||||
}
|
||||
|
||||
input SceneDestroyInput {
|
||||
|
|
|
|||
|
|
@ -1,12 +1,6 @@
|
|||
type ScrapedMovieStudio {
|
||||
"""Set if studio matched"""
|
||||
id: ID
|
||||
name: String!
|
||||
url: String
|
||||
}
|
||||
|
||||
"""A movie from a scraping operation..."""
|
||||
type ScrapedMovie {
|
||||
stored_id: ID
|
||||
name: String
|
||||
aliases: String
|
||||
duration: String
|
||||
|
|
@ -15,7 +9,7 @@ type ScrapedMovie {
|
|||
director: String
|
||||
url: String
|
||||
synopsis: String
|
||||
studio: ScrapedMovieStudio
|
||||
studio: ScrapedStudio
|
||||
|
||||
"""This should be a base64 encoded data URL"""
|
||||
front_image: String
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
"""A performer from a scraping operation..."""
|
||||
type ScrapedPerformer {
|
||||
"""Set if performer matched"""
|
||||
stored_id: ID
|
||||
name: String
|
||||
gender: String
|
||||
url: String
|
||||
|
|
@ -16,11 +18,11 @@ type ScrapedPerformer {
|
|||
tattoos: String
|
||||
piercings: String
|
||||
aliases: String
|
||||
# Should be ScrapedPerformerTag - but would be identical types
|
||||
tags: [ScrapedSceneTag!]
|
||||
tags: [ScrapedTag!]
|
||||
|
||||
"""This should be a base64 encoded data URL"""
|
||||
image: String
|
||||
image: String @deprecated(reason: "use images instead")
|
||||
images: [String!]
|
||||
details: String
|
||||
death_date: String
|
||||
hair_color: String
|
||||
|
|
@ -29,6 +31,8 @@ type ScrapedPerformer {
|
|||
}
|
||||
|
||||
input ScrapedPerformerInput {
|
||||
"""Set if performer matched"""
|
||||
stored_id: ID
|
||||
name: String
|
||||
gender: String
|
||||
url: String
|
||||
|
|
|
|||
|
|
@ -26,49 +26,7 @@ type Scraper {
|
|||
movie: ScraperSpec
|
||||
}
|
||||
|
||||
type ScrapedScenePerformer {
|
||||
"""Set if performer matched"""
|
||||
stored_id: ID
|
||||
name: String!
|
||||
gender: String
|
||||
url: String
|
||||
twitter: String
|
||||
instagram: String
|
||||
birthdate: String
|
||||
ethnicity: String
|
||||
country: String
|
||||
eye_color: String
|
||||
height: String
|
||||
measurements: String
|
||||
fake_tits: String
|
||||
career_length: String
|
||||
tattoos: String
|
||||
piercings: String
|
||||
aliases: String
|
||||
tags: [ScrapedSceneTag!]
|
||||
|
||||
remote_site_id: String
|
||||
images: [String!]
|
||||
details: String
|
||||
death_date: String
|
||||
hair_color: String
|
||||
weight: String
|
||||
}
|
||||
|
||||
type ScrapedSceneMovie {
|
||||
"""Set if movie matched"""
|
||||
stored_id: ID
|
||||
name: String!
|
||||
aliases: String
|
||||
duration: String
|
||||
date: String
|
||||
rating: String
|
||||
director: String
|
||||
synopsis: String
|
||||
url: String
|
||||
}
|
||||
|
||||
type ScrapedSceneStudio {
|
||||
type ScrapedStudio {
|
||||
"""Set if studio matched"""
|
||||
stored_id: ID
|
||||
name: String!
|
||||
|
|
@ -77,7 +35,7 @@ type ScrapedSceneStudio {
|
|||
remote_site_id: String
|
||||
}
|
||||
|
||||
type ScrapedSceneTag {
|
||||
type ScrapedTag {
|
||||
"""Set if tag matched"""
|
||||
stored_id: ID
|
||||
name: String!
|
||||
|
|
@ -94,25 +52,98 @@ type ScrapedScene {
|
|||
|
||||
file: SceneFileType # Resolver
|
||||
|
||||
studio: ScrapedSceneStudio
|
||||
tags: [ScrapedSceneTag!]
|
||||
performers: [ScrapedScenePerformer!]
|
||||
movies: [ScrapedSceneMovie!]
|
||||
studio: ScrapedStudio
|
||||
tags: [ScrapedTag!]
|
||||
performers: [ScrapedPerformer!]
|
||||
movies: [ScrapedMovie!]
|
||||
|
||||
remote_site_id: String
|
||||
duration: Int
|
||||
fingerprints: [StashBoxFingerprint!]
|
||||
}
|
||||
|
||||
input ScrapedSceneInput {
|
||||
title: String
|
||||
details: String
|
||||
url: String
|
||||
date: String
|
||||
|
||||
# no image, file, duration or relationships
|
||||
|
||||
remote_site_id: String
|
||||
}
|
||||
|
||||
type ScrapedGallery {
|
||||
title: String
|
||||
details: String
|
||||
url: String
|
||||
date: String
|
||||
|
||||
studio: ScrapedSceneStudio
|
||||
tags: [ScrapedSceneTag!]
|
||||
performers: [ScrapedScenePerformer!]
|
||||
studio: ScrapedStudio
|
||||
tags: [ScrapedTag!]
|
||||
performers: [ScrapedPerformer!]
|
||||
}
|
||||
|
||||
input ScrapedGalleryInput {
|
||||
title: String
|
||||
details: String
|
||||
url: String
|
||||
date: String
|
||||
|
||||
# no studio, tags or performers
|
||||
}
|
||||
|
||||
input ScraperSourceInput {
|
||||
"""Index of the configured stash-box instance to use. Should be unset if scraper_id is set"""
|
||||
stash_box_index: Int
|
||||
"""Scraper ID to scrape with. Should be unset if stash_box_index is set"""
|
||||
scraper_id: ID
|
||||
}
|
||||
|
||||
input ScrapeSingleSceneInput {
|
||||
"""Instructs to query by string"""
|
||||
query: String
|
||||
"""Instructs to query by scene fingerprints"""
|
||||
scene_id: ID
|
||||
"""Instructs to query by scene fragment"""
|
||||
scene_input: ScrapedSceneInput
|
||||
}
|
||||
|
||||
input ScrapeMultiScenesInput {
|
||||
"""Instructs to query by scene fingerprints"""
|
||||
scene_ids: [ID!]
|
||||
}
|
||||
|
||||
input ScrapeSinglePerformerInput {
|
||||
"""Instructs to query by string"""
|
||||
query: String
|
||||
"""Instructs to query by performer id"""
|
||||
performer_id: ID
|
||||
"""Instructs to query by performer fragment"""
|
||||
performer_input: ScrapedPerformerInput
|
||||
}
|
||||
|
||||
input ScrapeMultiPerformersInput {
|
||||
"""Instructs to query by scene fingerprints"""
|
||||
performer_ids: [ID!]
|
||||
}
|
||||
|
||||
input ScrapeSingleGalleryInput {
|
||||
"""Instructs to query by string"""
|
||||
query: String
|
||||
"""Instructs to query by gallery id"""
|
||||
gallery_id: ID
|
||||
"""Instructs to query by gallery fragment"""
|
||||
gallery_input: ScrapedGalleryInput
|
||||
}
|
||||
|
||||
input ScrapeSingleMovieInput {
|
||||
"""Instructs to query by string"""
|
||||
query: String
|
||||
"""Instructs to query by movie id"""
|
||||
movie_id: ID
|
||||
"""Instructs to query by gallery fragment"""
|
||||
movie_input: ScrapedMovieInput
|
||||
}
|
||||
|
||||
input StashBoxSceneQueryInput {
|
||||
|
|
@ -135,7 +166,7 @@ input StashBoxPerformerQueryInput {
|
|||
|
||||
type StashBoxPerformerQueryResult {
|
||||
query: String!
|
||||
results: [ScrapedScenePerformer!]!
|
||||
results: [ScrapedPerformer!]!
|
||||
}
|
||||
|
||||
type StashBoxFingerprint {
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ type Studio {
|
|||
url: String
|
||||
parent_studio: Studio
|
||||
child_studios: [Studio!]!
|
||||
aliases: [String!]!
|
||||
|
||||
image_path: String # Resolver
|
||||
scene_count: Int # Resolver
|
||||
|
|
@ -15,6 +16,8 @@ type Studio {
|
|||
details: String
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
movie_count: Int
|
||||
movies: [Movie!]!
|
||||
}
|
||||
|
||||
input StudioCreateInput {
|
||||
|
|
@ -26,6 +29,7 @@ input StudioCreateInput {
|
|||
stash_ids: [StashIDInput!]
|
||||
rating: Int
|
||||
details: String
|
||||
aliases: [String!]
|
||||
}
|
||||
|
||||
input StudioUpdateInput {
|
||||
|
|
@ -38,6 +42,7 @@ input StudioUpdateInput {
|
|||
stash_ids: [StashIDInput!]
|
||||
rating: Int
|
||||
details: String
|
||||
aliases: [String!]
|
||||
}
|
||||
|
||||
input StudioDestroyInput {
|
||||
|
|
|
|||
|
|
@ -11,6 +11,9 @@ type Tag {
|
|||
image_count: Int # Resolver
|
||||
gallery_count: Int # Resolver
|
||||
performer_count: Int
|
||||
|
||||
parents: [Tag!]!
|
||||
children: [Tag!]!
|
||||
}
|
||||
|
||||
input TagCreateInput {
|
||||
|
|
@ -19,6 +22,9 @@ input TagCreateInput {
|
|||
|
||||
"""This should be a URL or a base64 encoded data URL"""
|
||||
image: String
|
||||
|
||||
parent_ids: [ID!]
|
||||
child_ids: [ID!]
|
||||
}
|
||||
|
||||
input TagUpdateInput {
|
||||
|
|
@ -28,6 +34,9 @@ input TagUpdateInput {
|
|||
|
||||
"""This should be a URL or a base64 encoded data URL"""
|
||||
image: String
|
||||
|
||||
parent_ids: [ID!]
|
||||
child_ids: [ID!]
|
||||
}
|
||||
|
||||
input TagDestroyInput {
|
||||
|
|
|
|||
15
main.go
15
main.go
|
|
@ -2,25 +2,38 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"os"
|
||||
"os/signal"
|
||||
"runtime/pprof"
|
||||
"syscall"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
|
||||
_ "github.com/golang-migrate/migrate/v4/database/sqlite3"
|
||||
_ "github.com/golang-migrate/migrate/v4/source/file"
|
||||
)
|
||||
|
||||
//go:embed ui/v2.5/build
|
||||
var uiBox embed.FS
|
||||
|
||||
//go:embed ui/login
|
||||
var loginUIBox embed.FS
|
||||
|
||||
func main() {
|
||||
manager.Initialize()
|
||||
api.Start()
|
||||
api.Start(uiBox, loginUIBox)
|
||||
|
||||
// stop any profiling at exit
|
||||
defer pprof.StopCPUProfile()
|
||||
blockForever()
|
||||
|
||||
err := manager.GetInstance().Shutdown()
|
||||
if err != nil {
|
||||
logger.Errorf("Error when closing: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func blockForever() {
|
||||
|
|
|
|||
140
pkg/api/authentication.go
Normal file
140
pkg/api/authentication.go
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/session"
|
||||
)
|
||||
|
||||
const loginEndPoint = "/login"
|
||||
|
||||
const (
|
||||
tripwireActivatedErrMsg = "Stash is exposed to the public internet without authentication, and is not serving any more content to protect your privacy. " +
|
||||
"More information and fixes are available at https://github.com/stashapp/stash/wiki/Authentication-Required-When-Accessing-Stash-From-the-Internet"
|
||||
|
||||
externalAccessErrMsg = "You have attempted to access Stash over the internet, and authentication is not enabled. " +
|
||||
"This is extremely dangerous! The whole world can see your your stash page and browse your files! " +
|
||||
"Stash is not answering any other requests to protect your privacy. " +
|
||||
"Please read the log entry or visit https://github.com/stashapp/stash/wiki/Authentication-Required-When-Accessing-Stash-From-the-Internet"
|
||||
)
|
||||
|
||||
func allowUnauthenticated(r *http.Request) bool {
|
||||
return strings.HasPrefix(r.URL.Path, loginEndPoint) || r.URL.Path == "/css"
|
||||
}
|
||||
|
||||
func authenticateHandler() func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c := config.GetInstance()
|
||||
|
||||
if !checkSecurityTripwireActivated(c, w) {
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := manager.GetInstance().SessionStore.Authenticate(w, r)
|
||||
if err != nil {
|
||||
if err != session.ErrUnauthorized {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
_, err = w.Write([]byte(err.Error()))
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// unauthorized error
|
||||
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
if err := session.CheckAllowPublicWithoutAuth(c, r); err != nil {
|
||||
switch err := err.(type) {
|
||||
case session.ExternalAccessError:
|
||||
securityActivateTripwireAccessedFromInternetWithoutAuth(c, err, w)
|
||||
return
|
||||
case session.UntrustedProxyError:
|
||||
logger.Warnf("Rejected request from untrusted proxy: %s", net.IP(err).String())
|
||||
w.WriteHeader(http.StatusForbidden)
|
||||
return
|
||||
default:
|
||||
logger.Errorf("Error checking external access security: %s", err.Error())
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ctx := r.Context()
|
||||
|
||||
if c.HasCredentials() {
|
||||
// authentication is required
|
||||
if userID == "" && !allowUnauthenticated(r) {
|
||||
// authentication was not received, redirect
|
||||
// if graphql was requested, we just return a forbidden error
|
||||
if r.URL.Path == "/graphql" {
|
||||
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
|
||||
// otherwise redirect to the login page
|
||||
u := url.URL{
|
||||
Path: prefix + "/login",
|
||||
}
|
||||
q := u.Query()
|
||||
q.Set(returnURLParam, prefix+r.URL.Path)
|
||||
u.RawQuery = q.Encode()
|
||||
http.Redirect(w, r, u.String(), http.StatusFound)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ctx = session.SetCurrentUserID(ctx, userID)
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func checkSecurityTripwireActivated(c *config.Instance, w http.ResponseWriter) bool {
|
||||
if accessErr := session.CheckExternalAccessTripwire(c); accessErr != nil {
|
||||
w.WriteHeader(http.StatusForbidden)
|
||||
_, err := w.Write([]byte(tripwireActivatedErrMsg))
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func securityActivateTripwireAccessedFromInternetWithoutAuth(c *config.Instance, accessErr session.ExternalAccessError, w http.ResponseWriter) {
|
||||
session.LogExternalAccessError(accessErr)
|
||||
|
||||
err := c.ActivatePublicAccessTripwire(net.IP(accessErr).String())
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusForbidden)
|
||||
_, err = w.Write([]byte(externalAccessErrMsg))
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
|
||||
err = manager.GetInstance().Shutdown()
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
}
|
||||
|
|
@ -20,7 +20,7 @@ func getArgumentMap(ctx context.Context) map[string]interface{} {
|
|||
func getUpdateInputMap(ctx context.Context) map[string]interface{} {
|
||||
args := getArgumentMap(ctx)
|
||||
|
||||
input, _ := args[updateInputField]
|
||||
input := args[updateInputField]
|
||||
var ret map[string]interface{}
|
||||
if input != nil {
|
||||
ret, _ = input.(map[string]interface{})
|
||||
|
|
@ -36,7 +36,7 @@ func getUpdateInputMap(ctx context.Context) map[string]interface{} {
|
|||
func getUpdateInputMaps(ctx context.Context) []map[string]interface{} {
|
||||
args := getArgumentMap(ctx)
|
||||
|
||||
input, _ := args[updateInputField]
|
||||
input := args[updateInputField]
|
||||
var ret []map[string]interface{}
|
||||
if input != nil {
|
||||
// convert []interface{} into []map[string]interface{}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import (
|
|||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"runtime"
|
||||
|
|
@ -29,6 +29,7 @@ var ErrNoVersion = errors.New("no stash version")
|
|||
var stashReleases = func() map[string]string {
|
||||
return map[string]string{
|
||||
"darwin/amd64": "stash-osx",
|
||||
"darwin/arm64": "stash-osx-applesilicon",
|
||||
"linux/amd64": "stash-linux",
|
||||
"windows/amd64": "stash-win.exe",
|
||||
"linux/arm": "stash-pi",
|
||||
|
|
@ -117,23 +118,26 @@ func makeGithubRequest(url string, output interface{}) error {
|
|||
response, err := client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
//lint:ignore ST1005 Github is a proper capitalized noun
|
||||
return fmt.Errorf("Github API request failed: %s", err)
|
||||
}
|
||||
|
||||
if response.StatusCode != http.StatusOK {
|
||||
//lint:ignore ST1005 Github is a proper capitalized noun
|
||||
return fmt.Errorf("Github API request failed: %s", response.Status)
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(response.Body)
|
||||
data, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
//lint:ignore ST1005 Github is a proper capitalized noun
|
||||
return fmt.Errorf("Github API read response failed: %s", err)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(data, output)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Unmarshalling Github API response failed: %s", err)
|
||||
return fmt.Errorf("unmarshalling Github API response failed: %s", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -196,7 +200,7 @@ func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease strin
|
|||
}
|
||||
|
||||
if latestVersion == "" {
|
||||
return "", "", fmt.Errorf("No version found for \"%s\"", version)
|
||||
return "", "", fmt.Errorf("no version found for \"%s\"", version)
|
||||
}
|
||||
return latestVersion, latestRelease, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ package api
|
|||
type key int
|
||||
|
||||
const (
|
||||
galleryKey key = iota
|
||||
performerKey
|
||||
// galleryKey key = 0
|
||||
performerKey key = iota + 1
|
||||
sceneKey
|
||||
studioKey
|
||||
movieKey
|
||||
|
|
|
|||
|
|
@ -1,24 +1,36 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/static"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type imageBox struct {
|
||||
box *packr.Box
|
||||
box fs.FS
|
||||
files []string
|
||||
}
|
||||
|
||||
func newImageBox(box *packr.Box) *imageBox {
|
||||
return &imageBox{
|
||||
box: box,
|
||||
files: box.List(),
|
||||
func newImageBox(box fs.FS) (*imageBox, error) {
|
||||
ret := &imageBox{
|
||||
box: box,
|
||||
}
|
||||
|
||||
err := fs.WalkDir(box, ".", func(path string, d fs.DirEntry, err error) error {
|
||||
if !d.IsDir() {
|
||||
ret.files = append(ret.files, path)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
var performerBox *imageBox
|
||||
|
|
@ -26,8 +38,15 @@ var performerBoxMale *imageBox
|
|||
var performerBoxCustom *imageBox
|
||||
|
||||
func initialiseImages() {
|
||||
performerBox = newImageBox(packr.New("Performer Box", "../../static/performer"))
|
||||
performerBoxMale = newImageBox(packr.New("Male Performer Box", "../../static/performer_male"))
|
||||
var err error
|
||||
performerBox, err = newImageBox(&static.Performer)
|
||||
if err != nil {
|
||||
logger.Warnf("error loading performer images: %v", err)
|
||||
}
|
||||
performerBoxMale, err = newImageBox(&static.PerformerMale)
|
||||
if err != nil {
|
||||
logger.Warnf("error loading male performer images: %v", err)
|
||||
}
|
||||
initialiseCustomImages()
|
||||
}
|
||||
|
||||
|
|
@ -36,7 +55,11 @@ func initialiseCustomImages() {
|
|||
if customPath != "" {
|
||||
logger.Debugf("Loading custom performer images from %s", customPath)
|
||||
// We need to set performerBoxCustom at runtime, as this is a custom path, and store it in a pointer.
|
||||
performerBoxCustom = newImageBox(packr.Folder(customPath))
|
||||
var err error
|
||||
performerBoxCustom, err = newImageBox(os.DirFS(customPath))
|
||||
if err != nil {
|
||||
logger.Warnf("error loading custom performer from %s: %v", customPath, err)
|
||||
}
|
||||
} else {
|
||||
performerBoxCustom = nil
|
||||
}
|
||||
|
|
@ -63,5 +86,11 @@ func getRandomPerformerImageUsingName(name, gender, customPath string) ([]byte,
|
|||
|
||||
imageFiles := box.files
|
||||
index := utils.IntFromString(name) % uint64(len(imageFiles))
|
||||
return box.box.Find(imageFiles[index])
|
||||
img, err := box.box.Open(imageFiles[index])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer img.Close()
|
||||
|
||||
return io.ReadAll(img)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -53,22 +53,6 @@ func (r *Resolver) Tag() models.TagResolver {
|
|||
return &tagResolver{r}
|
||||
}
|
||||
|
||||
func (r *Resolver) ScrapedSceneTag() models.ScrapedSceneTagResolver {
|
||||
return &scrapedSceneTagResolver{r}
|
||||
}
|
||||
|
||||
func (r *Resolver) ScrapedSceneMovie() models.ScrapedSceneMovieResolver {
|
||||
return &scrapedSceneMovieResolver{r}
|
||||
}
|
||||
|
||||
func (r *Resolver) ScrapedScenePerformer() models.ScrapedScenePerformerResolver {
|
||||
return &scrapedScenePerformerResolver{r}
|
||||
}
|
||||
|
||||
func (r *Resolver) ScrapedSceneStudio() models.ScrapedSceneStudioResolver {
|
||||
return &scrapedSceneStudioResolver{r}
|
||||
}
|
||||
|
||||
type mutationResolver struct{ *Resolver }
|
||||
type queryResolver struct{ *Resolver }
|
||||
type subscriptionResolver struct{ *Resolver }
|
||||
|
|
@ -81,10 +65,6 @@ type imageResolver struct{ *Resolver }
|
|||
type studioResolver struct{ *Resolver }
|
||||
type movieResolver struct{ *Resolver }
|
||||
type tagResolver struct{ *Resolver }
|
||||
type scrapedSceneTagResolver struct{ *Resolver }
|
||||
type scrapedSceneMovieResolver struct{ *Resolver }
|
||||
type scrapedScenePerformerResolver struct{ *Resolver }
|
||||
type scrapedSceneStudioResolver struct{ *Resolver }
|
||||
|
||||
func (r *Resolver) withTxn(ctx context.Context, fn func(r models.Repository) error) error {
|
||||
return r.txnManager.WithTxn(ctx, fn)
|
||||
|
|
|
|||
|
|
@ -125,6 +125,18 @@ func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret
|
|||
return &res, err
|
||||
}
|
||||
|
||||
func (r *movieResolver) Scenes(ctx context.Context, obj *models.Movie) (ret []*models.Scene, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Scene().FindByMovieID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *movieResolver) CreatedAt(ctx context.Context, obj *models.Movie) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -254,3 +254,26 @@ func (r *performerResolver) CreatedAt(ctx context.Context, obj *models.Performer
|
|||
func (r *performerResolver) UpdatedAt(ctx context.Context, obj *models.Performer) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Movie, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Movie().FindByPerformerID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) MovieCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
|
||||
var res int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
res, err = repo.Movie().CountByPerformerID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,6 +58,12 @@ func (r *sceneMarkerResolver) Preview(ctx context.Context, obj *models.SceneMark
|
|||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamPreviewURL(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Screenshot(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamScreenshotURL(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) CreatedAt(ctx context.Context, obj *models.SceneMarker) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *scrapedSceneTagResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneTag) (*string, error) {
|
||||
return obj.ID, nil
|
||||
}
|
||||
|
||||
func (r *scrapedSceneMovieResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneMovie) (*string, error) {
|
||||
return obj.ID, nil
|
||||
}
|
||||
|
||||
func (r *scrapedScenePerformerResolver) StoredID(ctx context.Context, obj *models.ScrapedScenePerformer) (*string, error) {
|
||||
return obj.ID, nil
|
||||
}
|
||||
|
||||
func (r *scrapedSceneStudioResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneStudio) (*string, error) {
|
||||
return obj.ID, nil
|
||||
}
|
||||
|
|
@ -45,6 +45,17 @@ func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*st
|
|||
return &imagePath, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) (ret []string, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().GetAliases(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
|
||||
var res int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
|
|
@ -140,3 +151,26 @@ func (r *studioResolver) CreatedAt(ctx context.Context, obj *models.Studio) (*ti
|
|||
func (r *studioResolver) UpdatedAt(ctx context.Context, obj *models.Studio) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Movie, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Movie().FindByStudioID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
|
||||
var res int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
res, err = repo.Movie().CountByStudioID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,6 +10,28 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *tagResolver) Parents(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().FindByChildTagID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *tagResolver) Children(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().FindByParentTagID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []string, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().GetAliases(obj.ID)
|
||||
|
|
|
|||
|
|
@ -61,6 +61,15 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||
c.Set(config.Generated, input.GeneratedPath)
|
||||
}
|
||||
|
||||
if input.MetadataPath != nil {
|
||||
if *input.MetadataPath != "" {
|
||||
if err := utils.EnsureDir(*input.MetadataPath); err != nil {
|
||||
return makeConfigGeneralResult(), err
|
||||
}
|
||||
}
|
||||
c.Set(config.Metadata, input.MetadataPath)
|
||||
}
|
||||
|
||||
if input.CachePath != nil {
|
||||
if *input.CachePath != "" {
|
||||
if err := utils.EnsureDir(*input.CachePath); err != nil {
|
||||
|
|
@ -115,6 +124,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||
c.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String())
|
||||
}
|
||||
|
||||
if input.WriteImageThumbnails != nil {
|
||||
c.Set(config.WriteImageThumbnails, *input.WriteImageThumbnails)
|
||||
}
|
||||
|
||||
if input.Username != nil {
|
||||
c.Set(config.Username, input.Username)
|
||||
}
|
||||
|
|
@ -133,6 +146,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||
c.Set(config.MaxSessionAge, *input.MaxSessionAge)
|
||||
}
|
||||
|
||||
if input.TrustedProxies != nil {
|
||||
c.Set(config.TrustedProxies, input.TrustedProxies)
|
||||
}
|
||||
|
||||
if input.LogFile != nil {
|
||||
c.Set(config.LogFile, input.LogFile)
|
||||
}
|
||||
|
|
@ -289,7 +306,9 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.Confi
|
|||
if !*input.Enabled && dlnaService.IsRunning() {
|
||||
dlnaService.Stop(nil)
|
||||
} else if *input.Enabled && !dlnaService.IsRunning() {
|
||||
dlnaService.Start(nil)
|
||||
if err := dlnaService.Start(nil); err != nil {
|
||||
logger.Warnf("error starting DLNA service: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"io/ioutil"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
|
@ -105,8 +106,10 @@ func (r *mutationResolver) BackupDatabase(ctx context.Context, input models.Back
|
|||
mgr := manager.GetInstance()
|
||||
var backupPath string
|
||||
if download {
|
||||
utils.EnsureDir(mgr.Paths.Generated.Downloads)
|
||||
f, err := ioutil.TempFile(mgr.Paths.Generated.Downloads, "backup*.sqlite")
|
||||
if err := utils.EnsureDir(mgr.Paths.Generated.Downloads); err != nil {
|
||||
return nil, fmt.Errorf("could not create backup directory %v: %w", mgr.Paths.Generated.Downloads, err)
|
||||
}
|
||||
f, err := os.CreateTemp(mgr.Paths.Generated.Downloads, "backup*.sqlite")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, t
|
|||
func (r *mutationResolver) ReloadPlugins(ctx context.Context) (bool, error) {
|
||||
err := manager.GetInstance().PluginCache.LoadPlugins()
|
||||
if err != nil {
|
||||
logger.Errorf("Error reading plugin configs: %s", err.Error())
|
||||
logger.Errorf("Error reading plugin configs: %v", err)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
|
|
|
|||
|
|
@ -304,6 +304,18 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
|
|||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the movies
|
||||
if translator.hasField("movie_ids") {
|
||||
movies, err := adjustSceneMovieIDs(qb, sceneID, *input.MovieIds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.UpdateMovies(sceneID, movies); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -395,6 +407,48 @@ func adjustSceneGalleryIDs(qb models.SceneReader, sceneID int, ids models.BulkUp
|
|||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustSceneMovieIDs(qb models.SceneReader, sceneID int, updateIDs models.BulkUpdateIds) ([]models.MoviesScenes, error) {
|
||||
existingMovies, err := qb.GetMovies(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if we are setting the ids, just return the ids
|
||||
if updateIDs.Mode == models.BulkUpdateIDModeSet {
|
||||
existingMovies = []models.MoviesScenes{}
|
||||
for _, idStr := range updateIDs.Ids {
|
||||
id, _ := strconv.Atoi(idStr)
|
||||
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
|
||||
}
|
||||
|
||||
return existingMovies, nil
|
||||
}
|
||||
|
||||
for _, idStr := range updateIDs.Ids {
|
||||
id, _ := strconv.Atoi(idStr)
|
||||
|
||||
// look for the id in the list
|
||||
foundExisting := false
|
||||
for idx, existingMovie := range existingMovies {
|
||||
if existingMovie.MovieID == id {
|
||||
if updateIDs.Mode == models.BulkUpdateIDModeRemove {
|
||||
// remove from the list
|
||||
existingMovies = append(existingMovies[:idx], existingMovies[idx+1:]...)
|
||||
}
|
||||
|
||||
foundExisting = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !foundExisting && updateIDs.Mode != models.BulkUpdateIDModeRemove {
|
||||
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
|
||||
}
|
||||
}
|
||||
|
||||
return existingMovies, err
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) {
|
||||
sceneID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ package api
|
|||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/stashapp/stash/pkg/studio"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
|
|
@ -64,19 +65,19 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
|||
}
|
||||
|
||||
// Start the transaction and save the studio
|
||||
var studio *models.Studio
|
||||
var s *models.Studio
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Studio()
|
||||
|
||||
var err error
|
||||
studio, err = qb.Create(newStudio)
|
||||
s, err = qb.Create(newStudio)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateImage(studio.ID, imageData); err != nil {
|
||||
if err := qb.UpdateImage(s.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -84,7 +85,17 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
|||
// Save the stash_ids
|
||||
if input.StashIds != nil {
|
||||
stashIDJoins := models.StashIDsFromInput(input.StashIds)
|
||||
if err := qb.UpdateStashIDs(studio.ID, stashIDJoins); err != nil {
|
||||
if err := qb.UpdateStashIDs(s.ID, stashIDJoins); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(input.Aliases) > 0 {
|
||||
if err := studio.EnsureAliasesUnique(s.ID, input.Aliases, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.UpdateAliases(s.ID, input.Aliases); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -94,8 +105,8 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
|||
return nil, err
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, studio.ID, plugin.StudioCreatePost, input, nil)
|
||||
return r.getStudio(ctx, studio.ID)
|
||||
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.StudioCreatePost, input, nil)
|
||||
return r.getStudio(ctx, s.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) {
|
||||
|
|
@ -136,7 +147,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
|||
updatedStudio.Rating = translator.nullInt64(input.Rating, "rating")
|
||||
|
||||
// Start the transaction and save the studio
|
||||
var studio *models.Studio
|
||||
var s *models.Studio
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Studio()
|
||||
|
||||
|
|
@ -145,19 +156,19 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
|||
}
|
||||
|
||||
var err error
|
||||
studio, err = qb.Update(updatedStudio)
|
||||
s, err = qb.Update(updatedStudio)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateImage(studio.ID, imageData); err != nil {
|
||||
if err := qb.UpdateImage(s.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if imageIncluded {
|
||||
// must be unsetting
|
||||
if err := qb.DestroyImage(studio.ID); err != nil {
|
||||
if err := qb.DestroyImage(s.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -170,13 +181,23 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
|||
}
|
||||
}
|
||||
|
||||
if translator.hasField("aliases") {
|
||||
if err := studio.EnsureAliasesUnique(studioID, input.Aliases, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.UpdateAliases(studioID, input.Aliases); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, studio.ID, plugin.StudioUpdatePost, input, translator.getFields())
|
||||
return r.getStudio(ctx, studio.ID)
|
||||
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.StudioUpdatePost, input, translator.getFields())
|
||||
return r.getStudio(ctx, s.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioDestroy(ctx context.Context, input models.StudioDestroyInput) (bool, error) {
|
||||
|
|
|
|||
|
|
@ -75,6 +75,28 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
|
|||
}
|
||||
}
|
||||
|
||||
if input.ParentIds != nil && len(input.ParentIds) > 0 {
|
||||
ids, err := utils.StringSliceToIntSlice(input.ParentIds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.UpdateParentTags(t.ID, ids); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if input.ChildIds != nil && len(input.ChildIds) > 0 {
|
||||
ids, err := utils.StringSliceToIntSlice(input.ChildIds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.UpdateChildTags(t.ID, ids); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
|
@ -161,6 +183,41 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
|||
}
|
||||
}
|
||||
|
||||
var parentIDs []int
|
||||
var childIDs []int
|
||||
|
||||
if translator.hasField("parent_ids") {
|
||||
parentIDs, err = utils.StringSliceToIntSlice(input.ParentIds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if translator.hasField("child_ids") {
|
||||
childIDs, err = utils.StringSliceToIntSlice(input.ChildIds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if parentIDs != nil || childIDs != nil {
|
||||
if err := tag.EnsureUniqueHierarchy(tagID, parentIDs, childIDs, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if parentIDs != nil {
|
||||
if err := qb.UpdateParentTags(tagID, parentIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if childIDs != nil {
|
||||
if err := qb.UpdateChildTags(tagID, childIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
|
@ -242,10 +299,24 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMerge
|
|||
return fmt.Errorf("Tag with ID %d not found", destination)
|
||||
}
|
||||
|
||||
parents, children, err := tag.MergeHierarchy(destination, source, qb)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = qb.Merge(source, destination); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = qb.UpdateParentTags(destination, parents)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = qb.UpdateChildTags(destination, children)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -59,6 +59,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
|
|||
Stashes: config.GetStashPaths(),
|
||||
DatabasePath: config.GetDatabasePath(),
|
||||
GeneratedPath: config.GetGeneratedPath(),
|
||||
MetadataPath: config.GetMetadataPath(),
|
||||
ConfigFilePath: config.GetConfigFilePath(),
|
||||
ScrapersPath: config.GetScrapersPath(),
|
||||
CachePath: config.GetCachePath(),
|
||||
|
|
@ -73,10 +74,12 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
|
|||
PreviewPreset: config.GetPreviewPreset(),
|
||||
MaxTranscodeSize: &maxTranscodeSize,
|
||||
MaxStreamingTranscodeSize: &maxStreamingTranscodeSize,
|
||||
WriteImageThumbnails: config.IsWriteImageThumbnails(),
|
||||
APIKey: config.GetAPIKey(),
|
||||
Username: config.GetUsername(),
|
||||
Password: config.GetPasswordHash(),
|
||||
MaxSessionAge: config.GetMaxSessionAge(),
|
||||
TrustedProxies: config.GetTrustedProxies(),
|
||||
LogFile: &logFile,
|
||||
LogOut: config.GetLogOut(),
|
||||
LogLevel: config.GetLogLevel(),
|
||||
|
|
|
|||
|
|
@ -2,7 +2,9 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
|
|
@ -29,8 +31,9 @@ func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query s
|
|||
|
||||
var ret []string
|
||||
for _, v := range scrapedPerformers {
|
||||
name := v.Name
|
||||
ret = append(ret, *name)
|
||||
if v.Name != nil {
|
||||
ret = append(ret, *v.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
|
|
@ -68,8 +71,21 @@ func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*mo
|
|||
return manager.GetInstance().ScraperCache.ScrapePerformerURL(url)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) {
|
||||
if query == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return manager.GetInstance().ScraperCache.ScrapeSceneQuery(scraperID, query)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||
return manager.GetInstance().ScraperCache.ScrapeScene(scraperID, scene)
|
||||
id, err := strconv.Atoi(scene.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return manager.GetInstance().ScraperCache.ScrapeScene(scraperID, id)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
|
||||
|
|
@ -77,7 +93,12 @@ func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models
|
|||
}
|
||||
|
||||
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
|
||||
return manager.GetInstance().ScraperCache.ScrapeGallery(scraperID, gallery)
|
||||
id, err := strconv.Atoi(gallery.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return manager.GetInstance().ScraperCache.ScrapeGallery(scraperID, id)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) {
|
||||
|
|
@ -98,7 +119,7 @@ func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.Sta
|
|||
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
||||
|
||||
if len(input.SceneIds) > 0 {
|
||||
return client.FindStashBoxScenesByFingerprints(input.SceneIds)
|
||||
return client.FindStashBoxScenesByFingerprintsFlat(input.SceneIds)
|
||||
}
|
||||
|
||||
if input.Q != nil {
|
||||
|
|
@ -127,3 +148,177 @@ func (r *queryResolver) QueryStashBoxPerformer(ctx context.Context, input models
|
|||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) {
|
||||
boxes := config.GetInstance().GetStashBoxes()
|
||||
|
||||
if index < 0 || index >= len(boxes) {
|
||||
return nil, fmt.Errorf("invalid stash_box_index %d", index)
|
||||
}
|
||||
|
||||
return stashbox.NewClient(*boxes[index], r.txnManager), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleSceneInput) ([]*models.ScrapedScene, error) {
|
||||
if source.ScraperID != nil {
|
||||
var singleScene *models.ScrapedScene
|
||||
var err error
|
||||
|
||||
if input.SceneID != nil {
|
||||
var sceneID int
|
||||
sceneID, err = strconv.Atoi(*input.SceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
singleScene, err = manager.GetInstance().ScraperCache.ScrapeScene(*source.ScraperID, sceneID)
|
||||
} else if input.SceneInput != nil {
|
||||
singleScene, err = manager.GetInstance().ScraperCache.ScrapeSceneFragment(*source.ScraperID, *input.SceneInput)
|
||||
} else if input.Query != nil {
|
||||
return manager.GetInstance().ScraperCache.ScrapeSceneQuery(*source.ScraperID, *input.Query)
|
||||
} else {
|
||||
err = errors.New("scene_id, scene_input or query must be set")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if singleScene != nil {
|
||||
return []*models.ScrapedScene{singleScene}, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
} else if source.StashBoxIndex != nil {
|
||||
client, err := r.getStashBoxClient(*source.StashBoxIndex)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if input.SceneID != nil {
|
||||
return client.FindStashBoxScenesByFingerprintsFlat([]string{*input.SceneID})
|
||||
} else if input.Query != nil {
|
||||
return client.QueryStashBoxScene(*input.Query)
|
||||
}
|
||||
|
||||
return nil, errors.New("scene_id or query must be set")
|
||||
}
|
||||
|
||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) {
|
||||
if source.ScraperID != nil {
|
||||
return nil, errors.New("not implemented")
|
||||
} else if source.StashBoxIndex != nil {
|
||||
client, err := r.getStashBoxClient(*source.StashBoxIndex)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return client.FindStashBoxScenesByFingerprints(input.SceneIds)
|
||||
}
|
||||
|
||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
|
||||
if source.ScraperID != nil {
|
||||
if input.PerformerInput != nil {
|
||||
singlePerformer, err := manager.GetInstance().ScraperCache.ScrapePerformer(*source.ScraperID, *input.PerformerInput)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if singlePerformer != nil {
|
||||
return []*models.ScrapedPerformer{singlePerformer}, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if input.Query != nil {
|
||||
return manager.GetInstance().ScraperCache.ScrapePerformerList(*source.ScraperID, *input.Query)
|
||||
}
|
||||
|
||||
return nil, errors.New("not implemented")
|
||||
} else if source.StashBoxIndex != nil {
|
||||
client, err := r.getStashBoxClient(*source.StashBoxIndex)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ret []*models.StashBoxPerformerQueryResult
|
||||
if input.PerformerID != nil {
|
||||
ret, err = client.FindStashBoxPerformersByNames([]string{*input.PerformerID})
|
||||
} else if input.Query != nil {
|
||||
ret, err = client.QueryStashBoxPerformer(*input.Query)
|
||||
} else {
|
||||
return nil, errors.New("not implemented")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(ret) > 0 {
|
||||
return ret[0].Results, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiPerformersInput) ([][]*models.ScrapedPerformer, error) {
|
||||
if source.ScraperID != nil {
|
||||
return nil, errors.New("not implemented")
|
||||
} else if source.StashBoxIndex != nil {
|
||||
client, err := r.getStashBoxClient(*source.StashBoxIndex)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return client.FindStashBoxPerformersByPerformerNames(input.PerformerIds)
|
||||
}
|
||||
|
||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleGalleryInput) ([]*models.ScrapedGallery, error) {
|
||||
if source.ScraperID != nil {
|
||||
var singleGallery *models.ScrapedGallery
|
||||
var err error
|
||||
|
||||
if input.GalleryID != nil {
|
||||
var galleryID int
|
||||
galleryID, err = strconv.Atoi(*input.GalleryID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGallery(*source.ScraperID, galleryID)
|
||||
} else if input.GalleryInput != nil {
|
||||
singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGalleryFragment(*source.ScraperID, *input.GalleryInput)
|
||||
} else {
|
||||
return nil, errors.New("not implemented")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if singleGallery != nil {
|
||||
return []*models.ScrapedGallery{singleGallery}, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
} else if source.StashBoxIndex != nil {
|
||||
return nil, errors.New("not supported")
|
||||
}
|
||||
|
||||
return nil, errors.New("scraper_id must be set")
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) {
|
||||
return nil, errors.New("not supported")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,32 +2,12 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/job"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type throttledUpdate struct {
|
||||
id int
|
||||
pendingUpdate *job.Job
|
||||
lastUpdate time.Time
|
||||
broadcastTimer *time.Timer
|
||||
killTimer *time.Timer
|
||||
}
|
||||
|
||||
func (tu *throttledUpdate) broadcast(output chan *models.JobStatusUpdate) {
|
||||
tu.lastUpdate = time.Now()
|
||||
output <- &models.JobStatusUpdate{
|
||||
Type: models.JobStatusUpdateTypeUpdate,
|
||||
Job: jobToJobModel(*tu.pendingUpdate),
|
||||
}
|
||||
|
||||
tu.broadcastTimer = nil
|
||||
tu.pendingUpdate = nil
|
||||
}
|
||||
|
||||
func makeJobStatusUpdate(t models.JobStatusUpdateType, j job.Job) *models.JobStatusUpdate {
|
||||
return &models.JobStatusUpdate{
|
||||
Type: t,
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import (
|
|||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
|
|
@ -32,15 +33,35 @@ func (rs imageRoutes) Routes() chi.Router {
|
|||
// region Handlers
|
||||
|
||||
func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
|
||||
image := r.Context().Value(imageKey).(*models.Image)
|
||||
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
|
||||
img := r.Context().Value(imageKey).(*models.Image)
|
||||
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum, models.DefaultGthumbWidth)
|
||||
|
||||
// if the thumbnail doesn't exist, fall back to the original file
|
||||
w.Header().Add("Cache-Control", "max-age=604800000")
|
||||
|
||||
// if the thumbnail doesn't exist, encode on the fly
|
||||
exists, _ := utils.FileExists(filepath)
|
||||
if exists {
|
||||
http.ServeFile(w, r, filepath)
|
||||
} else {
|
||||
rs.Image(w, r)
|
||||
encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEGPath)
|
||||
data, err := encoder.GetThumbnail(img, models.DefaultGthumbWidth)
|
||||
if err != nil {
|
||||
logger.Errorf("error generating thumbnail for image: %s", err.Error())
|
||||
|
||||
// backwards compatibility - fallback to original image instead
|
||||
rs.Image(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// write the generated thumbnail to disk if enabled
|
||||
if manager.GetInstance().Config.IsWriteImageThumbnails() {
|
||||
if err := utils.WriteFile(filepath, data); err != nil {
|
||||
logger.Errorf("error writing thumbnail for image %s: %s", img.Path, err)
|
||||
}
|
||||
}
|
||||
if n, err := w.Write(data); err != nil {
|
||||
logger.Errorf("error writing thumbnail response. Wrote %v bytes: %v", n, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -59,7 +80,7 @@ func ImageCtx(next http.Handler) http.Handler {
|
|||
imageID, _ := strconv.Atoi(imageIdentifierQueryParam)
|
||||
|
||||
var image *models.Image
|
||||
manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
readTxnErr := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
qb := repo.Image()
|
||||
if imageID == 0 {
|
||||
image, _ = qb.FindByChecksum(imageIdentifierQueryParam)
|
||||
|
|
@ -69,6 +90,9 @@ func ImageCtx(next http.Handler) http.Handler {
|
|||
|
||||
return nil
|
||||
})
|
||||
if readTxnErr != nil {
|
||||
logger.Warnf("read transaction failure while trying to read image by id: %v", readTxnErr)
|
||||
}
|
||||
|
||||
if image == nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
|
|
@ -32,17 +33,22 @@ func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
|
|||
defaultParam := r.URL.Query().Get("default")
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Movie().GetFrontImage(movie.ID)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
logger.Warnf("read transaction error while getting front image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
_, image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
|
||||
}
|
||||
|
||||
utils.ServeImage(image, w, r)
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving front image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -50,17 +56,22 @@ func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
|
|||
defaultParam := r.URL.Query().Get("default")
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Movie().GetBackImage(movie.ID)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
logger.Warnf("read transaction error on fetch back image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
_, image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
|
||||
}
|
||||
|
||||
utils.ServeImage(image, w, r)
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error while serving image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func MovieCtx(next http.Handler) http.Handler {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
|
|
@ -33,17 +34,22 @@ func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
readTxnErr := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Performer().GetImage(performer.ID)
|
||||
return nil
|
||||
})
|
||||
if readTxnErr != nil {
|
||||
logger.Warnf("couldn't execute getting a performer image from read transaction: %v", readTxnErr)
|
||||
}
|
||||
}
|
||||
|
||||
if len(image) == 0 || defaultParam == "true" {
|
||||
image, _ = getRandomPerformerImageUsingName(performer.Name.String, performer.Gender.String, config.GetInstance().GetCustomPerformerImageLocation())
|
||||
}
|
||||
|
||||
utils.ServeImage(image, w, r)
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func PerformerCtx(next http.Handler) http.Handler {
|
||||
|
|
|
|||
|
|
@ -16,8 +16,7 @@ import (
|
|||
)
|
||||
|
||||
type sceneRoutes struct {
|
||||
txnManager models.TransactionManager
|
||||
sceneServer manager.SceneServer
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Routes() chi.Router {
|
||||
|
|
@ -42,6 +41,7 @@ func (rs sceneRoutes) Routes() chi.Router {
|
|||
|
||||
r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream)
|
||||
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
|
||||
r.Get("/scene_marker/{sceneMarkerId}/screenshot", rs.SceneMarkerScreenshot)
|
||||
})
|
||||
r.With(SceneCtx).Get("/{sceneId}_thumbs.vtt", rs.VttThumbs)
|
||||
r.With(SceneCtx).Get("/{sceneId}_sprite.jpg", rs.VttSprite)
|
||||
|
|
@ -59,7 +59,7 @@ func getSceneFileContainer(scene *models.Scene) ffmpeg.Container {
|
|||
// shouldn't happen, fallback to ffprobe
|
||||
tmpVideoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
|
||||
if err != nil {
|
||||
logger.Errorf("[transcode] error reading video file: %s", err.Error())
|
||||
logger.Errorf("[transcode] error reading video file: %v", err)
|
||||
return ffmpeg.Container("")
|
||||
}
|
||||
|
||||
|
|
@ -85,7 +85,9 @@ func (rs sceneRoutes) StreamMKV(w http.ResponseWriter, r *http.Request) {
|
|||
container := getSceneFileContainer(scene)
|
||||
if container != ffmpeg.Matroska {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
w.Write([]byte("not an mkv file"))
|
||||
if _, err := w.Write([]byte("not an mkv file")); err != nil {
|
||||
logger.Warnf("[stream] error writing to stream: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -105,7 +107,7 @@ func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
|
||||
if err != nil {
|
||||
logger.Errorf("[stream] error reading video file: %s", err.Error())
|
||||
logger.Errorf("[stream] error reading video file: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -126,7 +128,9 @@ func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
|
|||
rangeStr := requestByteRange.ToHeaderValue(int64(str.Len()))
|
||||
w.Header().Set("Content-Range", rangeStr)
|
||||
|
||||
w.Write(ret)
|
||||
if n, err := w.Write(ret); err != nil {
|
||||
logger.Warnf("[stream] error writing stream (wrote %v bytes): %v", n, err)
|
||||
}
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) StreamTS(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -141,12 +145,15 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
|
|||
|
||||
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
|
||||
if err != nil {
|
||||
logger.Errorf("[stream] error reading video file: %s", err.Error())
|
||||
logger.Errorf("[stream] error reading video file: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// start stream based on query param, if provided
|
||||
r.ParseForm()
|
||||
if err = r.ParseForm(); err != nil {
|
||||
logger.Warnf("[stream] error parsing query form: %v", err)
|
||||
}
|
||||
|
||||
startTime := r.Form.Get("start")
|
||||
requestedSize := r.Form.Get("resolution")
|
||||
|
||||
|
|
@ -168,9 +175,11 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
|
|||
stream, err = encoder.GetTranscodeStream(options)
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("[stream] error transcoding video file: %s", err.Error())
|
||||
logger.Errorf("[stream] error transcoding video file: %v", err)
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
w.Write([]byte(err.Error()))
|
||||
if _, err := w.Write([]byte(err.Error())); err != nil {
|
||||
logger.Warnf("[stream] error writing response: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -289,6 +298,12 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
|
|||
http.Error(w, http.StatusText(500), 500)
|
||||
return
|
||||
}
|
||||
|
||||
if sceneMarker == nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
|
@ -306,6 +321,12 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request)
|
|||
http.Error(w, http.StatusText(500), 500)
|
||||
return
|
||||
}
|
||||
|
||||
if sceneMarker == nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
|
||||
|
||||
// If the image doesn't exist, send the placeholder
|
||||
|
|
@ -320,6 +341,39 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request)
|
|||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) SceneMarkerScreenshot(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||
var sceneMarker *models.SceneMarker
|
||||
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
sceneMarker, err = repo.SceneMarker().Find(sceneMarkerID)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Warnf("Error when getting scene marker for stream: %s", err.Error())
|
||||
http.Error(w, http.StatusText(500), 500)
|
||||
return
|
||||
}
|
||||
|
||||
if sceneMarker == nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
|
||||
|
||||
// If the image doesn't exist, send the placeholder
|
||||
exists, _ := utils.FileExists(filepath)
|
||||
if !exists {
|
||||
w.Header().Set("Content-Type", "image/png")
|
||||
w.Header().Set("Cache-Control", "no-store")
|
||||
_, _ = w.Write(utils.PendingGenerateResource)
|
||||
return
|
||||
}
|
||||
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
// endregion
|
||||
|
||||
func SceneCtx(next http.Handler) http.Handler {
|
||||
|
|
@ -328,7 +382,7 @@ func SceneCtx(next http.Handler) http.Handler {
|
|||
sceneID, _ := strconv.Atoi(sceneIdentifierQueryParam)
|
||||
|
||||
var scene *models.Scene
|
||||
manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
readTxnErr := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
qb := repo.Scene()
|
||||
if sceneID == 0 {
|
||||
// determine checksum/os by the length of the query param
|
||||
|
|
@ -343,6 +397,9 @@ func SceneCtx(next http.Handler) http.Handler {
|
|||
|
||||
return nil
|
||||
})
|
||||
if readTxnErr != nil {
|
||||
logger.Warnf("error executing SceneCtx transaction: %v", readTxnErr)
|
||||
}
|
||||
|
||||
if scene == nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
|
|
@ -32,17 +33,22 @@ func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Studio().GetImage(studio.ID)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
logger.Warnf("read transaction error while fetching studio image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
_, image, _ = utils.ProcessBase64Image(models.DefaultStudioImage)
|
||||
}
|
||||
|
||||
utils.ServeImage(image, w, r)
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving studio image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func StudioCtx(next http.Handler) http.Handler {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
|
|
@ -32,17 +33,22 @@ func (rs tagRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Tag().GetImage(tag.ID)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
logger.Warnf("read transaction error while getting tag image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
image = models.DefaultTagImage
|
||||
}
|
||||
|
||||
utils.ServeImage(image, w, r)
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving tag image: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TagCtx(next http.Handler) http.Handler {
|
||||
|
|
|
|||
|
|
@ -3,11 +3,12 @@ package api
|
|||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
|
|
@ -21,14 +22,12 @@ import (
|
|||
gqlPlayground "github.com/99designs/gqlgen/graphql/playground"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/go-chi/chi/middleware"
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/rs/cors"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/session"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
|
|
@ -36,85 +35,7 @@ var version string
|
|||
var buildstamp string
|
||||
var githash string
|
||||
|
||||
var uiBox *packr.Box
|
||||
|
||||
//var legacyUiBox *packr.Box
|
||||
var loginUIBox *packr.Box
|
||||
|
||||
func allowUnauthenticated(r *http.Request) bool {
|
||||
return strings.HasPrefix(r.URL.Path, "/login") || r.URL.Path == "/css"
|
||||
}
|
||||
|
||||
func authenticateHandler() func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
userID, err := manager.GetInstance().SessionStore.Authenticate(w, r)
|
||||
if err != nil {
|
||||
if err != session.ErrUnauthorized {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
_, err = w.Write([]byte(err.Error()))
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// unauthorized error
|
||||
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
c := config.GetInstance()
|
||||
ctx := r.Context()
|
||||
|
||||
// handle redirect if no user and user is required
|
||||
if userID == "" && c.HasCredentials() && !allowUnauthenticated(r) {
|
||||
// if we don't have a userID, then redirect
|
||||
// if graphql was requested, we just return a forbidden error
|
||||
if r.URL.Path == "/graphql" {
|
||||
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
// otherwise redirect to the login page
|
||||
u := url.URL{
|
||||
Path: "/login",
|
||||
}
|
||||
q := u.Query()
|
||||
q.Set(returnURLParam, r.URL.Path)
|
||||
u.RawQuery = q.Encode()
|
||||
http.Redirect(w, r, u.String(), http.StatusFound)
|
||||
return
|
||||
}
|
||||
|
||||
ctx = session.SetCurrentUserID(ctx, userID)
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func visitedPluginHandler() func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// get the visited plugins and set them in the context
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const loginEndPoint = "/login"
|
||||
|
||||
func Start() {
|
||||
uiBox = packr.New("UI Box", "../../ui/v2.5/build")
|
||||
//legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
|
||||
loginUIBox = packr.New("Login UI Box", "../../ui/login")
|
||||
|
||||
func Start(uiBox embed.FS, loginUIBox embed.FS) {
|
||||
initialiseImages()
|
||||
|
||||
r := chi.NewRouter()
|
||||
|
|
@ -182,10 +103,10 @@ func Start() {
|
|||
r.HandleFunc("/playground", gqlPlayground.Handler("GraphQL playground", "/graphql"))
|
||||
|
||||
// session handlers
|
||||
r.Post(loginEndPoint, handleLogin)
|
||||
r.Get("/logout", handleLogout)
|
||||
r.Post(loginEndPoint, handleLogin(loginUIBox))
|
||||
r.Get("/logout", handleLogout(loginUIBox))
|
||||
|
||||
r.Get(loginEndPoint, getLoginHandler)
|
||||
r.Get(loginEndPoint, getLoginHandler(loginUIBox))
|
||||
|
||||
r.Mount("/performer", performerRoutes{
|
||||
txnManager: txnManager,
|
||||
|
|
@ -226,11 +147,18 @@ func Start() {
|
|||
r.HandleFunc("/login*", func(w http.ResponseWriter, r *http.Request) {
|
||||
ext := path.Ext(r.URL.Path)
|
||||
if ext == ".html" || ext == "" {
|
||||
data, _ := loginUIBox.Find("login.html")
|
||||
_, _ = w.Write(data)
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
|
||||
data := getLoginPage(loginUIBox)
|
||||
baseURLIndex := strings.Replace(string(data), "%BASE_URL%", prefix+"/", 2)
|
||||
_, _ = w.Write([]byte(baseURLIndex))
|
||||
} else {
|
||||
r.URL.Path = strings.Replace(r.URL.Path, loginEndPoint, "", 1)
|
||||
http.FileServer(loginUIBox).ServeHTTP(w, r)
|
||||
loginRoot, err := fs.Sub(loginUIBox, loginRootDir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
http.FileServer(http.FS(loginRoot)).ServeHTTP(w, r)
|
||||
}
|
||||
})
|
||||
|
||||
|
|
@ -255,6 +183,8 @@ func Start() {
|
|||
|
||||
// Serve the web app
|
||||
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
|
||||
const uiRootDir = "ui/v2.5/build"
|
||||
|
||||
ext := path.Ext(r.URL.Path)
|
||||
|
||||
if customUILocation != "" {
|
||||
|
|
@ -267,14 +197,25 @@ func Start() {
|
|||
}
|
||||
|
||||
if ext == ".html" || ext == "" {
|
||||
data, _ := uiBox.Find("index.html")
|
||||
_, _ = w.Write(data)
|
||||
data, err := uiBox.ReadFile(uiRootDir + "/index.html")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
baseURLIndex := strings.Replace(string(data), "%BASE_URL%", prefix+"/", 2)
|
||||
baseURLIndex = strings.Replace(baseURLIndex, "base href=\"/\"", fmt.Sprintf("base href=\"%s\"", prefix+"/"), 2)
|
||||
_, _ = w.Write([]byte(baseURLIndex))
|
||||
} else {
|
||||
isStatic, _ := path.Match("/static/*/*", r.URL.Path)
|
||||
if isStatic {
|
||||
w.Header().Add("Cache-Control", "max-age=604800000")
|
||||
}
|
||||
http.FileServer(uiBox).ServeHTTP(w, r)
|
||||
uiRoot, err := fs.Sub(uiBox, uiRootDir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
http.FileServer(http.FS(uiRoot)).ServeHTTP(w, r)
|
||||
}
|
||||
})
|
||||
|
||||
|
|
@ -342,12 +283,12 @@ func makeTLSConfig(c *config.Instance) (*tls.Config, error) {
|
|||
return nil, errors.New("SSL key file must be present if certificate file is present")
|
||||
}
|
||||
|
||||
cert, err := ioutil.ReadFile(certFile)
|
||||
cert, err := os.ReadFile(certFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error reading SSL certificate file %s: %s", certFile, err.Error())
|
||||
}
|
||||
|
||||
key, err := ioutil.ReadFile(keyFile)
|
||||
key, err := os.ReadFile(keyFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error reading SSL key file %s: %s", keyFile, err.Error())
|
||||
}
|
||||
|
|
@ -382,11 +323,19 @@ func BaseURLMiddleware(next http.Handler) http.Handler {
|
|||
} else {
|
||||
scheme = "http"
|
||||
}
|
||||
baseURL := scheme + "://" + r.Host
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
|
||||
port := ""
|
||||
forwardedPort := r.Header.Get("X-Forwarded-Port")
|
||||
if forwardedPort != "" && forwardedPort != "80" && forwardedPort != "8080" {
|
||||
port = ":" + forwardedPort
|
||||
}
|
||||
|
||||
baseURL := scheme + "://" + r.Host + port + prefix
|
||||
|
||||
externalHost := config.GetInstance().GetExternalHost()
|
||||
if externalHost != "" {
|
||||
baseURL = externalHost
|
||||
baseURL = externalHost + prefix
|
||||
}
|
||||
|
||||
r = r.WithContext(context.WithValue(ctx, BaseURLCtxKey, baseURL))
|
||||
|
|
@ -395,3 +344,12 @@ func BaseURLMiddleware(next http.Handler) http.Handler {
|
|||
}
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
|
||||
func getProxyPrefix(headers http.Header) string {
|
||||
prefix := ""
|
||||
if headers.Get("X-Forwarded-Prefix") != "" {
|
||||
prefix = strings.TrimRight(headers.Get("X-Forwarded-Prefix"), "/")
|
||||
}
|
||||
|
||||
return prefix
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
|
|
@ -10,20 +11,24 @@ import (
|
|||
"github.com/stashapp/stash/pkg/session"
|
||||
)
|
||||
|
||||
const cookieName = "session"
|
||||
const usernameFormKey = "username"
|
||||
const passwordFormKey = "password"
|
||||
const userIDKey = "userID"
|
||||
|
||||
const loginRootDir = "ui/login"
|
||||
const returnURLParam = "returnURL"
|
||||
|
||||
func getLoginPage(loginUIBox embed.FS) []byte {
|
||||
data, err := loginUIBox.ReadFile(loginRootDir + "/login.html")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
type loginTemplateData struct {
|
||||
URL string
|
||||
Error string
|
||||
}
|
||||
|
||||
func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string) {
|
||||
data, _ := loginUIBox.Find("login.html")
|
||||
func redirectToLogin(loginUIBox embed.FS, w http.ResponseWriter, returnURL string, loginError string) {
|
||||
data := getLoginPage(loginUIBox)
|
||||
templ, err := template.New("Login").Parse(string(data))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
|
||||
|
|
@ -36,42 +41,48 @@ func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string)
|
|||
}
|
||||
}
|
||||
|
||||
func getLoginHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if !config.GetInstance().HasCredentials() {
|
||||
http.Redirect(w, r, "/", http.StatusFound)
|
||||
return
|
||||
}
|
||||
func getLoginHandler(loginUIBox embed.FS) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if !config.GetInstance().HasCredentials() {
|
||||
http.Redirect(w, r, "/", http.StatusFound)
|
||||
return
|
||||
}
|
||||
|
||||
redirectToLogin(w, r.URL.Query().Get(returnURLParam), "")
|
||||
redirectToLogin(loginUIBox, w, r.URL.Query().Get(returnURLParam), "")
|
||||
}
|
||||
}
|
||||
|
||||
func handleLogin(w http.ResponseWriter, r *http.Request) {
|
||||
url := r.FormValue(returnURLParam)
|
||||
if url == "" {
|
||||
url = "/"
|
||||
}
|
||||
func handleLogin(loginUIBox embed.FS) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
url := r.FormValue(returnURLParam)
|
||||
if url == "" {
|
||||
url = "/"
|
||||
}
|
||||
|
||||
err := manager.GetInstance().SessionStore.Login(w, r)
|
||||
if err == session.ErrInvalidCredentials {
|
||||
// redirect back to the login page with an error
|
||||
redirectToLogin(w, url, "Username or password is invalid")
|
||||
return
|
||||
}
|
||||
err := manager.GetInstance().SessionStore.Login(w, r)
|
||||
if err == session.ErrInvalidCredentials {
|
||||
// redirect back to the login page with an error
|
||||
redirectToLogin(loginUIBox, w, url, "Username or password is invalid")
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, url, http.StatusFound)
|
||||
http.Redirect(w, r, url, http.StatusFound)
|
||||
}
|
||||
}
|
||||
|
||||
func handleLogout(w http.ResponseWriter, r *http.Request) {
|
||||
if err := manager.GetInstance().SessionStore.Logout(w, r); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
func handleLogout(loginUIBox embed.FS) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if err := manager.GetInstance().SessionStore.Logout(w, r); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// redirect to the login page if credentials are required
|
||||
getLoginHandler(w, r)
|
||||
// redirect to the login page if credentials are required
|
||||
getLoginHandler(loginUIBox)(w, r)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -59,6 +59,10 @@ func (b SceneURLBuilder) GetSceneMarkerStreamPreviewURL(sceneMarkerID int) strin
|
|||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/preview"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetSceneMarkerStreamScreenshotURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/screenshot"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetFunscriptURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ import (
|
|||
const galleryExt = "zip"
|
||||
|
||||
func TestGalleryPerformers(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const galleryID = 1
|
||||
const performerName = "performer name"
|
||||
const performerID = 2
|
||||
|
|
@ -55,6 +57,8 @@ func TestGalleryPerformers(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestGalleryStudios(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const galleryID = 1
|
||||
const studioName = "studio name"
|
||||
const studioID = 2
|
||||
|
|
@ -74,12 +78,7 @@ func TestGalleryStudios(t *testing.T) {
|
|||
|
||||
assert := assert.New(t)
|
||||
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockGalleryReader := &mocks.GalleryReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
|
||||
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) {
|
||||
if test.Matches {
|
||||
mockGalleryReader.On("Find", galleryID).Return(&models.Gallery{}, nil).Once()
|
||||
expectedStudioID := models.NullInt64(studioID)
|
||||
|
|
@ -99,9 +98,38 @@ func TestGalleryStudios(t *testing.T) {
|
|||
mockStudioReader.AssertExpectations(t)
|
||||
mockGalleryReader.AssertExpectations(t)
|
||||
}
|
||||
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockGalleryReader := &mocks.GalleryReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe()
|
||||
|
||||
doTest(mockStudioReader, mockGalleryReader, test)
|
||||
}
|
||||
|
||||
// test against aliases
|
||||
const unmatchedName = "unmatched"
|
||||
studio.Name.String = unmatchedName
|
||||
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockGalleryReader := &mocks.GalleryReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", studioID).Return([]string{
|
||||
studioName,
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", reversedStudioID).Return([]string{}, nil).Once()
|
||||
|
||||
doTest(mockStudioReader, mockGalleryReader, test)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGalleryTags(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const galleryID = 1
|
||||
const tagName = "tag name"
|
||||
const tagID = 2
|
||||
|
|
@ -121,12 +149,7 @@ func TestGalleryTags(t *testing.T) {
|
|||
|
||||
assert := assert.New(t)
|
||||
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockGalleryReader := &mocks.GalleryReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
|
||||
doTest := func(mockTagReader *mocks.TagReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) {
|
||||
if test.Matches {
|
||||
mockGalleryReader.On("GetTagIDs", galleryID).Return(nil, nil).Once()
|
||||
mockGalleryReader.On("UpdateTags", galleryID, []int{tagID}).Return(nil).Once()
|
||||
|
|
@ -142,4 +165,30 @@ func TestGalleryTags(t *testing.T) {
|
|||
mockTagReader.AssertExpectations(t)
|
||||
mockGalleryReader.AssertExpectations(t)
|
||||
}
|
||||
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockGalleryReader := &mocks.GalleryReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
mockTagReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe()
|
||||
|
||||
doTest(mockTagReader, mockGalleryReader, test)
|
||||
}
|
||||
|
||||
const unmatchedName = "unmatched"
|
||||
tag.Name = unmatchedName
|
||||
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockGalleryReader := &mocks.GalleryReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
mockTagReader.On("GetAliases", tagID).Return([]string{
|
||||
tagName,
|
||||
}, nil).Once()
|
||||
mockTagReader.On("GetAliases", reversedTagID).Return([]string{}, nil).Once()
|
||||
|
||||
doTest(mockTagReader, mockGalleryReader, test)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ import (
|
|||
const imageExt = "jpg"
|
||||
|
||||
func TestImagePerformers(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const imageID = 1
|
||||
const performerName = "performer name"
|
||||
const performerID = 2
|
||||
|
|
@ -55,6 +57,8 @@ func TestImagePerformers(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestImageStudios(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const imageID = 1
|
||||
const studioName = "studio name"
|
||||
const studioID = 2
|
||||
|
|
@ -74,12 +78,7 @@ func TestImageStudios(t *testing.T) {
|
|||
|
||||
assert := assert.New(t)
|
||||
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockImageReader := &mocks.ImageReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
|
||||
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) {
|
||||
if test.Matches {
|
||||
mockImageReader.On("Find", imageID).Return(&models.Image{}, nil).Once()
|
||||
expectedStudioID := models.NullInt64(studioID)
|
||||
|
|
@ -99,9 +98,38 @@ func TestImageStudios(t *testing.T) {
|
|||
mockStudioReader.AssertExpectations(t)
|
||||
mockImageReader.AssertExpectations(t)
|
||||
}
|
||||
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockImageReader := &mocks.ImageReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe()
|
||||
|
||||
doTest(mockStudioReader, mockImageReader, test)
|
||||
}
|
||||
|
||||
// test against aliases
|
||||
const unmatchedName = "unmatched"
|
||||
studio.Name.String = unmatchedName
|
||||
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockImageReader := &mocks.ImageReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", studioID).Return([]string{
|
||||
studioName,
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", reversedStudioID).Return([]string{}, nil).Once()
|
||||
|
||||
doTest(mockStudioReader, mockImageReader, test)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageTags(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const imageID = 1
|
||||
const tagName = "tag name"
|
||||
const tagID = 2
|
||||
|
|
@ -121,12 +149,7 @@ func TestImageTags(t *testing.T) {
|
|||
|
||||
assert := assert.New(t)
|
||||
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockImageReader := &mocks.ImageReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
|
||||
doTest := func(mockTagReader *mocks.TagReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) {
|
||||
if test.Matches {
|
||||
mockImageReader.On("GetTagIDs", imageID).Return(nil, nil).Once()
|
||||
mockImageReader.On("UpdateTags", imageID, []int{tagID}).Return(nil).Once()
|
||||
|
|
@ -142,4 +165,31 @@ func TestImageTags(t *testing.T) {
|
|||
mockTagReader.AssertExpectations(t)
|
||||
mockImageReader.AssertExpectations(t)
|
||||
}
|
||||
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockImageReader := &mocks.ImageReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
mockTagReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe()
|
||||
|
||||
doTest(mockTagReader, mockImageReader, test)
|
||||
}
|
||||
|
||||
// test against aliases
|
||||
const unmatchedName = "unmatched"
|
||||
tag.Name = unmatchedName
|
||||
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockImageReader := &mocks.ImageReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
mockTagReader.On("GetAliases", tagID).Return([]string{
|
||||
tagName,
|
||||
}, nil).Once()
|
||||
mockTagReader.On("GetAliases", reversedTagID).Return([]string{}, nil).Once()
|
||||
|
||||
doTest(mockTagReader, mockImageReader, test)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
//go:build integration
|
||||
// +build integration
|
||||
|
||||
package autotag
|
||||
|
|
@ -6,7 +7,6 @@ import (
|
|||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
|
|
@ -43,7 +43,7 @@ func testTeardown(databaseFile string) {
|
|||
|
||||
func runTests(m *testing.M) int {
|
||||
// create the database file
|
||||
f, err := ioutil.TempFile("", "*.sqlite")
|
||||
f, err := os.CreateTemp("", "*.sqlite")
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Could not create temporary file: %s", err.Error()))
|
||||
}
|
||||
|
|
@ -408,7 +408,12 @@ func TestParseStudioScenes(t *testing.T) {
|
|||
|
||||
for _, s := range studios {
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
return StudioScenes(s, nil, r.Scene())
|
||||
aliases, err := r.Studio().GetAliases(s.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return StudioScenes(s, nil, aliases, r.Scene())
|
||||
}); err != nil {
|
||||
t.Errorf("Error auto-tagging performers: %s", err)
|
||||
}
|
||||
|
|
@ -558,7 +563,12 @@ func TestParseStudioImages(t *testing.T) {
|
|||
|
||||
for _, s := range studios {
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
return StudioImages(s, nil, r.Image())
|
||||
aliases, err := r.Studio().GetAliases(s.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return StudioImages(s, nil, aliases, r.Image())
|
||||
}); err != nil {
|
||||
t.Errorf("Error auto-tagging performers: %s", err)
|
||||
}
|
||||
|
|
@ -708,7 +718,12 @@ func TestParseStudioGalleries(t *testing.T) {
|
|||
|
||||
for _, s := range studios {
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
return StudioGalleries(s, nil, r.Gallery())
|
||||
aliases, err := r.Studio().GetAliases(s.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return StudioGalleries(s, nil, aliases, r.Gallery())
|
||||
}); err != nil {
|
||||
t.Errorf("Error auto-tagging performers: %s", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ import (
|
|||
)
|
||||
|
||||
func TestPerformerScenes(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
type test struct {
|
||||
performerName string
|
||||
expectedRegex string
|
||||
|
|
@ -23,6 +25,10 @@ func TestPerformerScenes(t *testing.T) {
|
|||
"performer + name",
|
||||
`(?i)(?:^|_|[^\w\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
`performer + name\`,
|
||||
`(?i)(?:^|_|[^\w\d])performer[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, p := range performerNames {
|
||||
|
|
@ -81,6 +87,8 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
|
|||
}
|
||||
|
||||
func TestPerformerImages(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
type test struct {
|
||||
performerName string
|
||||
expectedRegex string
|
||||
|
|
@ -153,6 +161,8 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
|
|||
}
|
||||
|
||||
func TestPerformerGalleries(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
type test struct {
|
||||
performerName string
|
||||
expectedRegex string
|
||||
|
|
|
|||
|
|
@ -145,6 +145,8 @@ func generateTestTable(testName, ext string) []pathTestTable {
|
|||
}
|
||||
|
||||
func TestScenePerformers(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const sceneID = 1
|
||||
const performerName = "performer name"
|
||||
const performerID = 2
|
||||
|
|
@ -188,6 +190,8 @@ func TestScenePerformers(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestSceneStudios(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const sceneID = 1
|
||||
const studioName = "studio name"
|
||||
const studioID = 2
|
||||
|
|
@ -207,12 +211,7 @@ func TestSceneStudios(t *testing.T) {
|
|||
|
||||
assert := assert.New(t)
|
||||
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockSceneReader := &mocks.SceneReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
|
||||
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) {
|
||||
if test.Matches {
|
||||
mockSceneReader.On("Find", sceneID).Return(&models.Scene{}, nil).Once()
|
||||
expectedStudioID := models.NullInt64(studioID)
|
||||
|
|
@ -232,9 +231,38 @@ func TestSceneStudios(t *testing.T) {
|
|||
mockStudioReader.AssertExpectations(t)
|
||||
mockSceneReader.AssertExpectations(t)
|
||||
}
|
||||
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockSceneReader := &mocks.SceneReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe()
|
||||
|
||||
doTest(mockStudioReader, mockSceneReader, test)
|
||||
}
|
||||
|
||||
const unmatchedName = "unmatched"
|
||||
studio.Name.String = unmatchedName
|
||||
|
||||
// test against aliases
|
||||
for _, test := range testTables {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
mockSceneReader := &mocks.SceneReaderWriter{}
|
||||
|
||||
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", studioID).Return([]string{
|
||||
studioName,
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("GetAliases", reversedStudioID).Return([]string{}, nil).Once()
|
||||
|
||||
doTest(mockStudioReader, mockSceneReader, test)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSceneTags(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const sceneID = 1
|
||||
const tagName = "tag name"
|
||||
const tagID = 2
|
||||
|
|
@ -254,12 +282,7 @@ func TestSceneTags(t *testing.T) {
|
|||
|
||||
assert := assert.New(t)
|
||||
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockSceneReader := &mocks.SceneReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
|
||||
doTest := func(mockTagReader *mocks.TagReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) {
|
||||
if test.Matches {
|
||||
mockSceneReader.On("GetTagIDs", sceneID).Return(nil, nil).Once()
|
||||
mockSceneReader.On("UpdateTags", sceneID, []int{tagID}).Return(nil).Once()
|
||||
|
|
@ -275,4 +298,31 @@ func TestSceneTags(t *testing.T) {
|
|||
mockTagReader.AssertExpectations(t)
|
||||
mockSceneReader.AssertExpectations(t)
|
||||
}
|
||||
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockSceneReader := &mocks.SceneReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
mockTagReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe()
|
||||
|
||||
doTest(mockTagReader, mockSceneReader, test)
|
||||
}
|
||||
|
||||
const unmatchedName = "unmatched"
|
||||
tag.Name = unmatchedName
|
||||
|
||||
// test against aliases
|
||||
for _, test := range testTables {
|
||||
mockTagReader := &mocks.TagReaderWriter{}
|
||||
mockSceneReader := &mocks.SceneReaderWriter{}
|
||||
|
||||
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
|
||||
mockTagReader.On("GetAliases", tagID).Return([]string{
|
||||
tagName,
|
||||
}, nil).Once()
|
||||
mockTagReader.On("GetAliases", reversedTagID).Return([]string{}, nil).Once()
|
||||
|
||||
doTest(mockTagReader, mockSceneReader, test)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ package autotag
|
|||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
|
|
@ -16,7 +15,26 @@ func getMatchingStudios(path string, reader models.StudioReader) ([]*models.Stud
|
|||
|
||||
var ret []*models.Studio
|
||||
for _, c := range candidates {
|
||||
matches := false
|
||||
if nameMatchesPath(c.Name.String, path) {
|
||||
matches = true
|
||||
}
|
||||
|
||||
if !matches {
|
||||
aliases, err := reader.GetAliases(c.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, alias := range aliases {
|
||||
if nameMatchesPath(alias, path) {
|
||||
matches = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches {
|
||||
ret = append(ret, c)
|
||||
}
|
||||
}
|
||||
|
|
@ -96,37 +114,65 @@ func addGalleryStudio(galleryWriter models.GalleryReaderWriter, galleryID, studi
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func getStudioTagger(p *models.Studio) tagger {
|
||||
return tagger{
|
||||
func getStudioTagger(p *models.Studio, aliases []string) []tagger {
|
||||
ret := []tagger{{
|
||||
ID: p.ID,
|
||||
Type: "studio",
|
||||
Name: p.Name.String,
|
||||
}}
|
||||
|
||||
for _, a := range aliases {
|
||||
ret = append(ret, tagger{
|
||||
ID: p.ID,
|
||||
Type: "studio",
|
||||
Name: a,
|
||||
})
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene.
|
||||
func StudioScenes(p *models.Studio, paths []string, rw models.SceneReaderWriter) error {
|
||||
t := getStudioTagger(p)
|
||||
func StudioScenes(p *models.Studio, paths []string, aliases []string, rw models.SceneReaderWriter) error {
|
||||
t := getStudioTagger(p, aliases)
|
||||
|
||||
return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return addSceneStudio(rw, otherID, subjectID)
|
||||
})
|
||||
for _, tt := range t {
|
||||
if err := tt.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return addSceneStudio(rw, otherID, subjectID)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image.
|
||||
func StudioImages(p *models.Studio, paths []string, rw models.ImageReaderWriter) error {
|
||||
t := getStudioTagger(p)
|
||||
func StudioImages(p *models.Studio, paths []string, aliases []string, rw models.ImageReaderWriter) error {
|
||||
t := getStudioTagger(p, aliases)
|
||||
|
||||
return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return addImageStudio(rw, otherID, subjectID)
|
||||
})
|
||||
for _, tt := range t {
|
||||
if err := tt.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return addImageStudio(rw, otherID, subjectID)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery.
|
||||
func StudioGalleries(p *models.Studio, paths []string, rw models.GalleryReaderWriter) error {
|
||||
t := getStudioTagger(p)
|
||||
func StudioGalleries(p *models.Studio, paths []string, aliases []string, rw models.GalleryReaderWriter) error {
|
||||
t := getStudioTagger(p, aliases)
|
||||
|
||||
return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return addGalleryStudio(rw, otherID, subjectID)
|
||||
})
|
||||
for _, tt := range t {
|
||||
if err := tt.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return addGalleryStudio(rw, otherID, subjectID)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,35 +8,81 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type testStudioCase struct {
|
||||
studioName string
|
||||
expectedRegex string
|
||||
aliasName string
|
||||
aliasRegex string
|
||||
}
|
||||
|
||||
var testStudioCases = []testStudioCase{
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
`studio + name\`,
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\w\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
"alias name",
|
||||
`(?i)(?:^|_|[^\w\d])alias[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
"alias + name",
|
||||
`(?i)(?:^|_|[^\w\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
`studio + name\`,
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\w\d])`,
|
||||
`alias + name\`,
|
||||
`(?i)(?:^|_|[^\w\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
func TestStudioScenes(t *testing.T) {
|
||||
type test struct {
|
||||
studioName string
|
||||
expectedRegex string
|
||||
}
|
||||
t.Parallel()
|
||||
|
||||
studioNames := []test{
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, p := range studioNames {
|
||||
testStudioScenes(t, p.studioName, p.expectedRegex)
|
||||
for _, p := range testStudioCases {
|
||||
testStudioScenes(t, p)
|
||||
}
|
||||
}
|
||||
|
||||
func testStudioScenes(t *testing.T, studioName, expectedRegex string) {
|
||||
func testStudioScenes(t *testing.T, tc testStudioCase) {
|
||||
studioName := tc.studioName
|
||||
expectedRegex := tc.expectedRegex
|
||||
aliasName := tc.aliasName
|
||||
aliasRegex := tc.aliasRegex
|
||||
|
||||
mockSceneReader := &mocks.SceneReaderWriter{}
|
||||
|
||||
const studioID = 2
|
||||
|
||||
var aliases []string
|
||||
|
||||
testPathName := studioName
|
||||
if aliasName != "" {
|
||||
aliases = []string{aliasName}
|
||||
testPathName = aliasName
|
||||
}
|
||||
|
||||
matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4")
|
||||
|
||||
var scenes []*models.Scene
|
||||
matchingPaths, falsePaths := generateTestPaths(studioName, sceneExt)
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
scenes = append(scenes, &models.Scene{
|
||||
ID: i + 1,
|
||||
|
|
@ -64,7 +110,23 @@ func testStudioScenes(t *testing.T, studioName, expectedRegex string) {
|
|||
PerPage: &perPage,
|
||||
}
|
||||
|
||||
mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once()
|
||||
// if alias provided, then don't find by name
|
||||
onNameQuery := mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter)
|
||||
if aliasName == "" {
|
||||
onNameQuery.Return(scenes, len(scenes), nil).Once()
|
||||
} else {
|
||||
onNameQuery.Return(nil, 0, nil).Once()
|
||||
|
||||
expectedAliasFilter := &models.SceneFilterType{
|
||||
Organized: &organized,
|
||||
Path: &models.StringCriterionInput{
|
||||
Value: aliasRegex,
|
||||
Modifier: models.CriterionModifierMatchesRegex,
|
||||
},
|
||||
}
|
||||
|
||||
mockSceneReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once()
|
||||
}
|
||||
|
||||
for i := range matchingPaths {
|
||||
sceneID := i + 1
|
||||
|
|
@ -76,7 +138,7 @@ func testStudioScenes(t *testing.T, studioName, expectedRegex string) {
|
|||
}).Return(nil, nil).Once()
|
||||
}
|
||||
|
||||
err := StudioScenes(&studio, nil, mockSceneReader)
|
||||
err := StudioScenes(&studio, nil, aliases, mockSceneReader)
|
||||
|
||||
assert := assert.New(t)
|
||||
|
||||
|
|
@ -85,34 +147,33 @@ func testStudioScenes(t *testing.T, studioName, expectedRegex string) {
|
|||
}
|
||||
|
||||
func TestStudioImages(t *testing.T) {
|
||||
type test struct {
|
||||
studioName string
|
||||
expectedRegex string
|
||||
}
|
||||
t.Parallel()
|
||||
|
||||
studioNames := []test{
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, p := range studioNames {
|
||||
testStudioImages(t, p.studioName, p.expectedRegex)
|
||||
for _, p := range testStudioCases {
|
||||
testStudioImages(t, p)
|
||||
}
|
||||
}
|
||||
|
||||
func testStudioImages(t *testing.T, studioName, expectedRegex string) {
|
||||
func testStudioImages(t *testing.T, tc testStudioCase) {
|
||||
studioName := tc.studioName
|
||||
expectedRegex := tc.expectedRegex
|
||||
aliasName := tc.aliasName
|
||||
aliasRegex := tc.aliasRegex
|
||||
|
||||
mockImageReader := &mocks.ImageReaderWriter{}
|
||||
|
||||
const studioID = 2
|
||||
|
||||
var aliases []string
|
||||
|
||||
testPathName := studioName
|
||||
if aliasName != "" {
|
||||
aliases = []string{aliasName}
|
||||
testPathName = aliasName
|
||||
}
|
||||
|
||||
var images []*models.Image
|
||||
matchingPaths, falsePaths := generateTestPaths(studioName, imageExt)
|
||||
matchingPaths, falsePaths := generateTestPaths(testPathName, imageExt)
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
images = append(images, &models.Image{
|
||||
ID: i + 1,
|
||||
|
|
@ -140,7 +201,23 @@ func testStudioImages(t *testing.T, studioName, expectedRegex string) {
|
|||
PerPage: &perPage,
|
||||
}
|
||||
|
||||
mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once()
|
||||
// if alias provided, then don't find by name
|
||||
onNameQuery := mockImageReader.On("Query", expectedImageFilter, expectedFindFilter)
|
||||
if aliasName == "" {
|
||||
onNameQuery.Return(images, len(images), nil).Once()
|
||||
} else {
|
||||
onNameQuery.Return(nil, 0, nil).Once()
|
||||
|
||||
expectedAliasFilter := &models.ImageFilterType{
|
||||
Organized: &organized,
|
||||
Path: &models.StringCriterionInput{
|
||||
Value: aliasRegex,
|
||||
Modifier: models.CriterionModifierMatchesRegex,
|
||||
},
|
||||
}
|
||||
|
||||
mockImageReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(images, len(images), nil).Once()
|
||||
}
|
||||
|
||||
for i := range matchingPaths {
|
||||
imageID := i + 1
|
||||
|
|
@ -152,7 +229,7 @@ func testStudioImages(t *testing.T, studioName, expectedRegex string) {
|
|||
}).Return(nil, nil).Once()
|
||||
}
|
||||
|
||||
err := StudioImages(&studio, nil, mockImageReader)
|
||||
err := StudioImages(&studio, nil, aliases, mockImageReader)
|
||||
|
||||
assert := assert.New(t)
|
||||
|
||||
|
|
@ -161,34 +238,32 @@ func testStudioImages(t *testing.T, studioName, expectedRegex string) {
|
|||
}
|
||||
|
||||
func TestStudioGalleries(t *testing.T) {
|
||||
type test struct {
|
||||
studioName string
|
||||
expectedRegex string
|
||||
}
|
||||
t.Parallel()
|
||||
|
||||
studioNames := []test{
|
||||
{
|
||||
"studio name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
"studio + name",
|
||||
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, p := range studioNames {
|
||||
testStudioGalleries(t, p.studioName, p.expectedRegex)
|
||||
for _, p := range testStudioCases {
|
||||
testStudioGalleries(t, p)
|
||||
}
|
||||
}
|
||||
|
||||
func testStudioGalleries(t *testing.T, studioName, expectedRegex string) {
|
||||
func testStudioGalleries(t *testing.T, tc testStudioCase) {
|
||||
studioName := tc.studioName
|
||||
expectedRegex := tc.expectedRegex
|
||||
aliasName := tc.aliasName
|
||||
aliasRegex := tc.aliasRegex
|
||||
mockGalleryReader := &mocks.GalleryReaderWriter{}
|
||||
|
||||
const studioID = 2
|
||||
|
||||
var aliases []string
|
||||
|
||||
testPathName := studioName
|
||||
if aliasName != "" {
|
||||
aliases = []string{aliasName}
|
||||
testPathName = aliasName
|
||||
}
|
||||
|
||||
var galleries []*models.Gallery
|
||||
matchingPaths, falsePaths := generateTestPaths(studioName, galleryExt)
|
||||
matchingPaths, falsePaths := generateTestPaths(testPathName, galleryExt)
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
galleries = append(galleries, &models.Gallery{
|
||||
ID: i + 1,
|
||||
|
|
@ -216,7 +291,23 @@ func testStudioGalleries(t *testing.T, studioName, expectedRegex string) {
|
|||
PerPage: &perPage,
|
||||
}
|
||||
|
||||
mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
|
||||
// if alias provided, then don't find by name
|
||||
onNameQuery := mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter)
|
||||
if aliasName == "" {
|
||||
onNameQuery.Return(galleries, len(galleries), nil).Once()
|
||||
} else {
|
||||
onNameQuery.Return(nil, 0, nil).Once()
|
||||
|
||||
expectedAliasFilter := &models.GalleryFilterType{
|
||||
Organized: &organized,
|
||||
Path: &models.StringCriterionInput{
|
||||
Value: aliasRegex,
|
||||
Modifier: models.CriterionModifierMatchesRegex,
|
||||
},
|
||||
}
|
||||
|
||||
mockGalleryReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
|
||||
}
|
||||
|
||||
for i := range matchingPaths {
|
||||
galleryID := i + 1
|
||||
|
|
@ -228,7 +319,7 @@ func testStudioGalleries(t *testing.T, studioName, expectedRegex string) {
|
|||
}).Return(nil, nil).Once()
|
||||
}
|
||||
|
||||
err := StudioGalleries(&studio, nil, mockGalleryReader)
|
||||
err := StudioGalleries(&studio, nil, aliases, mockGalleryReader)
|
||||
|
||||
assert := assert.New(t)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,9 +16,27 @@ func getMatchingTags(path string, tagReader models.TagReader) ([]*models.Tag, er
|
|||
}
|
||||
|
||||
var ret []*models.Tag
|
||||
for _, p := range tags {
|
||||
if nameMatchesPath(p.Name, path) {
|
||||
ret = append(ret, p)
|
||||
for _, t := range tags {
|
||||
matches := false
|
||||
if nameMatchesPath(t.Name, path) {
|
||||
matches = true
|
||||
}
|
||||
|
||||
if !matches {
|
||||
aliases, err := tagReader.GetAliases(t.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, alias := range aliases {
|
||||
if nameMatchesPath(alias, path) {
|
||||
matches = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches {
|
||||
ret = append(ret, t)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,12 @@ var testTagCases = []testTagCase{
|
|||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
`tag + name\`,
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\w\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
|
|
@ -40,9 +46,17 @@ var testTagCases = []testTagCase{
|
|||
"alias + name",
|
||||
`(?i)(?:^|_|[^\w\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
`tag + name\`,
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\w\d])`,
|
||||
`alias + name\`,
|
||||
`(?i)(?:^|_|[^\w\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
func TestTagScenes(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, p := range testTagCases {
|
||||
testTagScenes(t, p)
|
||||
}
|
||||
|
|
@ -129,6 +143,8 @@ func testTagScenes(t *testing.T, tc testTagCase) {
|
|||
}
|
||||
|
||||
func TestTagImages(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, p := range testTagCases {
|
||||
testTagImages(t, p)
|
||||
}
|
||||
|
|
@ -214,6 +230,8 @@ func testTagImages(t *testing.T, tc testTagCase) {
|
|||
}
|
||||
|
||||
func TestTagGalleries(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, p := range testTagCases {
|
||||
testTagGalleries(t, p)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,22 +25,9 @@ import (
|
|||
|
||||
const separatorChars = `.\-_ `
|
||||
|
||||
// fixes #1292
|
||||
func escapePathRegex(name string) string {
|
||||
ret := name
|
||||
|
||||
chars := `+*?()|[]{}^$`
|
||||
for _, c := range chars {
|
||||
cStr := string(c)
|
||||
ret = strings.ReplaceAll(ret, cStr, `\`+cStr)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func getPathQueryRegex(name string) string {
|
||||
// escape specific regex characters
|
||||
name = escapePathRegex(name)
|
||||
name = regexp.QuoteMeta(name)
|
||||
|
||||
// handle path separators
|
||||
const separator = `[` + separatorChars + `]`
|
||||
|
|
@ -52,7 +39,7 @@ func getPathQueryRegex(name string) string {
|
|||
|
||||
func nameMatchesPath(name, path string) bool {
|
||||
// escape specific regex characters
|
||||
name = escapePathRegex(name)
|
||||
name = regexp.QuoteMeta(name)
|
||||
|
||||
name = strings.ToLower(name)
|
||||
path = strings.ToLower(path)
|
||||
|
|
|
|||
72
pkg/database/custom_migrations.go
Normal file
72
pkg/database/custom_migrations.go
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
package database
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
)
|
||||
|
||||
func runCustomMigrations() error {
|
||||
if err := createImagesChecksumIndex(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func createImagesChecksumIndex() error {
|
||||
return WithTxn(func(tx *sqlx.Tx) error {
|
||||
row := tx.QueryRow("SELECT 1 AS found FROM sqlite_master WHERE type = 'index' AND name = 'images_checksum_unique'")
|
||||
err := row.Err()
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
var found bool
|
||||
if err := row.Scan(&found); err != nil && err != sql.ErrNoRows {
|
||||
return fmt.Errorf("error while scanning for index: %w", err)
|
||||
}
|
||||
if found {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
_, err = tx.Exec("CREATE UNIQUE INDEX images_checksum_unique ON images (checksum)")
|
||||
if err == nil {
|
||||
_, err = tx.Exec("DROP INDEX IF EXISTS index_images_checksum")
|
||||
if err != nil {
|
||||
logger.Errorf("Failed to remove surrogate images.checksum index: %s", err)
|
||||
}
|
||||
logger.Info("Created unique constraint on images table")
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = tx.Exec("CREATE INDEX IF NOT EXISTS index_images_checksum ON images (checksum)")
|
||||
if err != nil {
|
||||
logger.Errorf("Unable to create index on images.checksum: %s", err)
|
||||
}
|
||||
|
||||
var result []struct {
|
||||
Checksum string `db:"checksum"`
|
||||
}
|
||||
|
||||
err = tx.Select(&result, "SELECT checksum FROM images GROUP BY checksum HAVING COUNT(1) > 1")
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
logger.Errorf("Unable to determine non-unique image checksums: %s", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
checksums := make([]string, len(result))
|
||||
for i, res := range result {
|
||||
checksums[i] = res.Checksum
|
||||
}
|
||||
|
||||
logger.Warnf("The following duplicate image checksums have been found. Please remove the duplicates and restart. %s", strings.Join(checksums, ", "))
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
|
@ -2,6 +2,7 @@ package database
|
|||
|
||||
import (
|
||||
"database/sql"
|
||||
"embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
|
@ -9,10 +10,9 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/fvbommel/sortorder"
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
sqlite3mig "github.com/golang-migrate/migrate/v4/database/sqlite3"
|
||||
"github.com/golang-migrate/migrate/v4/source"
|
||||
"github.com/golang-migrate/migrate/v4/source/iofs"
|
||||
"github.com/jmoiron/sqlx"
|
||||
sqlite3 "github.com/mattn/go-sqlite3"
|
||||
|
||||
|
|
@ -21,11 +21,14 @@ import (
|
|||
)
|
||||
|
||||
var DB *sqlx.DB
|
||||
var WriteMu *sync.Mutex
|
||||
var WriteMu sync.Mutex
|
||||
var dbPath string
|
||||
var appSchemaVersion uint = 25
|
||||
var appSchemaVersion uint = 28
|
||||
var databaseSchemaVersion uint
|
||||
|
||||
//go:embed migrations/*.sql
|
||||
var migrationsBox embed.FS
|
||||
|
||||
var (
|
||||
// ErrMigrationNeeded indicates that a database migration is needed
|
||||
// before the database can be initialized
|
||||
|
|
@ -84,14 +87,32 @@ func Initialize(databasePath string) error {
|
|||
|
||||
const disableForeignKeys = false
|
||||
DB = open(databasePath, disableForeignKeys)
|
||||
WriteMu = &sync.Mutex{}
|
||||
|
||||
if err := runCustomMigrations(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Close() error {
|
||||
WriteMu.Lock()
|
||||
defer WriteMu.Unlock()
|
||||
|
||||
if DB != nil {
|
||||
if err := DB.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
DB = nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func open(databasePath string, disableForeignKeys bool) *sqlx.DB {
|
||||
// https://github.com/mattn/go-sqlite3
|
||||
url := "file:" + databasePath + "?_journal=WAL"
|
||||
url := "file:" + databasePath + "?_journal=WAL&_sync=NORMAL"
|
||||
if !disableForeignKeys {
|
||||
url += "&_fk=true"
|
||||
}
|
||||
|
|
@ -130,7 +151,10 @@ func Reset(databasePath string) error {
|
|||
}
|
||||
}
|
||||
|
||||
Initialize(databasePath)
|
||||
if err := Initialize(databasePath); err != nil {
|
||||
return fmt.Errorf("[reset DB] unable to initialize: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
@ -149,7 +173,7 @@ func Backup(db *sqlx.DB, backupPath string) error {
|
|||
logger.Infof("Backing up database into: %s", backupPath)
|
||||
_, err := db.Exec(`VACUUM INTO "` + backupPath + `"`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Vacuum failed: %s", err)
|
||||
return fmt.Errorf("vacuum failed: %s", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -182,17 +206,13 @@ func Version() uint {
|
|||
}
|
||||
|
||||
func getMigrate() (*migrate.Migrate, error) {
|
||||
migrationsBox := packr.New("Migrations Box", "./migrations")
|
||||
packrSource := &Packr2Source{
|
||||
Box: migrationsBox,
|
||||
Migrations: source.NewMigrations(),
|
||||
migrations, err := iofs.New(migrationsBox, "migrations")
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
|
||||
databasePath := utils.FixWindowsPath(dbPath)
|
||||
s, _ := WithInstance(packrSource)
|
||||
|
||||
const disableForeignKeys = true
|
||||
conn := open(databasePath, disableForeignKeys)
|
||||
conn := open(dbPath, disableForeignKeys)
|
||||
|
||||
driver, err := sqlite3mig.WithInstance(conn.DB, &sqlite3mig.Config{})
|
||||
if err != nil {
|
||||
|
|
@ -201,9 +221,9 @@ func getMigrate() (*migrate.Migrate, error) {
|
|||
|
||||
// use sqlite3Driver so that migration has access to durationToTinyInt
|
||||
return migrate.NewWithInstance(
|
||||
"packr2",
|
||||
s,
|
||||
databasePath,
|
||||
"iofs",
|
||||
migrations,
|
||||
dbPath,
|
||||
driver,
|
||||
)
|
||||
}
|
||||
|
|
@ -225,6 +245,7 @@ func RunMigrations() error {
|
|||
if err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
defer m.Close()
|
||||
|
||||
databaseSchemaVersion, _, _ = m.Version()
|
||||
stepNumber := appSchemaVersion - databaseSchemaVersion
|
||||
|
|
@ -233,22 +254,20 @@ func RunMigrations() error {
|
|||
err = m.Steps(int(stepNumber))
|
||||
if err != nil {
|
||||
// migration failed
|
||||
logger.Errorf("Error migrating database: %s", err.Error())
|
||||
m.Close()
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
m.Close()
|
||||
|
||||
// re-initialise the database
|
||||
Initialize(dbPath)
|
||||
if err = Initialize(dbPath); err != nil {
|
||||
logger.Warnf("Error re-initializing the database: %v", err)
|
||||
}
|
||||
|
||||
// run a vacuum on the database
|
||||
logger.Info("Performing vacuum on database")
|
||||
_, err = DB.Exec("VACUUM")
|
||||
if err != nil {
|
||||
logger.Warnf("error while performing post-migration vacuum: %s", err.Error())
|
||||
logger.Warnf("error while performing post-migration vacuum: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -265,7 +284,7 @@ func registerCustomDriver() {
|
|||
|
||||
for name, fn := range funcs {
|
||||
if err := conn.RegisterFunc(name, fn, true); err != nil {
|
||||
return fmt.Errorf("Error registering function %s: %s", name, err.Error())
|
||||
return fmt.Errorf("error registering function %s: %s", name, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -279,7 +298,7 @@ func registerCustomDriver() {
|
|||
})
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error registering natural sort collation: %s", err.Error())
|
||||
return fmt.Errorf("error registering natural sort collation: %s", err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
|||
7
pkg/database/migrations/26_tag_hierarchy.up.sql
Normal file
7
pkg/database/migrations/26_tag_hierarchy.up.sql
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
CREATE TABLE tags_relations (
|
||||
parent_id integer,
|
||||
child_id integer,
|
||||
primary key (parent_id, child_id),
|
||||
foreign key (parent_id) references tags(id) on delete cascade,
|
||||
foreign key (child_id) references tags(id) on delete cascade
|
||||
);
|
||||
7
pkg/database/migrations/27_studio_aliases.up.sql
Normal file
7
pkg/database/migrations/27_studio_aliases.up.sql
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
CREATE TABLE `studio_aliases` (
|
||||
`studio_id` integer,
|
||||
`alias` varchar(255) NOT NULL,
|
||||
foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX `studio_aliases_alias_unique` on `studio_aliases` (`alias`);
|
||||
3
pkg/database/migrations/28_images_indexes.up.sql
Normal file
3
pkg/database/migrations/28_images_indexes.up.sql
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
DROP INDEX IF EXISTS `images_path_unique`;
|
||||
|
||||
CREATE UNIQUE INDEX `images_path_unique` ON `images` (`path`);
|
||||
|
|
@ -1,92 +0,0 @@
|
|||
package database
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
"github.com/golang-migrate/migrate/v4/source"
|
||||
)
|
||||
|
||||
type Packr2Source struct {
|
||||
Box *packr.Box
|
||||
Migrations *source.Migrations
|
||||
}
|
||||
|
||||
func init() {
|
||||
source.Register("packr2", &Packr2Source{})
|
||||
}
|
||||
|
||||
func WithInstance(instance *Packr2Source) (source.Driver, error) {
|
||||
for _, fi := range instance.Box.List() {
|
||||
m, err := source.DefaultParse(fi)
|
||||
if err != nil {
|
||||
continue // ignore files that we can't parse
|
||||
}
|
||||
|
||||
if !instance.Migrations.Append(m) {
|
||||
return nil, fmt.Errorf("unable to parse file %v", fi)
|
||||
}
|
||||
}
|
||||
|
||||
return instance, nil
|
||||
}
|
||||
|
||||
func (s *Packr2Source) Open(url string) (source.Driver, error) {
|
||||
return nil, fmt.Errorf("not implemented")
|
||||
}
|
||||
|
||||
func (s *Packr2Source) Close() error {
|
||||
s.Migrations = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Packr2Source) First() (version uint, err error) {
|
||||
if v, ok := s.Migrations.First(); !ok {
|
||||
return 0, os.ErrNotExist
|
||||
} else {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Packr2Source) Prev(version uint) (prevVersion uint, err error) {
|
||||
if v, ok := s.Migrations.Prev(version); !ok {
|
||||
return 0, os.ErrNotExist
|
||||
} else {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Packr2Source) Next(version uint) (nextVersion uint, err error) {
|
||||
if v, ok := s.Migrations.Next(version); !ok {
|
||||
return 0, os.ErrNotExist
|
||||
} else {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Packr2Source) ReadUp(version uint) (r io.ReadCloser, identifier string, err error) {
|
||||
if migration, ok := s.Migrations.Up(version); !ok {
|
||||
return nil, "", os.ErrNotExist
|
||||
} else {
|
||||
b, _ := s.Box.Find(migration.Raw)
|
||||
return ioutil.NopCloser(bytes.NewBuffer(b)),
|
||||
migration.Identifier,
|
||||
nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Packr2Source) ReadDown(version uint) (r io.ReadCloser, identifier string, err error) {
|
||||
if migration, ok := s.Migrations.Down(version); !ok {
|
||||
return nil, "", migrate.ErrNilVersion
|
||||
} else {
|
||||
b, _ := s.Box.Find(migration.Raw)
|
||||
return ioutil.NopCloser(bytes.NewBuffer(b)),
|
||||
migration.Identifier,
|
||||
nil
|
||||
}
|
||||
}
|
||||
|
|
@ -4,6 +4,7 @@ import (
|
|||
"context"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
)
|
||||
|
||||
// WithTxn executes the provided function within a transaction. It rolls back
|
||||
|
|
@ -17,11 +18,15 @@ func WithTxn(fn func(tx *sqlx.Tx) error) error {
|
|||
defer func() {
|
||||
if p := recover(); p != nil {
|
||||
// a panic occurred, rollback and repanic
|
||||
tx.Rollback()
|
||||
if err := tx.Rollback(); err != nil {
|
||||
logger.Warnf("failure when performing transaction rollback: %v", err)
|
||||
}
|
||||
panic(p)
|
||||
} else if err != nil {
|
||||
// something went wrong, rollback
|
||||
tx.Rollback()
|
||||
if err := tx.Rollback(); err != nil {
|
||||
logger.Warnf("failure when performing transaction rollback: %v", err)
|
||||
}
|
||||
} else {
|
||||
// all good, commit
|
||||
err = tx.Commit()
|
||||
|
|
|
|||
|
|
@ -533,7 +533,6 @@ func (me *contentDirectoryService) getStudioScenes(paths []string, host string)
|
|||
Studios: &models.HierarchicalMultiCriterionInput{
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
Value: []string{paths[0]},
|
||||
Depth: 0,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -570,7 +569,7 @@ func (me *contentDirectoryService) getTags() []interface{} {
|
|||
|
||||
func (me *contentDirectoryService) getTagScenes(paths []string, host string) []interface{} {
|
||||
sceneFilter := &models.SceneFilterType{
|
||||
Tags: &models.MultiCriterionInput{
|
||||
Tags: &models.HierarchicalMultiCriterionInput{
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
Value: []string{paths[0]},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -33,7 +33,6 @@ import (
|
|||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/http/pprof"
|
||||
|
|
@ -58,7 +57,6 @@ const (
|
|||
resPath = "/res"
|
||||
iconPath = "/icon"
|
||||
rootDescPath = "/rootDesc.xml"
|
||||
contentDirectorySCPDURL = "/scpd/ContentDirectory.xml"
|
||||
contentDirectoryEventSubURL = "/evt/ContentDirectory"
|
||||
serviceControlURL = "/ctl"
|
||||
deviceIconPath = "/deviceIcon"
|
||||
|
|
@ -417,7 +415,7 @@ func (me *Server) serveIcon(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
var scene *models.Scene
|
||||
me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error {
|
||||
err := me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error {
|
||||
idInt, err := strconv.Atoi(sceneId)
|
||||
if err != nil {
|
||||
return nil
|
||||
|
|
@ -425,6 +423,9 @@ func (me *Server) serveIcon(w http.ResponseWriter, r *http.Request) {
|
|||
scene, _ = r.Scene().Find(idInt)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
logger.Warnf("failed to execute read transaction while trying to serve an icon: %v", err)
|
||||
}
|
||||
|
||||
if scene == nil {
|
||||
return
|
||||
|
|
@ -481,7 +482,7 @@ func (me *Server) contentDirectoryInitialEvent(urls []*url.URL, sid string) {
|
|||
logger.Errorf("Could not notify %s: %s", _url.String(), err)
|
||||
continue
|
||||
}
|
||||
b, _ := ioutil.ReadAll(resp.Body)
|
||||
b, _ := io.ReadAll(resp.Body)
|
||||
if len(b) > 0 {
|
||||
logger.Debug(string(b))
|
||||
}
|
||||
|
|
@ -553,7 +554,7 @@ func (me *Server) initMux(mux *http.ServeMux) {
|
|||
mux.HandleFunc(resPath, func(w http.ResponseWriter, r *http.Request) {
|
||||
sceneId := r.URL.Query().Get("scene")
|
||||
var scene *models.Scene
|
||||
me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error {
|
||||
err := me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error {
|
||||
sceneIdInt, err := strconv.Atoi(sceneId)
|
||||
if err != nil {
|
||||
return nil
|
||||
|
|
@ -561,6 +562,9 @@ func (me *Server) initMux(mux *http.ServeMux) {
|
|||
scene, _ = r.Scene().Find(sceneIdInt)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
logger.Warnf("failed to execute read transaction for scene id (%v): %v", sceneId, err)
|
||||
}
|
||||
|
||||
if scene == nil {
|
||||
return
|
||||
|
|
@ -572,7 +576,9 @@ func (me *Server) initMux(mux *http.ServeMux) {
|
|||
w.Header().Set("content-type", `text/xml; charset="utf-8"`)
|
||||
w.Header().Set("content-length", fmt.Sprint(len(me.rootDescXML)))
|
||||
w.Header().Set("server", serverField)
|
||||
w.Write(me.rootDescXML)
|
||||
if k, err := w.Write(me.rootDescXML); err != nil {
|
||||
logger.Warnf("could not write rootDescXML (wrote %v bytes of %v): %v", k, len(me.rootDescXML), err)
|
||||
}
|
||||
})
|
||||
handleSCPDs(mux)
|
||||
mux.HandleFunc(serviceControlURL, me.serviceControlHandler)
|
||||
|
|
|
|||
|
|
@ -251,7 +251,9 @@ func (s *Service) Stop(duration *time.Duration) {
|
|||
|
||||
if s.startTimer == nil {
|
||||
s.startTimer = time.AfterFunc(*duration, func() {
|
||||
s.Start(nil)
|
||||
if err := s.Start(nil); err != nil {
|
||||
logger.Warnf("error restarting DLNA server: %v", err)
|
||||
}
|
||||
})
|
||||
t := time.Now().Add(*duration)
|
||||
s.startTime = &t
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ package ffmpeg
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
|
@ -63,7 +63,9 @@ func KillRunningEncoders(path string) {
|
|||
for _, process := range processes {
|
||||
// assume it worked, don't check for error
|
||||
logger.Infof("Killing encoder process for file: %s", path)
|
||||
process.Kill()
|
||||
if err := process.Kill(); err != nil {
|
||||
logger.Warnf("failed to kill process %v: %v", process.Pid, err)
|
||||
}
|
||||
|
||||
// wait for the process to die before returning
|
||||
// don't wait more than a few seconds
|
||||
|
|
@ -124,7 +126,7 @@ func (e *Encoder) runTranscode(probeResult VideoFile, args []string) (string, er
|
|||
}
|
||||
}
|
||||
|
||||
stdoutData, _ := ioutil.ReadAll(stdout)
|
||||
stdoutData, _ := io.ReadAll(stdout)
|
||||
stdoutString := string(stdoutData)
|
||||
|
||||
registerRunningEncoder(probeResult.Path, cmd.Process)
|
||||
|
|
|
|||
|
|
@ -236,7 +236,7 @@ func NewVideoFile(ffprobePath string, videoPath string, stripExt bool) (*VideoFi
|
|||
|
||||
probeJSON := &FFProbeJSON{}
|
||||
if err := json.Unmarshal(out, probeJSON); err != nil {
|
||||
return nil, fmt.Errorf("Error unmarshalling video data for <%s>: %s", videoPath, err.Error())
|
||||
return nil, fmt.Errorf("error unmarshalling video data for <%s>: %s", videoPath, err.Error())
|
||||
}
|
||||
|
||||
return parse(videoPath, probeJSON, stripExt)
|
||||
|
|
@ -273,8 +273,9 @@ func parse(filePath string, probeJSON *FFProbeJSON, stripExt bool) (*VideoFile,
|
|||
result.Duration = math.Round(duration*100) / 100
|
||||
fileStat, err := os.Stat(filePath)
|
||||
if err != nil {
|
||||
logger.Errorf("Error statting file <%s>: %s", filePath, err.Error())
|
||||
return nil, err
|
||||
statErr := fmt.Errorf("error statting file <%s>: %w", filePath, err)
|
||||
logger.Errorf("%v", statErr)
|
||||
return nil, statErr
|
||||
}
|
||||
result.Size = fileStat.Size()
|
||||
result.StartTime, _ = strconv.ParseFloat(probeJSON.Format.StartTime, 64)
|
||||
|
|
|
|||
|
|
@ -21,9 +21,8 @@ func WriteHLSPlaylist(probeResult VideoFile, baseUrl string, w io.Writer) {
|
|||
leftover := duration
|
||||
upTo := 0.0
|
||||
|
||||
tsURL := baseUrl
|
||||
i := strings.LastIndex(baseUrl, ".m3u8")
|
||||
tsURL = baseUrl[0:i] + ".ts"
|
||||
tsURL := baseUrl[0:i] + ".ts"
|
||||
|
||||
for leftover > 0 {
|
||||
thisLength := hlsSegmentLength
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ package ffmpeg
|
|||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
|
|
@ -32,7 +31,9 @@ func (s *Stream) Serve(w http.ResponseWriter, r *http.Request) {
|
|||
notify := r.Context().Done()
|
||||
go func() {
|
||||
<-notify
|
||||
s.Process.Kill()
|
||||
if err := s.Process.Kill(); err != nil {
|
||||
logger.Warnf("unable to kill os process %v: %v", s.Process.Pid, err)
|
||||
}
|
||||
}()
|
||||
|
||||
_, err := io.Copy(w, s.Stdout)
|
||||
|
|
@ -224,11 +225,15 @@ func (e *Encoder) stream(probeResult VideoFile, options TranscodeStreamOptions)
|
|||
}
|
||||
|
||||
registerRunningEncoder(probeResult.Path, cmd.Process)
|
||||
go waitAndDeregister(probeResult.Path, cmd)
|
||||
go func() {
|
||||
if err := waitAndDeregister(probeResult.Path, cmd); err != nil {
|
||||
logger.Warnf("Error while deregistering ffmpeg stream: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// stderr must be consumed or the process deadlocks
|
||||
go func() {
|
||||
stderrData, _ := ioutil.ReadAll(stderr)
|
||||
stderrData, _ := io.ReadAll(stderr)
|
||||
stderrString := string(stderrData)
|
||||
if len(stderrString) > 0 {
|
||||
logger.Debugf("[stream] ffmpeg stderr: %s", stderrString)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue