diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8c2cf3452..d7e3ac4fb 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -47,15 +47,18 @@ jobs: - name: Cache go build uses: actions/cache@v2 env: - cache-name: cache-go-cache + # increment the number suffix to bump the cache + cache-name: cache-go-cache-1 with: path: .go-cache - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/go.sum') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('go.mod', '**/go.sum') }} - name: Start build container + env: + official-build: ${{ (github.event_name == 'push' && github.ref == 'refs/heads/develop') || (github.event_name == 'release' && github.ref != 'refs/tags/latest_develop') }} run: | mkdir -p .go-cache - docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null + docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null - name: Pre-install run: docker exec -t build /bin/bash -c "make pre-ui" diff --git a/.golangci.yml b/.golangci.yml index ca296c1dd..ad200b40b 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -17,30 +17,35 @@ linters: - typecheck - unused - varcheck - # Linters added by the stash project - # - bodyclose + # Linters added by the stash project. - dogsled - # - errorlint + - errorlint # - exhaustive - exportloopref - # - goconst - # - gocritic + - gocritic # - goerr113 - gofmt - # - gosec + # - gomnd # - ifshort - misspell # - nakedret - # - noctx - # - paralleltest + - noctx - revive - rowserrcheck - sqlclosecheck +# Project-specific linter overrides linters-settings: gofmt: simplify: false + errorlint: + # Disable errorf because there are false positives, where you don't want to wrap + # an error. + errorf: false + asserts: true + comparison: true + revive: ignore-generated-header: true severity: error @@ -79,4 +84,8 @@ linters-settings: - name: unused-parameter disabled: true - name: unreachable-code - - name: redefines-builtin-id \ No newline at end of file + - name: redefines-builtin-id + + rowserrcheck: + packages: + - github.com/jmoiron/sqlx diff --git a/.goreleaser.yml b/.goreleaser.yml deleted file mode 100644 index 7c10ffeb8..000000000 --- a/.goreleaser.yml +++ /dev/null @@ -1,76 +0,0 @@ -project_name: stash -before: - hooks: - - go mod download -builds: - - binary: stash-win - ldflags: - - "-extldflags '-static'" - env: - - CGO_ENABLED=1 - - CC=x86_64-w64-mingw32-gcc - - CXX=x86_64-w64-mingw32-g++ - flags: - - -tags - - extended - goos: - - windows - goarch: - - amd64 - - binary: stash-osx - env: - - CGO_ENABLED=1 - - CC=o64-clang - - CXX=o64-clang++ - flags: - - -tags - - extended - goos: - - darwin - goarch: - - amd64 - - binary: stash-osx-applesilicon - env: - - CGO_ENABLED=1 - - CC=oa64-clang - - CXX=oa64-clang++ - flags: - - -tags - - extended - goos: - - darwin - goarch: - - arm64 - - binary: stash-linux - env: - - CGO_ENABLED=1 - flags: - - -tags - - extended - goos: - - linux - goarch: - - amd64 -archive: - format: tar.gz - format_overrides: - - goos: windows - format: zip - name_template: "{{.ProjectName}}_{{.Version}}_{{.Os}}-{{.Arch}}" - replacements: - amd64: 64bit - 386: 32bit - arm: ARM - arm64: ARM64 - darwin: macOS - linux: Linux - windows: Windows - openbsd: OpenBSD - netbsd: NetBSD - freebsd: FreeBSD - dragonfly: DragonFlyBSD - files: - - README.md - - LICENSE -release: - draft: true \ No newline at end of file diff --git a/.travis.yml.disabled b/.travis.yml.disabled deleted file mode 100644 index 91a12196f..000000000 --- a/.travis.yml.disabled +++ /dev/null @@ -1,117 +0,0 @@ -if: tag != latest_develop # dont build for the latest_develop tagged version - -dist: xenial -git: - depth: false -language: go -go: -- 1.17.x -services: -- docker -before_install: - - set -e - # Configure environment so changes are picked up when the Docker daemon is restarted after upgrading - - echo '{"experimental":true}' | sudo tee /etc/docker/daemon.json - - export DOCKER_CLI_EXPERIMENTAL=enabled - # Upgrade to Docker CE 19.03 for BuildKit support - - curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - - - sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" - - sudo apt-get update - - sudo apt-get -y -o Dpkg::Options::="--force-confnew" install docker-ce - # install binfmt docker container, this container uses qemu to run arm programs transparently allowng docker to build arm 6,7,8 containers. - - docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64 - # Show info to simplify debugging and create a builder that can build the platforms we need - - docker info - - docker buildx create --name builder --use - - docker buildx inspect --bootstrap - - docker buildx ls - -install: -- echo -e "machine github.com\n login $CI_USER_TOKEN" > ~/.netrc -- nvm install 12 -- travis_retry make pre-ui -- make generate -- CI=false make ui-validate ui-only -#- go get -v github.com/mgechev/revive -script: -# left lint off to avoid getting extra dependency -#- make lint -- make fmt-check vet it -after_success: -- docker pull stashapp/compiler:5 -- sh ./scripts/cross-compile.sh -- git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1 -- sha1sum dist/stash-* | sed 's/dist\///g' | tee -a CHECKSUMS_SHA1 -- 'if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then sh ./scripts/upload-pull-request.sh; fi' -before_deploy: -# push the latest tag when on the develop branch -- if [ "$TRAVIS_BRANCH" = "develop" ]; then git tag -f latest_develop; git push -f --tags; fi -- export RELEASE_DATE=$(date +'%Y-%m-%d %H:%M:%S %Z') -- export STASH_VERSION=$(git describe --tags --exclude latest_develop) -# set TRAVIS_TAG explcitly to the version so that it doesn't pick up latest_develop -- if [ "$TRAVIS_BRANCH" = "master" ]; then export TRAVIS_TAG=${STASH_VERSION}; fi -deploy: - # latest develop release - - provider: releases - # use the v2 release provider for proper release note setting - edge: true - api_key: - secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00= - file: - - dist/stash-osx - - dist/stash-osx-applesilicon - - dist/stash-win.exe - - dist/stash-linux - - dist/stash-linux-arm64v8 - - dist/stash-linux-arm32v7 - - dist/stash-pi - - CHECKSUMS_SHA1 - skip_cleanup: true - overwrite: true - name: "${STASH_VERSION}: Latest development build" - release_notes: "**${RELEASE_DATE}**\n This is always the latest committed version on the develop branch. Use as your own risk!" - prerelease: true - on: - repo: stashapp/stash - branch: develop - # docker image build for develop release - - provider: script - skip_cleanup: true - script: bash ./docker/ci/x86_64/docker_push.sh development - on: - repo: stashapp/stash - branch: develop - # official master release - only build when tagged - - provider: releases - api_key: - secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00= - file: - - dist/stash-osx - - dist/stash-osx-applesilicon - - dist/stash-win.exe - - dist/stash-linux - - dist/stash-linux-arm64v8 - - dist/stash-linux-arm32v7 - - dist/stash-pi - - CHECKSUMS_SHA1 - # make the release a draft so the maintainers can confirm before releasing - draft: true - skip_cleanup: true - overwrite: true - # don't write the body. To be done manually for now. In future we might - # want to generate the changelog or get it from a file - name: ${STASH_VERSION} - on: - repo: stashapp/stash - tags: true - # make sure we don't release using the latest_develop tag - condition: $TRAVIS_TAG != latest_develop - # docker image build for master release - - provider: script - skip_cleanup: true - script: bash ./docker/ci/x86_64/docker_push.sh latest - on: - repo: stashapp/stash - tags: true - # make sure we don't release using the latest_develop tag - condition: $TRAVIS_TAG != latest_develop diff --git a/Makefile b/Makefile index 0fe2eedbc..3f69e6085 100644 --- a/Makefile +++ b/Makefile @@ -41,8 +41,13 @@ ifndef STASH_VERSION $(eval STASH_VERSION := $(shell git describe --tags --exclude latest_develop)) endif +ifndef OFFICIAL_BUILD + $(eval OFFICIAL_BUILD := false) +endif + build: pre-build $(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/pkg/api.version=$(STASH_VERSION)' -X 'github.com/stashapp/stash/pkg/api.buildstamp=$(BUILD_DATE)' -X 'github.com/stashapp/stash/pkg/api.githash=$(GITHASH)') + $(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/pkg/api.officialBuild=$(OFFICIAL_BUILD)') go build $(OUTPUT) -mod=vendor -v -tags "sqlite_omit_load_extension osusergo netgo" $(GO_BUILD_FLAGS) -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS)" # strips debug symbols from the release build @@ -195,5 +200,5 @@ validate-backend: lint it # locally builds and tags a 'stash/build' docker image .PHONY: docker-build -docker-build: - docker build -t stash/build -f docker/build/x86_64/Dockerfile . +docker-build: pre-build + docker build --build-arg GITHASH=$(GITHASH) --build-arg STASH_VERSION=$(STASH_VERSION) -t stash/build -f docker/build/x86_64/Dockerfile . diff --git a/README.md b/README.md index 16ee10641..1a42bc4e1 100644 --- a/README.md +++ b/README.md @@ -1,83 +1,46 @@ # Stash +https://stashapp.cc +[![Build](https://github.com/stashapp/stash/actions/workflows/build.yml/badge.svg?branch=develop&event=push)](https://github.com/stashapp/stash/actions/workflows/build.yml) +[![Docker pulls](https://img.shields.io/docker/pulls/stashapp/stash.svg)](https://hub.docker.com/r/stashapp/Stash 'DockerHub') [![Go Report Card](https://goreportcard.com/badge/github.com/stashapp/stash)](https://goreportcard.com/report/github.com/stashapp/stash) [![Discord](https://img.shields.io/discord/559159668438728723.svg?logo=discord)](https://discord.gg/2TsNFKt) -https://stashapp.cc +### **Stash is a self-hosted webapp written in Go which organizes and serves your porn.** +![demo image](docs/readme_assets/demo_image.png) -**Stash is a locally hosted web-based app written in Go which organizes and serves your porn.** - -* It can gather information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers. -* It supports a wide variety of both video and image formats. +* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites. +* Stash supports a wide variety of both video and image formats. * You can tag videos and find them later. -* It provides statistics about performers, tags, studios and other things. +* Stash provides statistics about performers, tags, studios and more. You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action. For further information you can [read the in-app manual](ui/v2.5/src/docs/en). -# Installing stash +# Installing Stash -## via Docker + Windows | MacOS| Linux | Docker +:---:|:---:|:---:|:---: +[Latest Release](https://github.com/stashapp/stash/releases/latest/download/stash-win.exe)
[Development Preview](https://github.com/stashapp/stash/releases/download/latest_develop/stash-win.exe) | [Latest Release (Apple Silicon)](https://github.com/stashapp/stash/releases/latest/download/stash-osx-applesilicon)
[Development Preview (Apple Silicon)](https://github.com/stashapp/stash/releases/download/latest_develop/stash-osx-applesilicon)
[Latest Release (Intel)](https://github.com/stashapp/stash/releases/latest/download/stash-osx)
[Development Preview (Intel)](https://github.com/stashapp/stash/releases/download/latest_develop/stash-osx) | [Latest Release (amd64)](https://github.com/stashapp/stash/releases/latest/download/stash-linux)
[Development Preview (amd64)](https://github.com/stashapp/stash/releases/download/latest_develop/stash-linux)
[More Architectures...](https://github.com/stashapp/stash/releases/latest) | [Instructions](docker/production/README.md)
[Sample docker-compose.yml](docker/production/docker-compose.yml) -Follow [this README.md in the docker directory.](docker/production/README.md) - -## Pre-Compiled Binaries - -The Stash server runs on macOS, Windows, and Linux. Download the [latest release here](https://github.com/stashapp/stash/releases). - -Run the executable (double click the exe on windows or run `./stash-osx` / `./stash-linux` from the terminal on macOS / Linux) and navigate to either https://localhost:9999 or http://localhost:9999 to get started. +## Getting Started +Run the executable (double click the exe on windows or run `./stash-osx` / `./stash-linux` from the terminal on macOS / Linux) to get started. *Note for Windows users:* Running the app might present a security prompt since the binary isn't yet signed. Bypass this by clicking "more info" and then the "run anyway" button. #### FFMPEG - -If stash is unable to find or download FFMPEG then download it yourself from the link for your platform: - -* [macOS ffmpeg](https://evermeet.cx/ffmpeg/ffmpeg-4.3.1.zip), [macOS ffprobe](https://evermeet.cx/ffmpeg/ffprobe-4.3.1.zip) -* [Windows](https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip) -* [Linux](https://www.johnvansickle.com/ffmpeg/) - -The `ffmpeg(.exe)` and `ffprobe(.exe)` files should be placed in `~/.stash` on macOS / Linux or `C:\Users\YourUsername\.stash` on Windows. +Stash requires ffmpeg. If you don't have it installed, Stash will download a copy for you. It is recommended that Linux users install `ffmpeg` from their distro's package manager. # Usage ## Quickstart Guide -1) Download and install Stash and its dependencies -2) Run Stash. It will prompt you for some configuration options and a directory to index (you can also do this step afterward) -3) After configuration, launch your web browser and navigate to the URL shown within the Stash app. +Download and run Stash. It will prompt you for some configuration options and a directory to index (you can also do this step afterward) -**Note that Stash does not currently retrieve and organize information about your entire library automatically.** You will need to help it along through the use of [scrapers](blob/develop/ui/v2.5/src/docs/en/Scraping.md). The Stash community has developed scrapers for many popular data sources which can be downloaded and installed from [this repository](https://github.com/stashapp/CommunityScrapers). +**If you'd like to automatically retrieve and organize information about your entire library,** You will need to download some [scrapers](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en/Scraping.md). The Stash community has developed scrapers for many popular data sources which can be downloaded and installed from [this repository](https://github.com/stashapp/CommunityScrapers). The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our stash-box database. Note that this data source is not comprehensive and you may need to use the scrapers to identify some of your media. -## CLI - -Stash runs as a command-line app and local web server. There are some command-line options available, which you can see by running `stash --help`. - -For example, to run stash locally on port 80 run it like this (OSX / Linux) `stash --host 127.0.0.1 --port 80` - -## SSL (HTTPS) - -Stash can run over HTTPS with some additional work. First you must generate a SSL certificate and key combo. Here is an example using openssl: - -`openssl req -x509 -newkey rsa:4096 -sha256 -days 7300 -nodes -keyout stash.key -out stash.crt -extensions san -config <(echo "[req]"; echo distinguished_name=req; echo "[san]"; echo subjectAltName=DNS:stash.server,IP:127.0.0.1) -subj /CN=stash.server` - -This command would need customizing for your environment. [This link](https://stackoverflow.com/questions/10175812/how-to-create-a-self-signed-certificate-with-openssl) might be useful. - -Once you have a certificate and key file name them `stash.crt` and `stash.key` and place them in the same directory as the `config.yml` file, or the `~/.stash` directory. Stash detects these and starts up using HTTPS rather than HTTP. - -## Basepath rewriting - -The basepath defaults to `/`. When running stash via a reverse proxy in a subpath, the basepath can be changed by having the reverse proxy pass `X-Forwarded-Prefix` (and optionally `X-Forwarded-Port`) headers. When detects these headers, it alters the basepath URL of the UI. - -# Customization - -## Themes and CSS Customization -There is a [directory of community-created themes](https://github.com/stashapp/stash/wiki/Themes) on our Wiki, along with instructions on how to install them. - -You can also make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks). - # Support (FAQ) Answers to other Frequently Asked Questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ) @@ -85,73 +48,18 @@ Answers to other Frequently Asked Questions can be found [on our Wiki](https://g For issues not addressed there, there are a few options. * Read the [Wiki](https://github.com/stashapp/stash/wiki) -* Check the in-app documentation (also available [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en) +* Check the in-app documentation, in the top right corner of the app (also available [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en) * Join the [Discord server](https://discord.gg/2TsNFKt), where the community can offer support. -# Compiling From Source Code +# Customization -## Pre-requisites +## Themes and CSS Customization +There is a [directory of community-created themes](https://github.com/stashapp/stash/wiki/Themes) on our Wiki, along with instructions on how to install them. -* [Go](https://golang.org/dl/) -* [GolangCI](https://golangci-lint.run/) - A meta-linter which runs several linters in parallel - * To install, follow the [local installation instructions](https://golangci-lint.run/usage/install/#local-installation) -* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager - * Run `yarn install --frozen-lockfile` in the `stash/ui/v2.5` folder (before running make generate for first time). +You can also make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets). -NOTE: You may need to run the `go get` commands outside the project directory to avoid modifying the projects module file. +# For Developers -## Environment +Pull requests are welcome! -### macOS - -TODO - -### Windows - -1. Download and install [Go for Windows](https://golang.org/dl/) -2. Download and install [MingW](https://sourceforge.net/projects/mingw-w64/) -3. Search for "advanced system settings" and open the system properties dialog. - 1. Click the `Environment Variables` button - 2. Under system variables find the `Path`. Edit and add `C:\Program Files\mingw-w64\*\mingw64\bin` (replace * with the correct path). - -NOTE: The `make` command in Windows will be `mingw32-make` with MingW. - -## Commands - -* `make generate` - Generate Go and UI GraphQL files -* `make build` - Builds the binary (make sure to build the UI as well... see below) -* `make docker-build` - Locally builds and tags a complete 'stash/build' docker image -* `make pre-ui` - Installs the UI dependencies. Only needs to be run once before building the UI for the first time, or if the dependencies are updated -* `make fmt-ui` - Formats the UI source code -* `make ui` - Builds the frontend -* `make lint` - Run the linter on the backend -* `make fmt` - Run `go fmt` -* `make it` - Run the unit and integration tests -* `make validate` - Run all of the tests and checks required to submit a PR -* `make ui-start` - Runs the UI in development mode. Requires a running stash server to connect to. Stash port can be changed from the default of `9999` with environment variable `REACT_APP_PLATFORM_PORT`. - -## Building a release - -1. Run `make generate` to create generated files -2. Run `make ui` to compile the frontend -3. Run `make build` to build the executable for your current platform - -## Cross compiling - -This project uses a modification of the [CI-GoReleaser](https://github.com/bep/dockerfiles/tree/master/ci-goreleaser) docker container to create an environment -where the app can be cross-compiled. This process is kicked off by CI via the `scripts/cross-compile.sh` script. Run the following -command to open a bash shell to the container to poke around: - -`docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t stashappdev/compiler:latest /bin/bash` - -## Profiling - -Stash can be profiled using the `--cpuprofile ` command line flag. - -The resulting file can then be used with pprof as follows: - -`go tool pprof ` - -With `graphviz` installed and in the path, a call graph can be generated with: - -`go tool pprof -svg > ` +See [Development](docs/DEVELOPMENT.md) and [Contributing](docs/CONTRIBUTING.md) for information on working with the codebase, getting a local development setup, and contributing changes. diff --git a/docker/build/x86_64/Dockerfile b/docker/build/x86_64/Dockerfile index 2ffcf7050..529315959 100644 --- a/docker/build/x86_64/Dockerfile +++ b/docker/build/x86_64/Dockerfile @@ -1,30 +1,23 @@ -# This dockerfile must be built from the top-level stash directory -# ie from top-level stash: -# docker build -t stash/build -f docker/build/x86_64/Dockerfile . +# This dockerfile should be built with `make docker-build` from the stash root. # Build Frontend FROM node:alpine as frontend -RUN apk add --no-cache make git +RUN apk add --no-cache make ## cache node_modules separately COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/ WORKDIR /stash RUN yarn --cwd ui/v2.5 install --frozen-lockfile. COPY Makefile /stash/ -COPY ./.git /stash/.git COPY ./graphql /stash/graphql/ COPY ./ui /stash/ui/ RUN make generate-frontend +ARG GITHASH +ARG STASH_VERSION RUN BUILD_DATE=$(date +"%Y-%m-%d %H:%M:%S") make ui # Build Backend FROM golang:1.17-alpine as backend -RUN apk add --no-cache xz make alpine-sdk -## install ffmpeg -WORKDIR / -RUN wget -O /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \ - tar xf /ffmpeg.tar.xz && \ - rm ffmpeg.tar.xz && \ - mv /ffmpeg*/ /ffmpeg/ +RUN apk add --no-cache make alpine-sdk WORKDIR /stash COPY ./go* ./*.go Makefile gqlgen.yml .gqlgenc.yml /stash/ COPY ./scripts /stash/scripts/ @@ -32,12 +25,14 @@ COPY ./vendor /stash/vendor/ COPY ./pkg /stash/pkg/ COPY --from=frontend /stash /stash/ RUN make generate-backend +ARG GITHASH +ARG STASH_VERSION RUN make build # Final Runnable Image FROM alpine:latest -RUN apk add --no-cache ca-certificates vips-tools -COPY --from=backend /stash/stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/ +RUN apk add --no-cache ca-certificates vips-tools ffmpeg +COPY --from=backend /stash/stash /usr/bin/ ENV STASH_CONFIG_FILE=/root/.stash/config.yml EXPOSE 9999 ENTRYPOINT ["stash"] \ No newline at end of file diff --git a/docker/build/x86_64/README.md b/docker/build/x86_64/README.md index 3590c3e59..f21253b0f 100644 --- a/docker/build/x86_64/README.md +++ b/docker/build/x86_64/README.md @@ -1,13 +1,13 @@ # Introduction -This dockerfile is used to build a stash docker container using the current source code. +This dockerfile is used to build a stash docker container using the current source code. This is ideal for testing your current branch in docker. Note that it does not include python, so python-based scrapers will not work in this image. The production docker images distributed by the project contain python and the necessary packages. # Building the docker container From the top-level directory (should contain `main.go` file): ``` -docker build -t stash/build -f ./docker/build/x86_64/Dockerfile . +make docker-build ``` diff --git a/docker/ci/x86_64/Dockerfile b/docker/ci/x86_64/Dockerfile index 7e2fd24fa..4a209d96c 100644 --- a/docker/ci/x86_64/Dockerfile +++ b/docker/ci/x86_64/Dockerfile @@ -1,4 +1,4 @@ -FROM --platform=$BUILDPLATFORM ubuntu:20.04 AS prep +FROM --platform=$BUILDPLATFORM alpine:latest AS binary ARG TARGETPLATFORM WORKDIR / COPY stash-* / @@ -8,15 +8,12 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-pi; \ elif [ "$TARGETPLATFORM" = "linux/amd64" ]; then BIN=stash-linux; \ fi; \ mv $BIN /stash -ENV DEBIAN_FRONTEND=noninteractive -RUN apt update && apt install -y python3 python-is-python3 python3-requests python3-requests-toolbelt python3-lxml python3-pip && pip3 install cloudscraper -FROM ubuntu:20.04 as app -run apt update && apt install -y python3 python-is-python3 python3-requests python3-requests-toolbelt python3-lxml python3-mechanicalsoup ffmpeg libvips-tools && rm -rf /var/lib/apt/lists/* -COPY --from=prep /stash /usr/bin/ -COPY --from=prep /usr/local/lib/python3.8/dist-packages /usr/local/lib/python3.8/dist-packages +FROM --platform=$TARGETPLATFORM alpine:latest AS app +COPY --from=binary /stash /usr/bin/ +RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg vips-tools && pip install --no-cache-dir mechanicalsoup cloudscraper +RUN ln -s /usr/bin/python3 /usr/bin/python ENV STASH_CONFIG_FILE=/root/.stash/config.yml EXPOSE 9999 CMD ["stash"] - diff --git a/docker/ci/x86_64/docker_push.sh b/docker/ci/x86_64/docker_push.sh index 8d638e0f7..7ca5ff201 100644 --- a/docker/ci/x86_64/docker_push.sh +++ b/docker/ci/x86_64/docker_push.sh @@ -10,5 +10,5 @@ done echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin # must build the image from dist directory -docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 --push $DOCKER_TAGS -f docker/ci/x86_64/Dockerfile dist/ +docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6 --push $DOCKER_TAGS -f docker/ci/x86_64/Dockerfile dist/ diff --git a/docker/compiler/Dockerfile b/docker/compiler/Dockerfile index 96fc9f161..c7f5e789c 100644 --- a/docker/compiler/Dockerfile +++ b/docker/compiler/Dockerfile @@ -48,10 +48,20 @@ RUN mkdir -p /root/.ssh; \ ssh-keyscan github.com > /root/.ssh/known_hosts; # Notes for self: + +# To test locally: +# make generate +# make ui +# cd docker/compiler +# make build +# docker run -it -v /PATH_TO_STASH:/go/stash stashapp/compiler:latest /bin/bash +# cd stash +# make cross-compile-all +# # binaries will show up in /dist + # Windows: # GOOS=windows GOARCH=amd64 CGO_ENABLED=1 CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ go build -ldflags "-extldflags '-static'" -tags extended - # Darwin # CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go build -tags extended # env goreleaser --config=goreleaser-extended.yml --skip-publish --skip-validate --rm-dist --release-notes=temp/0.48-relnotes-ready.md diff --git a/docker/production/README.md b/docker/production/README.md index 40fe157bc..9744e6708 100644 --- a/docker/production/README.md +++ b/docker/production/README.md @@ -7,16 +7,6 @@ Only `docker` and `docker-compose` are required. For the most part your understa Installation instructions are available below, and if your distrobution's repository ships a current version of docker, you may use that. https://docs.docker.com/engine/install/ -### Docker -Docker is effectively a cross-platform software package repository. It allows you to ship an entire environment in what's referred to as a container. Containers are intended to hold everything that is needed to run an application from one place to another, making it easy for everyone along the way to reproduce the environment. - -The StashApp docker container ships with everything you need to automatically build and run stash, including ffmpeg. - -### docker-compose -Docker Compose lets you specify how and where to run your containers, and to manage their environment. The docker-compose.yml file in this folder gets you a fully working instance of StashApp exactly as you would need it to have a reasonable instance for testing / developing on. If you are deploying a live instance for production, a reverse proxy (such as NGINX or Traefik) is recommended, but not required. - -The latest version is always recommended. - ### Get the docker-compose.yml file Now you can either navigate to the [docker-compose.yml](https://raw.githubusercontent.com/stashapp/stash/master/docker/production/docker-compose.yml) in the repository, or if you have curl, you can make your Linux console do it for you: @@ -35,3 +25,13 @@ docker-compose up -d Installing StashApp this way will by default bind stash to port 9999. This is available in your web browser locally at http://localhost:9999 or on your network as http://YOUR-LOCAL-IP:9999 Good luck and have fun! + +### Docker +Docker is effectively a cross-platform software package repository. It allows you to ship an entire environment in what's referred to as a container. Containers are intended to hold everything that is needed to run an application from one place to another, making it easy for everyone along the way to reproduce the environment. + +The StashApp docker container ships with everything you need to automatically build and run stash, including ffmpeg. + +### docker-compose +Docker Compose lets you specify how and where to run your containers, and to manage their environment. The docker-compose.yml file in this folder gets you a fully working instance of StashApp exactly as you would need it to have a reasonable instance for testing / developing on. If you are deploying a live instance for production, a reverse proxy (such as NGINX or Traefik) is recommended, but not required. + +The latest version is always recommended. diff --git a/docker/production/x86_64/Dockerfile b/docker/production/x86_64/Dockerfile deleted file mode 100644 index 607cd1fbb..000000000 --- a/docker/production/x86_64/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -FROM ubuntu:20.04 as prep -LABEL MAINTAINER="https://discord.gg/2TsNFKt" - -RUN apt-get update && \ - apt-get -y install curl xz-utils && \ - apt-get autoclean -y && \ - rm -rf /var/lib/apt/lists/* -WORKDIR / -SHELL ["/bin/bash", "-o", "pipefail", "-c"] - -# added " to end of stash-linux clause so that it doesn't pick up the arm builds -RUN curl -L -o /stash $(curl -s https://api.github.com/repos/stashapp/stash/releases/latest | awk '/browser_download_url/ && /stash-linux/"' | sed -e 's/.*: "\(.*\)"/\1/') && \ - chmod +x /stash - -RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \ - tar xf /ffmpeg.tar.xz && \ - rm ffmpeg.tar.xz && \ - mv /ffmpeg*/ /ffmpeg/ - -FROM ubuntu:20.04 as app -RUN apt-get update && apt-get -y install ca-certificates -COPY --from=prep /stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/ - -ENV STASH_CONFIG_FILE=/root/.stash/config.yml - -EXPOSE 9999 -CMD ["stash"] diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md new file mode 100644 index 000000000..e33b96018 --- /dev/null +++ b/docs/DEVELOPMENT.md @@ -0,0 +1,68 @@ +# Building from Source + +## Pre-requisites + +* [Go](https://golang.org/dl/) +* [GolangCI](https://golangci-lint.run/) - A meta-linter which runs several linters in parallel + * To install, follow the [local installation instructions](https://golangci-lint.run/usage/install/#local-installation) +* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager + * Run `yarn install --frozen-lockfile` in the `stash/ui/v2.5` folder (before running make generate for first time). + +NOTE: You may need to run the `go get` commands outside the project directory to avoid modifying the projects module file. + +## Environment + +### Windows + +1. Download and install [Go for Windows](https://golang.org/dl/) +2. Download and install [MingW](https://sourceforge.net/projects/mingw-w64/) +3. Search for "advanced system settings" and open the system properties dialog. + 1. Click the `Environment Variables` button + 2. Under system variables find the `Path`. Edit and add `C:\Program Files\mingw-w64\*\mingw64\bin` (replace * with the correct path). + +NOTE: The `make` command in Windows will be `mingw32-make` with MingW. + +### macOS + +TODO + + +## Commands + +* `make generate` - Generate Go and UI GraphQL files +* `make build` - Builds the binary (make sure to build the UI as well... see below) +* `make docker-build` - Locally builds and tags a complete 'stash/build' docker image +* `make pre-ui` - Installs the UI dependencies. Only needs to be run once before building the UI for the first time, or if the dependencies are updated +* `make fmt-ui` - Formats the UI source code +* `make ui` - Builds the frontend +* `make lint` - Run the linter on the backend +* `make fmt` - Run `go fmt` +* `make it` - Run the unit and integration tests +* `make validate` - Run all of the tests and checks required to submit a PR +* `make ui-start` - Runs the UI in development mode. Requires a running stash server to connect to. Stash port can be changed from the default of `9999` with environment variable `REACT_APP_PLATFORM_PORT`. + +## Building a release + +1. Run `make generate` to create generated files +2. Run `make ui` to compile the frontend +3. Run `make build` to build the executable for your current platform + +## Cross compiling + +This project uses a modification of the [CI-GoReleaser](https://github.com/bep/dockerfiles/tree/master/ci-goreleaser) docker container to create an environment +where the app can be cross-compiled. This process is kicked off by CI via the `scripts/cross-compile.sh` script. Run the following +command to open a bash shell to the container to poke around: + +`docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t stashappdev/compiler:latest /bin/bash` + +## Profiling + +Stash can be profiled using the `--cpuprofile ` command line flag. + +The resulting file can then be used with pprof as follows: + +`go tool pprof ` + +With `graphviz` installed and in the path, a call graph can be generated with: + +`go tool pprof -svg > ` diff --git a/docs/readme_assets/demo_image.png b/docs/readme_assets/demo_image.png new file mode 100644 index 000000000..d951d0556 Binary files /dev/null and b/docs/readme_assets/demo_image.png differ diff --git a/docs/readme_assets/docker_logo.svg b/docs/readme_assets/docker_logo.svg new file mode 100644 index 000000000..df60469ef --- /dev/null +++ b/docs/readme_assets/docker_logo.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/docs/readme_assets/linux_logo.svg b/docs/readme_assets/linux_logo.svg new file mode 100644 index 000000000..b6ca3d222 --- /dev/null +++ b/docs/readme_assets/linux_logo.svg @@ -0,0 +1,121 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/readme_assets/mac_logo.svg b/docs/readme_assets/mac_logo.svg new file mode 100644 index 000000000..aaa4004f8 --- /dev/null +++ b/docs/readme_assets/mac_logo.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/docs/readme_assets/windows_logo.svg b/docs/readme_assets/windows_logo.svg new file mode 100644 index 000000000..b66709569 --- /dev/null +++ b/docs/readme_assets/windows_logo.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/go.mod b/go.mod index d2cd351a6..f8f4efda0 100644 --- a/go.mod +++ b/go.mod @@ -19,39 +19,42 @@ require ( github.com/h2non/filetype v1.0.8 github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a github.com/jmoiron/sqlx v1.3.1 - github.com/json-iterator/go v1.1.9 + github.com/json-iterator/go v1.1.11 github.com/mattn/go-sqlite3 v1.14.6 github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 + github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/remeh/sizedwaitgroup v1.0.0 github.com/robertkrimen/otto v0.0.0-20200922221731-ef014fd054ac github.com/rs/cors v1.6.0 github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f github.com/sirupsen/logrus v1.8.1 - github.com/spf13/afero v1.2.0 // indirect - github.com/spf13/pflag v1.0.3 - github.com/spf13/viper v1.7.0 - github.com/stretchr/testify v1.6.1 - github.com/tidwall/gjson v1.8.1 + github.com/spf13/afero v1.6.0 // indirect + github.com/spf13/pflag v1.0.5 + github.com/spf13/viper v1.9.0 + github.com/stretchr/testify v1.7.0 + github.com/tidwall/gjson v1.9.3 github.com/tidwall/pretty v1.2.0 // indirect - github.com/vektah/gqlparser/v2 v2.0.1 github.com/vektra/mockery/v2 v2.2.1 - golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 + golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb golang.org/x/net v0.0.0-20210520170846-37e1c6afe023 - golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c + golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b // indirect - golang.org/x/tools v0.1.0 // indirect + golang.org/x/text v0.3.6 + golang.org/x/tools v0.1.5 // indirect gopkg.in/sourcemap.v1 v1.0.5 // indirect gopkg.in/yaml.v2 v2.4.0 ) +require github.com/vektah/gqlparser/v2 v2.0.1 + require ( github.com/agnivade/levenshtein v1.1.0 // indirect github.com/antchfx/xpath v1.1.6 // indirect github.com/chromedp/sysutil v1.0.0 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/fsnotify/fsnotify v1.4.7 // indirect + github.com/fsnotify/fsnotify v1.5.1 // indirect github.com/gobwas/httphead v0.1.0 // indirect github.com/gobwas/pool v0.2.1 // indirect github.com/gobwas/ws v1.1.0-rc.5 // indirect @@ -62,34 +65,33 @@ require ( github.com/hashicorp/hcl v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect - github.com/magiconair/properties v1.8.1 // indirect + github.com/magiconair/properties v1.8.5 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect - github.com/mitchellh/mapstructure v1.1.2 // indirect + github.com/mitchellh/mapstructure v1.4.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.1 // indirect github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 // indirect - github.com/pelletier/go-toml v1.7.0 // indirect + github.com/pelletier/go-toml v1.9.4 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rs/zerolog v1.18.0 // indirect github.com/russross/blackfriday/v2 v2.0.1 // indirect github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect - github.com/spf13/cast v1.3.0 // indirect + github.com/spf13/cast v1.4.1 // indirect github.com/spf13/cobra v1.0.0 // indirect - github.com/spf13/jwalterweatherman v1.0.0 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/stretchr/objx v0.2.0 // indirect github.com/subosito/gotenv v1.2.0 // indirect - github.com/tidwall/match v1.0.3 // indirect + github.com/tidwall/match v1.1.1 // indirect github.com/urfave/cli/v2 v2.1.1 // indirect github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e // indirect - go.uber.org/atomic v1.6.0 // indirect - golang.org/x/mod v0.4.1 // indirect - golang.org/x/text v0.3.6 // indirect + go.uber.org/atomic v1.7.0 // indirect + golang.org/x/mod v0.4.2 // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect - gopkg.in/ini.v1 v1.51.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect + gopkg.in/ini.v1 v1.63.2 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) replace git.apache.org/thrift.git => github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999 diff --git a/go.sum b/go.sum index 457db0ce0..27cbde3ef 100644 --- a/go.sum +++ b/go.sum @@ -18,6 +18,11 @@ cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmW cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -27,6 +32,7 @@ cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM7 cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/firestore v1.6.0/go.mod h1:afJwI0vaXwAG54kI7A//lP/lSPDkQORQuMkv56TxEPU= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -50,7 +56,6 @@ github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSY github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= @@ -78,6 +83,7 @@ github.com/antchfx/htmlquery v1.2.3 h1:sP3NFDneHx2stfNXCKbhHFo8XgNjCACnU/4AO5gWz github.com/antchfx/htmlquery v1.2.3/go.mod h1:B0ABL+F5irhhMWg54ymEZinzMSi0Kt3I2if0BLYa3V0= github.com/antchfx/xpath v1.1.6 h1:6sVh6hB5T6phw1pFpHRQ+C4bd8sNI+O58flqtg7h0R0= github.com/antchfx/xpath v1.1.6/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apache/arrow/go/arrow v0.0.0-20200601151325-b2287a20f230/go.mod h1:QNYViu/X0HXDHw7m3KXzWSVXIbfUvJqBFe6Gj8/pYA0= github.com/apache/arrow/go/arrow v0.0.0-20210521153258-78c88a9f517b/go.mod h1:R4hW3Ug0s+n4CUsWHKOj00Pu01ZqU4x/hSF5kXUcXKQ= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= @@ -86,6 +92,7 @@ github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hC github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aws/aws-sdk-go v1.17.7/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go-v2 v1.3.2/go.mod h1:7OaACgj2SX3XGWnrIjGlJM22h6yD6MEWKvm7levnnM8= github.com/aws/aws-sdk-go-v2 v1.6.0/go.mod h1:tI4KhsR5VkzlUa2DZAdwx7wCAYGwkZZ1H31PYrBFx1w= @@ -139,6 +146,7 @@ github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/cockroachdb/cockroach-go/v2 v2.1.1/go.mod h1:7NtUnP6eK+l6k483WSYNrq3Kb23bWV10IRV1TyeSpwM= github.com/containerd/containerd v1.4.3/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= @@ -149,6 +157,7 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/corona10/goimagehash v1.0.3 h1:NZM518aKLmoNluluhfHGxT3LGOnrojrxhGn63DR/CZA= github.com/corona10/goimagehash v1.0.3/go.mod h1:VkvE0mLn84L4aF8vCb6mafVajEb6QYMHl2ZJLn0mOGI= @@ -183,12 +192,15 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/form3tech-oss/jwt-go v3.2.3+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= +github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/fsouza/fake-gcs-server v1.17.0/go.mod h1:D1rTE4YCyHFNa99oyJJ5HyclvN/0uQR+pM/VdlL83bw= github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= @@ -240,6 +252,7 @@ github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6Wezm github.com/gobwas/ws v1.1.0-rc.5 h1:QOAag7FoBaBYYHRqzqkhhd8fq5RTubvI4v3Ft/gDVVQ= github.com/gobwas/ws v1.1.0-rc.5/go.mod h1:nzvNcVha5eUziGrbxFCo6qFIojQHjJV5cLYIbezhfL0= github.com/gocql/gocql v0.0.0-20190301043612-f6df8288f9b4/go.mod h1:4Fw1eo5iaEhDUs8XyuhSVCVy52Jq3L+/3GJgYkwc+/0= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= @@ -265,6 +278,7 @@ github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.0.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -302,14 +316,16 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-github/v35 v35.2.0/go.mod h1:s0515YVTI+IMrDoy9Y4pHt9ShGpzHvHO8rZ7L7acgvs= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -321,14 +337,17 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e h1:JKmoR8x90Iww1ks85zJ1lfDGgIiMDuIptTOhJq+zKyg= github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= @@ -347,20 +366,25 @@ github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/ad github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/h2non/filetype v1.0.8 h1:le8gpf+FQA0/DlDABbtisA1KiTS0Xi+YSC/E8yY3Y14= github.com/h2non/filetype v1.0.8/go.mod h1:isekKqOuhMj+s/7r3rIeTErIRy4Rub5uBWHfvMusLMU= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0 h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= @@ -373,8 +397,11 @@ github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -439,12 +466,11 @@ github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22 github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= @@ -462,8 +488,10 @@ github.com/klauspost/compress v1.12.2/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8 github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= @@ -478,8 +506,9 @@ github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E= github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= @@ -498,13 +527,17 @@ github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= @@ -514,8 +547,9 @@ github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0Qu github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v0.0.0-20180220230111-00c29f56e238/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo= +github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -546,21 +580,26 @@ github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKw github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pelletier/go-toml v1.7.0 h1:7utD74fnzVc/cpcyy8sjrlFr5vYpypUixARcHIMIGuI= github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= +github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4/v4 v4.1.4/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.7/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= @@ -578,6 +617,7 @@ github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qq github.com/robertkrimen/otto v0.0.0-20200922221731-ef014fd054ac h1:kYPjbEN6YPYWWHI6ky1J813KzIq/8+Wg4TO4xU7A/KU= github.com/robertkrimen/otto v0.0.0-20200922221731-ef014fd054ac/go.mod h1:xvqspoSXJTIpemEonrMDFq6XzwHYYgToXWj5eRX1OtY= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -592,6 +632,7 @@ github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0 github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8= +github.com/sagikazarmark/crypt v0.1.0/go.mod h1:B/mN0msZuINBtQ1zZLEQcegFJJf9vnYIR88KRMEuODE= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= @@ -613,29 +654,31 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= -github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/snowflakedb/gosnowflake v1.4.3/go.mod h1:1kyg2XEduwti88V11PKRHImhXLK5WpGiayY6lFNYb98= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/afero v1.2.0 h1:O9FblXGxoTc51M+cqr74Bm2Tmt4PvkA5iu/j8HrkNuY= -github.com/spf13/afero v1.2.0/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= +github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= +github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v1.0.0 h1:6m/oheQuQ13N9ks4hubMG6BnvwOeaJrqSPLahSnczz8= github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= -github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= -github.com/spf13/viper v1.7.0 h1:xVKxvI7ouOI5I+U9s2eeiUfMaWBVoXA3AWskkrqK0VM= github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/spf13/viper v1.9.0 h1:yR6EXjTp0y0cLN8OZg1CRZmOBdI88UcGkhgyJhu6nZk= +github.com/spf13/viper v1.9.0/go.mod h1:+i6ajR7OX2XaiBkrcZJFK21htRk7eDeLg7+O6bhUPP4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= @@ -646,16 +689,16 @@ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/tidwall/gjson v1.8.1 h1:8j5EE9Hrh3l9Od1OIEDAb7IpezNA20UdRngNAj5N0WU= -github.com/tidwall/gjson v1.8.1/go.mod h1:5/xDoumyyDNerp2U36lyolv46b3uF/9Bu6OfyQ9GImk= -github.com/tidwall/match v1.0.3 h1:FQUVvBImDutD8wJLN6c5eMzWtjgONK9MwIBCOrUJKeE= -github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/gjson v1.9.3 h1:hqzS9wAHMO+KVBBkLxYdkEeeFHuqr95GfClRLKlgK0E= +github.com/tidwall/gjson v1.9.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= -github.com/tidwall/pretty v1.1.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= @@ -683,9 +726,13 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= go.mongodb.org/mongo-driver v1.7.0/go.mod h1:Q4oFMbo1+MSNqICAdYMlC/zSTrwCogR4R8NzkI+yfU8= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= @@ -694,15 +741,19 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.6.0 h1:Ezj3JGmsOnG1MoRWQkPBsKLe9DwWD9QeXzTRzzldNVk= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -713,6 +764,7 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -720,8 +772,8 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 h1:/UOmuWzQfxxo9UtlXMwuQU8CMgg1eZXqTRwkSQJWKOI= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 h1:HWj/xjIHfjYU5nVXpTM0s39J9CbLn7Cc5a7IC5rwsMQ= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -747,8 +799,8 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 h1:2M3HP5CCK1Si9FQhwnzYhXdG6DXeebvUHFpre8QvbyI= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= @@ -758,8 +810,9 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -783,6 +836,7 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -807,6 +861,8 @@ golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210520170846-37e1c6afe023 h1:ADo5wSpq2gqaCGQWzk7S5vd//0iyyLeAratkEoG5dLE= golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -823,6 +879,10 @@ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210413134643-5e61552d6c78/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -860,7 +920,10 @@ golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -869,6 +932,7 @@ golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -896,13 +960,22 @@ golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210412220455-f1c623a9e750/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210521090106-6ca3eb03dfc2/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= +golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf h1:2ucpDCmfkl8Bd/FsLtiD653Wf96cW37s+iGx93zsu4k= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b h1:9zKuko04nR4gjZ4+DNjHqRlAJqbJETHwiNKDqTfOjfE= golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -943,6 +1016,7 @@ golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190828213141-aed303cbaa74/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -984,8 +1058,13 @@ golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1015,6 +1094,12 @@ google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjR google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= google.golang.org/api v0.45.0/go.mod h1:ISLIJCedJolbZvDfAk+Ctuq5hf+aJ33WgtUsfyFoLXA= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/appengine v1.0.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1047,6 +1132,7 @@ google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= @@ -1067,6 +1153,18 @@ google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210413151531-c14fb6ef47c3/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210427215850-f767ed18ee4d/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -1082,12 +1180,19 @@ google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -1100,6 +1205,7 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -1110,8 +1216,9 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.63.2 h1:tGK/CyBg7SMzb60vP1M03vNZ3VDu3wGQJwn7Sxi9r3c= +gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/sourcemap.v1 v1.0.5 h1:inv58fC9f9J3TK2Y2R1NPntXEn3/wjWHkonhIUODNTI= gopkg.in/sourcemap.v1 v1.0.5/go.mod h1:2RlvNNSMglmRrcvhfuzp4hQHwOtjxlbjX7UPY/GXb78= @@ -1119,14 +1226,16 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWD gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gorm.io/driver/postgres v1.0.8/go.mod h1:4eOzrI1MUfm6ObJU/UcmbXyiHSs8jSwH95G5P5dxcAg= gorm.io/gorm v1.20.12/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw= gorm.io/gorm v1.21.4/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw= diff --git a/graphql/documents/data/config.graphql b/graphql/documents/data/config.graphql index bb2e41d8a..4964ab3cd 100644 --- a/graphql/documents/data/config.graphql +++ b/graphql/documents/data/config.graphql @@ -52,12 +52,20 @@ fragment ConfigInterfaceData on ConfigInterfaceResult { wallShowTitle wallPlayback maximumLoopDuration + noBrowser autostartVideo + autostartVideoOnPlaySelected + continuePlaylistDefault showStudioAsText css cssEnabled language slideshowDelay + disabledDropdownCreate { + performer + tag + studio + } handyKey funscriptOffset } @@ -76,6 +84,46 @@ fragment ConfigScrapingData on ConfigScrapingResult { excludeTagPatterns } +fragment IdentifyFieldOptionsData on IdentifyFieldOptions { + field + strategy + createMissing +} + +fragment IdentifyMetadataOptionsData on IdentifyMetadataOptions { + fieldOptions { + ...IdentifyFieldOptionsData + } + setCoverImage + setOrganized + includeMalePerformers +} + +fragment ScraperSourceData on ScraperSource { + stash_box_index + stash_box_endpoint + scraper_id +} + +fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult { + identify { + sources { + source { + ...ScraperSourceData + } + options { + ...IdentifyMetadataOptionsData + } + } + options { + ...IdentifyMetadataOptionsData + } + } + + deleteFile + deleteGenerated +} + fragment ConfigData on ConfigResult { general { ...ConfigGeneralData @@ -89,4 +137,7 @@ fragment ConfigData on ConfigResult { scraping { ...ConfigScrapingData } + defaults { + ...ConfigDefaultSettingsData + } } diff --git a/graphql/documents/mutations/config.graphql b/graphql/documents/mutations/config.graphql index 149d9bf28..fff7dbeca 100644 --- a/graphql/documents/mutations/config.graphql +++ b/graphql/documents/mutations/config.graphql @@ -30,6 +30,12 @@ mutation ConfigureScraping($input: ConfigScrapingInput!) { } } +mutation ConfigureDefaults($input: ConfigDefaultSettingsInput!) { + configureDefaults(input: $input) { + ...ConfigDefaultSettingsData + } +} + mutation GenerateAPIKey($input: GenerateAPIKeyInput!) { generateAPIKey(input: $input) } diff --git a/graphql/documents/mutations/metadata.graphql b/graphql/documents/mutations/metadata.graphql index 710a9aac9..068665d9f 100644 --- a/graphql/documents/mutations/metadata.graphql +++ b/graphql/documents/mutations/metadata.graphql @@ -26,6 +26,10 @@ mutation MetadataAutoTag($input: AutoTagMetadataInput!) { metadataAutoTag(input: $input) } +mutation MetadataIdentify($input: IdentifyMetadataInput!) { + metadataIdentify(input: $input) +} + mutation MetadataClean($input: CleanMetadataInput!) { metadataClean(input: $input) } diff --git a/graphql/documents/queries/image.graphql b/graphql/documents/queries/image.graphql index 4d35bc69b..0f275138d 100644 --- a/graphql/documents/queries/image.graphql +++ b/graphql/documents/queries/image.graphql @@ -1,6 +1,8 @@ query FindImages($filter: FindFilterType, $image_filter: ImageFilterType, $image_ids: [Int!]) { findImages(filter: $filter, image_filter: $image_filter, image_ids: $image_ids) { count + megapixels + filesize images { ...SlimImageData } diff --git a/graphql/documents/queries/scene.graphql b/graphql/documents/queries/scene.graphql index daeabbaaf..f64bfab61 100644 --- a/graphql/documents/queries/scene.graphql +++ b/graphql/documents/queries/scene.graphql @@ -1,6 +1,8 @@ query FindScenes($filter: FindFilterType, $scene_filter: SceneFilterType, $scene_ids: [Int!]) { findScenes(filter: $filter, scene_filter: $scene_filter, scene_ids: $scene_ids) { count + filesize + duration scenes { ...SlimSceneData } @@ -10,6 +12,8 @@ query FindScenes($filter: FindFilterType, $scene_filter: SceneFilterType, $scene query FindScenesByPathRegex($filter: FindFilterType) { findScenesByPathRegex(filter: $filter) { count + filesize + duration scenes { ...SlimSceneData } diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 64a55c7eb..e6f435e98 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -7,7 +7,7 @@ type Query { """Find a scene by ID or Checksum""" findScene(id: ID, checksum: String): Scene findSceneByHash(input: SceneHashInput!): Scene - + """A function which queries Scene objects""" findScenes(scene_filter: SceneFilterType, scene_ids: [Int!], filter: FindFilterType): FindScenesResultType! @@ -25,7 +25,7 @@ type Query { findSceneMarkers(scene_marker_filter: SceneMarkerFilterType filter: FindFilterType): FindSceneMarkersResultType! findImage(id: ID, checksum: String): Image - + """A function which queries Scene objects""" findImages(image_filter: ImageFilterType, image_ids: [Int!], filter: FindFilterType): FindImagesResultType! @@ -127,7 +127,12 @@ type Query { """Returns the current, complete configuration""" configuration: ConfigResult! """Returns an array of paths for the given path""" - directory(path: String): Directory! + directory( + "The directory path to list" + path: String, + "Desired collation locale. Determines the order of the directory result. eg. 'en-US', 'pt-BR', ..." + locale: String = "en" + ): Directory! # System status systemStatus: SystemStatus! @@ -149,7 +154,7 @@ type Query { # Version version: Version! - + # LatestVersion latestversion: ShortVersion! } @@ -232,6 +237,7 @@ type Mutation { configureInterface(input: ConfigInterfaceInput!): ConfigInterfaceResult! configureDLNA(input: ConfigDLNAInput!): ConfigDLNAResult! configureScraping(input: ConfigScrapingInput!): ConfigScrapingResult! + configureDefaults(input: ConfigDefaultSettingsInput!): ConfigDefaultSettingsResult! """Generate and set (or clear) API key""" generateAPIKey(input: GenerateAPIKeyInput!): String! @@ -254,6 +260,8 @@ type Mutation { metadataAutoTag(input: AutoTagMetadataInput!): ID! """Clean metadata. Returns the job ID""" metadataClean(input: CleanMetadataInput!): ID! + """Identifies scenes using scrapers. Returns the job ID""" + metadataIdentify(input: IdentifyMetadataInput!): ID! """Migrate generated files for the current hash naming""" migrateHashNaming: ID! @@ -275,7 +283,7 @@ type Mutation { """Run batch performer tag task. Returns the job ID.""" stashBoxBatchPerformerTag(input: StashBoxBatchPerformerTagInput!): String! - + """Enables DLNA for an optional duration. Has no effect if DLNA is enabled by default""" enableDLNA(input: EnableDLNAInput!): Boolean! """Disables DLNA for an optional duration. Has no effect if DLNA is disabled by default""" diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql index 475d4b272..6e8699a10 100644 --- a/graphql/schema/types/config.graphql +++ b/graphql/schema/types/config.graphql @@ -188,56 +188,102 @@ type ConfigGeneralResult { stashBoxes: [StashBox!]! } +input ConfigDisableDropdownCreateInput { + performer: Boolean + tag: Boolean + studio: Boolean +} + input ConfigInterfaceInput { """Ordered list of items that should be shown in the menu""" menuItems: [String!] + """Enable sound on mouseover previews""" soundOnPreview: Boolean + """Show title and tags in wall view""" wallShowTitle: Boolean """Wall playback type""" wallPlayback: String + """Maximum duration (in seconds) in which a scene video will loop in the scene player""" maximumLoopDuration: Int """If true, video will autostart on load in the scene player""" autostartVideo: Boolean + """If true, video will autostart when loading from play random or play selected""" + autostartVideoOnPlaySelected: Boolean + """If true, next scene in playlist will be played at video end by default""" + continuePlaylistDefault: Boolean + """If true, studio overlays will be shown as text instead of logo images""" showStudioAsText: Boolean + """Custom CSS""" css: String cssEnabled: Boolean + """Interface language""" language: String + """Slideshow Delay""" slideshowDelay: Int + + """Set to true to disable creating new objects via the dropdown menus""" + disableDropdownCreate: ConfigDisableDropdownCreateInput + """Handy Connection Key""" handyKey: String """Funscript Time Offset""" funscriptOffset: Int + """True if we should not auto-open a browser window on startup""" + noBrowser: Boolean +} + +type ConfigDisableDropdownCreate { + performer: Boolean! + tag: Boolean! + studio: Boolean! } type ConfigInterfaceResult { """Ordered list of items that should be shown in the menu""" menuItems: [String!] + """Enable sound on mouseover previews""" soundOnPreview: Boolean + """Show title and tags in wall view""" wallShowTitle: Boolean """Wall playback type""" wallPlayback: String + """Maximum duration (in seconds) in which a scene video will loop in the scene player""" maximumLoopDuration: Int + """"True if we should not auto-open a browser window on startup""" + noBrowser: Boolean """If true, video will autostart on load in the scene player""" autostartVideo: Boolean + """If true, video will autostart when loading from play random or play selected""" + autostartVideoOnPlaySelected: Boolean + """If true, next scene in playlist will be played at video end by default""" + continuePlaylistDefault: Boolean + """If true, studio overlays will be shown as text instead of logo images""" showStudioAsText: Boolean + """Custom CSS""" css: String cssEnabled: Boolean + """Interface language""" language: String + """Slideshow Delay""" slideshowDelay: Int + + """Fields are true if creating via dropdown menus are disabled""" + disabledDropdownCreate: ConfigDisableDropdownCreate! + """Handy Connection Key""" handyKey: String """Funscript Time Offset""" @@ -286,12 +332,31 @@ type ConfigScrapingResult { excludeTagPatterns: [String!]! } +type ConfigDefaultSettingsResult { + identify: IdentifyMetadataTaskOptions + + """If true, delete file checkbox will be checked by default""" + deleteFile: Boolean + """If true, delete generated supporting files checkbox will be checked by default""" + deleteGenerated: Boolean +} + +input ConfigDefaultSettingsInput { + identify: IdentifyMetadataInput + + """If true, delete file checkbox will be checked by default""" + deleteFile: Boolean + """If true, delete generated files checkbox will be checked by default""" + deleteGenerated: Boolean +} + """All configuration settings""" type ConfigResult { general: ConfigGeneralResult! interface: ConfigInterfaceResult! dlna: ConfigDLNAResult! scraping: ConfigScrapingResult! + defaults: ConfigDefaultSettingsResult! } """Directory structure of a path""" diff --git a/graphql/schema/types/gallery.graphql b/graphql/schema/types/gallery.graphql index 1c87a4b86..a06c6a512 100644 --- a/graphql/schema/types/gallery.graphql +++ b/graphql/schema/types/gallery.graphql @@ -74,6 +74,11 @@ input BulkGalleryUpdateInput { input GalleryDestroyInput { ids: [ID!]! + """ + If true, then the zip file will be deleted if the gallery is zip-file-based. + If gallery is folder-based, then any files not associated with other + galleries will be deleted, along with the folder, if it is not empty. + """ delete_file: Boolean delete_generated: Boolean } diff --git a/graphql/schema/types/image.graphql b/graphql/schema/types/image.graphql index 9445aec92..da3b56ee6 100644 --- a/graphql/schema/types/image.graphql +++ b/graphql/schema/types/image.graphql @@ -70,5 +70,9 @@ input ImagesDestroyInput { type FindImagesResultType { count: Int! + """Total megapixels of the images""" + megapixels: Float! + """Total file size in bytes""" + filesize: Float! images: [Image!]! } \ No newline at end of file diff --git a/graphql/schema/types/logging.graphql b/graphql/schema/types/logging.graphql index 397a27f10..adab16401 100644 --- a/graphql/schema/types/logging.graphql +++ b/graphql/schema/types/logging.graphql @@ -2,6 +2,7 @@ scalar Time enum LogLevel { + Trace Debug Info Progress diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index cfc366ccc..bb2f5643f 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -67,6 +67,88 @@ input AutoTagMetadataInput { tags: [String!] } +enum IdentifyFieldStrategy { + """Never sets the field value""" + IGNORE + """ + For multi-value fields, merge with existing. + For single-value fields, ignore if already set + """ + MERGE + """Always replaces the value if a value is found. + For multi-value fields, any existing values are removed and replaced with the + scraped values. + """ + OVERWRITE +} + +input IdentifyFieldOptionsInput { + field: String! + strategy: IdentifyFieldStrategy! + """creates missing objects if needed - only applicable for performers, tags and studios""" + createMissing: Boolean +} + +input IdentifyMetadataOptionsInput { + """any fields missing from here are defaulted to MERGE and createMissing false""" + fieldOptions: [IdentifyFieldOptionsInput!] + """defaults to true if not provided""" + setCoverImage: Boolean + setOrganized: Boolean + """defaults to true if not provided""" + includeMalePerformers: Boolean +} + +input IdentifySourceInput { + source: ScraperSourceInput! + """Options defined for a source override the defaults""" + options: IdentifyMetadataOptionsInput +} + +input IdentifyMetadataInput { + """An ordered list of sources to identify items with. Only the first source that finds a match is used.""" + sources: [IdentifySourceInput!]! + """Options defined here override the configured defaults""" + options: IdentifyMetadataOptionsInput + + """scene ids to identify""" + sceneIDs: [ID!] + + """paths of scenes to identify - ignored if scene ids are set""" + paths: [String!] +} + +# types for default options +type IdentifyFieldOptions { + field: String! + strategy: IdentifyFieldStrategy! + """creates missing objects if needed - only applicable for performers, tags and studios""" + createMissing: Boolean +} + +type IdentifyMetadataOptions { + """any fields missing from here are defaulted to MERGE and createMissing false""" + fieldOptions: [IdentifyFieldOptions!] + """defaults to true if not provided""" + setCoverImage: Boolean + setOrganized: Boolean + """defaults to true if not provided""" + includeMalePerformers: Boolean +} + +type IdentifySource { + source: ScraperSource! + """Options defined for a source override the defaults""" + options: IdentifyMetadataOptions +} + +type IdentifyMetadataTaskOptions { + """An ordered list of sources to identify items with. Only the first source that finds a match is used.""" + sources: [IdentifySource!]! + """Options defined here override the configured defaults""" + options: IdentifyMetadataOptions +} + input ExportObjectTypeInput { ids: [String!] all: Boolean diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index 4e2b0281b..208a43929 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -120,6 +120,10 @@ input ScenesDestroyInput { type FindScenesResultType { count: Int! + """Total duration in seconds""" + duration: Float! + """Total file size in bytes""" + filesize: Float! scenes: [Scene!]! } diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql index 9e35346f4..ebe338e1c 100644 --- a/graphql/schema/types/scraper.graphql +++ b/graphql/schema/types/scraper.graphql @@ -31,6 +31,7 @@ type ScrapedStudio { stored_id: ID name: String! url: String + image: String remote_site_id: String } @@ -95,7 +96,18 @@ input ScrapedGalleryInput { input ScraperSourceInput { """Index of the configured stash-box instance to use. Should be unset if scraper_id is set""" - stash_box_index: Int + stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") + """Stash-box endpoint""" + stash_box_endpoint: String + """Scraper ID to scrape with. Should be unset if stash_box_index is set""" + scraper_id: ID +} + +type ScraperSource { + """Index of the configured stash-box instance to use. Should be unset if scraper_id is set""" + stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") + """Stash-box endpoint""" + stash_box_endpoint: String """Scraper ID to scrape with. Should be unset if stash_box_index is set""" scraper_id: ID } @@ -175,10 +187,16 @@ type StashBoxFingerprint { duration: Int! } +"""If neither performer_ids nor performer_names are set, tag all performers""" input StashBoxBatchPerformerTagInput { + "Stash endpoint to use for the performer tagging" endpoint: Int! + "Fields to exclude when executing the performer tagging" exclude_fields: [String!] + "Refresh performers already tagged by StashBox if true. Only tag performers with no StashBox tagging if false" refresh: Boolean! + "If set, only tag these performer ids" performer_ids: [ID!] + "If set, only tag these performer names" performer_names: [String!] } diff --git a/graphql/stash-box/query.graphql b/graphql/stash-box/query.graphql index ad1c937f5..9bc24f70a 100644 --- a/graphql/stash-box/query.graphql +++ b/graphql/stash-box/query.graphql @@ -49,6 +49,7 @@ fragment PerformerFragment on Performer { disambiguation aliases gender + merged_ids urls { ...URLFragment } @@ -75,11 +76,6 @@ fragment PerformerFragment on Performer { piercings { ...BodyModificationFragment } - details - death_date { - ...FuzzyDateFragment - } - weight } fragment PerformerAppearanceFragment on PerformerAppearance { @@ -127,8 +123,8 @@ query FindSceneByFingerprint($fingerprint: FingerprintQueryInput!) { } } -query FindScenesByFingerprints($fingerprints: [String!]!) { - findScenesByFingerprints(fingerprints: $fingerprints) { +query FindScenesByFullFingerprints($fingerprints: [FingerprintQueryInput!]!) { + findScenesByFullFingerprints(fingerprints: $fingerprints) { ...SceneFragment } } @@ -151,6 +147,12 @@ query FindPerformerByID($id: ID!) { } } +query FindSceneByID($id: ID!) { + findScene(id: $id) { + ...SceneFragment + } +} + mutation SubmitFingerprint($input: FingerprintSubmission!) { submitFingerprint(input: $input) } diff --git a/pkg/api/authentication.go b/pkg/api/authentication.go index ae6fb2952..17246badd 100644 --- a/pkg/api/authentication.go +++ b/pkg/api/authentication.go @@ -1,6 +1,7 @@ package api import ( + "errors" "net" "net/http" "net/url" @@ -39,7 +40,7 @@ func authenticateHandler() func(http.Handler) http.Handler { userID, err := manager.GetInstance().SessionStore.Authenticate(w, r) if err != nil { - if err != session.ErrUnauthorized { + if errors.Is(err, session.ErrUnauthorized) { w.WriteHeader(http.StatusInternalServerError) _, err = w.Write([]byte(err.Error())) if err != nil { @@ -55,16 +56,18 @@ func authenticateHandler() func(http.Handler) http.Handler { } if err := session.CheckAllowPublicWithoutAuth(c, r); err != nil { - switch err := err.(type) { - case session.ExternalAccessError: - securityActivateTripwireAccessedFromInternetWithoutAuth(c, err, w) + var externalAccess session.ExternalAccessError + var untrustedProxy session.UntrustedProxyError + switch { + case errors.As(err, &externalAccess): + securityActivateTripwireAccessedFromInternetWithoutAuth(c, externalAccess, w) return - case session.UntrustedProxyError: - logger.Warnf("Rejected request from untrusted proxy: %s", net.IP(err).String()) + case errors.As(err, &untrustedProxy): + logger.Warnf("Rejected request from untrusted proxy: %v", net.IP(untrustedProxy)) w.WriteHeader(http.StatusForbidden) return default: - logger.Errorf("Error checking external access security: %s", err.Error()) + logger.Errorf("Error checking external access security: %v", err) w.WriteHeader(http.StatusInternalServerError) return } diff --git a/pkg/api/check_version.go b/pkg/api/check_version.go index 44e29c52a..bd023da4a 100644 --- a/pkg/api/check_version.go +++ b/pkg/api/check_version.go @@ -1,6 +1,7 @@ package api import ( + "context" "encoding/json" "errors" "fmt" @@ -15,7 +16,7 @@ import ( "github.com/stashapp/stash/pkg/logger" ) -//we use the github REST V3 API as no login is required +// we use the github REST V3 API as no login is required const apiReleases string = "https://api.github.com/repos/stashapp/stash/releases" const apiTags string = "https://api.github.com/repos/stashapp/stash/tags" const apiAcceptHeader string = "application/vnd.github.v3+json" @@ -107,19 +108,19 @@ type githubTagResponse struct { Node_id string } -func makeGithubRequest(url string, output interface{}) error { +func makeGithubRequest(ctx context.Context, url string, output interface{}) error { client := &http.Client{ Timeout: 3 * time.Second, } - req, _ := http.NewRequest("GET", url, nil) + req, _ := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) req.Header.Add("Accept", apiAcceptHeader) // gh api recommendation , send header with api version response, err := client.Do(req) if err != nil { //lint:ignore ST1005 Github is a proper capitalized noun - return fmt.Errorf("Github API request failed: %s", err) + return fmt.Errorf("Github API request failed: %w", err) } if response.StatusCode != http.StatusOK { @@ -132,12 +133,12 @@ func makeGithubRequest(url string, output interface{}) error { data, err := io.ReadAll(response.Body) if err != nil { //lint:ignore ST1005 Github is a proper capitalized noun - return fmt.Errorf("Github API read response failed: %s", err) + return fmt.Errorf("Github API read response failed: %w", err) } err = json.Unmarshal(data, output) if err != nil { - return fmt.Errorf("unmarshalling Github API response failed: %s", err) + return fmt.Errorf("unmarshalling Github API response failed: %w", err) } return nil @@ -147,7 +148,7 @@ func makeGithubRequest(url string, output interface{}) error { // If running a build from the "master" branch, then the latest full release // is used, otherwise it uses the release that is tagged with "latest_develop" // which is the latest pre-release build. -func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease string, err error) { +func GetLatestVersion(ctx context.Context, shortHash bool) (latestVersion string, latestRelease string, err error) { arch := runtime.GOARCH // https://en.wikipedia.org/wiki/Comparison_of_ARM_cores isARMv7 := cpu.ARM.HasNEON || cpu.ARM.HasVFPv3 || cpu.ARM.HasVFPv3D16 || cpu.ARM.HasVFPv4 // armv6 doesn't support any of these features @@ -180,14 +181,14 @@ func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease strin } release := githubReleasesResponse{} - err = makeGithubRequest(url, &release) + err = makeGithubRequest(ctx, url, &release) if err != nil { return "", "", err } if release.Prerelease == usePreRelease { - latestVersion = getReleaseHash(release, shortHash, usePreRelease) + latestVersion = getReleaseHash(ctx, release, shortHash, usePreRelease) if wantedRelease != "" { for _, asset := range release.Assets { @@ -205,12 +206,12 @@ func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease strin return latestVersion, latestRelease, nil } -func getReleaseHash(release githubReleasesResponse, shortHash bool, usePreRelease bool) string { +func getReleaseHash(ctx context.Context, release githubReleasesResponse, shortHash bool, usePreRelease bool) string { shaLength := len(release.Target_commitish) // the /latest API call doesn't return the hash in target_commitish // also add sanity check in case Target_commitish is not 40 characters if !usePreRelease || shaLength != 40 { - return getShaFromTags(shortHash, release.Tag_name) + return getShaFromTags(ctx, shortHash, release.Tag_name) } if shortHash { @@ -225,9 +226,9 @@ func getReleaseHash(release githubReleasesResponse, shortHash bool, usePreReleas return release.Target_commitish } -func printLatestVersion() { +func printLatestVersion(ctx context.Context) { _, githash, _ = GetVersion() - latest, _, err := GetLatestVersion(true) + latest, _, err := GetLatestVersion(ctx, true) if err != nil { logger.Errorf("Couldn't find latest version: %s", err) } else { @@ -241,13 +242,21 @@ func printLatestVersion() { // get sha from the github api tags endpoint // returns the sha1 hash/shorthash or "" if something's wrong -func getShaFromTags(shortHash bool, name string) string { +func getShaFromTags(ctx context.Context, shortHash bool, name string) string { url := apiTags tags := []githubTagResponse{} - err := makeGithubRequest(url, &tags) + err := makeGithubRequest(ctx, url, &tags) if err != nil { - logger.Errorf("Github Tags Api %v", err) + // If the context is canceled, we don't want to log this as an error + // in the path. The function here just gives up and returns "" if + // something goes wrong. Hence, log the error at the info-level so + // it's still present, but don't treat this as an error. + if errors.Is(err, context.Canceled) { + logger.Infof("aborting sha request due to context cancellation") + } else { + logger.Errorf("Github Tags Api: %v", err) + } return "" } _, gitShort, _ := GetVersion() // retrieve short hash to check actual length diff --git a/pkg/api/locale.go b/pkg/api/locale.go new file mode 100644 index 000000000..e29ffb9ef --- /dev/null +++ b/pkg/api/locale.go @@ -0,0 +1,34 @@ +package api + +import ( + "golang.org/x/text/collate" + "golang.org/x/text/language" +) + +// matcher defines a matcher for the languages we support +var matcher = language.NewMatcher([]language.Tag{ + language.MustParse("en-US"), // The first language is used as fallback. + language.MustParse("en-GB"), + language.MustParse("en-AU"), + language.MustParse("es-ES"), + language.MustParse("de-DE"), + language.MustParse("it-IT"), + language.MustParse("fr-FR"), + language.MustParse("pt-BR"), + language.MustParse("sv-SE"), + language.MustParse("zh-CN"), + language.MustParse("zh-TW"), +}) + +// newCollator parses a locale into a collator +// Go through the available matches and return a valid match, in practice the first is a fallback +// Optionally pass collation options through for creation. +// If passed a nil-locale string, return nil +func newCollator(locale *string, opts ...collate.Option) *collate.Collator { + if locale == nil { + return nil + } + + tag, _ := language.MatchStrings(matcher, *locale) + return collate.New(tag, opts...) +} diff --git a/pkg/api/resolver.go b/pkg/api/resolver.go index 7d3f6aa3f..2317f64e5 100644 --- a/pkg/api/resolver.go +++ b/pkg/api/resolver.go @@ -2,6 +2,7 @@ package api import ( "context" + "errors" "sort" "strconv" @@ -10,6 +11,11 @@ import ( "github.com/stashapp/stash/pkg/plugin" ) +var ( + ErrNotImplemented = errors.New("not implemented") + ErrNotSupported = errors.New("not supported") +) + type hookExecutor interface { ExecutePostHooks(ctx context.Context, id int, hookType plugin.HookTriggerEnum, input interface{}, inputFields []string) } @@ -158,9 +164,9 @@ func (r *queryResolver) Version(ctx context.Context) (*models.Version, error) { }, nil } -//Gets latest version (git shorthash commit for now) +// Latestversion returns the latest git shorthash commit. func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion, error) { - ver, url, err := GetLatestVersion(true) + ver, url, err := GetLatestVersion(ctx, true) if err == nil { logger.Infof("Retrieved latest hash: %s", ver) } else { diff --git a/pkg/api/resolver_model_scene.go b/pkg/api/resolver_model_scene.go index 5d909f6b5..185ef2e00 100644 --- a/pkg/api/resolver_model_scene.go +++ b/pkg/api/resolver_model_scene.go @@ -140,7 +140,7 @@ func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *mod } func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*models.SceneMovie, err error) { - if err := r.withTxn(ctx, func(repo models.Repository) error { + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { qb := repo.Scene() mqb := repo.Movie() diff --git a/pkg/api/resolver_model_studio.go b/pkg/api/resolver_model_studio.go index e8724c545..32c7c5399 100644 --- a/pkg/api/resolver_model_studio.go +++ b/pkg/api/resolver_model_studio.go @@ -39,7 +39,7 @@ func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*st // indicate that image is missing by setting default query param to true if !hasImage { - imagePath = imagePath + "?default=true" + imagePath += "?default=true" } return &imagePath, nil diff --git a/pkg/api/resolver_mutation_configure.go b/pkg/api/resolver_mutation_configure.go index 6672a1993..8273cc7e9 100644 --- a/pkg/api/resolver_mutation_configure.go +++ b/pkg/api/resolver_mutation_configure.go @@ -13,18 +13,21 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +var ErrOverriddenConfig = errors.New("cannot set overridden value") + func (r *mutationResolver) Setup(ctx context.Context, input models.SetupInput) (bool, error) { - err := manager.GetInstance().Setup(input) + err := manager.GetInstance().Setup(ctx, input) return err == nil, err } func (r *mutationResolver) Migrate(ctx context.Context, input models.MigrateInput) (bool, error) { - err := manager.GetInstance().Migrate(input) + err := manager.GetInstance().Migrate(ctx, input) return err == nil, err } func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.ConfigGeneralInput) (*models.ConfigGeneralResult, error) { c := config.GetInstance() + existingPaths := c.GetStashPaths() if len(input.Stashes) > 0 { for _, s := range input.Stashes { @@ -46,7 +49,20 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co c.Set(config.Stash, input.Stashes) } - if input.DatabasePath != nil { + checkConfigOverride := func(key string) error { + if c.HasOverride(key) { + return fmt.Errorf("%w: %s", ErrOverriddenConfig, key) + } + + return nil + } + + existingDBPath := c.GetDatabasePath() + if input.DatabasePath != nil && existingDBPath != *input.DatabasePath { + if err := checkConfigOverride(config.Database); err != nil { + return makeConfigGeneralResult(), err + } + ext := filepath.Ext(*input.DatabasePath) if ext != ".db" && ext != ".sqlite" && ext != ".sqlite3" { return makeConfigGeneralResult(), fmt.Errorf("invalid database path, use extension db, sqlite, or sqlite3") @@ -54,14 +70,24 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co c.Set(config.Database, input.DatabasePath) } - if input.GeneratedPath != nil { + existingGeneratedPath := c.GetGeneratedPath() + if input.GeneratedPath != nil && existingGeneratedPath != *input.GeneratedPath { + if err := checkConfigOverride(config.Generated); err != nil { + return makeConfigGeneralResult(), err + } + if err := utils.EnsureDir(*input.GeneratedPath); err != nil { return makeConfigGeneralResult(), err } c.Set(config.Generated, input.GeneratedPath) } - if input.MetadataPath != nil { + existingMetadataPath := c.GetMetadataPath() + if input.MetadataPath != nil && existingMetadataPath != *input.MetadataPath { + if err := checkConfigOverride(config.Metadata); err != nil { + return makeConfigGeneralResult(), err + } + if *input.MetadataPath != "" { if err := utils.EnsureDir(*input.MetadataPath); err != nil { return makeConfigGeneralResult(), err @@ -70,7 +96,12 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co c.Set(config.Metadata, input.MetadataPath) } - if input.CachePath != nil { + existingCachePath := c.GetCachePath() + if input.CachePath != nil && existingCachePath != *input.CachePath { + if err := checkConfigOverride(config.Metadata); err != nil { + return makeConfigGeneralResult(), err + } + if *input.CachePath != "" { if err := utils.EnsureDir(*input.CachePath); err != nil { return makeConfigGeneralResult(), err @@ -225,17 +256,21 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.ConfigInterfaceInput) (*models.ConfigInterfaceResult, error) { c := config.GetInstance() + + setBool := func(key string, v *bool) { + if v != nil { + c.Set(key, *v) + } + } + if input.MenuItems != nil { c.Set(config.MenuItems, input.MenuItems) } - if input.SoundOnPreview != nil { - c.Set(config.SoundOnPreview, *input.SoundOnPreview) - } + setBool(config.SoundOnPreview, input.SoundOnPreview) + setBool(config.WallShowTitle, input.WallShowTitle) - if input.WallShowTitle != nil { - c.Set(config.WallShowTitle, *input.WallShowTitle) - } + setBool(config.NoBrowser, input.NoBrowser) if input.WallPlayback != nil { c.Set(config.WallPlayback, *input.WallPlayback) @@ -245,13 +280,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models. c.Set(config.MaximumLoopDuration, *input.MaximumLoopDuration) } - if input.AutostartVideo != nil { - c.Set(config.AutostartVideo, *input.AutostartVideo) - } - - if input.ShowStudioAsText != nil { - c.Set(config.ShowStudioAsText, *input.ShowStudioAsText) - } + setBool(config.AutostartVideo, input.AutostartVideo) + setBool(config.ShowStudioAsText, input.ShowStudioAsText) + setBool(config.AutostartVideoOnPlaySelected, input.AutostartVideoOnPlaySelected) + setBool(config.ContinuePlaylistDefault, input.ContinuePlaylistDefault) if input.Language != nil { c.Set(config.Language, *input.Language) @@ -269,8 +301,13 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models. c.SetCSS(css) - if input.CSSEnabled != nil { - c.Set(config.CSSEnabled, *input.CSSEnabled) + setBool(config.CSSEnabled, input.CSSEnabled) + + if input.DisableDropdownCreate != nil { + ddc := input.DisableDropdownCreate + setBool(config.DisableDropdownCreatePerformer, ddc.Performer) + setBool(config.DisableDropdownCreateStudio, ddc.Studio) + setBool(config.DisableDropdownCreateTag, ddc.Tag) } if input.HandyKey != nil { @@ -350,6 +387,28 @@ func (r *mutationResolver) ConfigureScraping(ctx context.Context, input models.C return makeConfigScrapingResult(), nil } +func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input models.ConfigDefaultSettingsInput) (*models.ConfigDefaultSettingsResult, error) { + c := config.GetInstance() + + if input.Identify != nil { + c.Set(config.DefaultIdentifySettings, input.Identify) + } + + if input.DeleteFile != nil { + c.Set(config.DeleteFileDefault, *input.DeleteFile) + } + + if input.DeleteGenerated != nil { + c.Set(config.DeleteGeneratedDefault, *input.DeleteGenerated) + } + + if err := c.Write(); err != nil { + return makeConfigDefaultsResult(), err + } + + return makeConfigDefaultsResult(), nil +} + func (r *mutationResolver) GenerateAPIKey(ctx context.Context, input models.GenerateAPIKeyInput) (string, error) { c := config.GetInstance() diff --git a/pkg/api/resolver_mutation_gallery.go b/pkg/api/resolver_mutation_gallery.go index 8b4259782..c9de9a940 100644 --- a/pkg/api/resolver_mutation_gallery.go +++ b/pkg/api/resolver_mutation_gallery.go @@ -441,7 +441,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall return err } - if len(imgGalleries) == 0 { + if len(imgGalleries) == 1 { if err := iqb.Destroy(img.ID); err != nil { return err } @@ -465,13 +465,15 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall // if delete file is true, then delete the file as well // if it fails, just log a message if input.DeleteFile != nil && *input.DeleteFile { - for _, gallery := range galleries { - manager.DeleteGalleryFile(gallery) - } - + // #1804 - delete the image files first, since they must be removed + // before deleting a folder for _, img := range imgsToDelete { manager.DeleteImageFile(img) } + + for _, gallery := range galleries { + manager.DeleteGalleryFile(gallery) + } } // if delete generated is true, then delete the generated files diff --git a/pkg/api/resolver_mutation_metadata.go b/pkg/api/resolver_mutation_metadata.go index aee27005e..ff49347a3 100644 --- a/pkg/api/resolver_mutation_metadata.go +++ b/pkg/api/resolver_mutation_metadata.go @@ -90,6 +90,13 @@ func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input models.Aut return strconv.Itoa(jobID), nil } +func (r *mutationResolver) MetadataIdentify(ctx context.Context, input models.IdentifyMetadataInput) (string, error) { + t := manager.CreateIdentifyJob(input) + jobID := manager.GetInstance().JobManager.Add(ctx, "Identifying...", t) + + return strconv.Itoa(jobID), nil +} + func (r *mutationResolver) MetadataClean(ctx context.Context, input models.CleanMetadataInput) (string, error) { jobID := manager.GetInstance().Clean(ctx, input) return strconv.Itoa(jobID), nil diff --git a/pkg/api/resolver_mutation_movie.go b/pkg/api/resolver_mutation_movie.go index e1c63974f..88769f6d3 100644 --- a/pkg/api/resolver_mutation_movie.go +++ b/pkg/api/resolver_mutation_movie.go @@ -38,7 +38,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr // Process the base 64 encoded image string if input.FrontImage != nil { - frontimageData, err = utils.ProcessImageInput(*input.FrontImage) + frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) if err != nil { return nil, err } @@ -46,7 +46,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr // Process the base 64 encoded image string if input.BackImage != nil { - backimageData, err = utils.ProcessImageInput(*input.BackImage) + backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage) if err != nil { return nil, err } @@ -139,7 +139,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp var frontimageData []byte frontImageIncluded := translator.hasField("front_image") if input.FrontImage != nil { - frontimageData, err = utils.ProcessImageInput(*input.FrontImage) + frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) if err != nil { return nil, err } @@ -147,7 +147,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp backImageIncluded := translator.hasField("back_image") var backimageData []byte if input.BackImage != nil { - backimageData, err = utils.ProcessImageInput(*input.BackImage) + backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage) if err != nil { return nil, err } @@ -202,7 +202,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp // HACK - if front image is null and back image is not null, then set the front image // to the default image since we can't have a null front image and a non-null back image if frontimageData == nil && backimageData != nil { - frontimageData, _ = utils.ProcessImageInput(models.DefaultMovieImage) + frontimageData, _ = utils.ProcessImageInput(ctx, models.DefaultMovieImage) } if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil { diff --git a/pkg/api/resolver_mutation_performer.go b/pkg/api/resolver_mutation_performer.go index 66c068aa5..90e33b78b 100644 --- a/pkg/api/resolver_mutation_performer.go +++ b/pkg/api/resolver_mutation_performer.go @@ -32,7 +32,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per var err error if input.Image != nil { - imageData, err = utils.ProcessImageInput(*input.Image) + imageData, err = utils.ProcessImageInput(ctx, *input.Image) } if err != nil { @@ -178,7 +178,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per var err error imageIncluded := translator.hasField("image") if input.Image != nil { - imageData, err = utils.ProcessImageInput(*input.Image) + imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { return nil, err } diff --git a/pkg/api/resolver_mutation_scene.go b/pkg/api/resolver_mutation_scene.go index 090599665..411867fc2 100644 --- a/pkg/api/resolver_mutation_scene.go +++ b/pkg/api/resolver_mutation_scene.go @@ -11,6 +11,7 @@ import ( "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" + "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/utils" ) @@ -32,7 +33,7 @@ func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUp // Start the transaction and save the scene if err := r.withTxn(ctx, func(repo models.Repository) error { - ret, err = r.sceneUpdate(input, translator, repo) + ret, err = r.sceneUpdate(ctx, input, translator, repo) return err }); err != nil { return nil, err @@ -52,7 +53,7 @@ func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.Sce inputMap: inputMaps[i], } - thisScene, err := r.sceneUpdate(*scene, translator, repo) + thisScene, err := r.sceneUpdate(ctx, *scene, translator, repo) ret = append(ret, thisScene) if err != nil { @@ -85,7 +86,7 @@ func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.Sce return newRet, nil } -func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Scene, error) { +func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Scene, error) { // Populate scene from the input sceneID, err := strconv.Atoi(input.ID) if err != nil { @@ -110,7 +111,7 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator if input.CoverImage != nil && *input.CoverImage != "" { var err error - coverImageData, err = utils.ProcessImageInput(*input.CoverImage) + coverImageData, err = utils.ProcessImageInput(ctx, *input.CoverImage) if err != nil { return nil, err } @@ -119,7 +120,7 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator } qb := repo.Scene() - scene, err := qb.Update(updatedScene) + s, err := qb.Update(updatedScene) if err != nil { return nil, err } @@ -169,13 +170,13 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator // only update the cover image if provided and everything else was successful if coverImageData != nil { - err = manager.SetSceneScreenshot(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), coverImageData) + err = scene.SetScreenshot(manager.GetInstance().Paths, s.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), coverImageData) if err != nil { return nil, err } } - return scene, nil + return s, nil } func (r *mutationResolver) updateScenePerformers(qb models.SceneReaderWriter, sceneID int, performerIDs []string) error { diff --git a/pkg/api/resolver_mutation_studio.go b/pkg/api/resolver_mutation_studio.go index bdca6059f..5353c7594 100644 --- a/pkg/api/resolver_mutation_studio.go +++ b/pkg/api/resolver_mutation_studio.go @@ -3,10 +3,11 @@ package api import ( "context" "database/sql" - "github.com/stashapp/stash/pkg/studio" "strconv" "time" + "github.com/stashapp/stash/pkg/studio" + "github.com/stashapp/stash/pkg/manager" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" @@ -33,7 +34,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio // Process the base 64 encoded image string if input.Image != nil { - imageData, err = utils.ProcessImageInput(*input.Image) + imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { return nil, err } @@ -129,7 +130,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio imageIncluded := translator.hasField("image") if input.Image != nil { var err error - imageData, err = utils.ProcessImageInput(*input.Image) + imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { return nil, err } diff --git a/pkg/api/resolver_mutation_tag.go b/pkg/api/resolver_mutation_tag.go index d1dc230e4..23fefaa88 100644 --- a/pkg/api/resolver_mutation_tag.go +++ b/pkg/api/resolver_mutation_tag.go @@ -6,6 +6,7 @@ import ( "strconv" "time" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/tag" @@ -36,14 +37,31 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate var err error if input.Image != nil { - imageData, err = utils.ProcessImageInput(*input.Image) + imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { return nil, err } } - // Start the transaction and save the t + var parentIDs []int + var childIDs []int + + if len(input.ParentIds) > 0 { + parentIDs, err = utils.StringSliceToIntSlice(input.ParentIds) + if err != nil { + return nil, err + } + } + + if len(input.ChildIds) > 0 { + childIDs, err = utils.StringSliceToIntSlice(input.ChildIds) + if err != nil { + return nil, err + } + } + + // Start the transaction and save the tag var t *models.Tag if err := r.withTxn(ctx, func(repo models.Repository) error { qb := repo.Tag() @@ -75,24 +93,22 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate } } - if input.ParentIds != nil && len(input.ParentIds) > 0 { - ids, err := utils.StringSliceToIntSlice(input.ParentIds) - if err != nil { - return err - } - - if err := qb.UpdateParentTags(t.ID, ids); err != nil { + if len(parentIDs) > 0 { + if err := qb.UpdateParentTags(t.ID, parentIDs); err != nil { return err } } - if input.ChildIds != nil && len(input.ChildIds) > 0 { - ids, err := utils.StringSliceToIntSlice(input.ChildIds) - if err != nil { + if len(childIDs) > 0 { + if err := qb.UpdateChildTags(t.ID, childIDs); err != nil { return err } + } - if err := qb.UpdateChildTags(t.ID, ids); err != nil { + // FIXME: This should be called before any changes are made, but + // requires a rewrite of ValidateHierarchy. + if len(parentIDs) > 0 || len(childIDs) > 0 { + if err := tag.ValidateHierarchy(t, parentIDs, childIDs, qb); err != nil { return err } } @@ -121,13 +137,30 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate imageIncluded := translator.hasField("image") if input.Image != nil { - imageData, err = utils.ProcessImageInput(*input.Image) + imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { return nil, err } } + var parentIDs []int + var childIDs []int + + if translator.hasField("parent_ids") { + parentIDs, err = utils.StringSliceToIntSlice(input.ParentIds) + if err != nil { + return nil, err + } + } + + if translator.hasField("child_ids") { + childIDs, err = utils.StringSliceToIntSlice(input.ChildIds) + if err != nil { + return nil, err + } + } + // Start the transaction and save the tag var t *models.Tag if err := r.withTxn(ctx, func(repo models.Repository) error { @@ -183,29 +216,6 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate } } - var parentIDs []int - var childIDs []int - - if translator.hasField("parent_ids") { - parentIDs, err = utils.StringSliceToIntSlice(input.ParentIds) - if err != nil { - return err - } - } - - if translator.hasField("child_ids") { - childIDs, err = utils.StringSliceToIntSlice(input.ChildIds) - if err != nil { - return err - } - } - - if parentIDs != nil || childIDs != nil { - if err := tag.EnsureUniqueHierarchy(tagID, parentIDs, childIDs, qb); err != nil { - return err - } - } - if parentIDs != nil { if err := qb.UpdateParentTags(tagID, parentIDs); err != nil { return err @@ -218,6 +228,15 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate } } + // FIXME: This should be called before any changes are made, but + // requires a rewrite of ValidateHierarchy. + if parentIDs != nil || childIDs != nil { + if err := tag.ValidateHierarchy(t, parentIDs, childIDs, qb); err != nil { + logger.Errorf("Error saving tag: %s", err) + return err + } + } + return nil }); err != nil { return nil, err @@ -317,6 +336,12 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMerge return err } + err = tag.ValidateHierarchy(t, parents, children, qb) + if err != nil { + logger.Errorf("Error merging tag: %s", err) + return err + } + return nil }); err != nil { return nil, err diff --git a/pkg/api/resolver_query_configuration.go b/pkg/api/resolver_query_configuration.go index 283148113..25a43275b 100644 --- a/pkg/api/resolver_query_configuration.go +++ b/pkg/api/resolver_query_configuration.go @@ -6,23 +6,26 @@ import ( "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" + "golang.org/x/text/collate" ) func (r *queryResolver) Configuration(ctx context.Context) (*models.ConfigResult, error) { return makeConfigResult(), nil } -func (r *queryResolver) Directory(ctx context.Context, path *string) (*models.Directory, error) { +func (r *queryResolver) Directory(ctx context.Context, path, locale *string) (*models.Directory, error) { directory := &models.Directory{} var err error + col := newCollator(locale, collate.IgnoreCase, collate.Numeric) + var dirPath = "" if path != nil { dirPath = *path } currentDir := utils.GetDir(dirPath) - directories, err := utils.ListDir(currentDir) + directories, err := utils.ListDir(col, currentDir) if err != nil { return directory, err } @@ -40,6 +43,7 @@ func makeConfigResult() *models.ConfigResult { Interface: makeConfigInterfaceResult(), Dlna: makeConfigDLNAResult(), Scraping: makeConfigScrapingResult(), + Defaults: makeConfigDefaultsResult(), } } @@ -60,7 +64,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult { DatabasePath: config.GetDatabasePath(), GeneratedPath: config.GetGeneratedPath(), MetadataPath: config.GetMetadataPath(), - ConfigFilePath: config.GetConfigFilePath(), + ConfigFilePath: config.GetConfigFile(), ScrapersPath: config.GetScrapersPath(), CachePath: config.GetCachePath(), CalculateMd5: config.IsCalculateMD5(), @@ -104,8 +108,11 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult { soundOnPreview := config.GetSoundOnPreview() wallShowTitle := config.GetWallShowTitle() wallPlayback := config.GetWallPlayback() + noBrowser := config.GetNoBrowser() maximumLoopDuration := config.GetMaximumLoopDuration() autostartVideo := config.GetAutostartVideo() + autostartVideoOnPlaySelected := config.GetAutostartVideoOnPlaySelected() + continuePlaylistDefault := config.GetContinuePlaylistDefault() showStudioAsText := config.GetShowStudioAsText() css := config.GetCSS() cssEnabled := config.GetCSSEnabled() @@ -115,19 +122,23 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult { scriptOffset := config.GetFunscriptOffset() return &models.ConfigInterfaceResult{ - MenuItems: menuItems, - SoundOnPreview: &soundOnPreview, - WallShowTitle: &wallShowTitle, - WallPlayback: &wallPlayback, - MaximumLoopDuration: &maximumLoopDuration, - AutostartVideo: &autostartVideo, - ShowStudioAsText: &showStudioAsText, - CSS: &css, - CSSEnabled: &cssEnabled, - Language: &language, - SlideshowDelay: &slideshowDelay, - HandyKey: &handyKey, - FunscriptOffset: &scriptOffset, + MenuItems: menuItems, + SoundOnPreview: &soundOnPreview, + WallShowTitle: &wallShowTitle, + WallPlayback: &wallPlayback, + MaximumLoopDuration: &maximumLoopDuration, + NoBrowser: &noBrowser, + AutostartVideo: &autostartVideo, + ShowStudioAsText: &showStudioAsText, + AutostartVideoOnPlaySelected: &autostartVideoOnPlaySelected, + ContinuePlaylistDefault: &continuePlaylistDefault, + CSS: &css, + CSSEnabled: &cssEnabled, + Language: &language, + SlideshowDelay: &slideshowDelay, + DisabledDropdownCreate: config.GetDisableDropdownCreate(), + HandyKey: &handyKey, + FunscriptOffset: &scriptOffset, } } @@ -155,3 +166,15 @@ func makeConfigScrapingResult() *models.ConfigScrapingResult { ExcludeTagPatterns: config.GetScraperExcludeTagPatterns(), } } + +func makeConfigDefaultsResult() *models.ConfigDefaultSettingsResult { + config := config.GetInstance() + deleteFileDefault := config.GetDeleteFileDefault() + deleteGeneratedDefault := config.GetDeleteGeneratedDefault() + + return &models.ConfigDefaultSettingsResult{ + Identify: config.GetDefaultIdentifySettings(), + DeleteFile: &deleteFileDefault, + DeleteGenerated: &deleteGeneratedDefault, + } +} diff --git a/pkg/api/resolver_query_find_image.go b/pkg/api/resolver_query_find_image.go index cd6cbf94c..5de841454 100644 --- a/pkg/api/resolver_query_find_image.go +++ b/pkg/api/resolver_query_find_image.go @@ -4,7 +4,9 @@ import ( "context" "strconv" + "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *string) (*models.Image, error) { @@ -39,14 +41,32 @@ func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *str func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (ret *models.FindImagesResultType, err error) { if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { qb := repo.Image() - images, total, err := qb.Query(imageFilter, filter) + + fields := graphql.CollectAllFields(ctx) + + result, err := qb.Query(models.ImageQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: filter, + Count: utils.StrInclude(fields, "count"), + }, + ImageFilter: imageFilter, + Megapixels: utils.StrInclude(fields, "megapixels"), + TotalSize: utils.StrInclude(fields, "filesize"), + }) + if err != nil { + return err + } + + images, err := result.Resolve() if err != nil { return err } ret = &models.FindImagesResultType{ - Count: total, - Images: images, + Count: result.Count, + Images: images, + Megapixels: result.Megapixels, + Filesize: result.TotalSize, } return nil diff --git a/pkg/api/resolver_query_find_scene.go b/pkg/api/resolver_query_find_scene.go index b55839dc8..3b789ea32 100644 --- a/pkg/api/resolver_query_find_scene.go +++ b/pkg/api/resolver_query_find_scene.go @@ -4,8 +4,10 @@ import ( "context" "strconv" + "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/pkg/manager" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) { @@ -65,16 +67,34 @@ func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneH func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIDs []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) { if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { var scenes []*models.Scene - var total int var err error + fields := graphql.CollectAllFields(ctx) + result := &models.SceneQueryResult{} + if len(sceneIDs) > 0 { scenes, err = repo.Scene().FindMany(sceneIDs) if err == nil { - total = len(scenes) + result.Count = len(scenes) + for _, s := range scenes { + result.TotalDuration += s.Duration.Float64 + size, _ := strconv.ParseFloat(s.Size.String, 64) + result.TotalSize += size + } } } else { - scenes, total, err = repo.Scene().Query(sceneFilter, filter) + result, err = repo.Scene().Query(models.SceneQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: filter, + Count: utils.StrInclude(fields, "count"), + }, + SceneFilter: sceneFilter, + TotalDuration: utils.StrInclude(fields, "duration"), + TotalSize: utils.StrInclude(fields, "filesize"), + }) + if err == nil { + scenes, err = result.Resolve() + } } if err != nil { @@ -82,8 +102,10 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen } ret = &models.FindScenesResultType{ - Count: total, - Scenes: scenes, + Count: result.Count, + Scenes: scenes, + Duration: result.TotalDuration, + Filesize: result.TotalSize, } return nil @@ -114,14 +136,31 @@ func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *model queryFilter.Q = nil } - scenes, total, err := repo.Scene().Query(sceneFilter, queryFilter) + fields := graphql.CollectAllFields(ctx) + + result, err := repo.Scene().Query(models.SceneQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: queryFilter, + Count: utils.StrInclude(fields, "count"), + }, + SceneFilter: sceneFilter, + TotalDuration: utils.StrInclude(fields, "duration"), + TotalSize: utils.StrInclude(fields, "filesize"), + }) + if err != nil { + return err + } + + scenes, err := result.Resolve() if err != nil { return err } ret = &models.FindScenesResultType{ - Count: total, - Scenes: scenes, + Count: result.Count, + Scenes: scenes, + Duration: result.TotalDuration, + Filesize: result.TotalSize, } return nil diff --git a/pkg/api/resolver_query_scraper.go b/pkg/api/resolver_query_scraper.go index 181363e24..6bf542730 100644 --- a/pkg/api/resolver_query_scraper.go +++ b/pkg/api/resolver_query_scraper.go @@ -123,7 +123,7 @@ func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.Sta } if input.Q != nil { - return client.QueryStashBoxScene(*input.Q) + return client.QueryStashBoxScene(ctx, *input.Q) } return nil, nil @@ -164,18 +164,19 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr var singleScene *models.ScrapedScene var err error - if input.SceneID != nil { + switch { + case input.SceneID != nil: var sceneID int sceneID, err = strconv.Atoi(*input.SceneID) if err != nil { return nil, err } singleScene, err = manager.GetInstance().ScraperCache.ScrapeScene(*source.ScraperID, sceneID) - } else if input.SceneInput != nil { + case input.SceneInput != nil: singleScene, err = manager.GetInstance().ScraperCache.ScrapeSceneFragment(*source.ScraperID, *input.SceneInput) - } else if input.Query != nil { + case input.Query != nil: return manager.GetInstance().ScraperCache.ScrapeSceneQuery(*source.ScraperID, *input.Query) - } else { + default: err = errors.New("scene_id, scene_input or query must be set") } @@ -197,7 +198,7 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr if input.SceneID != nil { return client.FindStashBoxScenesByFingerprintsFlat([]string{*input.SceneID}) } else if input.Query != nil { - return client.QueryStashBoxScene(*input.Query) + return client.QueryStashBoxScene(ctx, *input.Query) } return nil, errors.New("scene_id or query must be set") @@ -208,7 +209,7 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) { if source.ScraperID != nil { - return nil, errors.New("not implemented") + return nil, ErrNotImplemented } else if source.StashBoxIndex != nil { client, err := r.getStashBoxClient(*source.StashBoxIndex) if err != nil { @@ -240,7 +241,7 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models return manager.GetInstance().ScraperCache.ScrapePerformerList(*source.ScraperID, *input.Query) } - return nil, errors.New("not implemented") + return nil, ErrNotImplemented } else if source.StashBoxIndex != nil { client, err := r.getStashBoxClient(*source.StashBoxIndex) if err != nil { @@ -248,12 +249,13 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models } var ret []*models.StashBoxPerformerQueryResult - if input.PerformerID != nil { + switch { + case input.PerformerID != nil: ret, err = client.FindStashBoxPerformersByNames([]string{*input.PerformerID}) - } else if input.Query != nil { + case input.Query != nil: ret, err = client.QueryStashBoxPerformer(*input.Query) - } else { - return nil, errors.New("not implemented") + default: + return nil, ErrNotImplemented } if err != nil { @@ -272,7 +274,7 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiPerformersInput) ([][]*models.ScrapedPerformer, error) { if source.ScraperID != nil { - return nil, errors.New("not implemented") + return nil, ErrNotImplemented } else if source.StashBoxIndex != nil { client, err := r.getStashBoxClient(*source.StashBoxIndex) if err != nil { @@ -290,17 +292,18 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.S var singleGallery *models.ScrapedGallery var err error - if input.GalleryID != nil { + switch { + case input.GalleryID != nil: var galleryID int galleryID, err = strconv.Atoi(*input.GalleryID) if err != nil { return nil, err } singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGallery(*source.ScraperID, galleryID) - } else if input.GalleryInput != nil { + case input.GalleryInput != nil: singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGalleryFragment(*source.ScraperID, *input.GalleryInput) - } else { - return nil, errors.New("not implemented") + default: + return nil, ErrNotImplemented } if err != nil { @@ -313,12 +316,12 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.S return nil, nil } else if source.StashBoxIndex != nil { - return nil, errors.New("not supported") + return nil, ErrNotSupported } return nil, errors.New("scraper_id must be set") } func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) { - return nil, errors.New("not supported") + return nil, ErrNotSupported } diff --git a/pkg/api/resolver_subscription_logging.go b/pkg/api/resolver_subscription_logging.go index 6ef8e4109..db6b9c8b2 100644 --- a/pkg/api/resolver_subscription_logging.go +++ b/pkg/api/resolver_subscription_logging.go @@ -8,20 +8,22 @@ import ( ) func getLogLevel(logType string) models.LogLevel { - if logType == "progress" { + switch logType { + case "progress": return models.LogLevelProgress - } else if logType == "debug" { + case "trace": + return models.LogLevelTrace + case "debug": return models.LogLevelDebug - } else if logType == "info" { + case "info": return models.LogLevelInfo - } else if logType == "warn" { + case "warn": return models.LogLevelWarning - } else if logType == "error" { + case "error": return models.LogLevelError + default: + return models.LogLevelDebug } - - // default to debug - return models.LogLevelDebug } func logEntriesFromLogItems(logItems []logger.LogItem) []*models.LogEntry { diff --git a/pkg/api/routes_image.go b/pkg/api/routes_image.go index 9cdaa6653..092d77668 100644 --- a/pkg/api/routes_image.go +++ b/pkg/api/routes_image.go @@ -43,7 +43,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) { if exists { http.ServeFile(w, r, filepath) } else { - encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEGPath) + encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG) data, err := encoder.GetThumbnail(img, models.DefaultGthumbWidth) if err != nil { logger.Errorf("error generating thumbnail for image: %s", err.Error()) diff --git a/pkg/api/routes_scene.go b/pkg/api/routes_scene.go index a029b79a9..56071e27c 100644 --- a/pkg/api/routes_scene.go +++ b/pkg/api/routes_scene.go @@ -57,7 +57,8 @@ func getSceneFileContainer(scene *models.Scene) ffmpeg.Container { container = ffmpeg.Container(scene.Format.String) } else { // container isn't in the DB // shouldn't happen, fallback to ffprobe - tmpVideoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false) + ffprobe := manager.GetInstance().FFProbe + tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path, false) if err != nil { logger.Errorf("[transcode] error reading video file: %v", err) return ffmpeg.Container("") @@ -105,7 +106,8 @@ func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) - videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false) + ffprobe := manager.GetInstance().FFProbe + videoFile, err := ffprobe.NewVideoFile(scene.Path, false) if err != nil { logger.Errorf("[stream] error reading video file: %v", err) return @@ -142,8 +144,8 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi scene := r.Context().Value(sceneKey).(*models.Scene) // needs to be transcoded - - videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false) + ffprobe := manager.GetInstance().FFProbe + videoFile, err := ffprobe.NewVideoFile(scene.Path, false) if err != nil { logger.Errorf("[stream] error reading video file: %v", err) return @@ -171,7 +173,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi options.MaxTranscodeSize = models.StreamingResolutionEnum(requestedSize) } - encoder := ffmpeg.NewEncoder(manager.GetInstance().FFMPEGPath) + encoder := manager.GetInstance().FFMPEG stream, err = encoder.GetTranscodeStream(options) if err != nil { diff --git a/pkg/api/routes_studio.go b/pkg/api/routes_studio.go index 67cb862dc..2cea188ec 100644 --- a/pkg/api/routes_studio.go +++ b/pkg/api/routes_studio.go @@ -2,8 +2,10 @@ package api import ( "context" + "errors" "net/http" "strconv" + "syscall" "github.com/go-chi/chi" "github.com/stashapp/stash/pkg/logger" @@ -47,7 +49,12 @@ func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) { } if err := utils.ServeImage(image, w, r); err != nil { - logger.Warnf("error serving studio image: %v", err) + // Broken pipe errors are common when serving images and the remote + // connection closes the connection. Filter them out of the error + // messages, as they are benign. + if !errors.Is(err, syscall.EPIPE) { + logger.Warnf("cannot serve studio image: %v", err) + } } } diff --git a/pkg/api/server.go b/pkg/api/server.go index 58310b4e2..0cde14fb4 100644 --- a/pkg/api/server.go +++ b/pkg/api/server.go @@ -23,6 +23,7 @@ import ( "github.com/go-chi/chi" "github.com/go-chi/chi/middleware" "github.com/gorilla/websocket" + "github.com/pkg/browser" "github.com/rs/cors" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager" @@ -34,6 +35,7 @@ import ( var version string var buildstamp string var githash string +var officialBuild string func Start(uiBox embed.FS, loginUIBox embed.FS) { initialiseImages() @@ -229,7 +231,7 @@ func Start(uiBox embed.FS, loginUIBox embed.FS) { tlsConfig, err := makeTLSConfig(c) if err != nil { // assume we don't want to start with a broken TLS configuration - panic(fmt.Errorf("error loading TLS config: %s", err.Error())) + panic(fmt.Errorf("error loading TLS config: %v", err)) } server := &http.Server{ @@ -240,14 +242,28 @@ func Start(uiBox embed.FS, loginUIBox embed.FS) { go func() { printVersion() - printLatestVersion() + printLatestVersion(context.TODO()) logger.Infof("stash is listening on " + address) + if tlsConfig != nil { + displayAddress = "https://" + displayAddress + "/" + } else { + displayAddress = "http://" + displayAddress + "/" + } + + // This can be done before actually starting the server, as modern browsers will + // automatically reload the page if a local port is closed at page load and then opened. + if !c.GetNoBrowser() && manager.GetInstance().IsDesktop() { + err = browser.OpenURL(displayAddress) + if err != nil { + logger.Error("Could not open browser: " + err.Error()) + } + } if tlsConfig != nil { - logger.Infof("stash is running at https://" + displayAddress + "/") + logger.Infof("stash is running at " + displayAddress) logger.Error(server.ListenAndServeTLS("", "")) } else { - logger.Infof("stash is running at http://" + displayAddress + "/") + logger.Infof("stash is running at " + displayAddress) logger.Error(server.ListenAndServe()) } }() @@ -255,12 +271,21 @@ func Start(uiBox embed.FS, loginUIBox embed.FS) { func printVersion() { versionString := githash + if IsOfficialBuild() { + versionString += " - Official Build" + } else { + versionString += " - Unofficial Build" + } if version != "" { versionString = version + " (" + versionString + ")" } fmt.Printf("stash version: %s - %s\n", versionString, buildstamp) } +func IsOfficialBuild() bool { + return officialBuild == "true" +} + func GetVersion() (string, string, string) { return version, githash, buildstamp } @@ -296,7 +321,7 @@ func makeTLSConfig(c *config.Instance) (*tls.Config, error) { certs := make([]tls.Certificate, 1) certs[0], err = tls.X509KeyPair(cert, key) if err != nil { - return nil, fmt.Errorf("error parsing key pair: %s", err.Error()) + return nil, fmt.Errorf("error parsing key pair: %v", err) } tlsConfig := &tls.Config{ Certificates: certs, @@ -327,7 +352,7 @@ func BaseURLMiddleware(next http.Handler) http.Handler { port := "" forwardedPort := r.Header.Get("X-Forwarded-Port") - if forwardedPort != "" && forwardedPort != "80" && forwardedPort != "8080" { + if forwardedPort != "" && forwardedPort != "80" && forwardedPort != "8080" && forwardedPort != "443" && !strings.Contains(r.Host, ":") { port = ":" + forwardedPort } diff --git a/pkg/api/session.go b/pkg/api/session.go index 71fa15136..c31845755 100644 --- a/pkg/api/session.go +++ b/pkg/api/session.go @@ -2,6 +2,7 @@ package api import ( "embed" + "errors" "fmt" "html/template" "net/http" @@ -60,7 +61,7 @@ func handleLogin(loginUIBox embed.FS) http.HandlerFunc { } err := manager.GetInstance().SessionStore.Login(w, r) - if err == session.ErrInvalidCredentials { + if errors.Is(err, session.ErrInvalidCredentials) { // redirect back to the login page with an error redirectToLogin(loginUIBox, w, url, "Username or password is invalid") return diff --git a/pkg/autotag/gallery.go b/pkg/autotag/gallery.go index fa3ab3a84..d35b0b05f 100644 --- a/pkg/autotag/gallery.go +++ b/pkg/autotag/gallery.go @@ -1,78 +1,10 @@ package autotag import ( - "fmt" - "path/filepath" - "strings" - "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/models" ) -func galleryPathsFilter(paths []string) *models.GalleryFilterType { - if paths == nil { - return nil - } - - sep := string(filepath.Separator) - - var ret *models.GalleryFilterType - var or *models.GalleryFilterType - for _, p := range paths { - newOr := &models.GalleryFilterType{} - if or != nil { - or.Or = newOr - } else { - ret = newOr - } - - or = newOr - - if !strings.HasSuffix(p, sep) { - p = p + sep - } - - or.Path = &models.StringCriterionInput{ - Modifier: models.CriterionModifierEquals, - Value: p + "%", - } - } - - return ret -} - -func getMatchingGalleries(name string, paths []string, galleryReader models.GalleryReader) ([]*models.Gallery, error) { - regex := getPathQueryRegex(name) - organized := false - filter := models.GalleryFilterType{ - Path: &models.StringCriterionInput{ - Value: "(?i)" + regex, - Modifier: models.CriterionModifierMatchesRegex, - }, - Organized: &organized, - } - - filter.And = galleryPathsFilter(paths) - - pp := models.PerPageAll - gallerys, _, err := galleryReader.Query(&filter, &models.FindFilterType{ - PerPage: &pp, - }) - - if err != nil { - return nil, fmt.Errorf("error querying gallerys with regex '%s': %s", regex, err.Error()) - } - - var ret []*models.Gallery - for _, p := range gallerys { - if nameMatchesPath(name, p.Path.String) { - ret = append(ret, p) - } - } - - return ret, nil -} - func getGalleryFileTagger(s *models.Gallery) tagger { return tagger{ ID: s.ID, diff --git a/pkg/autotag/image.go b/pkg/autotag/image.go index ff5816c6f..21745897c 100644 --- a/pkg/autotag/image.go +++ b/pkg/autotag/image.go @@ -1,78 +1,10 @@ package autotag import ( - "fmt" - "path/filepath" - "strings" - "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) -func imagePathsFilter(paths []string) *models.ImageFilterType { - if paths == nil { - return nil - } - - sep := string(filepath.Separator) - - var ret *models.ImageFilterType - var or *models.ImageFilterType - for _, p := range paths { - newOr := &models.ImageFilterType{} - if or != nil { - or.Or = newOr - } else { - ret = newOr - } - - or = newOr - - if !strings.HasSuffix(p, sep) { - p = p + sep - } - - or.Path = &models.StringCriterionInput{ - Modifier: models.CriterionModifierEquals, - Value: p + "%", - } - } - - return ret -} - -func getMatchingImages(name string, paths []string, imageReader models.ImageReader) ([]*models.Image, error) { - regex := getPathQueryRegex(name) - organized := false - filter := models.ImageFilterType{ - Path: &models.StringCriterionInput{ - Value: "(?i)" + regex, - Modifier: models.CriterionModifierMatchesRegex, - }, - Organized: &organized, - } - - filter.And = imagePathsFilter(paths) - - pp := models.PerPageAll - images, _, err := imageReader.Query(&filter, &models.FindFilterType{ - PerPage: &pp, - }) - - if err != nil { - return nil, fmt.Errorf("error querying images with regex '%s': %s", regex, err.Error()) - } - - var ret []*models.Image - for _, p := range images { - if nameMatchesPath(name, p.Path) { - ret = append(ret, p) - } - } - - return ret, nil -} - func getImageFileTagger(s *models.Image) tagger { return tagger{ ID: s.ID, diff --git a/pkg/autotag/performer.go b/pkg/autotag/performer.go index bdbd497c3..77ec0f558 100644 --- a/pkg/autotag/performer.go +++ b/pkg/autotag/performer.go @@ -7,25 +7,6 @@ import ( "github.com/stashapp/stash/pkg/scene" ) -func getMatchingPerformers(path string, performerReader models.PerformerReader) ([]*models.Performer, error) { - words := getPathWords(path) - performers, err := performerReader.QueryForAutoTag(words) - - if err != nil { - return nil, err - } - - var ret []*models.Performer - for _, p := range performers { - // TODO - commenting out alias handling until both sides work correctly - if nameMatchesPath(p.Name.String, path) { // || nameMatchesPath(p.Aliases.String, path) { - ret = append(ret, p) - } - } - - return ret, nil -} - func getPerformerTagger(p *models.Performer) tagger { return tagger{ ID: p.ID, diff --git a/pkg/autotag/performer_test.go b/pkg/autotag/performer_test.go index 3e6714ccd..0dc616de5 100644 --- a/pkg/autotag/performer_test.go +++ b/pkg/autotag/performer_test.go @@ -3,8 +3,10 @@ package autotag import ( "testing" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/scene" "github.com/stretchr/testify/assert" ) @@ -70,7 +72,8 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) { PerPage: &perPage, } - mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once() + mockSceneReader.On("Query", scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false)). + Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() for i := range matchingPaths { sceneID := i + 1 @@ -144,7 +147,8 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) { PerPage: &perPage, } - mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once() + mockImageReader.On("Query", image.QueryOptions(expectedImageFilter, expectedFindFilter, false)). + Return(mocks.ImageQueryResult(images, len(images)), nil).Once() for i := range matchingPaths { imageID := i + 1 diff --git a/pkg/autotag/scene.go b/pkg/autotag/scene.go index 272f5a9fe..aca523cb9 100644 --- a/pkg/autotag/scene.go +++ b/pkg/autotag/scene.go @@ -1,78 +1,10 @@ package autotag import ( - "fmt" - "path/filepath" - "strings" - "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" ) -func scenePathsFilter(paths []string) *models.SceneFilterType { - if paths == nil { - return nil - } - - sep := string(filepath.Separator) - - var ret *models.SceneFilterType - var or *models.SceneFilterType - for _, p := range paths { - newOr := &models.SceneFilterType{} - if or != nil { - or.Or = newOr - } else { - ret = newOr - } - - or = newOr - - if !strings.HasSuffix(p, sep) { - p = p + sep - } - - or.Path = &models.StringCriterionInput{ - Modifier: models.CriterionModifierEquals, - Value: p + "%", - } - } - - return ret -} - -func getMatchingScenes(name string, paths []string, sceneReader models.SceneReader) ([]*models.Scene, error) { - regex := getPathQueryRegex(name) - organized := false - filter := models.SceneFilterType{ - Path: &models.StringCriterionInput{ - Value: "(?i)" + regex, - Modifier: models.CriterionModifierMatchesRegex, - }, - Organized: &organized, - } - - filter.And = scenePathsFilter(paths) - - pp := models.PerPageAll - scenes, _, err := sceneReader.Query(&filter, &models.FindFilterType{ - PerPage: &pp, - }) - - if err != nil { - return nil, fmt.Errorf("error querying scenes with regex '%s': %s", regex, err.Error()) - } - - var ret []*models.Scene - for _, p := range scenes { - if nameMatchesPath(name, p.Path) { - ret = append(ret, p) - } - } - - return ret, nil -} - func getSceneFileTagger(s *models.Scene) tagger { return tagger{ ID: s.ID, diff --git a/pkg/autotag/scene_test.go b/pkg/autotag/scene_test.go index 5e5b88806..67e727a1b 100644 --- a/pkg/autotag/scene_test.go +++ b/pkg/autotag/scene_test.go @@ -67,7 +67,8 @@ func generateFalseNamePatterns(name string, separator, ext string) []string { } func generateTestPaths(testName, ext string) (scenePatterns []string, falseScenePatterns []string) { - separators := append(testSeparators, testEndSeparators...) + separators := testSeparators + separators = append(separators, testEndSeparators...) for _, separator := range separators { scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator, ext)...) @@ -79,7 +80,7 @@ func generateTestPaths(testName, ext string) (scenePatterns []string, falseScene // add test cases for intra-name separators for _, separator := range testSeparators { if separator != " " { - scenePatterns = append(scenePatterns, generateNamePatterns(strings.Replace(testName, " ", separator, -1), separator, ext)...) + scenePatterns = append(scenePatterns, generateNamePatterns(strings.ReplaceAll(testName, " ", separator), separator, ext)...) } } @@ -115,7 +116,8 @@ func generateTestTable(testName, ext string) []pathTestTable { var scenePatterns []string var falseScenePatterns []string - separators := append(testSeparators, testEndSeparators...) + separators := testSeparators + separators = append(separators, testEndSeparators...) for _, separator := range separators { scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator, ext)...) diff --git a/pkg/autotag/studio.go b/pkg/autotag/studio.go index 1634a0fed..635050df7 100644 --- a/pkg/autotag/studio.go +++ b/pkg/autotag/studio.go @@ -2,46 +2,10 @@ package autotag import ( "database/sql" + "github.com/stashapp/stash/pkg/models" ) -func getMatchingStudios(path string, reader models.StudioReader) ([]*models.Studio, error) { - words := getPathWords(path) - candidates, err := reader.QueryForAutoTag(words) - - if err != nil { - return nil, err - } - - var ret []*models.Studio - for _, c := range candidates { - matches := false - if nameMatchesPath(c.Name.String, path) { - matches = true - } - - if !matches { - aliases, err := reader.GetAliases(c.ID) - if err != nil { - return nil, err - } - - for _, alias := range aliases { - if nameMatchesPath(alias, path) { - matches = true - break - } - } - } - - if matches { - ret = append(ret, c) - } - } - - return ret, nil -} - func addSceneStudio(sceneWriter models.SceneReaderWriter, sceneID, studioID int) (bool, error) { // don't set if already set scene, err := sceneWriter.Find(sceneID) diff --git a/pkg/autotag/studio_test.go b/pkg/autotag/studio_test.go index f8c2df49e..ca6a1a9ff 100644 --- a/pkg/autotag/studio_test.go +++ b/pkg/autotag/studio_test.go @@ -3,8 +3,10 @@ package autotag import ( "testing" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/scene" "github.com/stretchr/testify/assert" ) @@ -111,11 +113,12 @@ func testStudioScenes(t *testing.T, tc testStudioCase) { } // if alias provided, then don't find by name - onNameQuery := mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter) + onNameQuery := mockSceneReader.On("Query", scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false)) + if aliasName == "" { - onNameQuery.Return(scenes, len(scenes), nil).Once() + onNameQuery.Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() } else { - onNameQuery.Return(nil, 0, nil).Once() + onNameQuery.Return(mocks.SceneQueryResult(nil, 0), nil).Once() expectedAliasFilter := &models.SceneFilterType{ Organized: &organized, @@ -125,7 +128,8 @@ func testStudioScenes(t *testing.T, tc testStudioCase) { }, } - mockSceneReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once() + mockSceneReader.On("Query", scene.QueryOptions(expectedAliasFilter, expectedFindFilter, false)). + Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() } for i := range matchingPaths { @@ -202,11 +206,11 @@ func testStudioImages(t *testing.T, tc testStudioCase) { } // if alias provided, then don't find by name - onNameQuery := mockImageReader.On("Query", expectedImageFilter, expectedFindFilter) + onNameQuery := mockImageReader.On("Query", image.QueryOptions(expectedImageFilter, expectedFindFilter, false)) if aliasName == "" { - onNameQuery.Return(images, len(images), nil).Once() + onNameQuery.Return(mocks.ImageQueryResult(images, len(images)), nil).Once() } else { - onNameQuery.Return(nil, 0, nil).Once() + onNameQuery.Return(mocks.ImageQueryResult(nil, 0), nil).Once() expectedAliasFilter := &models.ImageFilterType{ Organized: &organized, @@ -216,7 +220,8 @@ func testStudioImages(t *testing.T, tc testStudioCase) { }, } - mockImageReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(images, len(images), nil).Once() + mockImageReader.On("Query", image.QueryOptions(expectedAliasFilter, expectedFindFilter, false)). + Return(mocks.ImageQueryResult(images, len(images)), nil).Once() } for i := range matchingPaths { diff --git a/pkg/autotag/tag.go b/pkg/autotag/tag.go index 48de81417..78e12b766 100644 --- a/pkg/autotag/tag.go +++ b/pkg/autotag/tag.go @@ -7,42 +7,6 @@ import ( "github.com/stashapp/stash/pkg/scene" ) -func getMatchingTags(path string, tagReader models.TagReader) ([]*models.Tag, error) { - words := getPathWords(path) - tags, err := tagReader.QueryForAutoTag(words) - - if err != nil { - return nil, err - } - - var ret []*models.Tag - for _, t := range tags { - matches := false - if nameMatchesPath(t.Name, path) { - matches = true - } - - if !matches { - aliases, err := tagReader.GetAliases(t.ID) - if err != nil { - return nil, err - } - for _, alias := range aliases { - if nameMatchesPath(alias, path) { - matches = true - break - } - } - } - - if matches { - ret = append(ret, t) - } - } - - return ret, nil -} - func getTagTaggers(p *models.Tag, aliases []string) []tagger { ret := []tagger{{ ID: p.ID, diff --git a/pkg/autotag/tag_test.go b/pkg/autotag/tag_test.go index 07a85856e..3bc9c4cca 100644 --- a/pkg/autotag/tag_test.go +++ b/pkg/autotag/tag_test.go @@ -3,8 +3,10 @@ package autotag import ( "testing" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/scene" "github.com/stretchr/testify/assert" ) @@ -111,11 +113,11 @@ func testTagScenes(t *testing.T, tc testTagCase) { } // if alias provided, then don't find by name - onNameQuery := mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter) + onNameQuery := mockSceneReader.On("Query", scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false)) if aliasName == "" { - onNameQuery.Return(scenes, len(scenes), nil).Once() + onNameQuery.Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() } else { - onNameQuery.Return(nil, 0, nil).Once() + onNameQuery.Return(mocks.SceneQueryResult(nil, 0), nil).Once() expectedAliasFilter := &models.SceneFilterType{ Organized: &organized, @@ -125,7 +127,8 @@ func testTagScenes(t *testing.T, tc testTagCase) { }, } - mockSceneReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once() + mockSceneReader.On("Query", scene.QueryOptions(expectedAliasFilter, expectedFindFilter, false)). + Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() } for i := range matchingPaths { @@ -198,11 +201,11 @@ func testTagImages(t *testing.T, tc testTagCase) { } // if alias provided, then don't find by name - onNameQuery := mockImageReader.On("Query", expectedImageFilter, expectedFindFilter) + onNameQuery := mockImageReader.On("Query", image.QueryOptions(expectedImageFilter, expectedFindFilter, false)) if aliasName == "" { - onNameQuery.Return(images, len(images), nil).Once() + onNameQuery.Return(mocks.ImageQueryResult(images, len(images)), nil).Once() } else { - onNameQuery.Return(nil, 0, nil).Once() + onNameQuery.Return(mocks.ImageQueryResult(nil, 0), nil).Once() expectedAliasFilter := &models.ImageFilterType{ Organized: &organized, @@ -212,7 +215,8 @@ func testTagImages(t *testing.T, tc testTagCase) { }, } - mockImageReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(images, len(images), nil).Once() + mockImageReader.On("Query", image.QueryOptions(expectedAliasFilter, expectedFindFilter, false)). + Return(mocks.ImageQueryResult(images, len(images)), nil).Once() } for i := range matchingPaths { diff --git a/pkg/autotag/tagger.go b/pkg/autotag/tagger.go index c0555d401..b64e4e507 100644 --- a/pkg/autotag/tagger.go +++ b/pkg/autotag/tagger.go @@ -15,78 +15,12 @@ package autotag import ( "fmt" - "path/filepath" - "regexp" - "strings" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" ) -const separatorChars = `.\-_ ` - -func getPathQueryRegex(name string) string { - // escape specific regex characters - name = regexp.QuoteMeta(name) - - // handle path separators - const separator = `[` + separatorChars + `]` - - ret := strings.Replace(name, " ", separator+"*", -1) - ret = `(?:^|_|[^\w\d])` + ret + `(?:$|_|[^\w\d])` - return ret -} - -func nameMatchesPath(name, path string) bool { - // escape specific regex characters - name = regexp.QuoteMeta(name) - - name = strings.ToLower(name) - path = strings.ToLower(path) - - // handle path separators - const separator = `[` + separatorChars + `]` - - reStr := strings.Replace(name, " ", separator+"*", -1) - reStr = `(?:^|_|[^\w\d])` + reStr + `(?:$|_|[^\w\d])` - - re := regexp.MustCompile(reStr) - return re.MatchString(path) -} - -func getPathWords(path string) []string { - retStr := path - - // remove the extension - ext := filepath.Ext(retStr) - if ext != "" { - retStr = strings.TrimSuffix(retStr, ext) - } - - // handle path separators - const separator = `(?:_|[^\w\d])+` - re := regexp.MustCompile(separator) - retStr = re.ReplaceAllString(retStr, " ") - - words := strings.Split(retStr, " ") - - // remove any single letter words - var ret []string - for _, w := range words { - if len(w) > 1 { - // #1450 - we need to open up the criteria for matching so that we - // can match where path has no space between subject names - - // ie name = "foo bar" - path = "foobar" - // we post-match afterwards, so we can afford to be a little loose - // with the query - // just use the first two characters - ret = append(ret, w[0:2]) - } - } - - return ret -} - type tagger struct { ID int Type string @@ -105,7 +39,7 @@ func (t *tagger) addLog(otherType, otherName string) { } func (t *tagger) tagPerformers(performerReader models.PerformerReader, addFunc addLinkFunc) error { - others, err := getMatchingPerformers(t.Path, performerReader) + others, err := match.PathToPerformers(t.Path, performerReader) if err != nil { return err } @@ -126,7 +60,7 @@ func (t *tagger) tagPerformers(performerReader models.PerformerReader, addFunc a } func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFunc) error { - others, err := getMatchingStudios(t.Path, studioReader) + others, err := match.PathToStudios(t.Path, studioReader) if err != nil { return err } @@ -149,7 +83,7 @@ func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFun } func (t *tagger) tagTags(tagReader models.TagReader, addFunc addLinkFunc) error { - others, err := getMatchingTags(t.Path, tagReader) + others, err := match.PathToTags(t.Path, tagReader) if err != nil { return err } @@ -170,7 +104,7 @@ func (t *tagger) tagTags(tagReader models.TagReader, addFunc addLinkFunc) error } func (t *tagger) tagScenes(paths []string, sceneReader models.SceneReader, addFunc addLinkFunc) error { - others, err := getMatchingScenes(t.Name, paths, sceneReader) + others, err := match.PathToScenes(t.Name, paths, sceneReader) if err != nil { return err } @@ -191,7 +125,7 @@ func (t *tagger) tagScenes(paths []string, sceneReader models.SceneReader, addFu } func (t *tagger) tagImages(paths []string, imageReader models.ImageReader, addFunc addLinkFunc) error { - others, err := getMatchingImages(t.Name, paths, imageReader) + others, err := match.PathToImages(t.Name, paths, imageReader) if err != nil { return err } @@ -212,7 +146,7 @@ func (t *tagger) tagImages(paths []string, imageReader models.ImageReader, addFu } func (t *tagger) tagGalleries(paths []string, galleryReader models.GalleryReader, addFunc addLinkFunc) error { - others, err := getMatchingGalleries(t.Name, paths, galleryReader) + others, err := match.PathToGalleries(t.Name, paths, galleryReader) if err != nil { return err } diff --git a/pkg/database/custom_migrations.go b/pkg/database/custom_migrations.go index 9e343a515..340ffba55 100644 --- a/pkg/database/custom_migrations.go +++ b/pkg/database/custom_migrations.go @@ -2,6 +2,7 @@ package database import ( "database/sql" + "errors" "fmt" "strings" @@ -21,7 +22,7 @@ func createImagesChecksumIndex() error { return WithTxn(func(tx *sqlx.Tx) error { row := tx.QueryRow("SELECT 1 AS found FROM sqlite_master WHERE type = 'index' AND name = 'images_checksum_unique'") err := row.Err() - if err != nil && err != sql.ErrNoRows { + if err != nil && !errors.Is(err, sql.ErrNoRows) { return err } @@ -55,7 +56,7 @@ func createImagesChecksumIndex() error { } err = tx.Select(&result, "SELECT checksum FROM images GROUP BY checksum HAVING COUNT(1) > 1") - if err != nil && err != sql.ErrNoRows { + if err != nil && !errors.Is(err, sql.ErrNoRows) { logger.Errorf("Unable to determine non-unique image checksums: %s", err) return nil } diff --git a/pkg/database/database.go b/pkg/database/database.go index f84207b65..a23a77aa8 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -63,13 +63,13 @@ func Initialize(databasePath string) error { dbPath = databasePath if err := getDatabaseSchemaVersion(); err != nil { - return fmt.Errorf("error getting database schema version: %s", err.Error()) + return fmt.Errorf("error getting database schema version: %v", err) } if databaseSchemaVersion == 0 { // new database, just run the migrations if err := RunMigrations(); err != nil { - return fmt.Errorf("error running initial schema migrations: %s", err.Error()) + return fmt.Errorf("error running initial schema migrations: %v", err) } // RunMigrations calls Initialise. Just return return nil @@ -165,7 +165,7 @@ func Backup(db *sqlx.DB, backupPath string) error { var err error db, err = sqlx.Connect(sqlite3Driver, "file:"+dbPath+"?_fk=true") if err != nil { - return fmt.Errorf("Open database %s failed:%s", dbPath, err) + return fmt.Errorf("open database %s failed: %v", dbPath, err) } defer db.Close() } @@ -173,7 +173,7 @@ func Backup(db *sqlx.DB, backupPath string) error { logger.Infof("Backing up database into: %s", backupPath) _, err := db.Exec(`VACUUM INTO "` + backupPath + `"`) if err != nil { - return fmt.Errorf("vacuum failed: %s", err) + return fmt.Errorf("vacuum failed: %v", err) } return nil @@ -298,7 +298,7 @@ func registerCustomDriver() { }) if err != nil { - return fmt.Errorf("error registering natural sort collation: %s", err.Error()) + return fmt.Errorf("error registering natural sort collation: %v", err) } return nil diff --git a/pkg/database/transaction.go b/pkg/database/transaction.go index 32c1ab171..d8c23fb3b 100644 --- a/pkg/database/transaction.go +++ b/pkg/database/transaction.go @@ -22,7 +22,9 @@ func WithTxn(fn func(tx *sqlx.Tx) error) error { logger.Warnf("failure when performing transaction rollback: %v", err) } panic(p) - } else if err != nil { + } + + if err != nil { // something went wrong, rollback if err := tx.Rollback(); err != nil { logger.Warnf("failure when performing transaction rollback: %v", err) diff --git a/pkg/dlna/cds.go b/pkg/dlna/cds.go index d23660d0b..d9dc6f546 100644 --- a/pkg/dlna/cds.go +++ b/pkg/dlna/cds.go @@ -39,6 +39,7 @@ import ( "github.com/anacrolix/dms/upnpav" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/utils" ) @@ -437,7 +438,7 @@ func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType Sort: &sort, } - scenes, total, err := r.Scene().Query(sceneFilter, findFilter) + scenes, total, err := scene.QueryWithCount(r.Scene(), sceneFilter, findFilter) if err != nil { return err } diff --git a/pkg/dlna/dms.go b/pkg/dlna/dms.go index d7c5efa85..a1ea8ceac 100644 --- a/pkg/dlna/dms.go +++ b/pkg/dlna/dms.go @@ -415,7 +415,7 @@ func (me *Server) serveIcon(w http.ResponseWriter, r *http.Request) { } var scene *models.Scene - err := me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error { + err := me.txnManager.WithReadTxn(r.Context(), func(r models.ReaderRepository) error { idInt, err := strconv.Atoi(sceneId) if err != nil { return nil @@ -434,7 +434,7 @@ func (me *Server) serveIcon(w http.ResponseWriter, r *http.Request) { me.sceneServer.ServeScreenshot(scene, w, r) } -func (me *Server) contentDirectoryInitialEvent(urls []*url.URL, sid string) { +func (me *Server) contentDirectoryInitialEvent(ctx context.Context, urls []*url.URL, sid string) { body := xmlMarshalOrPanic(upnp.PropertySet{ Properties: []upnp.Property{ { @@ -465,7 +465,7 @@ func (me *Server) contentDirectoryInitialEvent(urls []*url.URL, sid string) { body = append([]byte(``+"\n"), body...) for _, _url := range urls { bodyReader := bytes.NewReader(body) - req, err := http.NewRequest("NOTIFY", _url.String(), bodyReader) + req, err := http.NewRequestWithContext(ctx, "NOTIFY", _url.String(), bodyReader) if err != nil { logger.Errorf("Could not create a request to notify %s: %s", _url.String(), err) continue @@ -515,7 +515,8 @@ func (me *Server) contentDirectoryEventSubHandler(w http.ResponseWriter, r *http // the spec on eventing but hasn't been completed as I have nothing to // test it with. service := me.services["ContentDirectory"] - if r.Method == "SUBSCRIBE" && r.Header.Get("SID") == "" { + switch { + case r.Method == "SUBSCRIBE" && r.Header.Get("SID") == "": urls := upnp.ParseCallbackURLs(r.Header.Get("CALLBACK")) var timeout int fmt.Sscanf(r.Header.Get("TIMEOUT"), "Second-%d", &timeout) @@ -526,11 +527,11 @@ func (me *Server) contentDirectoryEventSubHandler(w http.ResponseWriter, r *http w.WriteHeader(http.StatusOK) go func() { time.Sleep(100 * time.Millisecond) - me.contentDirectoryInitialEvent(urls, sid) + me.contentDirectoryInitialEvent(r.Context(), urls, sid) }() - } else if r.Method == "SUBSCRIBE" { + case r.Method == "SUBSCRIBE": http.Error(w, "meh", http.StatusPreconditionFailed) - } else { + default: logger.Debugf("unhandled event method: %s", r.Method) } } @@ -554,7 +555,7 @@ func (me *Server) initMux(mux *http.ServeMux) { mux.HandleFunc(resPath, func(w http.ResponseWriter, r *http.Request) { sceneId := r.URL.Query().Get("scene") var scene *models.Scene - err := me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error { + err := me.txnManager.WithReadTxn(r.Context(), func(r models.ReaderRepository) error { sceneIdInt, err := strconv.Atoi(sceneId) if err != nil { return nil diff --git a/pkg/dlna/paging.go b/pkg/dlna/paging.go index d5485b107..6f2afda8e 100644 --- a/pkg/dlna/paging.go +++ b/pkg/dlna/paging.go @@ -6,6 +6,7 @@ import ( "strconv" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" ) type scenePager struct { @@ -36,7 +37,7 @@ func (p *scenePager) getPages(r models.ReaderRepository, total int) ([]interface if pages <= 10 || (page-1)%(pages/10) == 0 { thisPage := ((page - 1) * pageSize) + 1 findFilter.Page = &thisPage - scenes, _, err := r.Scene().Query(p.sceneFilter, findFilter) + scenes, err := scene.Query(r.Scene(), p.sceneFilter, findFilter) if err != nil { return nil, err } @@ -48,7 +49,7 @@ func (p *scenePager) getPages(r models.ReaderRepository, total int) ([]interface sceneTitle = sceneTitle[0:3] } - title = title + fmt.Sprintf(" (%s...)", sceneTitle) + title += fmt.Sprintf(" (%s...)", sceneTitle) } objs = append(objs, makeStorageFolder(p.getPageID(page), title, p.parentID)) @@ -67,7 +68,7 @@ func (p *scenePager) getPageVideos(r models.ReaderRepository, page int, host str Sort: &sort, } - scenes, _, err := r.Scene().Query(p.sceneFilter, findFilter) + scenes, err := scene.Query(r.Scene(), p.sceneFilter, findFilter) if err != nil { return nil, err } diff --git a/pkg/ffmpeg/downloader.go b/pkg/ffmpeg/downloader.go index da5b2dbaf..0a8599d97 100644 --- a/pkg/ffmpeg/downloader.go +++ b/pkg/ffmpeg/downloader.go @@ -2,6 +2,7 @@ package ffmpeg import ( "archive/zip" + "context" "fmt" "io" "net/http" @@ -36,9 +37,9 @@ func GetPaths(paths []string) (string, string) { return ffmpegPath, ffprobePath } -func Download(configDirectory string) error { +func Download(ctx context.Context, configDirectory string) error { for _, url := range getFFMPEGURL() { - err := DownloadSingle(configDirectory, url) + err := DownloadSingle(ctx, configDirectory, url) if err != nil { return err } @@ -69,7 +70,7 @@ func (r *progressReader) Read(p []byte) (int, error) { return read, err } -func DownloadSingle(configDirectory, url string) error { +func DownloadSingle(ctx context.Context, configDirectory, url string) error { if url == "" { return fmt.Errorf("no ffmpeg url for this platform") } @@ -88,7 +89,12 @@ func DownloadSingle(configDirectory, url string) error { logger.Infof("Downloading %s...", url) // Make the HTTP request - resp, err := http.Get(url) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return err + } + + resp, err := http.DefaultClient.Do(req) if err != nil { return err } @@ -148,9 +154,14 @@ func getFFMPEGURL() []string { case "darwin": urls = []string{"https://evermeet.cx/ffmpeg/ffmpeg-4.3.1.zip", "https://evermeet.cx/ffmpeg/ffprobe-4.3.1.zip"} case "linux": - // TODO: get appropriate arch (arm,arm64,amd64) and xz untar from https://johnvansickle.com/ffmpeg/ - // or get the ffmpeg,ffprobe zip repackaged ones from https://ffbinaries.com/downloads - urls = []string{""} + switch runtime.GOARCH { + case "amd64": + urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffmpeg-4.2.1-linux-64.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffprobe-4.2.1-linux-64.zip"} + case "arm": + urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffmpeg-4.2.1-linux-armhf-32.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffprobe-4.2.1-linux-armhf-32.zip"} + case "arm64": + urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffmpeg-4.2.1-linux-arm-64.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffprobe-4.2.1-linux-arm-64.zip"} + } case "windows": urls = []string{"https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip"} default: diff --git a/pkg/ffmpeg/encoder.go b/pkg/ffmpeg/encoder.go index ed9c6e31f..2334808ed 100644 --- a/pkg/ffmpeg/encoder.go +++ b/pkg/ffmpeg/encoder.go @@ -12,21 +12,13 @@ import ( "github.com/stashapp/stash/pkg/logger" ) -type Encoder struct { - Path string -} +type Encoder string var ( runningEncoders = make(map[string][]*os.Process) runningEncodersMutex = sync.RWMutex{} ) -func NewEncoder(ffmpegPath string) Encoder { - return Encoder{ - Path: ffmpegPath, - } -} - func registerRunningEncoder(path string, process *os.Process) { runningEncodersMutex.Lock() processes := runningEncoders[path] @@ -86,7 +78,7 @@ func KillRunningEncoders(path string) { // FFmpeg runner with progress output, used for transcodes func (e *Encoder) runTranscode(probeResult VideoFile, args []string) (string, error) { - cmd := exec.Command(e.Path, args...) + cmd := exec.Command(string(*e), args...) stderr, err := cmd.StderrPipe() if err != nil { @@ -141,19 +133,25 @@ func (e *Encoder) runTranscode(probeResult VideoFile, args []string) (string, er return stdoutString, nil } -func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) { - cmd := exec.Command(e.Path, args...) +func (e *Encoder) run(sourcePath string, args []string, stdin io.Reader) (string, error) { + cmd := exec.Command(string(*e), args...) var stdout, stderr bytes.Buffer cmd.Stdout = &stdout cmd.Stderr = &stderr + cmd.Stdin = stdin if err := cmd.Start(); err != nil { return "", err } - registerRunningEncoder(probeResult.Path, cmd.Process) - err := waitAndDeregister(probeResult.Path, cmd) + var err error + if sourcePath != "" { + registerRunningEncoder(sourcePath, cmd.Process) + err = waitAndDeregister(sourcePath, cmd) + } else { + err = cmd.Wait() + } if err != nil { // error message should be in the stderr stream diff --git a/pkg/ffmpeg/encoder_marker.go b/pkg/ffmpeg/encoder_marker.go index 73ef8e790..540b8e6c3 100644 --- a/pkg/ffmpeg/encoder_marker.go +++ b/pkg/ffmpeg/encoder_marker.go @@ -34,7 +34,7 @@ func (e *Encoder) SceneMarkerVideo(probeResult VideoFile, options SceneMarkerOpt "-strict", "-2", options.OutputPath, } - _, err := e.run(probeResult, args) + _, err := e.run(probeResult.Path, args, nil) return err } @@ -55,6 +55,6 @@ func (e *Encoder) SceneMarkerImage(probeResult VideoFile, options SceneMarkerOpt "-an", options.OutputPath, } - _, err := e.run(probeResult, args) + _, err := e.run(probeResult.Path, args, nil) return err } diff --git a/pkg/ffmpeg/encoder_scene_preview_chunk.go b/pkg/ffmpeg/encoder_scene_preview_chunk.go index a0b53a93f..92d660b94 100644 --- a/pkg/ffmpeg/encoder_scene_preview_chunk.go +++ b/pkg/ffmpeg/encoder_scene_preview_chunk.go @@ -85,11 +85,11 @@ func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePre "-strict", "-2", } - args3 := append(args, args2...) - args3 = append(args3, argsAudio...) - finalArgs := append(args3, options.OutputPath) + args = append(args, args2...) + args = append(args, argsAudio...) + args = append(args, options.OutputPath) - _, err := e.run(probeResult, finalArgs) + _, err := e.run(probeResult.Path, args, nil) return err } @@ -102,7 +102,7 @@ func (e *Encoder) ScenePreviewVideoChunkCombine(probeResult VideoFile, concatFil "-c", "copy", outputPath, } - _, err := e.run(probeResult, args) + _, err := e.run(probeResult.Path, args, nil) return err } @@ -122,6 +122,6 @@ func (e *Encoder) ScenePreviewVideoToImage(probeResult VideoFile, width int, vid "-an", outputPath, } - _, err := e.run(probeResult, args) + _, err := e.run(probeResult.Path, args, nil) return err } diff --git a/pkg/ffmpeg/encoder_screenshot.go b/pkg/ffmpeg/encoder_screenshot.go index bd52273c8..636092ac0 100644 --- a/pkg/ffmpeg/encoder_screenshot.go +++ b/pkg/ffmpeg/encoder_screenshot.go @@ -28,7 +28,7 @@ func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) e "-f", "image2", options.OutputPath, } - _, err := e.run(probeResult, args) + _, err := e.run(probeResult.Path, args, nil) return err } diff --git a/pkg/ffmpeg/encoder_sprite_screenshot.go b/pkg/ffmpeg/encoder_sprite_screenshot.go index c1a87788e..cba560430 100644 --- a/pkg/ffmpeg/encoder_sprite_screenshot.go +++ b/pkg/ffmpeg/encoder_sprite_screenshot.go @@ -22,7 +22,7 @@ func (e *Encoder) SpriteScreenshot(probeResult VideoFile, options SpriteScreensh "-f", "rawvideo", "-", } - data, err := e.run(probeResult, args) + data, err := e.run(probeResult.Path, args, nil) if err != nil { return nil, err } diff --git a/pkg/ffmpeg/encoder_transcode.go b/pkg/ffmpeg/encoder_transcode.go index 235fb6959..920051b96 100644 --- a/pkg/ffmpeg/encoder_transcode.go +++ b/pkg/ffmpeg/encoder_transcode.go @@ -67,9 +67,9 @@ func (e *Encoder) Transcode(probeResult VideoFile, options TranscodeOptions) { _, _ = e.runTranscode(probeResult, args) } -//transcode the video, remove the audio -//in some videos where the audio codec is not supported by ffmpeg -//ffmpeg fails if you try to transcode the audio +// TranscodeVideo transcodes the video, and removes the audio. +// In some videos where the audio codec is not supported by ffmpeg, +// ffmpeg fails if you try to transcode the audio func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions) { scale := calculateTranscodeScale(probeResult, options.MaxTranscodeSize) args := []string{ @@ -87,7 +87,7 @@ func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions _, _ = e.runTranscode(probeResult, args) } -//copy the video stream as is, transcode audio +// TranscodeAudio will copy the video stream as is, and transcode audio. func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions) { args := []string{ "-i", probeResult.Path, @@ -99,7 +99,7 @@ func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions _, _ = e.runTranscode(probeResult, args) } -//copy the video stream as is, drop audio +// CopyVideo will copy the video stream as is, and drop the audio stream. func (e *Encoder) CopyVideo(probeResult VideoFile, options TranscodeOptions) { args := []string{ "-i", probeResult.Path, diff --git a/pkg/ffmpeg/ffprobe.go b/pkg/ffmpeg/ffprobe.go index ed276f14a..dfde16d8b 100644 --- a/pkg/ffmpeg/ffprobe.go +++ b/pkg/ffmpeg/ffprobe.go @@ -72,8 +72,8 @@ var validAudioForMkv = []AudioCodec{Aac, Mp3, Vorbis, Opus} var validAudioForWebm = []AudioCodec{Vorbis, Opus} var validAudioForMp4 = []AudioCodec{Aac, Mp3} -//maps user readable container strings to ffprobe's format_name -//on some formats ffprobe can't differentiate +// ContainerToFfprobe maps user readable container strings to ffprobe's format_name. +// On some formats ffprobe can't differentiate var ContainerToFfprobe = map[Container]string{ Mp4: Mp4Ffmpeg, M4v: M4vFfmpeg, @@ -116,7 +116,7 @@ func IsValidCodec(codecName string, supportedCodecs []string) bool { return false } -func IsValidAudio(audio AudioCodec, ValidCodecs []AudioCodec) bool { +func IsValidAudio(audio AudioCodec, validCodecs []AudioCodec) bool { // if audio codec is missing or unsupported by ffmpeg we can't do anything about it // report it as valid so that the file can at least be streamed directly if the video codec is supported @@ -124,7 +124,7 @@ func IsValidAudio(audio AudioCodec, ValidCodecs []AudioCodec) bool { return true } - for _, c := range ValidCodecs { + for _, c := range validCodecs { if c == audio { return true } @@ -155,7 +155,8 @@ func IsValidForContainer(format Container, validContainers []Container) bool { return false } -//extend stream validation check to take into account container +// IsValidCombo checks if a codec/container combination is valid. +// Returns true on validity, false otherwise func IsValidCombo(codecName string, format Container, supportedVideoCodecs []string) bool { supportMKV := IsValidCodec(Mkv, supportedVideoCodecs) supportHEVC := IsValidCodec(Hevc, supportedVideoCodecs) @@ -221,14 +222,13 @@ type VideoFile struct { AudioCodec string } +// FFProbe +type FFProbe string + // Execute exec command and bind result to struct. -func NewVideoFile(ffprobePath string, videoPath string, stripExt bool) (*VideoFile, error) { +func (f *FFProbe) NewVideoFile(videoPath string, stripExt bool) (*VideoFile, error) { args := []string{"-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", "-show_error", videoPath} - //// Extremely slow on windows for some reason - //if runtime.GOOS != "windows" { - // args = append(args, "-count_frames") - //} - out, err := exec.Command(ffprobePath, args...).Output() + out, err := exec.Command(string(*f), args...).Output() if err != nil { return nil, fmt.Errorf("FFProbe encountered an error with <%s>.\nError JSON:\n%s\nError: %s", videoPath, string(out), err.Error()) @@ -253,9 +253,6 @@ func parse(filePath string, probeJSON *FFProbeJSON, stripExt bool) (*VideoFile, if result.JSON.Error.Code != 0 { return nil, fmt.Errorf("ffprobe error code %d: %s", result.JSON.Error.Code, result.JSON.Error.String) } - //} else if (ffprobeResult.stderr.includes("could not find codec parameters")) { - // throw new Error(`FFProbe [${filePath}] -> Could not find codec parameters`); - //} // TODO nil_or_unsupported.(video_stream) && nil_or_unsupported.(audio_stream) result.Path = filePath result.Title = probeJSON.Format.Tags.Title diff --git a/pkg/ffmpeg/image.go b/pkg/ffmpeg/image.go new file mode 100644 index 000000000..68fb90379 --- /dev/null +++ b/pkg/ffmpeg/image.go @@ -0,0 +1,40 @@ +package ffmpeg + +import ( + "bytes" + "errors" + "fmt" +) + +var ErrUnsupportedFormat = errors.New("unsupported image format") + +func (e *Encoder) ImageThumbnail(image *bytes.Buffer, format *string, maxDimensions int, path string) ([]byte, error) { + // ffmpeg spends a long sniffing image format when data is piped through stdio, so we pass the format explicitly instead + ffmpegformat := "" + if format == nil { + return nil, ErrUnsupportedFormat + } + + switch *format { + case "jpeg": + ffmpegformat = "mjpeg" + case "png": + ffmpegformat = "png_pipe" + case "webp": + ffmpegformat = "webp_pipe" + } + + args := []string{ + "-f", ffmpegformat, + "-i", "-", + "-vf", fmt.Sprintf("scale=%v:%v:force_original_aspect_ratio=decrease", maxDimensions, maxDimensions), + "-c:v", "mjpeg", + "-q:v", "5", + "-f", "image2pipe", + "-", + } + + data, err := e.run(path, args, image) + + return []byte(data), err +} diff --git a/pkg/ffmpeg/media_detection.go b/pkg/ffmpeg/media_detection.go index 4de7e4ba6..6d9feb59a 100644 --- a/pkg/ffmpeg/media_detection.go +++ b/pkg/ffmpeg/media_detection.go @@ -2,8 +2,9 @@ package ffmpeg import ( "bytes" - "github.com/stashapp/stash/pkg/logger" "os" + + "github.com/stashapp/stash/pkg/logger" ) // detect file format from magic file number @@ -37,11 +38,12 @@ func containsMatroskaSignature(buf, subType []byte) bool { return buf[index-3] == 0x42 && buf[index-2] == 0x82 } -//returns container as string ("" on error or no match) -//implements only mkv or webm as ffprobe can't distinguish between them -//and not all browsers support mkv -func MagicContainer(file_path string) Container { - file, err := os.Open(file_path) +// MagicContainer returns the container type of a file path. +// Returns the zero-value on errors or no-match. Implements mkv or +// webm only, as ffprobe can't distinguish between them and not all +// browsers support mkv +func MagicContainer(filePath string) Container { + file, err := os.Open(filePath) if err != nil { logger.Errorf("[magicfile] %v", err) return "" diff --git a/pkg/ffmpeg/stream.go b/pkg/ffmpeg/stream.go index f99c3c95c..1f4d4960e 100644 --- a/pkg/ffmpeg/stream.go +++ b/pkg/ffmpeg/stream.go @@ -205,7 +205,7 @@ func (e *Encoder) GetTranscodeStream(options TranscodeStreamOptions) (*Stream, e func (e *Encoder) stream(probeResult VideoFile, options TranscodeStreamOptions) (*Stream, error) { args := options.getStreamArgs() - cmd := exec.Command(e.Path, args...) + cmd := exec.Command(string(*e), args...) logger.Debugf("Streaming via: %s", strings.Join(cmd.Args, " ")) stdout, err := cmd.StdoutPipe() diff --git a/pkg/file/file.go b/pkg/file/file.go new file mode 100644 index 000000000..397dabd6d --- /dev/null +++ b/pkg/file/file.go @@ -0,0 +1,31 @@ +package file + +import ( + "io" + "io/fs" + "os" +) + +type fsFile struct { + path string + info fs.FileInfo +} + +func (f *fsFile) Open() (io.ReadCloser, error) { + return os.Open(f.path) +} + +func (f *fsFile) Path() string { + return f.path +} + +func (f *fsFile) FileInfo() fs.FileInfo { + return f.info +} + +func FSFile(path string, info fs.FileInfo) SourceFile { + return &fsFile{ + path: path, + info: info, + } +} diff --git a/pkg/file/hash.go b/pkg/file/hash.go new file mode 100644 index 000000000..630ffcb6f --- /dev/null +++ b/pkg/file/hash.go @@ -0,0 +1,17 @@ +package file + +import ( + "io" + + "github.com/stashapp/stash/pkg/utils" +) + +type FSHasher struct{} + +func (h *FSHasher) OSHash(src io.ReadSeeker, size int64) (string, error) { + return utils.OSHashFromReader(src, size) +} + +func (h *FSHasher) MD5(src io.Reader) (string, error) { + return utils.MD5FromReader(src) +} diff --git a/pkg/file/scan.go b/pkg/file/scan.go new file mode 100644 index 000000000..4c0ac3152 --- /dev/null +++ b/pkg/file/scan.go @@ -0,0 +1,176 @@ +package file + +import ( + "fmt" + "io" + "io/fs" + "strconv" + "time" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type SourceFile interface { + Open() (io.ReadCloser, error) + Path() string + FileInfo() fs.FileInfo +} + +type FileBased interface { + File() models.File +} + +type Hasher interface { + OSHash(src io.ReadSeeker, size int64) (string, error) + MD5(src io.Reader) (string, error) +} + +type Scanned struct { + Old *models.File + New *models.File +} + +// FileUpdated returns true if both old and new files are present and not equal. +func (s Scanned) FileUpdated() bool { + if s.Old == nil || s.New == nil { + return false + } + + return !s.Old.Equal(*s.New) +} + +// ContentsChanged returns true if both old and new files are present and the file content is different. +func (s Scanned) ContentsChanged() bool { + if s.Old == nil || s.New == nil { + return false + } + + if s.Old.Checksum != s.New.Checksum { + return true + } + + if s.Old.OSHash != s.New.OSHash { + return true + } + + return false +} + +type Scanner struct { + Hasher Hasher + + CalculateMD5 bool + CalculateOSHash bool +} + +func (o Scanner) ScanExisting(existing FileBased, file SourceFile) (h *Scanned, err error) { + info := file.FileInfo() + h = &Scanned{} + + existingFile := existing.File() + h.Old = &existingFile + + updatedFile := existingFile + h.New = &updatedFile + + // update existing data if needed + // truncate to seconds, since we don't store beyond that in the database + updatedFile.FileModTime = info.ModTime().Truncate(time.Second) + updatedFile.Size = strconv.FormatInt(info.Size(), 10) + + modTimeChanged := !existingFile.FileModTime.Equal(updatedFile.FileModTime) + + // regenerate hash(es) if missing or file mod time changed + if _, err = o.generateHashes(&updatedFile, file, modTimeChanged); err != nil { + return nil, err + } + + // notify of changes as needed + // object exists, no further processing required + return +} + +func (o Scanner) ScanNew(file SourceFile) (*models.File, error) { + info := file.FileInfo() + sizeStr := strconv.FormatInt(info.Size(), 10) + modTime := info.ModTime() + f := models.File{ + Path: file.Path(), + Size: sizeStr, + FileModTime: modTime, + } + + if _, err := o.generateHashes(&f, file, true); err != nil { + return nil, err + } + + return &f, nil +} + +// generateHashes regenerates and sets the hashes in the provided File. +// It will not recalculate unless specified. +func (o Scanner) generateHashes(f *models.File, file SourceFile, regenerate bool) (changed bool, err error) { + existing := *f + + var src io.ReadCloser + if o.CalculateOSHash && (regenerate || f.OSHash == "") { + logger.Infof("Calculating oshash for %s ...", f.Path) + + src, err = file.Open() + if err != nil { + return false, err + } + defer src.Close() + + seekSrc, valid := src.(io.ReadSeeker) + if !valid { + return false, fmt.Errorf("invalid source file type: %s", file.Path()) + } + + // regenerate hash + var oshash string + oshash, err = o.Hasher.OSHash(seekSrc, file.FileInfo().Size()) + if err != nil { + return false, fmt.Errorf("error generating oshash for %s: %w", file.Path(), err) + } + + f.OSHash = oshash + + // reset reader to start of file + _, err = seekSrc.Seek(0, io.SeekStart) + if err != nil { + return false, fmt.Errorf("error seeking to start of file in %s: %w", file.Path(), err) + } + } + + // always generate if MD5 is nil + // only regenerate MD5 if: + // - OSHash was not calculated, or + // - existing OSHash is different to generated one + // or if it was different to the previous version + if o.CalculateMD5 && (f.Checksum == "" || (regenerate && (!o.CalculateOSHash || existing.OSHash != f.OSHash))) { + logger.Infof("Calculating checksum for %s...", f.Path) + + if src == nil { + src, err = file.Open() + if err != nil { + return false, err + } + defer src.Close() + } + + // regenerate checksum + var checksum string + checksum, err = o.Hasher.MD5(src) + if err != nil { + return + } + + f.Checksum = checksum + } + + changed = (o.CalculateOSHash && (f.OSHash != existing.OSHash)) || (o.CalculateMD5 && (f.Checksum != existing.Checksum)) + + return +} diff --git a/pkg/file/zip.go b/pkg/file/zip.go new file mode 100644 index 000000000..4028beea5 --- /dev/null +++ b/pkg/file/zip.go @@ -0,0 +1,64 @@ +package file + +import ( + "archive/zip" + "io" + "io/fs" + "strings" +) + +const zipSeparator = "\x00" + +type zipFile struct { + zipPath string + zf *zip.File +} + +func (f *zipFile) Open() (io.ReadCloser, error) { + return f.zf.Open() +} + +func (f *zipFile) Path() string { + // TODO - fix this + return ZipFilename(f.zipPath, f.zf.Name) +} + +func (f *zipFile) FileInfo() fs.FileInfo { + return f.zf.FileInfo() +} + +func ZipFile(zipPath string, zf *zip.File) SourceFile { + return &zipFile{ + zipPath: zipPath, + zf: zf, + } +} + +func ZipFilename(zipFilename, filenameInZip string) string { + return zipFilename + zipSeparator + filenameInZip +} + +// IsZipPath returns true if the path includes the zip separator byte, +// indicating it is within a zip file. +func IsZipPath(p string) bool { + return strings.Contains(p, zipSeparator) +} + +// ZipPathDisplayName converts an zip path for display. It translates the zip +// file separator character into '/', since this character is also used for +// path separators within zip files. It returns the original provided path +// if it does not contain the zip file separator character. +func ZipPathDisplayName(path string) string { + return strings.ReplaceAll(path, zipSeparator, "/") +} + +func ZipFilePath(path string) (zipFilename, filename string) { + nullIndex := strings.Index(path, zipSeparator) + if nullIndex != -1 { + zipFilename = path[0:nullIndex] + filename = path[nullIndex+1:] + } else { + filename = path + } + return +} diff --git a/pkg/gallery/export_test.go b/pkg/gallery/export_test.go index ac46871fa..379900dfd 100644 --- a/pkg/gallery/export_test.go +++ b/pkg/gallery/export_test.go @@ -25,7 +25,7 @@ const ( const ( path = "path" - zip = true + isZip = true url = "url" checksum = "checksum" title = "title" @@ -48,7 +48,7 @@ func createFullGallery(id int) models.Gallery { return models.Gallery{ ID: id, Path: models.NullString(path), - Zip: zip, + Zip: isZip, Title: models.NullString(title), Checksum: checksum, Date: models.SQLiteDate{ @@ -72,7 +72,7 @@ func createFullJSONGallery() *jsonschema.Gallery { return &jsonschema.Gallery{ Title: title, Path: path, - Zip: zip, + Zip: isZip, Checksum: checksum, Date: date, Details: details, @@ -107,11 +107,12 @@ func TestToJSON(t *testing.T) { gallery := s.input json, err := ToBasicJSON(&gallery) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } @@ -162,11 +163,12 @@ func TestGetStudioName(t *testing.T) { gallery := s.input json, err := GetStudioName(mockStudioReader, &gallery) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index 7ad87cdbe..e568d5bbd 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -78,7 +78,7 @@ func (i *Importer) populateStudio() error { if i.Input.Studio != "" { studio, err := i.StudioWriter.FindByName(i.Input.Studio, false) if err != nil { - return fmt.Errorf("error finding studio by name: %s", err.Error()) + return fmt.Errorf("error finding studio by name: %v", err) } if studio == nil { @@ -147,7 +147,7 @@ func (i *Importer) populatePerformers() error { if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { createdPerformers, err := i.createPerformers(missingPerformers) if err != nil { - return fmt.Errorf("error creating gallery performers: %s", err.Error()) + return fmt.Errorf("error creating gallery performers: %v", err) } performers = append(performers, createdPerformers...) @@ -203,7 +203,7 @@ func (i *Importer) populateTags() error { if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { createdTags, err := i.createTags(missingTags) if err != nil { - return fmt.Errorf("error creating gallery tags: %s", err.Error()) + return fmt.Errorf("error creating gallery tags: %v", err) } tags = append(tags, createdTags...) @@ -242,7 +242,7 @@ func (i *Importer) PostImport(id int) error { } if err := i.ReaderWriter.UpdatePerformers(id, performerIDs); err != nil { - return fmt.Errorf("failed to associate performers: %s", err.Error()) + return fmt.Errorf("failed to associate performers: %v", err) } } @@ -252,7 +252,7 @@ func (i *Importer) PostImport(id int) error { tagIDs = append(tagIDs, t.ID) } if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil { - return fmt.Errorf("failed to associate tags: %s", err.Error()) + return fmt.Errorf("failed to associate tags: %v", err) } } @@ -280,7 +280,7 @@ func (i *Importer) FindExistingID() (*int, error) { func (i *Importer) Create() (*int, error) { created, err := i.ReaderWriter.Create(i.gallery) if err != nil { - return nil, fmt.Errorf("error creating gallery: %s", err.Error()) + return nil, fmt.Errorf("error creating gallery: %v", err) } id := created.ID @@ -292,7 +292,7 @@ func (i *Importer) Update(id int) error { gallery.ID = id _, err := i.ReaderWriter.Update(gallery) if err != nil { - return fmt.Errorf("error updating existing gallery: %s", err.Error()) + return fmt.Errorf("error updating existing gallery: %v", err) } return nil diff --git a/pkg/gallery/scan.go b/pkg/gallery/scan.go new file mode 100644 index 000000000..d5b195b39 --- /dev/null +++ b/pkg/gallery/scan.go @@ -0,0 +1,225 @@ +package gallery + +import ( + "archive/zip" + "context" + "database/sql" + "fmt" + "strings" + "time" + + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/paths" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stashapp/stash/pkg/utils" +) + +const mutexType = "gallery" + +type Scanner struct { + file.Scanner + + ImageExtensions []string + StripFileExtension bool + Ctx context.Context + CaseSensitiveFs bool + TxnManager models.TransactionManager + Paths *paths.Paths + PluginCache *plugin.Cache + MutexManager *utils.MutexManager +} + +func FileScanner(hasher file.Hasher) file.Scanner { + return file.Scanner{ + Hasher: hasher, + CalculateMD5: true, + } +} + +func (scanner *Scanner) ScanExisting(existing file.FileBased, file file.SourceFile) (retGallery *models.Gallery, scanImages bool, err error) { + scanned, err := scanner.Scanner.ScanExisting(existing, file) + if err != nil { + return nil, false, err + } + + // we don't currently store sizes for gallery files + // clear the file size so that we don't incorrectly detect a + // change + scanned.New.Size = "" + + retGallery = existing.(*models.Gallery) + + path := scanned.New.Path + + changed := false + + if scanned.ContentsChanged() { + retGallery.SetFile(*scanned.New) + changed = true + } else if scanned.FileUpdated() { + logger.Infof("Updated gallery file %s", path) + + retGallery.SetFile(*scanned.New) + changed = true + } + + if changed { + scanImages = true + logger.Infof("%s has been updated: rescanning", path) + + retGallery.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()} + + // we are operating on a checksum now, so grab a mutex on the checksum + done := make(chan struct{}) + scanner.MutexManager.Claim(mutexType, scanned.New.Checksum, done) + + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + // free the mutex once transaction is complete + defer close(done) + + // ensure no clashes of hashes + if scanned.New.Checksum != "" && scanned.Old.Checksum != scanned.New.Checksum { + dupe, _ := r.Gallery().FindByChecksum(retGallery.Checksum) + if dupe != nil { + return fmt.Errorf("MD5 for file %s is the same as that of %s", path, dupe.Path.String) + } + } + + retGallery, err = r.Gallery().Update(*retGallery) + return err + }); err != nil { + return nil, false, err + } + + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, retGallery.ID, plugin.GalleryUpdatePost, nil, nil) + } + + return +} + +func (scanner *Scanner) ScanNew(file file.SourceFile) (retGallery *models.Gallery, scanImages bool, err error) { + scanned, err := scanner.Scanner.ScanNew(file) + if err != nil { + return nil, false, err + } + + path := file.Path() + checksum := scanned.Checksum + isNewGallery := false + isUpdatedGallery := false + var g *models.Gallery + + // grab a mutex on the checksum + done := make(chan struct{}) + scanner.MutexManager.Claim(mutexType, checksum, done) + defer close(done) + + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + qb := r.Gallery() + + g, _ = qb.FindByChecksum(checksum) + if g != nil { + exists, _ := utils.FileExists(g.Path.String) + if !scanner.CaseSensitiveFs { + // #1426 - if file exists but is a case-insensitive match for the + // original filename, then treat it as a move + if exists && strings.EqualFold(path, g.Path.String) { + exists = false + } + } + + if exists { + logger.Infof("%s already exists. Duplicate of %s ", path, g.Path.String) + } else { + logger.Infof("%s already exists. Updating path...", path) + g.Path = sql.NullString{ + String: path, + Valid: true, + } + g, err = qb.Update(*g) + if err != nil { + return err + } + + isUpdatedGallery = true + } + } else if scanner.hasImages(path) { // don't create gallery if it has no images + currentTime := time.Now() + + g = &models.Gallery{ + Zip: true, + Title: sql.NullString{ + String: utils.GetNameFromPath(path, scanner.StripFileExtension), + Valid: true, + }, + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + } + + g.SetFile(*scanned) + + // only warn when creating the gallery + ok, err := utils.IsZipFileUncompressed(path) + if err == nil && !ok { + logger.Warnf("%s is using above store (0) level compression.", path) + } + + logger.Infof("%s doesn't exist. Creating new item...", path) + g, err = qb.Create(*g) + if err != nil { + return err + } + + scanImages = true + isNewGallery = true + } + + return nil + }); err != nil { + return nil, false, err + } + + if isNewGallery { + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, g.ID, plugin.GalleryCreatePost, nil, nil) + } else if isUpdatedGallery { + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, g.ID, plugin.GalleryUpdatePost, nil, nil) + } + + scanImages = isNewGallery + retGallery = g + + return +} + +func (scanner *Scanner) isImage(pathname string) bool { + return utils.MatchExtension(pathname, scanner.ImageExtensions) +} + +func (scanner *Scanner) hasImages(path string) bool { + readCloser, err := zip.OpenReader(path) + if err != nil { + logger.Warnf("Error while walking gallery zip: %v", err) + return false + } + defer readCloser.Close() + + for _, file := range readCloser.File { + if file.FileInfo().IsDir() { + continue + } + + if strings.Contains(file.Name, "__MACOSX") { + continue + } + + if !scanner.isImage(file.Name) { + continue + } + + return true + } + + return false +} diff --git a/pkg/identify/identify.go b/pkg/identify/identify.go new file mode 100644 index 000000000..d64a36fe6 --- /dev/null +++ b/pkg/identify/identify.go @@ -0,0 +1,274 @@ +package identify + +import ( + "context" + "database/sql" + "fmt" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" + "github.com/stashapp/stash/pkg/utils" +) + +type SceneScraper interface { + ScrapeScene(sceneID int) (*models.ScrapedScene, error) +} + +type SceneUpdatePostHookExecutor interface { + ExecuteSceneUpdatePostHooks(ctx context.Context, input models.SceneUpdateInput, inputFields []string) +} + +type ScraperSource struct { + Name string + Options *models.IdentifyMetadataOptionsInput + Scraper SceneScraper + RemoteSite string +} + +type SceneIdentifier struct { + DefaultOptions *models.IdentifyMetadataOptionsInput + Sources []ScraperSource + ScreenshotSetter scene.ScreenshotSetter + SceneUpdatePostHookExecutor SceneUpdatePostHookExecutor +} + +func (t *SceneIdentifier) Identify(ctx context.Context, txnManager models.TransactionManager, scene *models.Scene) error { + result, err := t.scrapeScene(scene) + if err != nil { + return err + } + + if result == nil { + logger.Infof("Unable to identify %s", scene.Path) + return nil + } + + // results were found, modify the scene + if err := t.modifyScene(ctx, txnManager, scene, result); err != nil { + return fmt.Errorf("error modifying scene: %v", err) + } + + return nil +} + +type scrapeResult struct { + result *models.ScrapedScene + source ScraperSource +} + +func (t *SceneIdentifier) scrapeScene(scene *models.Scene) (*scrapeResult, error) { + // iterate through the input sources + for _, source := range t.Sources { + // scrape using the source + scraped, err := source.Scraper.ScrapeScene(scene.ID) + if err != nil { + return nil, fmt.Errorf("error scraping from %v: %v", source.Scraper, err) + } + + // if results were found then return + if scraped != nil { + return &scrapeResult{ + result: scraped, + source: source, + }, nil + } + } + + return nil, nil +} + +func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene, result *scrapeResult, repo models.Repository) (*scene.UpdateSet, error) { + ret := &scene.UpdateSet{ + ID: s.ID, + } + + options := []models.IdentifyMetadataOptionsInput{} + if result.source.Options != nil { + options = append(options, *result.source.Options) + } + if t.DefaultOptions != nil { + options = append(options, *t.DefaultOptions) + } + + fieldOptions := getFieldOptions(options) + + setOrganized := false + for _, o := range options { + if o.SetOrganized != nil { + setOrganized = *o.SetOrganized + break + } + } + + scraped := result.result + + rel := sceneRelationships{ + repo: repo, + scene: s, + result: result, + fieldOptions: fieldOptions, + } + + ret.Partial = getScenePartial(s, scraped, fieldOptions, setOrganized) + + studioID, err := rel.studio() + if err != nil { + return nil, fmt.Errorf("error getting studio: %w", err) + } + + if studioID != nil { + ret.Partial.StudioID = &sql.NullInt64{ + Int64: *studioID, + Valid: true, + } + } + + ignoreMale := false + for _, o := range options { + if o.IncludeMalePerformers != nil { + ignoreMale = !*o.IncludeMalePerformers + break + } + } + + ret.PerformerIDs, err = rel.performers(ignoreMale) + if err != nil { + return nil, err + } + + ret.TagIDs, err = rel.tags() + if err != nil { + return nil, err + } + + ret.StashIDs, err = rel.stashIDs() + if err != nil { + return nil, err + } + + setCoverImage := false + for _, o := range options { + if o.SetCoverImage != nil { + setCoverImage = *o.SetCoverImage + break + } + } + + if setCoverImage { + ret.CoverImage, err = rel.cover(ctx) + if err != nil { + return nil, err + } + } + + return ret, nil +} + +func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager models.TransactionManager, s *models.Scene, result *scrapeResult) error { + var updater *scene.UpdateSet + if err := txnManager.WithTxn(ctx, func(repo models.Repository) error { + var err error + updater, err = t.getSceneUpdater(ctx, s, result, repo) + if err != nil { + return err + } + + // don't update anything if nothing was set + if updater.IsEmpty() { + logger.Infof("Nothing to set for %s", s.Path) + return nil + } + + _, err = updater.Update(repo.Scene(), t.ScreenshotSetter) + if err != nil { + return fmt.Errorf("error updating scene: %w", err) + } + + as := "" + title := updater.Partial.Title + if title != nil { + as = fmt.Sprintf(" as %s", title.String) + } + logger.Infof("Successfully identified %s%s using %s", s.Path, as, result.source.Name) + + return nil + }); err != nil { + return err + } + + // fire post-update hooks + if !updater.IsEmpty() { + updateInput := updater.UpdateInput() + fields := utils.NotNilFields(updateInput, "json") + t.SceneUpdatePostHookExecutor.ExecuteSceneUpdatePostHooks(ctx, updateInput, fields) + } + + return nil +} + +func getFieldOptions(options []models.IdentifyMetadataOptionsInput) map[string]*models.IdentifyFieldOptionsInput { + // prefer source-specific field strategies, then the defaults + ret := make(map[string]*models.IdentifyFieldOptionsInput) + for _, oo := range options { + for _, f := range oo.FieldOptions { + if _, found := ret[f.Field]; !found { + ret[f.Field] = f + } + } + } + + return ret +} + +func getScenePartial(scene *models.Scene, scraped *models.ScrapedScene, fieldOptions map[string]*models.IdentifyFieldOptionsInput, setOrganized bool) models.ScenePartial { + partial := models.ScenePartial{ + ID: scene.ID, + } + + if scraped.Title != nil && scene.Title.String != *scraped.Title { + if shouldSetSingleValueField(fieldOptions["title"], scene.Title.String != "") { + partial.Title = models.NullStringPtr(*scraped.Title) + } + } + if scraped.Date != nil && scene.Date.String != *scraped.Date { + if shouldSetSingleValueField(fieldOptions["date"], scene.Date.Valid) { + partial.Date = &models.SQLiteDate{ + String: *scraped.Date, + Valid: true, + } + } + } + if scraped.Details != nil && scene.Details.String != *scraped.Details { + if shouldSetSingleValueField(fieldOptions["details"], scene.Details.String != "") { + partial.Details = models.NullStringPtr(*scraped.Details) + } + } + if scraped.URL != nil && scene.URL.String != *scraped.URL { + if shouldSetSingleValueField(fieldOptions["url"], scene.URL.String != "") { + partial.URL = models.NullStringPtr(*scraped.URL) + } + } + + if setOrganized && !scene.Organized { + // just reuse the boolean since we know it's true + partial.Organized = &setOrganized + } + + return partial +} + +func shouldSetSingleValueField(strategy *models.IdentifyFieldOptionsInput, hasExistingValue bool) bool { + // if unset then default to MERGE + fs := models.IdentifyFieldStrategyMerge + + if strategy != nil && strategy.Strategy.IsValid() { + fs = strategy.Strategy + } + + if fs == models.IdentifyFieldStrategyIgnore { + return false + } + + return !hasExistingValue || fs == models.IdentifyFieldStrategyOverwrite +} diff --git a/pkg/identify/identify_test.go b/pkg/identify/identify_test.go new file mode 100644 index 000000000..a598c04bb --- /dev/null +++ b/pkg/identify/identify_test.go @@ -0,0 +1,502 @@ +package identify + +import ( + "context" + "errors" + "reflect" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/utils" + "github.com/stretchr/testify/mock" +) + +type mockSceneScraper struct { + errIDs []int + results map[int]*models.ScrapedScene +} + +func (s mockSceneScraper) ScrapeScene(sceneID int) (*models.ScrapedScene, error) { + if utils.IntInclude(s.errIDs, sceneID) { + return nil, errors.New("scrape scene error") + } + return s.results[sceneID], nil +} + +type mockHookExecutor struct { +} + +func (s mockHookExecutor) ExecuteSceneUpdatePostHooks(ctx context.Context, input models.SceneUpdateInput, inputFields []string) { +} + +func TestSceneIdentifier_Identify(t *testing.T) { + const ( + errID1 = iota + errID2 + missingID + found1ID + found2ID + errUpdateID + ) + + var scrapedTitle = "scrapedTitle" + + defaultOptions := &models.IdentifyMetadataOptionsInput{} + sources := []ScraperSource{ + { + Scraper: mockSceneScraper{ + errIDs: []int{errID1}, + results: map[int]*models.ScrapedScene{ + found1ID: { + Title: &scrapedTitle, + }, + }, + }, + }, + { + Scraper: mockSceneScraper{ + errIDs: []int{errID2}, + results: map[int]*models.ScrapedScene{ + found2ID: { + Title: &scrapedTitle, + }, + errUpdateID: { + Title: &scrapedTitle, + }, + }, + }, + }, + } + + repo := mocks.NewTransactionManager() + repo.Scene().(*mocks.SceneReaderWriter).On("Update", mock.MatchedBy(func(partial models.ScenePartial) bool { + return partial.ID != errUpdateID + })).Return(nil, nil) + repo.Scene().(*mocks.SceneReaderWriter).On("Update", mock.MatchedBy(func(partial models.ScenePartial) bool { + return partial.ID == errUpdateID + })).Return(nil, errors.New("update error")) + + tests := []struct { + name string + sceneID int + wantErr bool + }{ + { + "error scraping", + errID1, + true, + }, + { + "error scraping from second", + errID2, + true, + }, + { + "found in first scraper", + found1ID, + false, + }, + { + "found in second scraper", + found2ID, + false, + }, + { + "not found", + missingID, + false, + }, + { + "error modifying", + errUpdateID, + true, + }, + } + + identifier := SceneIdentifier{ + DefaultOptions: defaultOptions, + Sources: sources, + SceneUpdatePostHookExecutor: mockHookExecutor{}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + scene := &models.Scene{ + ID: tt.sceneID, + } + if err := identifier.Identify(context.TODO(), repo, scene); (err != nil) != tt.wantErr { + t.Errorf("SceneIdentifier.Identify() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestSceneIdentifier_modifyScene(t *testing.T) { + repo := mocks.NewTransactionManager() + tr := &SceneIdentifier{} + + type args struct { + scene *models.Scene + result *scrapeResult + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + "empty update", + args{ + &models.Scene{}, + &scrapeResult{ + result: &models.ScrapedScene{}, + }, + }, + false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := tr.modifyScene(context.TODO(), repo, tt.args.scene, tt.args.result); (err != nil) != tt.wantErr { + t.Errorf("SceneIdentifier.modifyScene() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func Test_getFieldOptions(t *testing.T) { + const ( + inFirst = "inFirst" + inSecond = "inSecond" + inBoth = "inBoth" + ) + + type args struct { + options []models.IdentifyMetadataOptionsInput + } + tests := []struct { + name string + args args + want map[string]*models.IdentifyFieldOptionsInput + }{ + { + "simple", + args{ + []models.IdentifyMetadataOptionsInput{ + { + FieldOptions: []*models.IdentifyFieldOptionsInput{ + { + Field: inFirst, + Strategy: models.IdentifyFieldStrategyIgnore, + }, + { + Field: inBoth, + Strategy: models.IdentifyFieldStrategyIgnore, + }, + }, + }, + { + FieldOptions: []*models.IdentifyFieldOptionsInput{ + { + Field: inSecond, + Strategy: models.IdentifyFieldStrategyMerge, + }, + { + Field: inBoth, + Strategy: models.IdentifyFieldStrategyMerge, + }, + }, + }, + }, + }, + map[string]*models.IdentifyFieldOptionsInput{ + inFirst: { + Field: inFirst, + Strategy: models.IdentifyFieldStrategyIgnore, + }, + inSecond: { + Field: inSecond, + Strategy: models.IdentifyFieldStrategyMerge, + }, + inBoth: { + Field: inBoth, + Strategy: models.IdentifyFieldStrategyIgnore, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := getFieldOptions(tt.args.options); !reflect.DeepEqual(got, tt.want) { + t.Errorf("getFieldOptions() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_getScenePartial(t *testing.T) { + var ( + originalTitle = "originalTitle" + originalDate = "originalDate" + originalDetails = "originalDetails" + originalURL = "originalURL" + ) + + var ( + scrapedTitle = "scrapedTitle" + scrapedDate = "scrapedDate" + scrapedDetails = "scrapedDetails" + scrapedURL = "scrapedURL" + ) + + originalScene := &models.Scene{ + Title: models.NullString(originalTitle), + Date: models.SQLiteDate{ + String: originalDate, + Valid: true, + }, + Details: models.NullString(originalDetails), + URL: models.NullString(originalURL), + } + + organisedScene := *originalScene + organisedScene.Organized = true + + emptyScene := &models.Scene{} + + postPartial := models.ScenePartial{ + Title: models.NullStringPtr(scrapedTitle), + Date: &models.SQLiteDate{ + String: scrapedDate, + Valid: true, + }, + Details: models.NullStringPtr(scrapedDetails), + URL: models.NullStringPtr(scrapedURL), + } + + scrapedScene := &models.ScrapedScene{ + Title: &scrapedTitle, + Date: &scrapedDate, + Details: &scrapedDetails, + URL: &scrapedURL, + } + + scrapedUnchangedScene := &models.ScrapedScene{ + Title: &originalTitle, + Date: &originalDate, + Details: &originalDetails, + URL: &originalURL, + } + + makeFieldOptions := func(input *models.IdentifyFieldOptionsInput) map[string]*models.IdentifyFieldOptionsInput { + return map[string]*models.IdentifyFieldOptionsInput{ + "title": input, + "date": input, + "details": input, + "url": input, + } + } + + overwriteAll := makeFieldOptions(&models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + }) + ignoreAll := makeFieldOptions(&models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyIgnore, + }) + mergeAll := makeFieldOptions(&models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyMerge, + }) + + setOrganised := true + + type args struct { + scene *models.Scene + scraped *models.ScrapedScene + fieldOptions map[string]*models.IdentifyFieldOptionsInput + setOrganized bool + } + tests := []struct { + name string + args args + want models.ScenePartial + }{ + { + "overwrite all", + args{ + originalScene, + scrapedScene, + overwriteAll, + false, + }, + postPartial, + }, + { + "ignore all", + args{ + originalScene, + scrapedScene, + ignoreAll, + false, + }, + models.ScenePartial{}, + }, + { + "merge (existing values)", + args{ + originalScene, + scrapedScene, + mergeAll, + false, + }, + models.ScenePartial{}, + }, + { + "merge (empty values)", + args{ + emptyScene, + scrapedScene, + mergeAll, + false, + }, + postPartial, + }, + { + "unchanged", + args{ + originalScene, + scrapedUnchangedScene, + overwriteAll, + false, + }, + models.ScenePartial{}, + }, + { + "set organized", + args{ + originalScene, + scrapedUnchangedScene, + overwriteAll, + true, + }, + models.ScenePartial{ + Organized: &setOrganised, + }, + }, + { + "set organized unchanged", + args{ + &organisedScene, + scrapedUnchangedScene, + overwriteAll, + true, + }, + models.ScenePartial{}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := getScenePartial(tt.args.scene, tt.args.scraped, tt.args.fieldOptions, tt.args.setOrganized); !reflect.DeepEqual(got, tt.want) { + t.Errorf("getScenePartial() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_shouldSetSingleValueField(t *testing.T) { + const invalid = "invalid" + + type args struct { + strategy *models.IdentifyFieldOptionsInput + hasExistingValue bool + } + tests := []struct { + name string + args args + want bool + }{ + { + "ignore", + args{ + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyIgnore, + }, + false, + }, + false, + }, + { + "merge existing", + args{ + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyMerge, + }, + true, + }, + false, + }, + { + "merge absent", + args{ + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyMerge, + }, + false, + }, + true, + }, + { + "overwrite", + args{ + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + }, + true, + }, + true, + }, + { + "nil (merge) existing", + args{ + &models.IdentifyFieldOptionsInput{}, + true, + }, + false, + }, + { + "nil (merge) absent", + args{ + &models.IdentifyFieldOptionsInput{}, + false, + }, + true, + }, + { + "invalid (merge) existing", + args{ + &models.IdentifyFieldOptionsInput{ + Strategy: invalid, + }, + true, + }, + false, + }, + { + "invalid (merge) absent", + args{ + &models.IdentifyFieldOptionsInput{ + Strategy: invalid, + }, + false, + }, + true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := shouldSetSingleValueField(tt.args.strategy, tt.args.hasExistingValue); got != tt.want { + t.Errorf("shouldSetSingleValueField() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/identify/performer.go b/pkg/identify/performer.go new file mode 100644 index 000000000..4d0855388 --- /dev/null +++ b/pkg/identify/performer.go @@ -0,0 +1,108 @@ +package identify + +import ( + "database/sql" + "fmt" + "strconv" + "time" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +func getPerformerID(endpoint string, r models.Repository, p *models.ScrapedPerformer, createMissing bool) (*int, error) { + if p.StoredID != nil { + // existing performer, just add it + performerID, err := strconv.Atoi(*p.StoredID) + if err != nil { + return nil, fmt.Errorf("error converting performer ID %s: %w", *p.StoredID, err) + } + + return &performerID, nil + } else if createMissing && p.Name != nil { // name is mandatory + return createMissingPerformer(endpoint, r, p) + } + + return nil, nil +} + +func createMissingPerformer(endpoint string, r models.Repository, p *models.ScrapedPerformer) (*int, error) { + created, err := r.Performer().Create(scrapedToPerformerInput(p)) + if err != nil { + return nil, fmt.Errorf("error creating performer: %w", err) + } + + if endpoint != "" && p.RemoteSiteID != nil { + if err := r.Performer().UpdateStashIDs(created.ID, []models.StashID{ + { + Endpoint: endpoint, + StashID: *p.RemoteSiteID, + }, + }); err != nil { + return nil, fmt.Errorf("error setting performer stash id: %w", err) + } + } + + return &created.ID, nil +} + +func scrapedToPerformerInput(performer *models.ScrapedPerformer) models.Performer { + currentTime := time.Now() + ret := models.Performer{ + Name: sql.NullString{String: *performer.Name, Valid: true}, + Checksum: utils.MD5FromString(*performer.Name), + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + Favorite: sql.NullBool{Bool: false, Valid: true}, + } + if performer.Birthdate != nil { + ret.Birthdate = models.SQLiteDate{String: *performer.Birthdate, Valid: true} + } + if performer.DeathDate != nil { + ret.DeathDate = models.SQLiteDate{String: *performer.DeathDate, Valid: true} + } + if performer.Gender != nil { + ret.Gender = sql.NullString{String: *performer.Gender, Valid: true} + } + if performer.Ethnicity != nil { + ret.Ethnicity = sql.NullString{String: *performer.Ethnicity, Valid: true} + } + if performer.Country != nil { + ret.Country = sql.NullString{String: *performer.Country, Valid: true} + } + if performer.EyeColor != nil { + ret.EyeColor = sql.NullString{String: *performer.EyeColor, Valid: true} + } + if performer.HairColor != nil { + ret.HairColor = sql.NullString{String: *performer.HairColor, Valid: true} + } + if performer.Height != nil { + ret.Height = sql.NullString{String: *performer.Height, Valid: true} + } + if performer.Measurements != nil { + ret.Measurements = sql.NullString{String: *performer.Measurements, Valid: true} + } + if performer.FakeTits != nil { + ret.FakeTits = sql.NullString{String: *performer.FakeTits, Valid: true} + } + if performer.CareerLength != nil { + ret.CareerLength = sql.NullString{String: *performer.CareerLength, Valid: true} + } + if performer.Tattoos != nil { + ret.Tattoos = sql.NullString{String: *performer.Tattoos, Valid: true} + } + if performer.Piercings != nil { + ret.Piercings = sql.NullString{String: *performer.Piercings, Valid: true} + } + if performer.Aliases != nil { + ret.Aliases = sql.NullString{String: *performer.Aliases, Valid: true} + } + if performer.Twitter != nil { + ret.Twitter = sql.NullString{String: *performer.Twitter, Valid: true} + } + if performer.Instagram != nil { + ret.Instagram = sql.NullString{String: *performer.Instagram, Valid: true} + } + + return ret +} diff --git a/pkg/identify/performer_test.go b/pkg/identify/performer_test.go new file mode 100644 index 000000000..ebe8e49fe --- /dev/null +++ b/pkg/identify/performer_test.go @@ -0,0 +1,329 @@ +package identify + +import ( + "database/sql" + "errors" + "reflect" + "strconv" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + + "github.com/stretchr/testify/mock" +) + +func Test_getPerformerID(t *testing.T) { + const ( + emptyEndpoint = "" + endpoint = "endpoint" + ) + invalidStoredID := "invalidStoredID" + validStoredIDStr := "1" + validStoredID := 1 + name := "name" + + repo := mocks.NewTransactionManager() + repo.PerformerMock().On("Create", mock.Anything).Return(&models.Performer{ + ID: validStoredID, + }, nil) + + type args struct { + endpoint string + p *models.ScrapedPerformer + createMissing bool + } + tests := []struct { + name string + args args + want *int + wantErr bool + }{ + { + "no performer", + args{ + emptyEndpoint, + &models.ScrapedPerformer{}, + false, + }, + nil, + false, + }, + { + "invalid stored id", + args{ + emptyEndpoint, + &models.ScrapedPerformer{ + StoredID: &invalidStoredID, + }, + false, + }, + nil, + true, + }, + { + "valid stored id", + args{ + emptyEndpoint, + &models.ScrapedPerformer{ + StoredID: &validStoredIDStr, + }, + false, + }, + &validStoredID, + false, + }, + { + "nil stored not creating", + args{ + emptyEndpoint, + &models.ScrapedPerformer{ + Name: &name, + }, + false, + }, + nil, + false, + }, + { + "nil name creating", + args{ + emptyEndpoint, + &models.ScrapedPerformer{}, + true, + }, + nil, + false, + }, + { + "valid name creating", + args{ + emptyEndpoint, + &models.ScrapedPerformer{ + Name: &name, + }, + true, + }, + &validStoredID, + false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := getPerformerID(tt.args.endpoint, repo, tt.args.p, tt.args.createMissing) + if (err != nil) != tt.wantErr { + t.Errorf("getPerformerID() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("getPerformerID() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_createMissingPerformer(t *testing.T) { + emptyEndpoint := "" + validEndpoint := "validEndpoint" + invalidEndpoint := "invalidEndpoint" + remoteSiteID := "remoteSiteID" + validName := "validName" + invalidName := "invalidName" + performerID := 1 + + repo := mocks.NewTransactionManager() + repo.PerformerMock().On("Create", mock.MatchedBy(func(p models.Performer) bool { + return p.Name.String == validName + })).Return(&models.Performer{ + ID: performerID, + }, nil) + repo.PerformerMock().On("Create", mock.MatchedBy(func(p models.Performer) bool { + return p.Name.String == invalidName + })).Return(nil, errors.New("error creating performer")) + + repo.PerformerMock().On("UpdateStashIDs", performerID, []models.StashID{ + { + Endpoint: invalidEndpoint, + StashID: remoteSiteID, + }, + }).Return(errors.New("error updating stash ids")) + repo.PerformerMock().On("UpdateStashIDs", performerID, []models.StashID{ + { + Endpoint: validEndpoint, + StashID: remoteSiteID, + }, + }).Return(nil) + + type args struct { + endpoint string + p *models.ScrapedPerformer + } + tests := []struct { + name string + args args + want *int + wantErr bool + }{ + { + "simple", + args{ + emptyEndpoint, + &models.ScrapedPerformer{ + Name: &validName, + }, + }, + &performerID, + false, + }, + { + "error creating", + args{ + emptyEndpoint, + &models.ScrapedPerformer{ + Name: &invalidName, + }, + }, + nil, + true, + }, + { + "valid stash id", + args{ + validEndpoint, + &models.ScrapedPerformer{ + Name: &validName, + RemoteSiteID: &remoteSiteID, + }, + }, + &performerID, + false, + }, + { + "invalid stash id", + args{ + invalidEndpoint, + &models.ScrapedPerformer{ + Name: &validName, + RemoteSiteID: &remoteSiteID, + }, + }, + nil, + true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := createMissingPerformer(tt.args.endpoint, repo, tt.args.p) + if (err != nil) != tt.wantErr { + t.Errorf("createMissingPerformer() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("createMissingPerformer() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_scrapedToPerformerInput(t *testing.T) { + name := "name" + md5 := "b068931cc450442b63f5b3d276ea4297" + + var stringValues []string + for i := 0; i < 16; i++ { + stringValues = append(stringValues, strconv.Itoa(i)) + } + + upTo := 0 + nextVal := func() *string { + ret := stringValues[upTo] + upTo = (upTo + 1) % len(stringValues) + return &ret + } + + tests := []struct { + name string + performer *models.ScrapedPerformer + want models.Performer + }{ + { + "set all", + &models.ScrapedPerformer{ + Name: &name, + Birthdate: nextVal(), + DeathDate: nextVal(), + Gender: nextVal(), + Ethnicity: nextVal(), + Country: nextVal(), + EyeColor: nextVal(), + HairColor: nextVal(), + Height: nextVal(), + Measurements: nextVal(), + FakeTits: nextVal(), + CareerLength: nextVal(), + Tattoos: nextVal(), + Piercings: nextVal(), + Aliases: nextVal(), + Twitter: nextVal(), + Instagram: nextVal(), + }, + models.Performer{ + Name: models.NullString(name), + Checksum: md5, + Favorite: sql.NullBool{ + Bool: false, + Valid: true, + }, + Birthdate: models.SQLiteDate{ + String: *nextVal(), + Valid: true, + }, + DeathDate: models.SQLiteDate{ + String: *nextVal(), + Valid: true, + }, + Gender: models.NullString(*nextVal()), + Ethnicity: models.NullString(*nextVal()), + Country: models.NullString(*nextVal()), + EyeColor: models.NullString(*nextVal()), + HairColor: models.NullString(*nextVal()), + Height: models.NullString(*nextVal()), + Measurements: models.NullString(*nextVal()), + FakeTits: models.NullString(*nextVal()), + CareerLength: models.NullString(*nextVal()), + Tattoos: models.NullString(*nextVal()), + Piercings: models.NullString(*nextVal()), + Aliases: models.NullString(*nextVal()), + Twitter: models.NullString(*nextVal()), + Instagram: models.NullString(*nextVal()), + }, + }, + { + "set none", + &models.ScrapedPerformer{ + Name: &name, + }, + models.Performer{ + Name: models.NullString(name), + Checksum: md5, + Favorite: sql.NullBool{ + Bool: false, + Valid: true, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := scrapedToPerformerInput(tt.performer) + + // clear created/updated dates + got.CreatedAt = models.SQLiteTimestamp{} + got.UpdatedAt = got.CreatedAt + + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("scrapedToPerformerInput() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/identify/scene.go b/pkg/identify/scene.go new file mode 100644 index 000000000..a8b5d4cff --- /dev/null +++ b/pkg/identify/scene.go @@ -0,0 +1,251 @@ +package identify + +import ( + "bytes" + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type sceneRelationships struct { + repo models.Repository + scene *models.Scene + result *scrapeResult + fieldOptions map[string]*models.IdentifyFieldOptionsInput +} + +func (g sceneRelationships) studio() (*int64, error) { + existingID := g.scene.StudioID + fieldStrategy := g.fieldOptions["studio"] + createMissing := fieldStrategy != nil && utils.IsTrue(fieldStrategy.CreateMissing) + + scraped := g.result.result.Studio + endpoint := g.result.source.RemoteSite + + if scraped == nil || !shouldSetSingleValueField(fieldStrategy, existingID.Valid) { + return nil, nil + } + + if scraped.StoredID != nil { + // existing studio, just set it + studioID, err := strconv.ParseInt(*scraped.StoredID, 10, 64) + if err != nil { + return nil, fmt.Errorf("error converting studio ID %s: %w", *scraped.StoredID, err) + } + + // only return value if different to current + if existingID.Int64 != studioID { + return &studioID, nil + } + } else if createMissing { + return createMissingStudio(endpoint, g.repo, scraped) + } + + return nil, nil +} + +func (g sceneRelationships) performers(ignoreMale bool) ([]int, error) { + fieldStrategy := g.fieldOptions["performers"] + scraped := g.result.result.Performers + + // just check if ignored + if len(scraped) == 0 || !shouldSetSingleValueField(fieldStrategy, false) { + return nil, nil + } + + createMissing := fieldStrategy != nil && utils.IsTrue(fieldStrategy.CreateMissing) + strategy := models.IdentifyFieldStrategyMerge + if fieldStrategy != nil { + strategy = fieldStrategy.Strategy + } + + repo := g.repo + endpoint := g.result.source.RemoteSite + + var performerIDs []int + originalPerformerIDs, err := repo.Scene().GetPerformerIDs(g.scene.ID) + if err != nil { + return nil, fmt.Errorf("error getting scene performers: %w", err) + } + + if strategy == models.IdentifyFieldStrategyMerge { + // add to existing + performerIDs = originalPerformerIDs + } + + for _, p := range scraped { + if ignoreMale && p.Gender != nil && strings.EqualFold(*p.Gender, models.GenderEnumMale.String()) { + continue + } + + performerID, err := getPerformerID(endpoint, repo, p, createMissing) + if err != nil { + return nil, err + } + + if performerID != nil { + performerIDs = utils.IntAppendUnique(performerIDs, *performerID) + } + } + + // don't return if nothing was added + if utils.SliceSame(originalPerformerIDs, performerIDs) { + return nil, nil + } + + return performerIDs, nil +} + +func (g sceneRelationships) tags() ([]int, error) { + fieldStrategy := g.fieldOptions["tags"] + scraped := g.result.result.Tags + target := g.scene + r := g.repo + + // just check if ignored + if len(scraped) == 0 || !shouldSetSingleValueField(fieldStrategy, false) { + return nil, nil + } + + createMissing := fieldStrategy != nil && utils.IsTrue(fieldStrategy.CreateMissing) + strategy := models.IdentifyFieldStrategyMerge + if fieldStrategy != nil { + strategy = fieldStrategy.Strategy + } + + var tagIDs []int + originalTagIDs, err := r.Scene().GetTagIDs(target.ID) + if err != nil { + return nil, fmt.Errorf("error getting scene tags: %w", err) + } + + if strategy == models.IdentifyFieldStrategyMerge { + // add to existing + tagIDs = originalTagIDs + } + + for _, t := range scraped { + if t.StoredID != nil { + // existing tag, just add it + tagID, err := strconv.ParseInt(*t.StoredID, 10, 64) + if err != nil { + return nil, fmt.Errorf("error converting tag ID %s: %w", *t.StoredID, err) + } + + tagIDs = utils.IntAppendUnique(tagIDs, int(tagID)) + } else if createMissing { + now := time.Now() + created, err := r.Tag().Create(models.Tag{ + Name: t.Name, + CreatedAt: models.SQLiteTimestamp{Timestamp: now}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: now}, + }) + if err != nil { + return nil, fmt.Errorf("error creating tag: %w", err) + } + + tagIDs = append(tagIDs, created.ID) + } + } + + // don't return if nothing was added + if utils.SliceSame(originalTagIDs, tagIDs) { + return nil, nil + } + + return tagIDs, nil +} + +func (g sceneRelationships) stashIDs() ([]models.StashID, error) { + remoteSiteID := g.result.result.RemoteSiteID + fieldStrategy := g.fieldOptions["stash_ids"] + target := g.scene + r := g.repo + + endpoint := g.result.source.RemoteSite + + // just check if ignored + if remoteSiteID == nil || endpoint == "" || !shouldSetSingleValueField(fieldStrategy, false) { + return nil, nil + } + + strategy := models.IdentifyFieldStrategyMerge + if fieldStrategy != nil { + strategy = fieldStrategy.Strategy + } + + var originalStashIDs []models.StashID + var stashIDs []models.StashID + stashIDPtrs, err := r.Scene().GetStashIDs(target.ID) + if err != nil { + return nil, fmt.Errorf("error getting scene tag: %w", err) + } + + // convert existing to non-pointer types + for _, stashID := range stashIDPtrs { + originalStashIDs = append(originalStashIDs, *stashID) + } + + if strategy == models.IdentifyFieldStrategyMerge { + // add to existing + stashIDs = originalStashIDs + } + + for i, stashID := range stashIDs { + if endpoint == stashID.Endpoint { + // if stashID is the same, then don't set + if stashID.StashID == *remoteSiteID { + return nil, nil + } + + // replace the stash id and return + stashID.StashID = *remoteSiteID + stashIDs[i] = stashID + return stashIDs, nil + } + } + + // not found, create new entry + stashIDs = append(stashIDs, models.StashID{ + StashID: *remoteSiteID, + Endpoint: endpoint, + }) + + if utils.SliceSame(originalStashIDs, stashIDs) { + return nil, nil + } + + return stashIDs, nil +} + +func (g sceneRelationships) cover(ctx context.Context) ([]byte, error) { + scraped := g.result.result.Image + r := g.repo + + if scraped == nil { + return nil, nil + } + + // always overwrite if present + existingCover, err := r.Scene().GetCover(g.scene.ID) + if err != nil { + return nil, fmt.Errorf("error getting scene cover: %w", err) + } + + data, err := utils.ProcessImageInput(ctx, *scraped) + if err != nil { + return nil, fmt.Errorf("error processing image input: %w", err) + } + + // only return if different + if !bytes.Equal(existingCover, data) { + return data, nil + } + + return nil, nil +} diff --git a/pkg/identify/scene_test.go b/pkg/identify/scene_test.go new file mode 100644 index 000000000..f0ba7da17 --- /dev/null +++ b/pkg/identify/scene_test.go @@ -0,0 +1,782 @@ +package identify + +import ( + "context" + "errors" + "reflect" + "strconv" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/utils" + "github.com/stretchr/testify/mock" +) + +func Test_sceneRelationships_studio(t *testing.T) { + validStoredID := "1" + var validStoredIDInt int64 = 1 + invalidStoredID := "invalidStoredID" + createMissing := true + + defaultOptions := &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyMerge, + } + + repo := mocks.NewTransactionManager() + repo.StudioMock().On("Create", mock.Anything).Return(&models.Studio{ + ID: int(validStoredIDInt), + }, nil) + + tr := sceneRelationships{ + repo: repo, + fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput), + } + + tests := []struct { + name string + scene *models.Scene + fieldOptions *models.IdentifyFieldOptionsInput + result *models.ScrapedStudio + want *int64 + wantErr bool + }{ + { + "nil studio", + &models.Scene{}, + defaultOptions, + nil, + nil, + false, + }, + { + "ignore", + &models.Scene{}, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyIgnore, + }, + &models.ScrapedStudio{ + StoredID: &validStoredID, + }, + nil, + false, + }, + { + "invalid stored id", + &models.Scene{}, + defaultOptions, + &models.ScrapedStudio{ + StoredID: &invalidStoredID, + }, + nil, + true, + }, + { + "same stored id", + &models.Scene{ + StudioID: models.NullInt64(validStoredIDInt), + }, + defaultOptions, + &models.ScrapedStudio{ + StoredID: &validStoredID, + }, + nil, + false, + }, + { + "different stored id", + &models.Scene{}, + defaultOptions, + &models.ScrapedStudio{ + StoredID: &validStoredID, + }, + &validStoredIDInt, + false, + }, + { + "no create missing", + &models.Scene{}, + defaultOptions, + &models.ScrapedStudio{}, + nil, + false, + }, + { + "create missing", + &models.Scene{}, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyMerge, + CreateMissing: &createMissing, + }, + &models.ScrapedStudio{}, + &validStoredIDInt, + false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tr.scene = tt.scene + tr.fieldOptions["studio"] = tt.fieldOptions + tr.result = &scrapeResult{ + result: &models.ScrapedScene{ + Studio: tt.result, + }, + } + + got, err := tr.studio() + if (err != nil) != tt.wantErr { + t.Errorf("sceneRelationships.studio() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("sceneRelationships.studio() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_sceneRelationships_performers(t *testing.T) { + const ( + sceneID = iota + sceneWithPerformerID + errSceneID + existingPerformerID + validStoredIDInt + ) + validStoredID := strconv.Itoa(validStoredIDInt) + invalidStoredID := "invalidStoredID" + createMissing := true + existingPerformerStr := strconv.Itoa(existingPerformerID) + validName := "validName" + female := models.GenderEnumFemale.String() + male := models.GenderEnumMale.String() + + defaultOptions := &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyMerge, + } + + repo := mocks.NewTransactionManager() + repo.SceneMock().On("GetPerformerIDs", sceneID).Return(nil, nil) + repo.SceneMock().On("GetPerformerIDs", sceneWithPerformerID).Return([]int{existingPerformerID}, nil) + repo.SceneMock().On("GetPerformerIDs", errSceneID).Return(nil, errors.New("error getting IDs")) + + tr := sceneRelationships{ + repo: repo, + fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput), + } + + tests := []struct { + name string + sceneID int + fieldOptions *models.IdentifyFieldOptionsInput + scraped []*models.ScrapedPerformer + ignoreMale bool + want []int + wantErr bool + }{ + { + "ignore", + sceneID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyIgnore, + }, + []*models.ScrapedPerformer{ + { + StoredID: &validStoredID, + }, + }, + false, + nil, + false, + }, + { + "none", + sceneID, + defaultOptions, + []*models.ScrapedPerformer{}, + false, + nil, + false, + }, + { + "error getting ids", + errSceneID, + defaultOptions, + []*models.ScrapedPerformer{ + {}, + }, + false, + nil, + true, + }, + { + "merge existing", + sceneWithPerformerID, + defaultOptions, + []*models.ScrapedPerformer{ + { + Name: &validName, + StoredID: &existingPerformerStr, + }, + }, + false, + nil, + false, + }, + { + "merge add", + sceneWithPerformerID, + defaultOptions, + []*models.ScrapedPerformer{ + { + Name: &validName, + StoredID: &validStoredID, + }, + }, + false, + []int{existingPerformerID, validStoredIDInt}, + false, + }, + { + "ignore male", + sceneID, + defaultOptions, + []*models.ScrapedPerformer{ + { + Name: &validName, + StoredID: &validStoredID, + Gender: &male, + }, + }, + true, + nil, + false, + }, + { + "overwrite", + sceneWithPerformerID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + }, + []*models.ScrapedPerformer{ + { + Name: &validName, + StoredID: &validStoredID, + }, + }, + false, + []int{validStoredIDInt}, + false, + }, + { + "ignore male (not male)", + sceneWithPerformerID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + }, + []*models.ScrapedPerformer{ + { + Name: &validName, + StoredID: &validStoredID, + Gender: &female, + }, + }, + true, + []int{validStoredIDInt}, + false, + }, + { + "error getting tag ID", + sceneID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + CreateMissing: &createMissing, + }, + []*models.ScrapedPerformer{ + { + Name: &validName, + StoredID: &invalidStoredID, + }, + }, + false, + nil, + true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tr.scene = &models.Scene{ + ID: tt.sceneID, + } + tr.fieldOptions["performers"] = tt.fieldOptions + tr.result = &scrapeResult{ + result: &models.ScrapedScene{ + Performers: tt.scraped, + }, + } + + got, err := tr.performers(tt.ignoreMale) + if (err != nil) != tt.wantErr { + t.Errorf("sceneRelationships.performers() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("sceneRelationships.performers() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_sceneRelationships_tags(t *testing.T) { + const ( + sceneID = iota + sceneWithTagID + errSceneID + existingID + validStoredIDInt + ) + validStoredID := strconv.Itoa(validStoredIDInt) + invalidStoredID := "invalidStoredID" + createMissing := true + existingIDStr := strconv.Itoa(existingID) + validName := "validName" + invalidName := "invalidName" + + defaultOptions := &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyMerge, + } + + repo := mocks.NewTransactionManager() + repo.SceneMock().On("GetTagIDs", sceneID).Return(nil, nil) + repo.SceneMock().On("GetTagIDs", sceneWithTagID).Return([]int{existingID}, nil) + repo.SceneMock().On("GetTagIDs", errSceneID).Return(nil, errors.New("error getting IDs")) + + repo.TagMock().On("Create", mock.MatchedBy(func(p models.Tag) bool { + return p.Name == validName + })).Return(&models.Tag{ + ID: validStoredIDInt, + }, nil) + repo.TagMock().On("Create", mock.MatchedBy(func(p models.Tag) bool { + return p.Name == invalidName + })).Return(nil, errors.New("error creating tag")) + + tr := sceneRelationships{ + repo: repo, + fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput), + } + + tests := []struct { + name string + sceneID int + fieldOptions *models.IdentifyFieldOptionsInput + scraped []*models.ScrapedTag + want []int + wantErr bool + }{ + { + "ignore", + sceneID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyIgnore, + }, + []*models.ScrapedTag{ + { + StoredID: &validStoredID, + }, + }, + nil, + false, + }, + { + "none", + sceneID, + defaultOptions, + []*models.ScrapedTag{}, + nil, + false, + }, + { + "error getting ids", + errSceneID, + defaultOptions, + []*models.ScrapedTag{ + {}, + }, + nil, + true, + }, + { + "merge existing", + sceneWithTagID, + defaultOptions, + []*models.ScrapedTag{ + { + Name: validName, + StoredID: &existingIDStr, + }, + }, + nil, + false, + }, + { + "merge add", + sceneWithTagID, + defaultOptions, + []*models.ScrapedTag{ + { + Name: validName, + StoredID: &validStoredID, + }, + }, + []int{existingID, validStoredIDInt}, + false, + }, + { + "overwrite", + sceneWithTagID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + }, + []*models.ScrapedTag{ + { + Name: validName, + StoredID: &validStoredID, + }, + }, + []int{validStoredIDInt}, + false, + }, + { + "error getting tag ID", + sceneID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + }, + []*models.ScrapedTag{ + { + Name: validName, + StoredID: &invalidStoredID, + }, + }, + nil, + true, + }, + { + "create missing", + sceneID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + CreateMissing: &createMissing, + }, + []*models.ScrapedTag{ + { + Name: validName, + }, + }, + []int{validStoredIDInt}, + false, + }, + { + "error creating", + sceneID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + CreateMissing: &createMissing, + }, + []*models.ScrapedTag{ + { + Name: invalidName, + }, + }, + nil, + true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tr.scene = &models.Scene{ + ID: tt.sceneID, + } + tr.fieldOptions["tags"] = tt.fieldOptions + tr.result = &scrapeResult{ + result: &models.ScrapedScene{ + Tags: tt.scraped, + }, + } + + got, err := tr.tags() + if (err != nil) != tt.wantErr { + t.Errorf("sceneRelationships.tags() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("sceneRelationships.tags() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_sceneRelationships_stashIDs(t *testing.T) { + const ( + sceneID = iota + sceneWithStashID + errSceneID + existingID + validStoredIDInt + ) + existingEndpoint := "existingEndpoint" + newEndpoint := "newEndpoint" + remoteSiteID := "remoteSiteID" + newRemoteSiteID := "newRemoteSiteID" + + defaultOptions := &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyMerge, + } + + repo := mocks.NewTransactionManager() + repo.SceneMock().On("GetStashIDs", sceneID).Return(nil, nil) + repo.SceneMock().On("GetStashIDs", sceneWithStashID).Return([]*models.StashID{ + { + StashID: remoteSiteID, + Endpoint: existingEndpoint, + }, + }, nil) + repo.SceneMock().On("GetStashIDs", errSceneID).Return(nil, errors.New("error getting IDs")) + + tr := sceneRelationships{ + repo: repo, + fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput), + } + + tests := []struct { + name string + sceneID int + fieldOptions *models.IdentifyFieldOptionsInput + endpoint string + remoteSiteID *string + want []models.StashID + wantErr bool + }{ + { + "ignore", + sceneID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyIgnore, + }, + newEndpoint, + &remoteSiteID, + nil, + false, + }, + { + "no endpoint", + sceneID, + defaultOptions, + "", + &remoteSiteID, + nil, + false, + }, + { + "no site id", + sceneID, + defaultOptions, + newEndpoint, + nil, + nil, + false, + }, + { + "error getting ids", + errSceneID, + defaultOptions, + newEndpoint, + &remoteSiteID, + nil, + true, + }, + { + "merge existing", + sceneWithStashID, + defaultOptions, + existingEndpoint, + &remoteSiteID, + nil, + false, + }, + { + "merge existing new value", + sceneWithStashID, + defaultOptions, + existingEndpoint, + &newRemoteSiteID, + []models.StashID{ + { + StashID: newRemoteSiteID, + Endpoint: existingEndpoint, + }, + }, + false, + }, + { + "merge add", + sceneWithStashID, + defaultOptions, + newEndpoint, + &newRemoteSiteID, + []models.StashID{ + { + StashID: remoteSiteID, + Endpoint: existingEndpoint, + }, + { + StashID: newRemoteSiteID, + Endpoint: newEndpoint, + }, + }, + false, + }, + { + "overwrite", + sceneWithStashID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + }, + newEndpoint, + &newRemoteSiteID, + []models.StashID{ + { + StashID: newRemoteSiteID, + Endpoint: newEndpoint, + }, + }, + false, + }, + { + "overwrite same", + sceneWithStashID, + &models.IdentifyFieldOptionsInput{ + Strategy: models.IdentifyFieldStrategyOverwrite, + }, + existingEndpoint, + &remoteSiteID, + nil, + false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tr.scene = &models.Scene{ + ID: tt.sceneID, + } + tr.fieldOptions["stash_ids"] = tt.fieldOptions + tr.result = &scrapeResult{ + source: ScraperSource{ + RemoteSite: tt.endpoint, + }, + result: &models.ScrapedScene{ + RemoteSiteID: tt.remoteSiteID, + }, + } + + got, err := tr.stashIDs() + if (err != nil) != tt.wantErr { + t.Errorf("sceneRelationships.stashIDs() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("sceneRelationships.stashIDs() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_sceneRelationships_cover(t *testing.T) { + const ( + sceneID = iota + sceneWithStashID + errSceneID + existingID + validStoredIDInt + ) + existingData := []byte("existingData") + newData := []byte("newData") + const base64Prefix = "data:image/png;base64," + existingDataEncoded := base64Prefix + utils.GetBase64StringFromData(existingData) + newDataEncoded := base64Prefix + utils.GetBase64StringFromData(newData) + invalidData := newDataEncoded + "!!!" + + repo := mocks.NewTransactionManager() + repo.SceneMock().On("GetCover", sceneID).Return(existingData, nil) + repo.SceneMock().On("GetCover", errSceneID).Return(nil, errors.New("error getting cover")) + + tr := sceneRelationships{ + repo: repo, + fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput), + } + + tests := []struct { + name string + sceneID int + image *string + want []byte + wantErr bool + }{ + { + "nil image", + sceneID, + nil, + nil, + false, + }, + { + "different image", + sceneID, + &newDataEncoded, + newData, + false, + }, + { + "same image", + sceneID, + &existingDataEncoded, + nil, + false, + }, + { + "error getting scene cover", + errSceneID, + &newDataEncoded, + nil, + true, + }, + { + "invalid data", + sceneID, + &invalidData, + nil, + true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tr.scene = &models.Scene{ + ID: tt.sceneID, + } + tr.result = &scrapeResult{ + result: &models.ScrapedScene{ + Image: tt.image, + }, + } + + got, err := tr.cover(context.TODO()) + if (err != nil) != tt.wantErr { + t.Errorf("sceneRelationships.cover() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("sceneRelationships.cover() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/identify/studio.go b/pkg/identify/studio.go new file mode 100644 index 000000000..4a4c2924b --- /dev/null +++ b/pkg/identify/studio.go @@ -0,0 +1,47 @@ +package identify + +import ( + "database/sql" + "fmt" + "time" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +func createMissingStudio(endpoint string, repo models.Repository, studio *models.ScrapedStudio) (*int64, error) { + created, err := repo.Studio().Create(scrapedToStudioInput(studio)) + if err != nil { + return nil, fmt.Errorf("error creating studio: %w", err) + } + + if endpoint != "" && studio.RemoteSiteID != nil { + if err := repo.Studio().UpdateStashIDs(created.ID, []models.StashID{ + { + Endpoint: endpoint, + StashID: *studio.RemoteSiteID, + }, + }); err != nil { + return nil, fmt.Errorf("error setting studio stash id: %w", err) + } + } + + createdID := int64(created.ID) + return &createdID, nil +} + +func scrapedToStudioInput(studio *models.ScrapedStudio) models.Studio { + currentTime := time.Now() + ret := models.Studio{ + Name: sql.NullString{String: studio.Name, Valid: true}, + Checksum: utils.MD5FromString(studio.Name), + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + } + + if studio.URL != nil { + ret.URL = sql.NullString{String: *studio.URL, Valid: true} + } + + return ret +} diff --git a/pkg/identify/studio_test.go b/pkg/identify/studio_test.go new file mode 100644 index 000000000..2ba0b840e --- /dev/null +++ b/pkg/identify/studio_test.go @@ -0,0 +1,163 @@ +package identify + +import ( + "errors" + "reflect" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/mock" +) + +func Test_createMissingStudio(t *testing.T) { + emptyEndpoint := "" + validEndpoint := "validEndpoint" + invalidEndpoint := "invalidEndpoint" + remoteSiteID := "remoteSiteID" + validName := "validName" + invalidName := "invalidName" + createdID := 1 + createdID64 := int64(createdID) + + repo := mocks.NewTransactionManager() + repo.StudioMock().On("Create", mock.MatchedBy(func(p models.Studio) bool { + return p.Name.String == validName + })).Return(&models.Studio{ + ID: createdID, + }, nil) + repo.StudioMock().On("Create", mock.MatchedBy(func(p models.Studio) bool { + return p.Name.String == invalidName + })).Return(nil, errors.New("error creating performer")) + + repo.StudioMock().On("UpdateStashIDs", createdID, []models.StashID{ + { + Endpoint: invalidEndpoint, + StashID: remoteSiteID, + }, + }).Return(errors.New("error updating stash ids")) + repo.StudioMock().On("UpdateStashIDs", createdID, []models.StashID{ + { + Endpoint: validEndpoint, + StashID: remoteSiteID, + }, + }).Return(nil) + + type args struct { + endpoint string + studio *models.ScrapedStudio + } + tests := []struct { + name string + args args + want *int64 + wantErr bool + }{ + { + "simple", + args{ + emptyEndpoint, + &models.ScrapedStudio{ + Name: validName, + }, + }, + &createdID64, + false, + }, + { + "error creating", + args{ + emptyEndpoint, + &models.ScrapedStudio{ + Name: invalidName, + }, + }, + nil, + true, + }, + { + "valid stash id", + args{ + validEndpoint, + &models.ScrapedStudio{ + Name: validName, + RemoteSiteID: &remoteSiteID, + }, + }, + &createdID64, + false, + }, + { + "invalid stash id", + args{ + invalidEndpoint, + &models.ScrapedStudio{ + Name: validName, + RemoteSiteID: &remoteSiteID, + }, + }, + nil, + true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := createMissingStudio(tt.args.endpoint, repo, tt.args.studio) + if (err != nil) != tt.wantErr { + t.Errorf("createMissingStudio() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("createMissingStudio() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_scrapedToStudioInput(t *testing.T) { + const name = "name" + const md5 = "b068931cc450442b63f5b3d276ea4297" + url := "url" + + tests := []struct { + name string + studio *models.ScrapedStudio + want models.Studio + }{ + { + "set all", + &models.ScrapedStudio{ + Name: name, + URL: &url, + }, + models.Studio{ + Name: models.NullString(name), + Checksum: md5, + URL: models.NullString(url), + }, + }, + { + "set none", + &models.ScrapedStudio{ + Name: name, + }, + models.Studio{ + Name: models.NullString(name), + Checksum: md5, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := scrapedToStudioInput(tt.studio) + + // clear created/updated dates + got.CreatedAt = models.SQLiteTimestamp{} + got.UpdatedAt = got.CreatedAt + + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("scrapedToStudioInput() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/image/export.go b/pkg/image/export.go index e02a505c4..b70bbe7f2 100644 --- a/pkg/image/export.go +++ b/pkg/image/export.go @@ -75,7 +75,7 @@ func GetStudioName(reader models.StudioReader, image *models.Image) (string, err // func GetGalleryChecksum(reader models.GalleryReader, image *models.Image) (string, error) { // gallery, err := reader.FindByImageID(image.ID) // if err != nil { -// return "", fmt.Errorf("error getting image gallery: %s", err.Error()) +// return "", fmt.Errorf("error getting image gallery: %v", err) // } // if gallery != nil { diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go index 1e0a7d411..f47672b58 100644 --- a/pkg/image/export_test.go +++ b/pkg/image/export_test.go @@ -50,7 +50,7 @@ const ( const ( studioName = "studioName" - //galleryChecksum = "galleryChecksum" + // galleryChecksum = "galleryChecksum" ) var ( @@ -165,11 +165,12 @@ func TestGetStudioName(t *testing.T) { image := s.input json, err := GetStudioName(mockStudioReader, &image) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } diff --git a/pkg/image/image.go b/pkg/image/image.go index 486c65ef4..392c9124a 100644 --- a/pkg/image/image.go +++ b/pkg/image/image.go @@ -12,14 +12,13 @@ import ( "strings" "time" + "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" _ "golang.org/x/image/webp" ) -const zipSeparator = "\x00" - func GetSourceImage(i *models.Image) (image.Image, error) { f, err := openSourceImage(i.Path) if err != nil { @@ -67,17 +66,6 @@ func FileExists(path string) bool { return true } -func ZipFilename(zipFilename, filenameInZip string) string { - return zipFilename + zipSeparator + filenameInZip -} - -// IsZipPath returns true if the path includes the zip separator byte, -// indicating it is within a zip file. -// TODO - this should be moved to utils -func IsZipPath(p string) bool { - return strings.Contains(p, zipSeparator) -} - type imageReadCloser struct { src io.ReadCloser zrc *zip.ReadCloser @@ -102,7 +90,7 @@ func (i *imageReadCloser) Close() error { func openSourceImage(path string) (io.ReadCloser, error) { // may need to read from a zip file - zipFilename, filename := getFilePath(path) + zipFilename, filename := file.ZipFilePath(path) if zipFilename != "" { r, err := zip.OpenReader(zipFilename) if err != nil { @@ -134,17 +122,6 @@ func openSourceImage(path string) (io.ReadCloser, error) { return os.Open(filename) } -func getFilePath(path string) (zipFilename, filename string) { - nullIndex := strings.Index(path, zipSeparator) - if nullIndex != -1 { - zipFilename = path[0:nullIndex] - filename = path[nullIndex+1:] - } else { - filename = path - } - return -} - // GetFileDetails returns a pointer to an Image object with the // width, height and size populated. func GetFileDetails(path string) (*models.Image, error) { @@ -203,7 +180,7 @@ func GetFileModTime(path string) (time.Time, error) { func stat(path string) (os.FileInfo, error) { // may need to read from a zip file - zipFilename, filename := getFilePath(path) + zipFilename, filename := file.ZipFilePath(path) if zipFilename != "" { r, err := zip.OpenReader(zipFilename) if err != nil { @@ -224,16 +201,8 @@ func stat(path string) (os.FileInfo, error) { return os.Stat(filename) } -// PathDisplayName converts an image path for display. It translates the zip -// file separator character into '/', since this character is also used for -// path separators within zip files. It returns the original provided path -// if it does not contain the zip file separator character. -func PathDisplayName(path string) string { - return strings.Replace(path, zipSeparator, "/", -1) -} - func Serve(w http.ResponseWriter, r *http.Request, path string) { - zipFilename, _ := getFilePath(path) + zipFilename, _ := file.ZipFilePath(path) w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week if zipFilename == "" { http.ServeFile(w, r, path) @@ -259,7 +228,7 @@ func Serve(w http.ResponseWriter, r *http.Request, path string) { } func IsCover(img *models.Image) bool { - _, fn := getFilePath(img.Path) + _, fn := file.ZipFilePath(img.Path) return strings.HasSuffix(fn, "cover.jpg") } @@ -268,13 +237,13 @@ func GetTitle(s *models.Image) string { return s.Title.String } - _, fn := getFilePath(s.Path) + _, fn := file.ZipFilePath(s.Path) return filepath.Base(fn) } // GetFilename gets the base name of the image file // If stripExt is set the file extension is omitted from the name func GetFilename(s *models.Image, stripExt bool) string { - _, fn := getFilePath(s.Path) + _, fn := file.ZipFilePath(s.Path) return utils.GetNameFromPath(fn, stripExt) } diff --git a/pkg/image/import.go b/pkg/image/import.go index 7b0640595..b6ebe5b05 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -86,7 +86,7 @@ func (i *Importer) populateStudio() error { if i.Input.Studio != "" { studio, err := i.StudioWriter.FindByName(i.Input.Studio, false) if err != nil { - return fmt.Errorf("error finding studio by name: %s", err.Error()) + return fmt.Errorf("error finding studio by name: %v", err) } if studio == nil { @@ -131,7 +131,7 @@ func (i *Importer) populateGalleries() error { for _, checksum := range i.Input.Galleries { gallery, err := i.GalleryWriter.FindByChecksum(checksum) if err != nil { - return fmt.Errorf("error finding gallery: %s", err.Error()) + return fmt.Errorf("error finding gallery: %v", err) } if gallery == nil { @@ -179,7 +179,7 @@ func (i *Importer) populatePerformers() error { if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { createdPerformers, err := i.createPerformers(missingPerformers) if err != nil { - return fmt.Errorf("error creating image performers: %s", err.Error()) + return fmt.Errorf("error creating image performers: %v", err) } performers = append(performers, createdPerformers...) @@ -232,7 +232,7 @@ func (i *Importer) PostImport(id int) error { } if err := i.ReaderWriter.UpdateGalleries(id, galleryIDs); err != nil { - return fmt.Errorf("failed to associate galleries: %s", err.Error()) + return fmt.Errorf("failed to associate galleries: %v", err) } } @@ -243,7 +243,7 @@ func (i *Importer) PostImport(id int) error { } if err := i.ReaderWriter.UpdatePerformers(id, performerIDs); err != nil { - return fmt.Errorf("failed to associate performers: %s", err.Error()) + return fmt.Errorf("failed to associate performers: %v", err) } } @@ -253,7 +253,7 @@ func (i *Importer) PostImport(id int) error { tagIDs = append(tagIDs, t.ID) } if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil { - return fmt.Errorf("failed to associate tags: %s", err.Error()) + return fmt.Errorf("failed to associate tags: %v", err) } } @@ -284,7 +284,7 @@ func (i *Importer) FindExistingID() (*int, error) { func (i *Importer) Create() (*int, error) { created, err := i.ReaderWriter.Create(i.image) if err != nil { - return nil, fmt.Errorf("error creating image: %s", err.Error()) + return nil, fmt.Errorf("error creating image: %v", err) } id := created.ID @@ -298,7 +298,7 @@ func (i *Importer) Update(id int) error { i.ID = id _, err := i.ReaderWriter.UpdateFull(image) if err != nil { - return fmt.Errorf("error updating existing image: %s", err.Error()) + return fmt.Errorf("error updating existing image: %v", err) } return nil @@ -327,7 +327,7 @@ func importTags(tagWriter models.TagReaderWriter, names []string, missingRefBeha if missingRefBehaviour == models.ImportMissingRefEnumCreate { createdTags, err := createTags(tagWriter, missingTags) if err != nil { - return nil, fmt.Errorf("error creating tags: %s", err.Error()) + return nil, fmt.Errorf("error creating tags: %v", err) } tags = append(tags, createdTags...) diff --git a/pkg/image/query.go b/pkg/image/query.go index 7b2dac990..1ce2130cf 100644 --- a/pkg/image/query.go +++ b/pkg/image/query.go @@ -6,6 +6,36 @@ import ( "github.com/stashapp/stash/pkg/models" ) +type Queryer interface { + Query(options models.ImageQueryOptions) (*models.ImageQueryResult, error) +} + +// QueryOptions returns a ImageQueryResult populated with the provided filters. +func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType, count bool) models.ImageQueryOptions { + return models.ImageQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: findFilter, + Count: count, + }, + ImageFilter: imageFilter, + } +} + +// Query queries for images using the provided filters. +func Query(qb Queryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { + result, err := qb.Query(QueryOptions(imageFilter, findFilter, false)) + if err != nil { + return nil, err + } + + images, err := result.Resolve() + if err != nil { + return nil, err + } + + return images, nil +} + func CountByPerformerID(r models.ImageReader, id int) (int, error) { filter := &models.ImageFilterType{ Performers: &models.MultiCriterionInput{ diff --git a/pkg/image/scan.go b/pkg/image/scan.go new file mode 100644 index 000000000..15424f29a --- /dev/null +++ b/pkg/image/scan.go @@ -0,0 +1,192 @@ +package image + +import ( + "context" + "fmt" + "os" + "strings" + "time" + + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/paths" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stashapp/stash/pkg/utils" +) + +const mutexType = "image" + +type Scanner struct { + file.Scanner + + StripFileExtension bool + + Ctx context.Context + CaseSensitiveFs bool + TxnManager models.TransactionManager + Paths *paths.Paths + PluginCache *plugin.Cache + MutexManager *utils.MutexManager +} + +func FileScanner(hasher file.Hasher) file.Scanner { + return file.Scanner{ + Hasher: hasher, + CalculateMD5: true, + } +} + +func (scanner *Scanner) ScanExisting(existing file.FileBased, file file.SourceFile) (retImage *models.Image, err error) { + scanned, err := scanner.Scanner.ScanExisting(existing, file) + if err != nil { + return nil, err + } + + i := existing.(*models.Image) + + path := scanned.New.Path + oldChecksum := i.Checksum + changed := false + + if scanned.ContentsChanged() { + logger.Infof("%s has been updated: rescanning", path) + + // regenerate the file details as well + if err := SetFileDetails(i); err != nil { + return nil, err + } + + changed = true + } else if scanned.FileUpdated() { + logger.Infof("Updated image file %s", path) + + changed = true + } + + if changed { + i.SetFile(*scanned.New) + i.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()} + + // we are operating on a checksum now, so grab a mutex on the checksum + done := make(chan struct{}) + scanner.MutexManager.Claim(mutexType, scanned.New.Checksum, done) + + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + // free the mutex once transaction is complete + defer close(done) + var err error + + // ensure no clashes of hashes + if scanned.New.Checksum != "" && scanned.Old.Checksum != scanned.New.Checksum { + dupe, _ := r.Image().FindByChecksum(i.Checksum) + if dupe != nil { + return fmt.Errorf("MD5 for file %s is the same as that of %s", path, dupe.Path) + } + } + + retImage, err = r.Image().UpdateFull(*i) + return err + }); err != nil { + return nil, err + } + + // remove the old thumbnail if the checksum changed - we'll regenerate it + if oldChecksum != scanned.New.Checksum { + // remove cache dir of gallery + err = os.Remove(scanner.Paths.Generated.GetThumbnailPath(oldChecksum, models.DefaultGthumbWidth)) + if err != nil { + logger.Errorf("Error deleting thumbnail image: %s", err) + } + } + + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, retImage.ID, plugin.ImageUpdatePost, nil, nil) + } + + return +} + +func (scanner *Scanner) ScanNew(f file.SourceFile) (retImage *models.Image, err error) { + scanned, err := scanner.Scanner.ScanNew(f) + if err != nil { + return nil, err + } + + path := f.Path() + checksum := scanned.Checksum + + // grab a mutex on the checksum + done := make(chan struct{}) + scanner.MutexManager.Claim(mutexType, checksum, done) + defer close(done) + + // check for image by checksum + var existingImage *models.Image + if err := scanner.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + var err error + existingImage, err = r.Image().FindByChecksum(checksum) + return err + }); err != nil { + return nil, err + } + + pathDisplayName := file.ZipPathDisplayName(path) + + if existingImage != nil { + exists := FileExists(existingImage.Path) + if !scanner.CaseSensitiveFs { + // #1426 - if file exists but is a case-insensitive match for the + // original filename, then treat it as a move + if exists && strings.EqualFold(path, existingImage.Path) { + exists = false + } + } + + if exists { + logger.Infof("%s already exists. Duplicate of %s ", pathDisplayName, file.ZipPathDisplayName(existingImage.Path)) + return nil, nil + } else { + logger.Infof("%s already exists. Updating path...", pathDisplayName) + imagePartial := models.ImagePartial{ + ID: existingImage.ID, + Path: &path, + } + + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + retImage, err = r.Image().Update(imagePartial) + return err + }); err != nil { + return nil, err + } + + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, existingImage.ID, plugin.ImageUpdatePost, nil, nil) + } + } else { + logger.Infof("%s doesn't exist. Creating new item...", pathDisplayName) + currentTime := time.Now() + newImage := models.Image{ + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + } + newImage.SetFile(*scanned) + newImage.Title.String = GetFilename(&newImage, scanner.StripFileExtension) + newImage.Title.Valid = true + + if err := SetFileDetails(&newImage); err != nil { + logger.Error(err.Error()) + return nil, err + } + + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + var err error + retImage, err = r.Image().Create(newImage) + return err + }); err != nil { + return nil, err + } + + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, retImage.ID, plugin.ImageCreatePost, nil, nil) + } + + return +} diff --git a/pkg/image/thumbnail.go b/pkg/image/thumbnail.go index bb4cac743..ea03d7357 100644 --- a/pkg/image/thumbnail.go +++ b/pkg/image/thumbnail.go @@ -3,22 +3,22 @@ package image import ( "bytes" "errors" - "fmt" "os/exec" "runtime" - "strings" "sync" - "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/models" ) var vipsPath string var once sync.Once +var ErrUnsupportedFormat = errors.New("unsupported image format") + type ThumbnailEncoder struct { - FFMPEGPath string - VipsPath string + ffmpeg ffmpeg.Encoder + vips *vipsEncoder } func GetVipsPath() string { @@ -28,11 +28,18 @@ func GetVipsPath() string { return vipsPath } -func NewThumbnailEncoder(ffmpegPath string) ThumbnailEncoder { - return ThumbnailEncoder{ - FFMPEGPath: ffmpegPath, - VipsPath: GetVipsPath(), +func NewThumbnailEncoder(ffmpegEncoder ffmpeg.Encoder) ThumbnailEncoder { + ret := ThumbnailEncoder{ + ffmpeg: ffmpegEncoder, } + + vipsPath := GetVipsPath() + if vipsPath != "" { + vipsEncoder := vipsEncoder(vipsPath) + ret.vips = &vipsEncoder + } + + return ret } // GetThumbnail returns the thumbnail image of the provided image resized to @@ -60,72 +67,9 @@ func (e *ThumbnailEncoder) GetThumbnail(img *models.Image, maxSize int) ([]byte, } // vips has issues loading files from stdin on Windows - if e.VipsPath != "" && runtime.GOOS != "windows" { - return e.getVipsThumbnail(buf, maxSize) + if e.vips != nil && runtime.GOOS != "windows" { + return e.vips.ImageThumbnail(buf, maxSize) } else { - return e.getFFMPEGThumbnail(buf, format, maxSize, img.Path) + return e.ffmpeg.ImageThumbnail(buf, format, maxSize, img.Path) } } - -func (e *ThumbnailEncoder) getVipsThumbnail(image *bytes.Buffer, maxSize int) ([]byte, error) { - args := []string{ - "thumbnail_source", - "[descriptor=0]", - ".jpg[Q=70,strip]", - fmt.Sprint(maxSize), - "--size", "down", - } - data, err := e.run(e.VipsPath, args, image) - - return []byte(data), err -} - -func (e *ThumbnailEncoder) getFFMPEGThumbnail(image *bytes.Buffer, format *string, maxDimensions int, path string) ([]byte, error) { - // ffmpeg spends a long sniffing image format when data is piped through stdio, so we pass the format explicitly instead - ffmpegformat := "" - if format != nil && *format == "jpeg" { - ffmpegformat = "mjpeg" - } else if format != nil && *format == "png" { - ffmpegformat = "png_pipe" - } else if format != nil && *format == "webp" { - ffmpegformat = "webp_pipe" - } else { - return nil, errors.New("unsupported image format") - } - - args := []string{ - "-f", ffmpegformat, - "-i", "-", - "-vf", fmt.Sprintf("scale=%v:%v:force_original_aspect_ratio=decrease", maxDimensions, maxDimensions), - "-c:v", "mjpeg", - "-q:v", "5", - "-f", "image2pipe", - "-", - } - data, err := e.run(e.FFMPEGPath, args, image) - - return []byte(data), err -} - -func (e *ThumbnailEncoder) run(path string, args []string, stdin *bytes.Buffer) (string, error) { - cmd := exec.Command(path, args...) - - var stdout, stderr bytes.Buffer - cmd.Stdout = &stdout - cmd.Stderr = &stderr - cmd.Stdin = stdin - - if err := cmd.Start(); err != nil { - return "", err - } - - err := cmd.Wait() - - if err != nil { - // error message should be in the stderr stream - logger.Errorf("image encoder error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderr.String()) - return stdout.String(), err - } - - return stdout.String(), nil -} diff --git a/pkg/image/vips.go b/pkg/image/vips.go new file mode 100644 index 000000000..061afa5f8 --- /dev/null +++ b/pkg/image/vips.go @@ -0,0 +1,48 @@ +package image + +import ( + "bytes" + "fmt" + "os/exec" + "strings" + + "github.com/stashapp/stash/pkg/logger" +) + +type vipsEncoder string + +func (e *vipsEncoder) ImageThumbnail(image *bytes.Buffer, maxSize int) ([]byte, error) { + args := []string{ + "thumbnail_source", + "[descriptor=0]", + ".jpg[Q=70,strip]", + fmt.Sprint(maxSize), + "--size", "down", + } + data, err := e.run(args, image) + + return []byte(data), err +} + +func (e *vipsEncoder) run(args []string, stdin *bytes.Buffer) (string, error) { + cmd := exec.Command(string(*e), args...) + + var stdout, stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + cmd.Stdin = stdin + + if err := cmd.Start(); err != nil { + return "", err + } + + err := cmd.Wait() + + if err != nil { + // error message should be in the stderr stream + logger.Errorf("image encoder error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderr.String()) + return stdout.String(), err + } + + return stdout.String(), nil +} diff --git a/pkg/job/manager.go b/pkg/job/manager.go index 233818483..ed4dda133 100644 --- a/pkg/job/manager.go +++ b/pkg/job/manager.go @@ -9,7 +9,7 @@ import ( ) const maxGraveyardSize = 10 -const defaultThrottleLimit = time.Second +const defaultThrottleLimit = 100 * time.Millisecond // Manager maintains a queue of jobs. Jobs are executed one at a time. type Manager struct { diff --git a/pkg/job/progress.go b/pkg/job/progress.go index 0e62d5ce5..3bd6c3f08 100644 --- a/pkg/job/progress.go +++ b/pkg/job/progress.go @@ -61,11 +61,12 @@ func (p *Progress) SetProcessed(processed int) { } func (p *Progress) calculatePercent() { - if p.total <= 0 { + switch { + case p.total <= 0: p.percent = ProgressIndefinite - } else if p.processed < 0 { + case p.processed < 0: p.percent = 0 - } else { + default: p.percent = float64(p.processed) / float64(p.total) if p.percent > 1 { p.percent = 1 @@ -92,18 +93,33 @@ func (p *Progress) SetPercent(percent float64) { p.updated() } -// Increment increments the number of processed work units, if this does not -// exceed the total units. This is used to calculate the percentage. +// Increment increments the number of processed work units. This is used to calculate the percentage. +// If total is set already, then the number of processed work units will not exceed the total. func (p *Progress) Increment() { p.mutex.Lock() defer p.mutex.Unlock() - if p.processed < p.total { + if p.total <= 0 || p.processed < p.total { p.processed++ p.calculatePercent() } } +// AddProcessed increments the number of processed work units by the provided +// amount. This is used to calculate the percentage. +func (p *Progress) AddProcessed(v int) { + p.mutex.Lock() + defer p.mutex.Unlock() + + newVal := v + if newVal > p.total { + newVal = p.total + } + + p.processed = newVal + p.calculatePercent() +} + func (p *Progress) addTask(t *task) { p.mutex.Lock() defer p.mutex.Unlock() diff --git a/pkg/logger/hook.go b/pkg/logger/hook.go new file mode 100644 index 000000000..b0e0417dc --- /dev/null +++ b/pkg/logger/hook.go @@ -0,0 +1,25 @@ +package logger + +import ( + "io" + + "github.com/sirupsen/logrus" +) + +type fileLogHook struct { + Writer io.Writer + Formatter logrus.Formatter +} + +func (hook *fileLogHook) Fire(entry *logrus.Entry) error { + line, err := hook.Formatter.Format(entry) + if err != nil { + return err + } + _, err = hook.Writer.Write(line) + return err +} + +func (hook *fileLogHook) Levels() []logrus.Level { + return logrus.AllLevels +} diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index 0e2a3db0b..0b4a4225b 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -2,7 +2,6 @@ package logger import ( "fmt" - "io" "os" "sync" "time" @@ -35,6 +34,13 @@ func Init(logFile string, logOut bool, logLevel string) { customFormatter.FullTimestamp = true logger.SetFormatter(customFormatter) + // #1837 - trigger the console to use color-mode since it won't be + // otherwise triggered until the first log entry + // this is covers the situation where the logger is only logging to file + // and therefore does not trigger the console color-mode - resulting in + // the access log colouring not being applied + _, _ = customFormatter.Format(logrus.NewEntry(logger)) + if logFile != "" { var err error file, err = os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644) @@ -44,11 +50,22 @@ func Init(logFile string, logOut bool, logLevel string) { } } - if file != nil && logOut { - mw := io.MultiWriter(os.Stderr, file) - logger.Out = mw - } else if file != nil { - logger.Out = file + if file != nil { + if logOut { + // log to file separately disabling colours + fileFormatter := new(logrus.TextFormatter) + fileFormatter.TimestampFormat = customFormatter.TimestampFormat + fileFormatter.FullTimestamp = customFormatter.FullTimestamp + logger.AddHook(&fileLogHook{ + Writer: file, + Formatter: fileFormatter, + }) + } else { + // logging to file only + // turn off the colouring for the file + customFormatter.ForceColors = false + logger.Out = file + } } // otherwise, output to StdErr @@ -63,13 +80,14 @@ func SetLogLevel(level string) { func logLevelFromString(level string) logrus.Level { ret := logrus.InfoLevel - if level == "Debug" { + switch level { + case "Debug": ret = logrus.DebugLevel - } else if level == "Warning" { + case "Warning": ret = logrus.WarnLevel - } else if level == "Error" { + case "Error": ret = logrus.ErrorLevel - } else if level == "Trace" { + case "Trace": ret = logrus.TraceLevel } @@ -277,7 +295,3 @@ func Fatal(args ...interface{}) { func Fatalf(format string, args ...interface{}) { logger.Fatalf(format, args...) } - -//func WithRequest(req *http.Request) *logrus.Entry { -// return logger.WithFields(RequestFields(req)) -//} diff --git a/pkg/logger/plugin.go b/pkg/logger/plugin.go index 67e6b03e9..7b2541c1b 100644 --- a/pkg/logger/plugin.go +++ b/pkg/logger/plugin.go @@ -160,16 +160,14 @@ func (log *PluginLogger) HandleStderrLine(line string) { p, err := strconv.ParseFloat(ll, 64) if err != nil { Errorf("Error parsing progress value '%s': %s", ll, err.Error()) - } else { - // only pass progress through if channel present - if log.ProgressChan != nil { - // don't block on this - select { - case log.ProgressChan <- p: - default: - } + } else if log.ProgressChan != nil { // only pass progress through if channel present + // don't block on this + select { + case log.ProgressChan <- p: + default: } } + } } diff --git a/pkg/manager/checksum.go b/pkg/manager/checksum.go index bc41ddfe1..5a36bca43 100644 --- a/pkg/manager/checksum.go +++ b/pkg/manager/checksum.go @@ -9,12 +9,12 @@ import ( "github.com/stashapp/stash/pkg/models" ) -func setInitialMD5Config(txnManager models.TransactionManager) { +func setInitialMD5Config(ctx context.Context, txnManager models.TransactionManager) { // if there are no scene files in the database, then default the // VideoFileNamingAlgorithm config setting to oshash and calculateMD5 to // false, otherwise set them to true for backwards compatibility purposes var count int - if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + if err := txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { var err error count, err = r.Scene().Count() return err diff --git a/pkg/manager/config/config.go b/pkg/manager/config/config.go index 92235394b..b3c76b532 100644 --- a/pkg/manager/config/config.go +++ b/pkg/manager/config/config.go @@ -1,7 +1,6 @@ package config import ( - "errors" "fmt" "os" "path/filepath" @@ -10,7 +9,7 @@ import ( "strings" "sync" - //"github.com/sasha-s/go-deadlock" // if you have deadlock issues + // "github.com/sasha-s/go-deadlock" // if you have deadlock issues "golang.org/x/crypto/bcrypt" @@ -22,143 +21,173 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -const Stash = "stash" -const Cache = "cache" -const Generated = "generated" -const Metadata = "metadata" -const Downloads = "downloads" -const ApiKey = "api_key" -const Username = "username" -const Password = "password" -const MaxSessionAge = "max_session_age" +const ( + Stash = "stash" + Cache = "cache" + Generated = "generated" + Metadata = "metadata" + Downloads = "downloads" + ApiKey = "api_key" + Username = "username" + Password = "password" + MaxSessionAge = "max_session_age" -const DefaultMaxSessionAge = 60 * 60 * 1 // 1 hours + DefaultMaxSessionAge = 60 * 60 * 1 // 1 hours -const Database = "database" + Database = "database" -const Exclude = "exclude" -const ImageExclude = "image_exclude" + Exclude = "exclude" + ImageExclude = "image_exclude" -const VideoExtensions = "video_extensions" + VideoExtensions = "video_extensions" + ImageExtensions = "image_extensions" + GalleryExtensions = "gallery_extensions" + CreateGalleriesFromFolders = "create_galleries_from_folders" -var defaultVideoExtensions = []string{"m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm"} + // CalculateMD5 is the config key used to determine if MD5 should be calculated + // for video files. + CalculateMD5 = "calculate_md5" -const ImageExtensions = "image_extensions" + // VideoFileNamingAlgorithm is the config key used to determine what hash + // should be used when generating and using generated files for scenes. + VideoFileNamingAlgorithm = "video_file_naming_algorithm" -var defaultImageExtensions = []string{"png", "jpg", "jpeg", "gif", "webp"} + MaxTranscodeSize = "max_transcode_size" + MaxStreamingTranscodeSize = "max_streaming_transcode_size" -const GalleryExtensions = "gallery_extensions" + ParallelTasks = "parallel_tasks" + parallelTasksDefault = 1 -var defaultGalleryExtensions = []string{"zip", "cbz"} + PreviewPreset = "preview_preset" -const CreateGalleriesFromFolders = "create_galleries_from_folders" + PreviewAudio = "preview_audio" + previewAudioDefault = true -// CalculateMD5 is the config key used to determine if MD5 should be calculated -// for video files. -const CalculateMD5 = "calculate_md5" + PreviewSegmentDuration = "preview_segment_duration" + previewSegmentDurationDefault = 0.75 -// VideoFileNamingAlgorithm is the config key used to determine what hash -// should be used when generating and using generated files for scenes. -const VideoFileNamingAlgorithm = "video_file_naming_algorithm" + PreviewSegments = "preview_segments" + previewSegmentsDefault = 12 -const MaxTranscodeSize = "max_transcode_size" -const MaxStreamingTranscodeSize = "max_streaming_transcode_size" + PreviewExcludeStart = "preview_exclude_start" + previewExcludeStartDefault = "0" -const ParallelTasks = "parallel_tasks" -const parallelTasksDefault = 1 + PreviewExcludeEnd = "preview_exclude_end" + previewExcludeEndDefault = "0" -const PreviewPreset = "preview_preset" + WriteImageThumbnails = "write_image_thumbnails" + writeImageThumbnailsDefault = true -const PreviewAudio = "preview_audio" -const previewAudioDefault = true + Host = "host" + hostDefault = "0.0.0.0" -const PreviewSegmentDuration = "preview_segment_duration" -const previewSegmentDurationDefault = 0.75 + Port = "port" + portDefault = 9999 -const PreviewSegments = "preview_segments" -const previewSegmentsDefault = 12 + ExternalHost = "external_host" -const PreviewExcludeStart = "preview_exclude_start" -const previewExcludeStartDefault = "0" + // key used to sign JWT tokens + JWTSignKey = "jwt_secret_key" -const PreviewExcludeEnd = "preview_exclude_end" -const previewExcludeEndDefault = "0" + // key used for session store + SessionStoreKey = "session_store_key" -const WriteImageThumbnails = "write_image_thumbnails" -const writeImageThumbnailsDefault = true + // scraping options + ScrapersPath = "scrapers_path" + ScraperUserAgent = "scraper_user_agent" + ScraperCertCheck = "scraper_cert_check" + ScraperCDPPath = "scraper_cdp_path" + ScraperExcludeTagPatterns = "scraper_exclude_tag_patterns" -const Host = "host" -const Port = "port" -const ExternalHost = "external_host" + // stash-box options + StashBoxes = "stash_boxes" -// key used to sign JWT tokens -const JWTSignKey = "jwt_secret_key" + // plugin options + PluginsPath = "plugins_path" -// key used for session store -const SessionStoreKey = "session_store_key" + // i18n + Language = "language" -// scraping options -const ScrapersPath = "scrapers_path" -const ScraperUserAgent = "scraper_user_agent" -const ScraperCertCheck = "scraper_cert_check" -const ScraperCDPPath = "scraper_cdp_path" -const ScraperExcludeTagPatterns = "scraper_exclude_tag_patterns" + // served directories + // this should be manually configured only + CustomServedFolders = "custom_served_folders" -// stash-box options -const StashBoxes = "stash_boxes" + // UI directory. Overrides to serve the UI from a specific location + // rather than use the embedded UI. + CustomUILocation = "custom_ui_location" -// plugin options -const PluginsPath = "plugins_path" + // Interface options + MenuItems = "menu_items" -// i18n -const Language = "language" + SoundOnPreview = "sound_on_preview" -// served directories -// this should be manually configured only -const CustomServedFolders = "custom_served_folders" + WallShowTitle = "wall_show_title" + defaultWallShowTitle = true -// UI directory. Overrides to serve the UI from a specific location -// rather than use the embedded UI. -const CustomUILocation = "custom_ui_location" + CustomPerformerImageLocation = "custom_performer_image_location" + MaximumLoopDuration = "maximum_loop_duration" + AutostartVideo = "autostart_video" + AutostartVideoOnPlaySelected = "autostart_video_on_play_selected" + ContinuePlaylistDefault = "continue_playlist_default" + ShowStudioAsText = "show_studio_as_text" + CSSEnabled = "cssEnabled" -// Interface options -const MenuItems = "menu_items" + WallPlayback = "wall_playback" + defaultWallPlayback = "video" -var defaultMenuItems = []string{"scenes", "images", "movies", "markers", "galleries", "performers", "studios", "tags"} + SlideshowDelay = "slideshow_delay" + defaultSlideshowDelay = 5000 -const SoundOnPreview = "sound_on_preview" -const WallShowTitle = "wall_show_title" -const CustomPerformerImageLocation = "custom_performer_image_location" -const MaximumLoopDuration = "maximum_loop_duration" -const AutostartVideo = "autostart_video" -const ShowStudioAsText = "show_studio_as_text" -const CSSEnabled = "cssEnabled" -const WallPlayback = "wall_playback" -const SlideshowDelay = "slideshow_delay" -const HandyKey = "handy_key" -const FunscriptOffset = "funscript_offset" + DisableDropdownCreatePerformer = "disable_dropdown_create.performer" + DisableDropdownCreateStudio = "disable_dropdown_create.studio" + DisableDropdownCreateTag = "disable_dropdown_create.tag" -// Security -const TrustedProxies = "trusted_proxies" -const dangerousAllowPublicWithoutAuth = "dangerous_allow_public_without_auth" -const dangerousAllowPublicWithoutAuthDefault = "false" -const SecurityTripwireAccessedFromPublicInternet = "security_tripwire_accessed_from_public_internet" -const securityTripwireAccessedFromPublicInternetDefault = "" + HandyKey = "handy_key" + FunscriptOffset = "funscript_offset" -// DLNA options -const DLNAServerName = "dlna.server_name" -const DLNADefaultEnabled = "dlna.default_enabled" -const DLNADefaultIPWhitelist = "dlna.default_whitelist" -const DLNAInterfaces = "dlna.interfaces" + // Security + TrustedProxies = "trusted_proxies" + dangerousAllowPublicWithoutAuth = "dangerous_allow_public_without_auth" + dangerousAllowPublicWithoutAuthDefault = "false" + SecurityTripwireAccessedFromPublicInternet = "security_tripwire_accessed_from_public_internet" + securityTripwireAccessedFromPublicInternetDefault = "" -// Logging options -const LogFile = "logFile" -const LogOut = "logOut" -const LogLevel = "logLevel" -const LogAccess = "logAccess" + // DLNA options + DLNAServerName = "dlna.server_name" + DLNADefaultEnabled = "dlna.default_enabled" + DLNADefaultIPWhitelist = "dlna.default_whitelist" + DLNAInterfaces = "dlna.interfaces" -// File upload options -const MaxUploadSize = "max_upload_size" + // Logging options + LogFile = "logFile" + LogOut = "logOut" + defaultLogOut = true + LogLevel = "logLevel" + defaultLogLevel = "Info" + LogAccess = "logAccess" + defaultLogAccess = true + + DefaultIdentifySettings = "defaults.identify_task" + + DeleteFileDefault = "defaults.delete_file" + DeleteGeneratedDefault = "defaults.delete_generated" + deleteGeneratedDefaultDefault = true + + // Desktop Integration Options + NoBrowser = "noBrowser" + NoBrowserDefault = false + + // File upload options + MaxUploadSize = "max_upload_size" +) + +// slice default values +var ( + defaultVideoExtensions = []string{"m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm"} + defaultImageExtensions = []string{"png", "jpg", "jpeg", "gif", "webp"} + defaultGalleryExtensions = []string{"zip", "cbz"} + defaultMenuItems = []string{"scenes", "images", "movies", "markers", "galleries", "performers", "studios", "tags"} +) type MissingConfigError struct { missingFields []string @@ -168,24 +197,34 @@ func (e MissingConfigError) Error() string { return fmt.Sprintf("missing the following mandatory settings: %s", strings.Join(e.missingFields, ", ")) } +// StashBoxError represents configuration errors of Stash-Box +type StashBoxError struct { + msg string +} + +func (s *StashBoxError) Error() string { + // "Stash-box" is a proper noun and is therefore capitcalized + return "Stash-box: " + s.msg +} + type Instance struct { + // main instance - backed by config file + main *viper.Viper + + // override instance - populated from flags/environment + // not written to config file + overrides *viper.Viper + cpuProfilePath string isNewSystem bool certFile string keyFile string sync.RWMutex - //deadlock.RWMutex // for deadlock testing/issues + // deadlock.RWMutex // for deadlock testing/issues } var instance *Instance -func GetInstance() *Instance { - if instance == nil { - instance = &Instance{} - } - return instance -} - func (i *Instance) IsNewSystem() bool { return i.isNewSystem } @@ -193,7 +232,7 @@ func (i *Instance) IsNewSystem() bool { func (i *Instance) SetConfigFile(fn string) { i.Lock() defer i.Unlock() - viper.SetConfigFile(fn) + i.main.SetConfigFile(fn) } func (i *Instance) InitTLS() { @@ -223,10 +262,14 @@ func (i *Instance) GetCPUProfilePath() string { return i.cpuProfilePath } +func (i *Instance) GetNoBrowser() bool { + return i.getBool(NoBrowser) +} + func (i *Instance) Set(key string, value interface{}) { i.Lock() defer i.Unlock() - viper.Set(key, value) + i.main.Set(key, value) } func (i *Instance) SetPassword(value string) { @@ -241,14 +284,20 @@ func (i *Instance) SetPassword(value string) { func (i *Instance) Write() error { i.Lock() defer i.Unlock() - return viper.WriteConfig() + return i.main.WriteConfig() +} + +// FileEnvSet returns true if the configuration file environment parameter +// is set. +func FileEnvSet() bool { + return os.Getenv("STASH_CONFIG_FILE") != "" } // GetConfigFile returns the full path to the used configuration file. func (i *Instance) GetConfigFile() string { i.RLock() defer i.RUnlock() - return viper.ConfigFileUsed() + return i.main.ConfigFileUsed() } // GetConfigPath returns the path of the directory containing the used @@ -263,13 +312,94 @@ func (i *Instance) GetDefaultDatabaseFilePath() string { return filepath.Join(i.GetConfigPath(), "stash-go.sqlite") } +// viper returns the viper instance that should be used to get the provided +// key. Returns the overrides instance if the key exists there, otherwise it +// returns the main instance. Assumes read lock held. +func (i *Instance) viper(key string) *viper.Viper { + v := i.main + if i.overrides.IsSet(key) { + v = i.overrides + } + + return v +} + +func (i *Instance) HasOverride(key string) bool { + i.RLock() + defer i.RUnlock() + + return i.overrides.IsSet(key) +} + +// These functions wrap the equivalent viper functions, checking the override +// instance first, then the main instance. + +func (i *Instance) unmarshalKey(key string, rawVal interface{}) error { + i.RLock() + defer i.RUnlock() + + return i.viper(key).UnmarshalKey(key, rawVal) +} + +func (i *Instance) getStringSlice(key string) []string { + i.RLock() + defer i.RUnlock() + + return i.viper(key).GetStringSlice(key) +} + +func (i *Instance) getString(key string) string { + i.RLock() + defer i.RUnlock() + + return i.viper(key).GetString(key) +} + +func (i *Instance) getBool(key string) bool { + i.RLock() + defer i.RUnlock() + + return i.viper(key).GetBool(key) +} + +func (i *Instance) getInt(key string) int { + i.RLock() + defer i.RUnlock() + + return i.viper(key).GetInt(key) +} + +func (i *Instance) getFloat64(key string) float64 { + i.RLock() + defer i.RUnlock() + + return i.viper(key).GetFloat64(key) +} + +func (i *Instance) getStringMapString(key string) map[string]string { + i.RLock() + defer i.RUnlock() + + return i.viper(key).GetStringMapString(key) +} + +// GetStathPaths returns the configured stash library paths. +// Works opposite to the usual case - it will return the override +// value only if the main value is not set. func (i *Instance) GetStashPaths() []*models.StashConfig { i.RLock() defer i.RUnlock() + var ret []*models.StashConfig - if err := viper.UnmarshalKey(Stash, &ret); err != nil || len(ret) == 0 { + + v := i.main + if !v.IsSet(Stash) { + v = i.overrides + } + + if err := v.UnmarshalKey(Stash, &ret); err != nil || len(ret) == 0 { // fallback to legacy format - ss := viper.GetStringSlice(Stash) + ss := v.GetStringSlice(Stash) ret = nil for _, path := range ss { toAdd := &models.StashConfig{ @@ -282,72 +412,47 @@ func (i *Instance) GetStashPaths() []*models.StashConfig { return ret } -func (i *Instance) GetConfigFilePath() string { - i.RLock() - defer i.RUnlock() - return viper.ConfigFileUsed() -} - func (i *Instance) GetCachePath() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(Cache) + return i.getString(Cache) } func (i *Instance) GetGeneratedPath() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(Generated) + return i.getString(Generated) } func (i *Instance) GetMetadataPath() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(Metadata) + return i.getString(Metadata) } func (i *Instance) GetDatabasePath() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(Database) + return i.getString(Database) } func (i *Instance) GetJWTSignKey() []byte { - i.RLock() - defer i.RUnlock() - return []byte(viper.GetString(JWTSignKey)) + return []byte(i.getString(JWTSignKey)) } func (i *Instance) GetSessionStoreKey() []byte { - i.RLock() - defer i.RUnlock() - return []byte(viper.GetString(SessionStoreKey)) + return []byte(i.getString(SessionStoreKey)) } func (i *Instance) GetDefaultScrapersPath() string { // default to the same directory as the config file - fn := filepath.Join(i.GetConfigPath(), "scrapers") return fn } func (i *Instance) GetExcludes() []string { - i.RLock() - defer i.RUnlock() - return viper.GetStringSlice(Exclude) + return i.getStringSlice(Exclude) } func (i *Instance) GetImageExcludes() []string { - i.RLock() - defer i.RUnlock() - return viper.GetStringSlice(ImageExclude) + return i.getStringSlice(ImageExclude) } func (i *Instance) GetVideoExtensions() []string { - i.RLock() - defer i.RUnlock() - ret := viper.GetStringSlice(VideoExtensions) + ret := i.getStringSlice(VideoExtensions) if ret == nil { ret = defaultVideoExtensions } @@ -355,9 +460,7 @@ func (i *Instance) GetVideoExtensions() []string { } func (i *Instance) GetImageExtensions() []string { - i.RLock() - defer i.RUnlock() - ret := viper.GetStringSlice(ImageExtensions) + ret := i.getStringSlice(ImageExtensions) if ret == nil { ret = defaultImageExtensions } @@ -365,9 +468,7 @@ func (i *Instance) GetImageExtensions() []string { } func (i *Instance) GetGalleryExtensions() []string { - i.RLock() - defer i.RUnlock() - ret := viper.GetStringSlice(GalleryExtensions) + ret := i.getStringSlice(GalleryExtensions) if ret == nil { ret = defaultGalleryExtensions } @@ -375,15 +476,11 @@ func (i *Instance) GetGalleryExtensions() []string { } func (i *Instance) GetCreateGalleriesFromFolders() bool { - i.RLock() - defer i.RUnlock() - return viper.GetBool(CreateGalleriesFromFolders) + return i.getBool(CreateGalleriesFromFolders) } func (i *Instance) GetLanguage() string { - i.RLock() - defer i.RUnlock() - ret := viper.GetString(Language) + ret := i.getString(Language) // default to English if ret == "" { @@ -396,17 +493,13 @@ func (i *Instance) GetLanguage() string { // IsCalculateMD5 returns true if MD5 checksums should be generated for // scene video files. func (i *Instance) IsCalculateMD5() bool { - i.RLock() - defer i.RUnlock() - return viper.GetBool(CalculateMD5) + return i.getBool(CalculateMD5) } // GetVideoFileNamingAlgorithm returns what hash algorithm should be used for // naming generated scene video files. func (i *Instance) GetVideoFileNamingAlgorithm() models.HashAlgorithm { - i.RLock() - defer i.RUnlock() - ret := viper.GetString(VideoFileNamingAlgorithm) + ret := i.getString(VideoFileNamingAlgorithm) // default to oshash if ret == "" { @@ -417,54 +510,41 @@ func (i *Instance) GetVideoFileNamingAlgorithm() models.HashAlgorithm { } func (i *Instance) GetScrapersPath() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(ScrapersPath) + return i.getString(ScrapersPath) } func (i *Instance) GetScraperUserAgent() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(ScraperUserAgent) + return i.getString(ScraperUserAgent) } // GetScraperCDPPath gets the path to the Chrome executable or remote address // to an instance of Chrome. func (i *Instance) GetScraperCDPPath() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(ScraperCDPPath) + return i.getString(ScraperCDPPath) } // GetScraperCertCheck returns true if the scraper should check for insecure // certificates when fetching an image or a page. func (i *Instance) GetScraperCertCheck() bool { + ret := true i.RLock() defer i.RUnlock() - ret := true - if viper.IsSet(ScraperCertCheck) { - ret = viper.GetBool(ScraperCertCheck) + + v := i.viper(ScraperCertCheck) + if v.IsSet(ScraperCertCheck) { + ret = v.GetBool(ScraperCertCheck) } return ret } func (i *Instance) GetScraperExcludeTagPatterns() []string { - i.RLock() - defer i.RUnlock() - var ret []string - if viper.IsSet(ScraperExcludeTagPatterns) { - ret = viper.GetStringSlice(ScraperExcludeTagPatterns) - } - - return ret + return i.getStringSlice(ScraperExcludeTagPatterns) } -func (i *Instance) GetStashBoxes() []*models.StashBox { - i.RLock() - defer i.RUnlock() - var boxes []*models.StashBox - if err := viper.UnmarshalKey(StashBoxes, &boxes); err != nil { +func (i *Instance) GetStashBoxes() models.StashBoxes { + var boxes models.StashBoxes + if err := i.unmarshalKey(StashBoxes, &boxes); err != nil { logger.Warnf("error in unmarshalkey: %v", err) } @@ -479,49 +559,45 @@ func (i *Instance) GetDefaultPluginsPath() string { } func (i *Instance) GetPluginsPath() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(PluginsPath) + return i.getString(PluginsPath) } func (i *Instance) GetHost() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(Host) + ret := i.getString(Host) + if ret == "" { + ret = hostDefault + } + + return ret } func (i *Instance) GetPort() int { - i.RLock() - defer i.RUnlock() - return viper.GetInt(Port) + ret := i.getInt(Port) + if ret == 0 { + ret = portDefault + } + + return ret } func (i *Instance) GetExternalHost() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(ExternalHost) + return i.getString(ExternalHost) } // GetPreviewSegmentDuration returns the duration of a single segment in a // scene preview file, in seconds. func (i *Instance) GetPreviewSegmentDuration() float64 { - i.RLock() - defer i.RUnlock() - return viper.GetFloat64(PreviewSegmentDuration) + return i.getFloat64(PreviewSegmentDuration) } // GetParallelTasks returns the number of parallel tasks that should be started // by scan or generate task. func (i *Instance) GetParallelTasks() int { - i.RLock() - defer i.RUnlock() - return viper.GetInt(ParallelTasks) + return i.getInt(ParallelTasks) } func (i *Instance) GetParallelTasksWithAutoDetection() int { - i.RLock() - defer i.RUnlock() - parallelTasks := viper.GetInt(ParallelTasks) + parallelTasks := i.getInt(ParallelTasks) if parallelTasks <= 0 { parallelTasks = (runtime.NumCPU() / 4) + 1 } @@ -529,16 +605,12 @@ func (i *Instance) GetParallelTasksWithAutoDetection() int { } func (i *Instance) GetPreviewAudio() bool { - i.RLock() - defer i.RUnlock() - return viper.GetBool(PreviewAudio) + return i.getBool(PreviewAudio) } // GetPreviewSegments returns the amount of segments in a scene preview file. func (i *Instance) GetPreviewSegments() int { - i.RLock() - defer i.RUnlock() - return viper.GetInt(PreviewSegments) + return i.getInt(PreviewSegments) } // GetPreviewExcludeStart returns the configuration setting string for @@ -548,9 +620,7 @@ func (i *Instance) GetPreviewSegments() int { // in the preview. If the value is suffixed with a '%' character (for example // '2%'), then it is interpreted as a proportion of the total video duration. func (i *Instance) GetPreviewExcludeStart() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(PreviewExcludeStart) + return i.getString(PreviewExcludeStart) } // GetPreviewExcludeEnd returns the configuration setting string for @@ -559,17 +629,13 @@ func (i *Instance) GetPreviewExcludeStart() string { // when generating previews. If the value is suffixed with a '%' character, // then it is interpreted as a proportion of the total video duration. func (i *Instance) GetPreviewExcludeEnd() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(PreviewExcludeEnd) + return i.getString(PreviewExcludeEnd) } // GetPreviewPreset returns the preset when generating previews. Defaults to // Slow. func (i *Instance) GetPreviewPreset() models.PreviewPreset { - i.RLock() - defer i.RUnlock() - ret := viper.GetString(PreviewPreset) + ret := i.getString(PreviewPreset) // default to slow if ret == "" { @@ -580,9 +646,7 @@ func (i *Instance) GetPreviewPreset() models.PreviewPreset { } func (i *Instance) GetMaxTranscodeSize() models.StreamingResolutionEnum { - i.RLock() - defer i.RUnlock() - ret := viper.GetString(MaxTranscodeSize) + ret := i.getString(MaxTranscodeSize) // default to original if ret == "" { @@ -593,9 +657,7 @@ func (i *Instance) GetMaxTranscodeSize() models.StreamingResolutionEnum { } func (i *Instance) GetMaxStreamingTranscodeSize() models.StreamingResolutionEnum { - i.RLock() - defer i.RUnlock() - ret := viper.GetString(MaxStreamingTranscodeSize) + ret := i.getString(MaxStreamingTranscodeSize) // default to original if ret == "" { @@ -608,48 +670,32 @@ func (i *Instance) GetMaxStreamingTranscodeSize() models.StreamingResolutionEnum // IsWriteImageThumbnails returns true if image thumbnails should be written // to disk after generating on the fly. func (i *Instance) IsWriteImageThumbnails() bool { - i.RLock() - defer i.RUnlock() - return viper.GetBool(WriteImageThumbnails) + return i.getBool(WriteImageThumbnails) } func (i *Instance) GetAPIKey() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(ApiKey) + return i.getString(ApiKey) } func (i *Instance) GetUsername() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(Username) + return i.getString(Username) } func (i *Instance) GetPasswordHash() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(Password) + return i.getString(Password) } func (i *Instance) GetCredentials() (string, string) { if i.HasCredentials() { - i.RLock() - defer i.RUnlock() - return viper.GetString(Username), viper.GetString(Password) + return i.getString(Username), i.getString(Password) } return "", "" } func (i *Instance) HasCredentials() bool { - i.RLock() - defer i.RUnlock() - if !viper.IsSet(Username) || !viper.IsSet(Password) { - return false - } - - username := viper.GetString(Username) - pwHash := viper.GetString(Password) + username := i.getString(Username) + pwHash := i.getString(Password) return username != "" && pwHash != "" } @@ -673,125 +719,152 @@ func (i *Instance) ValidateCredentials(username string, password string) bool { return username == authUser && err == nil } +var stashBoxRe = regexp.MustCompile("^http.*graphql$") + func (i *Instance) ValidateStashBoxes(boxes []*models.StashBoxInput) error { isMulti := len(boxes) > 1 - re, err := regexp.Compile("^http.*graphql$") - if err != nil { - return errors.New("failure to generate regular expression") - } - for _, box := range boxes { + // Validate each stash-box configuration field, return on error if box.APIKey == "" { - //lint:ignore ST1005 Stash-box is a name - return errors.New("Stash-box API Key cannot be blank") - } else if box.Endpoint == "" { - //lint:ignore ST1005 Stash-box is a name - return errors.New("Stash-box Endpoint cannot be blank") - } else if !re.Match([]byte(box.Endpoint)) { - //lint:ignore ST1005 Stash-box is a name - return errors.New("Stash-box Endpoint is invalid") - } else if isMulti && box.Name == "" { - //lint:ignore ST1005 Stash-box is a name - return errors.New("Stash-box Name cannot be blank") + return &StashBoxError{msg: "API Key cannot be blank"} + } + + if box.Endpoint == "" { + return &StashBoxError{msg: "endpoint cannot be blank"} + } + + if !stashBoxRe.Match([]byte(box.Endpoint)) { + return &StashBoxError{msg: "endpoint is invalid"} + } + + if isMulti && box.Name == "" { + return &StashBoxError{msg: "name cannot be blank"} } } + return nil } // GetMaxSessionAge gets the maximum age for session cookies, in seconds. // Session cookie expiry times are refreshed every request. func (i *Instance) GetMaxSessionAge() int { - i.Lock() - defer i.Unlock() - viper.SetDefault(MaxSessionAge, DefaultMaxSessionAge) - return viper.GetInt(MaxSessionAge) + i.RLock() + defer i.RUnlock() + + ret := DefaultMaxSessionAge + v := i.viper(MaxSessionAge) + if v.IsSet(MaxSessionAge) { + ret = v.GetInt(MaxSessionAge) + } + + return ret } // GetCustomServedFolders gets the map of custom paths to their applicable // filesystem locations func (i *Instance) GetCustomServedFolders() URLMap { - i.RLock() - defer i.RUnlock() - return viper.GetStringMapString(CustomServedFolders) + return i.getStringMapString(CustomServedFolders) } func (i *Instance) GetCustomUILocation() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(CustomUILocation) + return i.getString(CustomUILocation) } // Interface options func (i *Instance) GetMenuItems() []string { i.RLock() defer i.RUnlock() - if viper.IsSet(MenuItems) { - return viper.GetStringSlice(MenuItems) + v := i.viper(MenuItems) + if v.IsSet(MenuItems) { + return v.GetStringSlice(MenuItems) } return defaultMenuItems } func (i *Instance) GetSoundOnPreview() bool { - i.RLock() - defer i.RUnlock() - return viper.GetBool(SoundOnPreview) + return i.getBool(SoundOnPreview) } func (i *Instance) GetWallShowTitle() bool { - i.Lock() - defer i.Unlock() - viper.SetDefault(WallShowTitle, true) - return viper.GetBool(WallShowTitle) + i.RLock() + defer i.RUnlock() + + ret := defaultWallShowTitle + v := i.viper(WallShowTitle) + if v.IsSet(WallShowTitle) { + ret = v.GetBool(WallShowTitle) + } + return ret } func (i *Instance) GetCustomPerformerImageLocation() string { - i.Lock() - defer i.Unlock() - viper.SetDefault(CustomPerformerImageLocation, "") - return viper.GetString(CustomPerformerImageLocation) + return i.getString(CustomPerformerImageLocation) } func (i *Instance) GetWallPlayback() string { - i.Lock() - defer i.Unlock() - viper.SetDefault(WallPlayback, "video") - return viper.GetString(WallPlayback) + i.RLock() + defer i.RUnlock() + + ret := defaultWallPlayback + v := i.viper(WallPlayback) + if v.IsSet(WallPlayback) { + ret = v.GetString(WallPlayback) + } + + return ret } func (i *Instance) GetMaximumLoopDuration() int { - i.Lock() - defer i.Unlock() - viper.SetDefault(MaximumLoopDuration, 0) - return viper.GetInt(MaximumLoopDuration) + return i.getInt(MaximumLoopDuration) } func (i *Instance) GetAutostartVideo() bool { + return i.getBool(AutostartVideo) +} + +func (i *Instance) GetAutostartVideoOnPlaySelected() bool { i.Lock() defer i.Unlock() - viper.SetDefault(AutostartVideo, false) - return viper.GetBool(AutostartVideo) + viper.SetDefault(AutostartVideoOnPlaySelected, true) + return viper.GetBool(AutostartVideoOnPlaySelected) +} + +func (i *Instance) GetContinuePlaylistDefault() bool { + i.Lock() + defer i.Unlock() + viper.SetDefault(ContinuePlaylistDefault, false) + return viper.GetBool(ContinuePlaylistDefault) } func (i *Instance) GetShowStudioAsText() bool { - i.Lock() - defer i.Unlock() - viper.SetDefault(ShowStudioAsText, false) - return viper.GetBool(ShowStudioAsText) + return i.getBool(ShowStudioAsText) } func (i *Instance) GetSlideshowDelay() int { - i.Lock() - defer i.Unlock() - viper.SetDefault(SlideshowDelay, 5000) - return viper.GetInt(SlideshowDelay) + i.RLock() + defer i.RUnlock() + + ret := defaultSlideshowDelay + v := i.viper(SlideshowDelay) + if v.IsSet(SlideshowDelay) { + ret = v.GetInt(SlideshowDelay) + } + + return ret +} + +func (i *Instance) GetDisableDropdownCreate() *models.ConfigDisableDropdownCreate { + return &models.ConfigDisableDropdownCreate{ + Performer: i.getBool(DisableDropdownCreatePerformer), + Studio: i.getBool(DisableDropdownCreateStudio), + Tag: i.getBool(DisableDropdownCreateTag), + } } func (i *Instance) GetCSSPath() string { - i.RLock() - defer i.RUnlock() // use custom.css in the same directory as the config file - configFileUsed := viper.ConfigFileUsed() + configFileUsed := i.GetConfigFile() configDir := filepath.Dir(configFileUsed) fn := filepath.Join(configDir, "custom.css") @@ -817,9 +890,9 @@ func (i *Instance) GetCSS() string { } func (i *Instance) SetCSS(css string) { - i.RLock() - defer i.RUnlock() fn := i.GetCSSPath() + i.Lock() + defer i.Unlock() buf := []byte(css) @@ -829,84 +902,99 @@ func (i *Instance) SetCSS(css string) { } func (i *Instance) GetCSSEnabled() bool { - i.RLock() - defer i.RUnlock() - return viper.GetBool(CSSEnabled) + return i.getBool(CSSEnabled) } func (i *Instance) GetHandyKey() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(HandyKey) + return i.getString(HandyKey) } func (i *Instance) GetFunscriptOffset() int { - viper.SetDefault(FunscriptOffset, 0) - return viper.GetInt(FunscriptOffset) + return i.getInt(FunscriptOffset) +} + +func (i *Instance) GetDeleteFileDefault() bool { + return i.getBool(DeleteFileDefault) +} + +func (i *Instance) GetDeleteGeneratedDefault() bool { + i.RLock() + defer i.RUnlock() + ret := deleteGeneratedDefaultDefault + + v := i.viper(DeleteGeneratedDefault) + if v.IsSet(DeleteGeneratedDefault) { + ret = v.GetBool(DeleteGeneratedDefault) + } + + return ret +} + +// GetDefaultIdentifySettings returns the default Identify task settings. +// Returns nil if the settings could not be unmarshalled, or if it +// has not been set. +func (i *Instance) GetDefaultIdentifySettings() *models.IdentifyMetadataTaskOptions { + i.RLock() + defer i.RUnlock() + v := i.viper(DefaultIdentifySettings) + + if v.IsSet(DefaultIdentifySettings) { + var ret models.IdentifyMetadataTaskOptions + if err := v.UnmarshalKey(DefaultIdentifySettings, &ret); err != nil { + return nil + } + return &ret + } + + return nil } // GetTrustedProxies returns a comma separated list of ip addresses that should allow proxying. // When empty, allow from any private network func (i *Instance) GetTrustedProxies() []string { - i.RLock() - defer i.RUnlock() - return viper.GetStringSlice(TrustedProxies) + return i.getStringSlice(TrustedProxies) } // GetDangerousAllowPublicWithoutAuth determines if the security feature is enabled. // See https://github.com/stashapp/stash/wiki/Authentication-Required-When-Accessing-Stash-From-the-Internet func (i *Instance) GetDangerousAllowPublicWithoutAuth() bool { - i.RLock() - defer i.RUnlock() - return viper.GetBool(dangerousAllowPublicWithoutAuth) + return i.getBool(dangerousAllowPublicWithoutAuth) } // GetSecurityTripwireAccessedFromPublicInternet returns a public IP address if stash // has been accessed from the public internet, with no auth enabled, and // DangerousAllowPublicWithoutAuth disabled. Returns an empty string otherwise. func (i *Instance) GetSecurityTripwireAccessedFromPublicInternet() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(SecurityTripwireAccessedFromPublicInternet) + return i.getString(SecurityTripwireAccessedFromPublicInternet) } // GetDLNAServerName returns the visible name of the DLNA server. If empty, // "stash" will be used. func (i *Instance) GetDLNAServerName() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(DLNAServerName) + return i.getString(DLNAServerName) } // GetDLNADefaultEnabled returns true if the DLNA is enabled by default. func (i *Instance) GetDLNADefaultEnabled() bool { - i.RLock() - defer i.RUnlock() - return viper.GetBool(DLNADefaultEnabled) + return i.getBool(DLNADefaultEnabled) } // GetDLNADefaultIPWhitelist returns a list of IP addresses/wildcards that // are allowed to use the DLNA service. func (i *Instance) GetDLNADefaultIPWhitelist() []string { - i.RLock() - defer i.RUnlock() - return viper.GetStringSlice(DLNADefaultIPWhitelist) + return i.getStringSlice(DLNADefaultIPWhitelist) } // GetDLNAInterfaces returns a list of interface names to expose DLNA on. If // empty, runs on all interfaces. func (i *Instance) GetDLNAInterfaces() []string { - i.RLock() - defer i.RUnlock() - return viper.GetStringSlice(DLNAInterfaces) + return i.getStringSlice(DLNAInterfaces) } // GetLogFile returns the filename of the file to output logs to. // An empty string means that file logging will be disabled. func (i *Instance) GetLogFile() string { - i.RLock() - defer i.RUnlock() - return viper.GetString(LogFile) + return i.getString(LogFile) } // GetLogOut returns true if logging should be output to the terminal @@ -915,9 +1003,12 @@ func (i *Instance) GetLogFile() string { func (i *Instance) GetLogOut() bool { i.RLock() defer i.RUnlock() - ret := true - if viper.IsSet(LogOut) { - ret = viper.GetBool(LogOut) + + ret := defaultLogOut + v := i.viper(LogOut) + + if v.IsSet(LogOut) { + ret = v.GetBool(LogOut) } return ret @@ -926,13 +1017,9 @@ func (i *Instance) GetLogOut() bool { // GetLogLevel returns the lowest log level to write to the log. // Should be one of "Debug", "Info", "Warning", "Error" func (i *Instance) GetLogLevel() string { - i.RLock() - defer i.RUnlock() - const defaultValue = "Info" - - value := viper.GetString(LogLevel) + value := i.getString(LogLevel) if value != "Debug" && value != "Info" && value != "Warning" && value != "Error" && value != "Trace" { - value = defaultValue + value = defaultLogLevel } return value @@ -943,9 +1030,11 @@ func (i *Instance) GetLogLevel() string { func (i *Instance) GetLogAccess() bool { i.RLock() defer i.RUnlock() - ret := true - if viper.IsSet(LogAccess) { - ret = viper.GetBool(LogAccess) + ret := defaultLogAccess + + v := i.viper(LogAccess) + if v.IsSet(LogAccess) { + ret = v.GetBool(LogAccess) } return ret @@ -956,8 +1045,10 @@ func (i *Instance) GetMaxUploadSize() int64 { i.RLock() defer i.RUnlock() ret := int64(1024) - if viper.IsSet(MaxUploadSize) { - ret = viper.GetInt64(MaxUploadSize) + + v := i.viper(MaxUploadSize) + if v.IsSet(MaxUploadSize) { + ret = v.GetInt64(MaxUploadSize) } return ret << 20 } @@ -981,7 +1072,7 @@ func (i *Instance) Validate() error { var missingFields []string for _, p := range mandatoryPaths { - if !viper.IsSet(p) || viper.GetString(p) == "" { + if !i.viper(p).IsSet(p) || i.viper(p).GetString(p) == "" { missingFields = append(missingFields, p) } } @@ -998,8 +1089,8 @@ func (i *Instance) Validate() error { func (i *Instance) SetChecksumDefaultValues(defaultAlgorithm models.HashAlgorithm, usingMD5 bool) { i.Lock() defer i.Unlock() - viper.SetDefault(VideoFileNamingAlgorithm, defaultAlgorithm) - viper.SetDefault(CalculateMD5, usingMD5) + i.main.SetDefault(VideoFileNamingAlgorithm, defaultAlgorithm) + i.main.SetDefault(CalculateMD5, usingMD5) } func (i *Instance) setDefaultValues(write bool) error { @@ -1010,29 +1101,60 @@ func (i *Instance) setDefaultValues(write bool) error { i.Lock() defer i.Unlock() - viper.SetDefault(ParallelTasks, parallelTasksDefault) - viper.SetDefault(PreviewSegmentDuration, previewSegmentDurationDefault) - viper.SetDefault(PreviewSegments, previewSegmentsDefault) - viper.SetDefault(PreviewExcludeStart, previewExcludeStartDefault) - viper.SetDefault(PreviewExcludeEnd, previewExcludeEndDefault) - viper.SetDefault(PreviewAudio, previewAudioDefault) - viper.SetDefault(SoundOnPreview, false) - viper.SetDefault(WriteImageThumbnails, writeImageThumbnailsDefault) + // set the default host and port so that these are written to the config + // file + i.main.SetDefault(Host, hostDefault) + i.main.SetDefault(Port, portDefault) - viper.SetDefault(Database, defaultDatabaseFilePath) + i.main.SetDefault(ParallelTasks, parallelTasksDefault) + i.main.SetDefault(PreviewSegmentDuration, previewSegmentDurationDefault) + i.main.SetDefault(PreviewSegments, previewSegmentsDefault) + i.main.SetDefault(PreviewExcludeStart, previewExcludeStartDefault) + i.main.SetDefault(PreviewExcludeEnd, previewExcludeEndDefault) + i.main.SetDefault(PreviewAudio, previewAudioDefault) + i.main.SetDefault(SoundOnPreview, false) - viper.SetDefault(dangerousAllowPublicWithoutAuth, dangerousAllowPublicWithoutAuthDefault) - viper.SetDefault(SecurityTripwireAccessedFromPublicInternet, securityTripwireAccessedFromPublicInternetDefault) + i.main.SetDefault(WriteImageThumbnails, writeImageThumbnailsDefault) + + i.main.SetDefault(Database, defaultDatabaseFilePath) + + i.main.SetDefault(dangerousAllowPublicWithoutAuth, dangerousAllowPublicWithoutAuthDefault) + i.main.SetDefault(SecurityTripwireAccessedFromPublicInternet, securityTripwireAccessedFromPublicInternetDefault) // Set generated to the metadata path for backwards compat - viper.SetDefault(Generated, viper.GetString(Metadata)) + i.main.SetDefault(Generated, i.main.GetString(Metadata)) + + i.main.SetDefault(NoBrowser, NoBrowserDefault) // Set default scrapers and plugins paths - viper.SetDefault(ScrapersPath, defaultScrapersPath) - viper.SetDefault(PluginsPath, defaultPluginsPath) + i.main.SetDefault(ScrapersPath, defaultScrapersPath) + i.main.SetDefault(PluginsPath, defaultPluginsPath) if write { - return viper.WriteConfig() + return i.main.WriteConfig() + } + + return nil +} + +// setExistingSystemDefaults sets config options that are new and unset in an existing install, +// but should have a separate default than for brand-new systems, to maintain behavior. +func (i *Instance) setExistingSystemDefaults() error { + i.Lock() + defer i.Unlock() + if !i.isNewSystem { + configDirtied := false + + // Existing systems as of the introduction of auto-browser open should retain existing + // behavior and not start the browser automatically. + if !i.main.InConfig(NoBrowser) { + configDirtied = true + i.main.Set(NoBrowser, true) + } + + if configDirtied { + return i.main.WriteConfig() + } } return nil diff --git a/pkg/manager/config/config_concurrency_test.go b/pkg/manager/config/config_concurrency_test.go index cc6b56d67..4b940cb2e 100644 --- a/pkg/manager/config/config_concurrency_test.go +++ b/pkg/manager/config/config_concurrency_test.go @@ -10,7 +10,6 @@ func TestConcurrentConfigAccess(t *testing.T) { i := GetInstance() const workers = 8 - //const loops = 1000 const loops = 200 var wg sync.WaitGroup for k := 0; k < workers; k++ { @@ -22,12 +21,15 @@ func TestConcurrentConfigAccess(t *testing.T) { } i.HasCredentials() + i.ValidateCredentials("", "") i.GetCPUProfilePath() i.GetConfigFile() i.GetConfigPath() i.GetDefaultDatabaseFilePath() i.GetStashPaths() - i.GetConfigFilePath() + _ = i.ValidateStashBoxes(nil) + _ = i.Validate() + _ = i.ActivatePublicAccessTripwire("") i.Set(Cache, i.GetCachePath()) i.Set(Generated, i.GetGeneratedPath()) i.Set(Metadata, i.GetMetadataPath()) @@ -93,6 +95,19 @@ func TestConcurrentConfigAccess(t *testing.T) { i.Set(LogLevel, i.GetLogLevel()) i.Set(LogAccess, i.GetLogAccess()) i.Set(MaxUploadSize, i.GetMaxUploadSize()) + i.Set(FunscriptOffset, i.GetFunscriptOffset()) + i.Set(DefaultIdentifySettings, i.GetDefaultIdentifySettings()) + i.Set(DeleteGeneratedDefault, i.GetDeleteGeneratedDefault()) + i.Set(DeleteFileDefault, i.GetDeleteFileDefault()) + i.Set(TrustedProxies, i.GetTrustedProxies()) + i.Set(dangerousAllowPublicWithoutAuth, i.GetDangerousAllowPublicWithoutAuth()) + i.Set(SecurityTripwireAccessedFromPublicInternet, i.GetSecurityTripwireAccessedFromPublicInternet()) + i.Set(DisableDropdownCreatePerformer, i.GetDisableDropdownCreate().Performer) + i.Set(DisableDropdownCreateStudio, i.GetDisableDropdownCreate().Studio) + i.Set(DisableDropdownCreateTag, i.GetDisableDropdownCreate().Tag) + i.SetChecksumDefaultValues(i.GetVideoFileNamingAlgorithm(), i.IsCalculateMD5()) + i.Set(AutostartVideoOnPlaySelected, i.GetAutostartVideoOnPlaySelected()) + i.Set(ContinuePlaylistDefault, i.GetContinuePlaylistDefault()) } wg.Done() }(k) diff --git a/pkg/manager/config/init.go b/pkg/manager/config/init.go index 7117a18af..0b2ddb6b8 100644 --- a/pkg/manager/config/init.go +++ b/pkg/manager/config/init.go @@ -1,9 +1,11 @@ package config import ( + "errors" "fmt" "net" "os" + "path/filepath" "sync" "github.com/spf13/pflag" @@ -13,25 +15,40 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -var once sync.Once +var ( + initOnce sync.Once + instanceOnce sync.Once +) type flagStruct struct { configFilePath string cpuProfilePath string + nobrowser bool +} + +func GetInstance() *Instance { + instanceOnce.Do(func() { + instance = &Instance{ + main: viper.New(), + overrides: viper.New(), + } + }) + return instance } func Initialize() (*Instance, error) { var err error - once.Do(func() { + initOnce.Do(func() { flags := initFlags() - instance = &Instance{ - cpuProfilePath: flags.cpuProfilePath, - } + overrides := makeOverrideConfig() - if err = initConfig(flags); err != nil { + _ = GetInstance() + instance.overrides = overrides + instance.cpuProfilePath = flags.cpuProfilePath + + if err = initConfig(instance, flags); err != nil { return } - initEnvs() if instance.isNewSystem { if instance.Validate() == nil { @@ -41,19 +58,23 @@ func Initialize() (*Instance, error) { } if !instance.isNewSystem { - err = instance.SetInitialConfig() + err = instance.setExistingSystemDefaults() + if err == nil { + err = instance.SetInitialConfig() + } } }) return instance, err } -func initConfig(flags flagStruct) error { +func initConfig(instance *Instance, flags flagStruct) error { + v := instance.main // The config file is called config. Leave off the file extension. - viper.SetConfigName("config") + v.SetConfigName("config") - viper.AddConfigPath(".") // Look for config in the working directory - viper.AddConfigPath("$HOME/.stash") // Look for the config in the home directory + v.AddConfigPath(".") // Look for config in the working directory + v.AddConfigPath(filepath.FromSlash("$HOME/.stash")) // Look for the config in the home directory configFile := "" envConfigFile := os.Getenv("STASH_CONFIG_FILE") @@ -65,7 +86,7 @@ func initConfig(flags flagStruct) error { } if configFile != "" { - viper.SetConfigFile(configFile) + v.SetConfigFile(configFile) // if file does not exist, assume it is a new system if exists, _ := utils.FileExists(configFile); !exists { @@ -83,9 +104,10 @@ func initConfig(flags flagStruct) error { } } - err := viper.ReadInConfig() // Find and read the config file + err := v.ReadInConfig() // Find and read the config file // if not found, assume its a new system - if _, isMissing := err.(viper.ConfigFileNotFoundError); isMissing { + var notFoundErr viper.ConfigFileNotFoundError + if errors.As(err, ¬FoundErr) { instance.isNewSystem = true return nil } else if err != nil { @@ -102,32 +124,38 @@ func initFlags() flagStruct { pflag.Int("port", 9999, "port to serve from") pflag.StringVarP(&flags.configFilePath, "config", "c", "", "config file to use") pflag.StringVar(&flags.cpuProfilePath, "cpuprofile", "", "write cpu profile to file") + pflag.BoolVar(&flags.nobrowser, "nobrowser", false, "Don't open a browser window after launch") pflag.Parse() - if err := viper.BindPFlags(pflag.CommandLine); err != nil { - logger.Infof("failed to bind flags: %s", err.Error()) - } return flags } -func initEnvs() { - viper.SetEnvPrefix("stash") // will be uppercased automatically - bindEnv("host") // STASH_HOST - bindEnv("port") // STASH_PORT - bindEnv("external_host") // STASH_EXTERNAL_HOST - bindEnv("generated") // STASH_GENERATED - bindEnv("metadata") // STASH_METADATA - bindEnv("cache") // STASH_CACHE - - // only set stash config flag if not already set - if instance.GetStashPaths() == nil { - bindEnv("stash") // STASH_STASH - } +func initEnvs(viper *viper.Viper) { + viper.SetEnvPrefix("stash") // will be uppercased automatically + bindEnv(viper, "host") // STASH_HOST + bindEnv(viper, "port") // STASH_PORT + bindEnv(viper, "external_host") // STASH_EXTERNAL_HOST + bindEnv(viper, "generated") // STASH_GENERATED + bindEnv(viper, "metadata") // STASH_METADATA + bindEnv(viper, "cache") // STASH_CACHE + bindEnv(viper, "stash") // STASH_STASH } -func bindEnv(key string) { +func bindEnv(viper *viper.Viper, key string) { if err := viper.BindEnv(key); err != nil { panic(fmt.Sprintf("unable to set environment key (%v): %v", key, err)) } } + +func makeOverrideConfig() *viper.Viper { + viper := viper.New() + + if err := viper.BindPFlags(pflag.CommandLine); err != nil { + logger.Infof("failed to bind flags: %s", err.Error()) + } + + initEnvs(viper) + + return viper +} diff --git a/pkg/manager/downloads.go b/pkg/manager/downloads.go index 4706298f2..675a08525 100644 --- a/pkg/manager/downloads.go +++ b/pkg/manager/downloads.go @@ -44,7 +44,7 @@ func (s *DownloadStore) RegisterFile(fp string, contentType string, keep bool) s for generate && a < attempts { hash = utils.GenerateRandomKey(keyLength) _, generate = s.m[hash] - a = a + 1 + a++ } s.m[hash] = &storeFile{ diff --git a/pkg/manager/exclude_files.go b/pkg/manager/exclude_files.go index acc390a8e..b80b2f911 100644 --- a/pkg/manager/exclude_files.go +++ b/pkg/manager/exclude_files.go @@ -1,7 +1,6 @@ package manager import ( - "path/filepath" "regexp" "strings" @@ -23,14 +22,13 @@ func excludeFiles(files []string, patterns []string) ([]string, int) { return files, 0 } - for i := 0; i < len(files); i++ { - if matchFileSimple(files[i], fileRegexps) { - logger.Infof("File matched pattern. Excluding:\"%s\"", files[i]) + for _, f := range files { + if matchFileSimple(f, fileRegexps) { + logger.Infof("File matched pattern. Excluding:\"%s\"", f) exclCount++ } else { - - //if pattern doesn't match add file to list - results = append(results, files[i]) + // if pattern doesn't match add file to list + results = append(results, f) } } logger.Infof("Excluded %d file(s) from scan", exclCount) @@ -86,14 +84,3 @@ func matchFileSimple(file string, regExps []*regexp.Regexp) bool { } return false } - -func matchExtension(path string, extensions []string) bool { - ext := filepath.Ext(path) - for _, e := range extensions { - if strings.EqualFold(ext, "."+e) { - return true - } - } - - return false -} diff --git a/pkg/manager/exclude_files_test.go b/pkg/manager/exclude_files_test.go index 978c9455e..df8a0a140 100644 --- a/pkg/manager/exclude_files_test.go +++ b/pkg/manager/exclude_files_test.go @@ -2,8 +2,9 @@ package manager import ( "fmt" - "github.com/stashapp/stash/pkg/logger" "testing" + + "github.com/stashapp/stash/pkg/logger" ) var excludeTestFilenames = []string{ @@ -31,16 +32,16 @@ var excludeTests = []struct { testPattern []string expected int }{ - {[]string{"sample\\.mp4$", "trash", "\\.[\\d]{3}\\.webm$"}, 6}, //generic - {[]string{"no_match\\.mp4"}, 0}, //no match - {[]string{"^/stash/videos/exclude/", "/videos/xcl/"}, 3}, //linux - {[]string{"/\\.[[:word:]]+/"}, 1}, //linux hidden dirs (handbrake unraid issue?) - {[]string{"c:\\\\stash\\\\videos\\\\exclude"}, 1}, //windows - {[]string{"\\/[/invalid"}, 0}, //invalid pattern - {[]string{"\\/[/invalid", "sample\\.[[:alnum:]]+$"}, 3}, //invalid pattern but continue - {[]string{"^\\\\\\\\network"}, 4}, //windows net share - {[]string{"\\\\private\\\\"}, 1}, //windows net share - {[]string{"\\\\private\\\\", "sample\\.mp4"}, 3}, //windows net share + {[]string{"sample\\.mp4$", "trash", "\\.[\\d]{3}\\.webm$"}, 6}, // generic + {[]string{"no_match\\.mp4"}, 0}, // no match + {[]string{"^/stash/videos/exclude/", "/videos/xcl/"}, 3}, // linux + {[]string{"/\\.[[:word:]]+/"}, 1}, // linux hidden dirs (handbrake unraid issue?) + {[]string{"c:\\\\stash\\\\videos\\\\exclude"}, 1}, // windows + {[]string{"\\/[/invalid"}, 0}, // invalid pattern + {[]string{"\\/[/invalid", "sample\\.[[:alnum:]]+$"}, 3}, // invalid pattern but continue + {[]string{"^\\\\\\\\network"}, 4}, // windows net share + {[]string{"\\\\private\\\\"}, 1}, // windows net share + {[]string{"\\\\private\\\\", "sample\\.mp4"}, 3}, // windows net share } func TestExcludeFiles(t *testing.T) { diff --git a/pkg/manager/filename_parser.go b/pkg/manager/filename_parser.go index 9dbfecd81..41428696f 100644 --- a/pkg/manager/filename_parser.go +++ b/pkg/manager/filename_parser.go @@ -3,13 +3,15 @@ package manager import ( "database/sql" "errors" - "github.com/stashapp/stash/pkg/studio" "path/filepath" "regexp" "strconv" "strings" "time" + "github.com/stashapp/stash/pkg/scene" + "github.com/stashapp/stash/pkg/studio" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/tag" ) @@ -81,8 +83,6 @@ func initParserFields() { ret["title"] = newParserField("title", ".*", true) ret["ext"] = newParserField("ext", ".*$", false) - //I = new ParserField("i", undefined, "Matches any ignored word", false); - ret["d"] = newParserField("d", `(?:\.|-|_)`, false) ret["rating"] = newParserField("rating", `\d`, true) ret["performer"] = newParserField("performer", ".*", true) @@ -466,7 +466,7 @@ func (p *SceneFilenameParser) Parse(repo models.ReaderRepository) ([]*models.Sce p.Filter.Q = nil - scenes, total, err := repo.Scene().Query(sceneFilter, p.Filter) + scenes, total, err := scene.QueryWithCount(repo.Scene(), sceneFilter, p.Filter) if err != nil { return nil, 0, err } diff --git a/pkg/manager/generator.go b/pkg/manager/generator.go index 4deec7bf6..c7aed9716 100644 --- a/pkg/manager/generator.go +++ b/pkg/manager/generator.go @@ -71,7 +71,7 @@ func (g *GeneratorInfo) calculateFrameRate(videoStream *ffmpeg.FFProbeStream) er args = append(args, "/dev/null") } - command := exec.Command(instance.FFMPEGPath, args...) + command := exec.Command(string(instance.FFMPEG), args...) var stdErrBuffer bytes.Buffer command.Stderr = &stdErrBuffer // Frames go to stderr rather than stdout if err := command.Run(); err == nil { diff --git a/pkg/manager/generator_phash.go b/pkg/manager/generator_phash.go index 5ea390452..367dc8a0c 100644 --- a/pkg/manager/generator_phash.go +++ b/pkg/manager/generator_phash.go @@ -42,7 +42,7 @@ func NewPhashGenerator(videoFile ffmpeg.VideoFile, checksum string) (*PhashGener } func (g *PhashGenerator) Generate() (*uint64, error) { - encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) + encoder := instance.FFMPEG sprite, err := g.generateSprite(&encoder) if err != nil { diff --git a/pkg/manager/generator_preview.go b/pkg/manager/generator_preview.go index 899e91df9..56ad6725c 100644 --- a/pkg/manager/generator_preview.go +++ b/pkg/manager/generator_preview.go @@ -57,7 +57,7 @@ func (g *PreviewGenerator) Generate() error { return err } - encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) + encoder := instance.FFMPEG if g.GenerateVideo { if err := g.generateVideo(&encoder, false); err != nil { logger.Warnf("[generator] failed generating scene preview, trying fallback") diff --git a/pkg/manager/generator_sprite.go b/pkg/manager/generator_sprite.go index 2aebb827e..764df5033 100644 --- a/pkg/manager/generator_sprite.go +++ b/pkg/manager/generator_sprite.go @@ -53,7 +53,7 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO } func (g *SpriteGenerator) Generate() error { - encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) + encoder := instance.FFMPEG if err := g.generateSpriteImage(&encoder); err != nil { return err diff --git a/pkg/manager/import.go b/pkg/manager/import.go index d5f61bcf2..c2f2820c7 100644 --- a/pkg/manager/import.go +++ b/pkg/manager/import.go @@ -25,7 +25,7 @@ func performImport(i importer, duplicateBehaviour models.ImportDuplicateEnum) er name := i.Name() existing, err := i.FindExistingID() if err != nil { - return fmt.Errorf("error finding existing objects: %s", err.Error()) + return fmt.Errorf("error finding existing objects: %v", err) } var id int @@ -41,13 +41,13 @@ func performImport(i importer, duplicateBehaviour models.ImportDuplicateEnum) er // must be overwriting id = *existing if err := i.Update(id); err != nil { - return fmt.Errorf("error updating existing object: %s", err.Error()) + return fmt.Errorf("error updating existing object: %v", err) } } else { // creating createdID, err := i.Create() if err != nil { - return fmt.Errorf("error creating object: %s", err.Error()) + return fmt.Errorf("error creating object: %v", err) } id = *createdID diff --git a/pkg/manager/jsonschema/performer.go b/pkg/manager/jsonschema/performer.go index ec1aec4bb..6fee26a18 100644 --- a/pkg/manager/jsonschema/performer.go +++ b/pkg/manager/jsonschema/performer.go @@ -9,32 +9,33 @@ import ( ) type Performer struct { - Name string `json:"name,omitempty"` - Gender string `json:"gender,omitempty"` - URL string `json:"url,omitempty"` - Twitter string `json:"twitter,omitempty"` - Instagram string `json:"instagram,omitempty"` - Birthdate string `json:"birthdate,omitempty"` - Ethnicity string `json:"ethnicity,omitempty"` - Country string `json:"country,omitempty"` - EyeColor string `json:"eye_color,omitempty"` - Height string `json:"height,omitempty"` - Measurements string `json:"measurements,omitempty"` - FakeTits string `json:"fake_tits,omitempty"` - CareerLength string `json:"career_length,omitempty"` - Tattoos string `json:"tattoos,omitempty"` - Piercings string `json:"piercings,omitempty"` - Aliases string `json:"aliases,omitempty"` - Favorite bool `json:"favorite,omitempty"` - Tags []string `json:"tags,omitempty"` - Image string `json:"image,omitempty"` - CreatedAt models.JSONTime `json:"created_at,omitempty"` - UpdatedAt models.JSONTime `json:"updated_at,omitempty"` - Rating int `json:"rating,omitempty"` - Details string `json:"details,omitempty"` - DeathDate string `json:"death_date,omitempty"` - HairColor string `json:"hair_color,omitempty"` - Weight int `json:"weight,omitempty"` + Name string `json:"name,omitempty"` + Gender string `json:"gender,omitempty"` + URL string `json:"url,omitempty"` + Twitter string `json:"twitter,omitempty"` + Instagram string `json:"instagram,omitempty"` + Birthdate string `json:"birthdate,omitempty"` + Ethnicity string `json:"ethnicity,omitempty"` + Country string `json:"country,omitempty"` + EyeColor string `json:"eye_color,omitempty"` + Height string `json:"height,omitempty"` + Measurements string `json:"measurements,omitempty"` + FakeTits string `json:"fake_tits,omitempty"` + CareerLength string `json:"career_length,omitempty"` + Tattoos string `json:"tattoos,omitempty"` + Piercings string `json:"piercings,omitempty"` + Aliases string `json:"aliases,omitempty"` + Favorite bool `json:"favorite,omitempty"` + Tags []string `json:"tags,omitempty"` + Image string `json:"image,omitempty"` + CreatedAt models.JSONTime `json:"created_at,omitempty"` + UpdatedAt models.JSONTime `json:"updated_at,omitempty"` + Rating int `json:"rating,omitempty"` + Details string `json:"details,omitempty"` + DeathDate string `json:"death_date,omitempty"` + HairColor string `json:"hair_color,omitempty"` + Weight int `json:"weight,omitempty"` + StashIDs []models.StashID `json:"stash_ids,omitempty"` } func LoadPerformerFile(filePath string) (*Performer, error) { diff --git a/pkg/manager/jsonschema/scene.go b/pkg/manager/jsonschema/scene.go index b0207f33c..72ccc53e1 100644 --- a/pkg/manager/jsonschema/scene.go +++ b/pkg/manager/jsonschema/scene.go @@ -36,26 +36,27 @@ type SceneMovie struct { } type Scene struct { - Title string `json:"title,omitempty"` - Checksum string `json:"checksum,omitempty"` - OSHash string `json:"oshash,omitempty"` - Phash string `json:"phash,omitempty"` - Studio string `json:"studio,omitempty"` - URL string `json:"url,omitempty"` - Date string `json:"date,omitempty"` - Rating int `json:"rating,omitempty"` - Organized bool `json:"organized,omitempty"` - OCounter int `json:"o_counter,omitempty"` - Details string `json:"details,omitempty"` - Galleries []string `json:"galleries,omitempty"` - Performers []string `json:"performers,omitempty"` - Movies []SceneMovie `json:"movies,omitempty"` - Tags []string `json:"tags,omitempty"` - Markers []SceneMarker `json:"markers,omitempty"` - File *SceneFile `json:"file,omitempty"` - Cover string `json:"cover,omitempty"` - CreatedAt models.JSONTime `json:"created_at,omitempty"` - UpdatedAt models.JSONTime `json:"updated_at,omitempty"` + Title string `json:"title,omitempty"` + Checksum string `json:"checksum,omitempty"` + OSHash string `json:"oshash,omitempty"` + Phash string `json:"phash,omitempty"` + Studio string `json:"studio,omitempty"` + URL string `json:"url,omitempty"` + Date string `json:"date,omitempty"` + Rating int `json:"rating,omitempty"` + Organized bool `json:"organized,omitempty"` + OCounter int `json:"o_counter,omitempty"` + Details string `json:"details,omitempty"` + Galleries []string `json:"galleries,omitempty"` + Performers []string `json:"performers,omitempty"` + Movies []SceneMovie `json:"movies,omitempty"` + Tags []string `json:"tags,omitempty"` + Markers []SceneMarker `json:"markers,omitempty"` + File *SceneFile `json:"file,omitempty"` + Cover string `json:"cover,omitempty"` + CreatedAt models.JSONTime `json:"created_at,omitempty"` + UpdatedAt models.JSONTime `json:"updated_at,omitempty"` + StashIDs []models.StashID `json:"stash_ids,omitempty"` } func LoadSceneFile(filePath string) (*Scene, error) { diff --git a/pkg/manager/jsonschema/studio.go b/pkg/manager/jsonschema/studio.go index ee793acbc..5ecf04335 100644 --- a/pkg/manager/jsonschema/studio.go +++ b/pkg/manager/jsonschema/studio.go @@ -9,15 +9,16 @@ import ( ) type Studio struct { - Name string `json:"name,omitempty"` - URL string `json:"url,omitempty"` - ParentStudio string `json:"parent_studio,omitempty"` - Image string `json:"image,omitempty"` - CreatedAt models.JSONTime `json:"created_at,omitempty"` - UpdatedAt models.JSONTime `json:"updated_at,omitempty"` - Rating int `json:"rating,omitempty"` - Details string `json:"details,omitempty"` - Aliases []string `json:"aliases,omitempty"` + Name string `json:"name,omitempty"` + URL string `json:"url,omitempty"` + ParentStudio string `json:"parent_studio,omitempty"` + Image string `json:"image,omitempty"` + CreatedAt models.JSONTime `json:"created_at,omitempty"` + UpdatedAt models.JSONTime `json:"updated_at,omitempty"` + Rating int `json:"rating,omitempty"` + Details string `json:"details,omitempty"` + Aliases []string `json:"aliases,omitempty"` + StashIDs []models.StashID `json:"stash_ids,omitempty"` } func LoadStudioFile(filePath string) (*Studio, error) { diff --git a/pkg/manager/manager.go b/pkg/manager/manager.go index 03f0fe260..4d7a44efe 100644 --- a/pkg/manager/manager.go +++ b/pkg/manager/manager.go @@ -1,11 +1,15 @@ package manager import ( + "context" "errors" "fmt" + "io/ioutil" "os" "path/filepath" + "runtime" "runtime/pprof" + "strings" "sync" "time" @@ -29,8 +33,8 @@ type singleton struct { Paths *paths.Paths - FFMPEGPath string - FFProbePath string + FFMPEG ffmpeg.Encoder + FFProbe ffmpeg.FFProbe SessionStore *session.Store @@ -58,6 +62,7 @@ func GetInstance() *singleton { func Initialize() *singleton { once.Do(func() { + ctx := context.TODO() cfg, err := config.Initialize() if err != nil { @@ -92,17 +97,15 @@ func Initialize() *singleton { if err != nil { panic(fmt.Sprintf("error initializing configuration: %s", err.Error())) - } else { - if err := instance.PostInit(); err != nil { - panic(err) - } + } else if err := instance.PostInit(ctx); err != nil { + panic(err) } initSecurity(cfg) } else { cfgFile := cfg.GetConfigFile() if cfgFile != "" { - cfgFile = cfgFile + " " + cfgFile += " " } // create temporary session store - this will be re-initialised @@ -152,6 +155,8 @@ func initProfiling(cpuProfilePath string) { } func initFFMPEG() error { + ctx := context.TODO() + // only do this if we have a config file set if instance.Config.GetConfigFile() != "" { // use same directory as config path @@ -164,7 +169,7 @@ func initFFMPEG() error { if ffmpegPath == "" || ffprobePath == "" { logger.Infof("couldn't find FFMPEG, attempting to download it") - if err := ffmpeg.Download(configDirectory); err != nil { + if err := ffmpeg.Download(ctx, configDirectory); err != nil { msg := `Unable to locate / automatically download FFMPEG Check the readme for download links. @@ -180,8 +185,8 @@ func initFFMPEG() error { } } - instance.FFMPEGPath = ffmpegPath - instance.FFProbePath = ffprobePath + instance.FFMPEG = ffmpeg.Encoder(ffmpegPath) + instance.FFProbe = ffmpeg.FFProbe(ffprobePath) } return nil @@ -195,7 +200,7 @@ func initLog() { // PostInit initialises the paths, caches and txnManager after the initial // configuration has been set. Should only be called if the configuration // is valid. -func (s *singleton) PostInit() error { +func (s *singleton) PostInit(ctx context.Context) error { if err := s.Config.SetInitialConfig(); err != nil { logger.Warnf("could not set initial configuration: %v", err) } @@ -235,7 +240,7 @@ func (s *singleton) PostInit() error { } if database.Ready() == nil { - s.PostMigrate() + s.PostMigrate(ctx) } return nil @@ -295,41 +300,51 @@ func setSetupDefaults(input *models.SetupInput) { } } -func (s *singleton) Setup(input models.SetupInput) error { +func (s *singleton) Setup(ctx context.Context, input models.SetupInput) error { setSetupDefaults(&input) + c := s.Config // create the config directory if it does not exist - configDir := filepath.Dir(input.ConfigLocation) - if exists, _ := utils.DirExists(configDir); !exists { - if err := os.Mkdir(configDir, 0755); err != nil { - return fmt.Errorf("abc: %s", err.Error()) + // don't do anything if config is already set in the environment + if !config.FileEnvSet() { + configDir := filepath.Dir(input.ConfigLocation) + if exists, _ := utils.DirExists(configDir); !exists { + if err := os.Mkdir(configDir, 0755); err != nil { + return fmt.Errorf("error creating config directory: %v", err) + } } + + if err := utils.Touch(input.ConfigLocation); err != nil { + return fmt.Errorf("error creating config file: %v", err) + } + + s.Config.SetConfigFile(input.ConfigLocation) } // create the generated directory if it does not exist - if exists, _ := utils.DirExists(input.GeneratedLocation); !exists { - if err := os.Mkdir(input.GeneratedLocation, 0755); err != nil { - return fmt.Errorf("error creating generated directory: %s", err.Error()) + if !c.HasOverride(config.Generated) { + if exists, _ := utils.DirExists(input.GeneratedLocation); !exists { + if err := os.Mkdir(input.GeneratedLocation, 0755); err != nil { + return fmt.Errorf("error creating generated directory: %v", err) + } } - } - if err := utils.Touch(input.ConfigLocation); err != nil { - return fmt.Errorf("error creating config file: %s", err.Error()) + s.Config.Set(config.Generated, input.GeneratedLocation) } - s.Config.SetConfigFile(input.ConfigLocation) - // set the configuration - s.Config.Set(config.Generated, input.GeneratedLocation) - s.Config.Set(config.Database, input.DatabaseFile) + if !c.HasOverride(config.Database) { + s.Config.Set(config.Database, input.DatabaseFile) + } + s.Config.Set(config.Stash, input.Stashes) if err := s.Config.Write(); err != nil { - return fmt.Errorf("error writing configuration file: %s", err.Error()) + return fmt.Errorf("error writing configuration file: %v", err) } // initialise the database - if err := s.PostInit(); err != nil { - return fmt.Errorf("error initializing the database: %s", err.Error()) + if err := s.PostInit(ctx); err != nil { + return fmt.Errorf("error initializing the database: %v", err) } s.Config.FinalizeSetup() @@ -342,14 +357,14 @@ func (s *singleton) Setup(input models.SetupInput) error { } func (s *singleton) validateFFMPEG() error { - if s.FFMPEGPath == "" || s.FFProbePath == "" { + if s.FFMPEG == "" || s.FFProbe == "" { return errors.New("missing ffmpeg and/or ffprobe") } return nil } -func (s *singleton) Migrate(input models.MigrateInput) error { +func (s *singleton) Migrate(ctx context.Context, input models.MigrateInput) error { // always backup so that we can roll back to the previous version if // migration fails backupPath := input.BackupPath @@ -377,7 +392,7 @@ func (s *singleton) Migrate(input models.MigrateInput) error { } // perform post-migration operations - s.PostMigrate() + s.PostMigrate(ctx) // if no backup path was provided, then delete the created backup if input.BackupPath == "" { @@ -389,6 +404,34 @@ func (s *singleton) Migrate(input models.MigrateInput) error { return nil } +func (s *singleton) IsDesktop() bool { + // check if running under root + if os.Getuid() == 0 { + return false + } + // check if started by init, e.g. stash is a *nix systemd service / MacOS launchd service + if os.Getppid() == 1 { + return false + } + if IsServerDockerized() { + return false + } + + return true +} + +func IsServerDockerized() bool { + if runtime.GOOS == "linux" { + _, dockerEnvErr := os.Stat("/.dockerenv") + cgroups, _ := ioutil.ReadFile("/proc/self/cgroup") + if os.IsExist(dockerEnvErr) || strings.Contains(string(cgroups), "docker") { + return true + } + } + + return false +} + func (s *singleton) GetSystemStatus() *models.SystemStatus { status := models.SystemStatusEnumOk dbSchema := int(database.Version()) diff --git a/pkg/manager/manager_tasks.go b/pkg/manager/manager_tasks.go index a626dffad..9713bfd42 100644 --- a/pkg/manager/manager_tasks.go +++ b/pkg/manager/manager_tasks.go @@ -6,9 +6,6 @@ import ( "fmt" "strconv" "sync" - "time" - - "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" @@ -19,17 +16,17 @@ import ( func isGallery(pathname string) bool { gExt := config.GetInstance().GetGalleryExtensions() - return matchExtension(pathname, gExt) + return utils.MatchExtension(pathname, gExt) } func isVideo(pathname string) bool { vidExt := config.GetInstance().GetVideoExtensions() - return matchExtension(pathname, vidExt) + return utils.MatchExtension(pathname, vidExt) } func isImage(pathname string) bool { imgExt := config.GetInstance().GetImageExtensions() - return matchExtension(pathname, imgExt) + return utils.MatchExtension(pathname, imgExt) } func getScanPaths(inputPaths []string) []*models.StashConfig { @@ -90,7 +87,7 @@ func (s *singleton) Import(ctx context.Context) (int, error) { MissingRefBehaviour: models.ImportMissingRefEnumFail, fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), } - task.Start() + task.Start(ctx) }) return s.JobManager.Add(ctx, "Importing...", j), nil @@ -122,7 +119,7 @@ func (s *singleton) RunSingleTask(ctx context.Context, t Task) int { wg.Add(1) j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) { - t.Start() + t.Start(ctx) wg.Done() }) @@ -165,224 +162,10 @@ func (s *singleton) Generate(ctx context.Context, input models.GenerateMetadataI logger.Warnf("could not generate temporary directory: %v", err) } - sceneIDs, err := utils.StringSliceToIntSlice(input.SceneIDs) - if err != nil { - logger.Error(err.Error()) + j := &GenerateJob{ + txnManager: s.TxnManager, + input: input, } - markerIDs, err := utils.StringSliceToIntSlice(input.MarkerIDs) - if err != nil { - logger.Error(err.Error()) - } - - // TODO - formalise this - j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) { - var scenes []*models.Scene - var err error - var markers []*models.SceneMarker - - if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - qb := r.Scene() - if len(sceneIDs) > 0 { - scenes, err = qb.FindMany(sceneIDs) - } else { - scenes, err = qb.All() - } - - if err != nil { - return err - } - - if len(markerIDs) > 0 { - markers, err = r.SceneMarker().FindMany(markerIDs) - if err != nil { - return err - } - } - - return nil - }); err != nil { - logger.Error(err.Error()) - return - } - - config := config.GetInstance() - parallelTasks := config.GetParallelTasksWithAutoDetection() - - logger.Infof("Generate started with %d parallel tasks", parallelTasks) - wg := sizedwaitgroup.New(parallelTasks) - - lenScenes := len(scenes) - total := lenScenes + len(markers) - progress.SetTotal(total) - - if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") - return - } - - // TODO - consider removing this. Even though we're only waiting a maximum of - // 90 seconds for this, it is all for a simple log message, and probably not worth - // waiting for - var totalsNeeded *totalsGenerate - progress.ExecuteTask("Calculating content to generate...", func() { - totalsNeeded = s.neededGenerate(scenes, input) - - if totalsNeeded == nil { - logger.Infof("Taking too long to count content. Skipping...") - logger.Infof("Generating content") - } else { - logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes, totalsNeeded.phashes) - } - }) - - fileNamingAlgo := config.GetVideoFileNamingAlgorithm() - - overwrite := false - if input.Overwrite != nil { - overwrite = *input.Overwrite - } - - generatePreviewOptions := input.PreviewOptions - if generatePreviewOptions == nil { - generatePreviewOptions = &models.GeneratePreviewOptionsInput{} - } - setGeneratePreviewOptionsInput(generatePreviewOptions) - - // Start measuring how long the generate has taken. (consider moving this up) - start := time.Now() - if err = instance.Paths.Generated.EnsureTmpDir(); err != nil { - logger.Warnf("could not create temporary directory: %v", err) - } - - for _, scene := range scenes { - progress.Increment() - if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") - wg.Wait() - if err := instance.Paths.Generated.EmptyTmpDir(); err != nil { - logger.Warnf("failure emptying temporary directory: %v", err) - } - return - } - - if scene == nil { - logger.Errorf("nil scene, skipping generate") - continue - } - - if utils.IsTrue(input.Sprites) { - task := GenerateSpriteTask{ - Scene: *scene, - Overwrite: overwrite, - fileNamingAlgorithm: fileNamingAlgo, - } - wg.Add() - go progress.ExecuteTask(fmt.Sprintf("Generating sprites for %s", scene.Path), func() { - task.Start() - wg.Done() - }) - } - - if utils.IsTrue(input.Previews) { - task := GeneratePreviewTask{ - Scene: *scene, - ImagePreview: utils.IsTrue(input.ImagePreviews), - Options: *generatePreviewOptions, - Overwrite: overwrite, - fileNamingAlgorithm: fileNamingAlgo, - } - wg.Add() - go progress.ExecuteTask(fmt.Sprintf("Generating preview for %s", scene.Path), func() { - task.Start() - wg.Done() - }) - } - - if utils.IsTrue(input.Markers) { - wg.Add() - task := GenerateMarkersTask{ - TxnManager: s.TxnManager, - Scene: scene, - Overwrite: overwrite, - fileNamingAlgorithm: fileNamingAlgo, - ImagePreview: utils.IsTrue(input.MarkerImagePreviews), - Screenshot: utils.IsTrue(input.MarkerScreenshots), - } - go progress.ExecuteTask(fmt.Sprintf("Generating markers for %s", scene.Path), func() { - task.Start() - wg.Done() - }) - } - - if utils.IsTrue(input.Transcodes) { - wg.Add() - task := GenerateTranscodeTask{ - Scene: *scene, - Overwrite: overwrite, - fileNamingAlgorithm: fileNamingAlgo, - } - go progress.ExecuteTask(fmt.Sprintf("Generating transcode for %s", scene.Path), func() { - task.Start() - wg.Done() - }) - } - - if utils.IsTrue(input.Phashes) { - task := GeneratePhashTask{ - Scene: *scene, - fileNamingAlgorithm: fileNamingAlgo, - txnManager: s.TxnManager, - Overwrite: overwrite, - } - wg.Add() - go progress.ExecuteTask(fmt.Sprintf("Generating phash for %s", scene.Path), func() { - task.Start() - wg.Done() - }) - } - } - - wg.Wait() - - for _, marker := range markers { - progress.Increment() - if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") - wg.Wait() - if err := instance.Paths.Generated.EmptyTmpDir(); err != nil { - logger.Warnf("failure emptying temporary directory: %v", err) - } - elapsed := time.Since(start) - logger.Info(fmt.Sprintf("Generate finished (%s)", elapsed)) - return - } - - if marker == nil { - logger.Errorf("nil marker, skipping generate") - continue - } - - wg.Add() - task := GenerateMarkersTask{ - TxnManager: s.TxnManager, - Marker: marker, - Overwrite: overwrite, - fileNamingAlgorithm: fileNamingAlgo, - } - go progress.ExecuteTask(fmt.Sprintf("Generating marker preview for marker ID %d", marker.ID), func() { - task.Start() - wg.Done() - }) - } - - wg.Wait() - - if err = instance.Paths.Generated.EmptyTmpDir(); err != nil { - logger.Warnf("failure emptying temporary directory: %v", err) - } - elapsed := time.Since(start) - logger.Info(fmt.Sprintf("Generate finished (%s)", elapsed)) - }) return s.JobManager.Add(ctx, "Generating...", j), nil } @@ -425,7 +208,7 @@ func (s *singleton) generateScreenshot(ctx context.Context, sceneId string, at * fileNamingAlgorithm: config.GetInstance().GetVideoFileNamingAlgorithm(), } - task.Start() + task.Start(ctx) logger.Infof("Generate screenshot finished") }) @@ -443,136 +226,13 @@ func (s *singleton) AutoTag(ctx context.Context, input models.AutoTagMetadataInp } func (s *singleton) Clean(ctx context.Context, input models.CleanMetadataInput) int { - j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) { - var scenes []*models.Scene - var images []*models.Image - var galleries []*models.Gallery + j := cleanJob{ + txnManager: s.TxnManager, + input: input, + scanSubs: s.scanSubs, + } - if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - qb := r.Scene() - iqb := r.Image() - gqb := r.Gallery() - - logger.Infof("Starting cleaning of tracked files") - if input.DryRun { - logger.Infof("Running in Dry Mode") - } - var err error - - scenes, err = qb.All() - - if err != nil { - return errors.New("failed to fetch list of scenes for cleaning") - } - - images, err = iqb.All() - if err != nil { - return errors.New("failed to fetch list of images for cleaning") - } - - galleries, err = gqb.All() - if err != nil { - return errors.New("failed to fetch list of galleries for cleaning") - } - - return nil - }); err != nil { - logger.Error(err.Error()) - return - } - - if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") - return - } - - var wg sync.WaitGroup - total := len(scenes) + len(images) + len(galleries) - progress.SetTotal(total) - fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() - for _, scene := range scenes { - progress.Increment() - if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") - return - } - - if scene == nil { - logger.Errorf("nil scene, skipping Clean") - continue - } - - wg.Add(1) - - task := CleanTask{ - ctx: ctx, - TxnManager: s.TxnManager, - Scene: scene, - fileNamingAlgorithm: fileNamingAlgo, - } - go progress.ExecuteTask(fmt.Sprintf("Assessing scene %s for clean", scene.Path), func() { - task.Start(&wg, input.DryRun) - }) - - wg.Wait() - } - - for _, img := range images { - progress.Increment() - if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") - return - } - - if img == nil { - logger.Errorf("nil image, skipping Clean") - continue - } - - wg.Add(1) - - task := CleanTask{ - ctx: ctx, - TxnManager: s.TxnManager, - Image: img, - } - go progress.ExecuteTask(fmt.Sprintf("Assessing image %s for clean", img.Path), func() { - task.Start(&wg, input.DryRun) - }) - wg.Wait() - } - - for _, gallery := range galleries { - progress.Increment() - if job.IsCancelled(ctx) { - logger.Info("Stopping due to user request") - return - } - - if gallery == nil { - logger.Errorf("nil gallery, skipping Clean") - continue - } - - wg.Add(1) - - task := CleanTask{ - ctx: ctx, - TxnManager: s.TxnManager, - Gallery: gallery, - } - go progress.ExecuteTask(fmt.Sprintf("Assessing gallery %s for clean", gallery.GetTitle()), func() { - task.Start(&wg, input.DryRun) - }) - wg.Wait() - } - - logger.Info("Finished Cleaning") - - s.scanSubs.notify() - }) - - return s.JobManager.Add(ctx, "Cleaning...", j) + return s.JobManager.Add(ctx, "Cleaning...", &j) } func (s *singleton) MigrateHash(ctx context.Context) int { @@ -623,109 +283,6 @@ func (s *singleton) MigrateHash(ctx context.Context) int { return s.JobManager.Add(ctx, "Migrating scene hashes...", j) } -type totalsGenerate struct { - sprites int64 - previews int64 - imagePreviews int64 - markers int64 - transcodes int64 - phashes int64 -} - -func (s *singleton) neededGenerate(scenes []*models.Scene, input models.GenerateMetadataInput) *totalsGenerate { - - var totals totalsGenerate - const timeout = 90 * time.Second - - // create a control channel through which to signal the counting loop when the timeout is reached - chTimeout := make(chan struct{}) - - //run the timeout function in a separate thread - go func() { - time.Sleep(timeout) - chTimeout <- struct{}{} - }() - - fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() - overwrite := false - if input.Overwrite != nil { - overwrite = *input.Overwrite - } - - logger.Infof("Counting content to generate...") - for _, scene := range scenes { - if scene != nil { - if utils.IsTrue(input.Sprites) { - task := GenerateSpriteTask{ - Scene: *scene, - fileNamingAlgorithm: fileNamingAlgo, - } - - if overwrite || task.required() { - totals.sprites++ - } - } - - if utils.IsTrue(input.Previews) { - task := GeneratePreviewTask{ - Scene: *scene, - ImagePreview: utils.IsTrue(input.ImagePreviews), - fileNamingAlgorithm: fileNamingAlgo, - } - - sceneHash := scene.GetHash(task.fileNamingAlgorithm) - if overwrite || !task.doesVideoPreviewExist(sceneHash) { - totals.previews++ - } - - if utils.IsTrue(input.ImagePreviews) && (overwrite || !task.doesImagePreviewExist(sceneHash)) { - totals.imagePreviews++ - } - } - - if utils.IsTrue(input.Markers) { - task := GenerateMarkersTask{ - TxnManager: s.TxnManager, - Scene: scene, - Overwrite: overwrite, - fileNamingAlgorithm: fileNamingAlgo, - } - totals.markers += int64(task.isMarkerNeeded()) - } - - if utils.IsTrue(input.Transcodes) { - task := GenerateTranscodeTask{ - Scene: *scene, - Overwrite: overwrite, - fileNamingAlgorithm: fileNamingAlgo, - } - if task.isTranscodeNeeded() { - totals.transcodes++ - } - } - - if utils.IsTrue(input.Phashes) { - task := GeneratePhashTask{ - Scene: *scene, - fileNamingAlgorithm: fileNamingAlgo, - } - - if task.shouldGenerate() { - totals.phashes++ - } - } - } - //check for timeout - select { - case <-chTimeout: - return nil - default: - } - - } - return &totals -} - func (s *singleton) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) int { j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) { logger.Infof("Initiating stash-box batch performer tag") @@ -739,7 +296,13 @@ func (s *singleton) StashBoxBatchPerformerTag(ctx context.Context, input models. var tasks []StashBoxPerformerTagTask - if len(input.PerformerIds) > 0 { + // The gocritic linter wants to turn this ifElseChain into a switch. + // however, such a switch would contain quite large blocks for each section + // and would arguably be hard to read. + // + // This is why we mark this section nolint. In principle, we should look to + // rewrite the section at some point, to avoid the linter warning. + if len(input.PerformerIds) > 0 { //nolint:gocritic if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { performerQuery := r.Performer() @@ -775,7 +338,11 @@ func (s *singleton) StashBoxBatchPerformerTag(ctx context.Context, input models. }) } } - } else { + } else { //nolint:gocritic + // The gocritic linter wants to fold this if-block into the else on the line above. + // However, this doesn't really help with readability of the current section. Mark it + // as nolint for now. In the future we'd like to rewrite this code by factoring some of + // this into separate functions. if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { performerQuery := r.Performer() var performers []*models.Performer @@ -786,7 +353,7 @@ func (s *singleton) StashBoxBatchPerformerTag(ctx context.Context, input models. performers, err = performerQuery.FindByStashIDStatus(false, box.Endpoint) } if err != nil { - return fmt.Errorf("error querying performers: %s", err.Error()) + return fmt.Errorf("error querying performers: %v", err) } for _, performer := range performers { diff --git a/pkg/manager/post_migrate.go b/pkg/manager/post_migrate.go index 400ae39a0..eaf397708 100644 --- a/pkg/manager/post_migrate.go +++ b/pkg/manager/post_migrate.go @@ -1,6 +1,8 @@ package manager +import "context" + // PostMigrate is executed after migrations have been executed. -func (s *singleton) PostMigrate() { - setInitialMD5Config(s.TxnManager) +func (s *singleton) PostMigrate(ctx context.Context) { + setInitialMD5Config(ctx, s.TxnManager) } diff --git a/pkg/manager/scene.go b/pkg/manager/scene.go index 58ce2e65d..ffbb98c27 100644 --- a/pkg/manager/scene.go +++ b/pkg/manager/scene.go @@ -192,9 +192,10 @@ func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) { container = ffmpeg.Container(scene.Format.String) } else { // container isn't in the DB // shouldn't happen, fallback to ffprobe - tmpVideoFile, err := ffmpeg.NewVideoFile(GetInstance().FFProbePath, scene.Path, false) + ffprobe := GetInstance().FFProbe + tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path, false) if err != nil { - return ffmpeg.Container(""), fmt.Errorf("error reading video file: %s", err.Error()) + return ffmpeg.Container(""), fmt.Errorf("error reading video file: %v", err) } container = ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path) diff --git a/pkg/manager/scene_screenshot.go b/pkg/manager/scene_screenshot.go deleted file mode 100644 index 0e410ae5d..000000000 --- a/pkg/manager/scene_screenshot.go +++ /dev/null @@ -1,61 +0,0 @@ -package manager - -import ( - "bytes" - "image" - "image/jpeg" - "os" - - "github.com/disintegration/imaging" - - // needed to decode other image formats - _ "image/gif" - _ "image/png" -) - -func writeImage(path string, imageData []byte) error { - f, err := os.Create(path) - if err != nil { - return err - } - defer f.Close() - - _, err = f.Write(imageData) - return err -} - -func writeThumbnail(path string, thumbnail image.Image) error { - f, err := os.Create(path) - if err != nil { - return err - } - defer f.Close() - - return jpeg.Encode(f, thumbnail, nil) -} - -func SetSceneScreenshot(checksum string, imageData []byte) error { - thumbPath := instance.Paths.Scene.GetThumbnailScreenshotPath(checksum) - normalPath := instance.Paths.Scene.GetScreenshotPath(checksum) - - img, _, err := image.Decode(bytes.NewReader(imageData)) - if err != nil { - return err - } - - // resize to 320 width maintaining aspect ratio, for the thumbnail - const width = 320 - origWidth := img.Bounds().Max.X - origHeight := img.Bounds().Max.Y - height := width / origWidth * origHeight - - thumbnail := imaging.Resize(img, width, height, imaging.Lanczos) - err = writeThumbnail(thumbPath, thumbnail) - if err != nil { - return err - } - - err = writeImage(normalPath, imageData) - - return err -} diff --git a/pkg/manager/screenshot.go b/pkg/manager/screenshot.go index 739162491..9167d1d81 100644 --- a/pkg/manager/screenshot.go +++ b/pkg/manager/screenshot.go @@ -6,7 +6,7 @@ import ( ) func makeScreenshot(probeResult ffmpeg.VideoFile, outputPath string, quality int, width int, time float64) { - encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) + encoder := instance.FFMPEG options := ffmpeg.ScreenshotOptions{ OutputPath: outputPath, Quality: quality, diff --git a/pkg/manager/studio.go b/pkg/manager/studio.go index 9602f50b6..3b0d81ceb 100644 --- a/pkg/manager/studio.go +++ b/pkg/manager/studio.go @@ -24,7 +24,7 @@ func ValidateModifyStudio(studio models.StudioPartial, qb models.StudioReader) e currentStudio, err := qb.Find(int(currentParentID.Int64)) if err != nil { - return fmt.Errorf("error finding parent studio: %s", err.Error()) + return fmt.Errorf("error finding parent studio: %v", err) } currentParentID = currentStudio.ParentID diff --git a/pkg/manager/task.go b/pkg/manager/task.go index 9906948c8..ec2676563 100644 --- a/pkg/manager/task.go +++ b/pkg/manager/task.go @@ -1,6 +1,8 @@ package manager +import "context" + type Task interface { - Start() + Start(context.Context) GetDescription() string } diff --git a/pkg/manager/task_autotag.go b/pkg/manager/task_autotag.go index 60b86771b..421eff709 100644 --- a/pkg/manager/task_autotag.go +++ b/pkg/manager/task_autotag.go @@ -9,9 +9,11 @@ import ( "sync" "github.com/stashapp/stash/pkg/autotag" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" ) type autoTagJob struct { @@ -73,19 +75,19 @@ func (j *autoTagJob) autoTagSpecific(ctx context.Context, progress *job.Progress if performerCount == 1 && performerIds[0] == wildcard { performerCount, err = performerQuery.Count() if err != nil { - return fmt.Errorf("error getting performer count: %s", err.Error()) + return fmt.Errorf("error getting performer count: %v", err) } } if studioCount == 1 && studioIds[0] == wildcard { studioCount, err = studioQuery.Count() if err != nil { - return fmt.Errorf("error getting studio count: %s", err.Error()) + return fmt.Errorf("error getting studio count: %v", err) } } if tagCount == 1 && tagIds[0] == wildcard { tagCount, err = tagQuery.Count() if err != nil { - return fmt.Errorf("error getting tag count: %s", err.Error()) + return fmt.Errorf("error getting tag count: %v", err) } } @@ -122,7 +124,7 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre var err error performers, err = performerQuery.All() if err != nil { - return fmt.Errorf("error querying performers: %s", err.Error()) + return fmt.Errorf("error querying performers: %v", err) } } else { performerIdInt, err := strconv.Atoi(performerId) @@ -188,7 +190,7 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress, var err error studios, err = studioQuery.All() if err != nil { - return fmt.Errorf("error querying studios: %s", err.Error()) + return fmt.Errorf("error querying studios: %v", err) } } else { studioIdInt, err := strconv.Atoi(studioId) @@ -259,7 +261,7 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa var err error tags, err = tagQuery.All() if err != nil { - return fmt.Errorf("error querying tags: %s", err.Error()) + return fmt.Errorf("error querying tags: %v", err) } } else { tagIdInt, err := strconv.Atoi(tagId) @@ -324,28 +326,7 @@ type autoTagFilesTask struct { } func (t *autoTagFilesTask) makeSceneFilter() *models.SceneFilterType { - ret := &models.SceneFilterType{} - or := ret - sep := string(filepath.Separator) - - for _, p := range t.paths { - if !strings.HasSuffix(p, sep) { - p = p + sep - } - - if ret.Path == nil { - or = ret - } else { - newOr := &models.SceneFilterType{} - or.Or = newOr - or = newOr - } - - or.Path = &models.StringCriterionInput{ - Modifier: models.CriterionModifierEquals, - Value: p + "%", - } - } + ret := scene.FilterFromPaths(t.paths) organized := false ret.Organized = &organized @@ -360,7 +341,7 @@ func (t *autoTagFilesTask) makeImageFilter() *models.ImageFilterType { for _, p := range t.paths { if !strings.HasSuffix(p, sep) { - p = p + sep + p += sep } if ret.Path == nil { @@ -397,7 +378,7 @@ func (t *autoTagFilesTask) makeGalleryFilter() *models.GalleryFilterType { for _, p := range t.paths { if !strings.HasSuffix(p, sep) { - p = p + sep + p += sep } if ret.Path == nil { @@ -426,16 +407,32 @@ func (t *autoTagFilesTask) getCount(r models.ReaderRepository) (int, error) { PerPage: &pp, } - _, sceneCount, err := r.Scene().Query(t.makeSceneFilter(), findFilter) + sceneResults, err := r.Scene().Query(models.SceneQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: findFilter, + Count: true, + }, + SceneFilter: t.makeSceneFilter(), + }) if err != nil { return 0, err } - _, imageCount, err := r.Image().Query(t.makeImageFilter(), findFilter) + sceneCount := sceneResults.Count + + imageResults, err := r.Image().Query(models.ImageQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: findFilter, + Count: true, + }, + ImageFilter: t.makeImageFilter(), + }) if err != nil { return 0, err } + imageCount := imageResults.Count + _, galleryCount, err := r.Gallery().Query(t.makeGalleryFilter(), findFilter) if err != nil { return 0, err @@ -444,14 +441,6 @@ func (t *autoTagFilesTask) getCount(r models.ReaderRepository) (int, error) { return sceneCount + imageCount + galleryCount, nil } -func (t *autoTagFilesTask) batchFindFilter(batchSize int) *models.FindFilterType { - page := 1 - return &models.FindFilterType{ - PerPage: &batchSize, - Page: &page, - } -} - func (t *autoTagFilesTask) processScenes(r models.ReaderRepository) error { if job.IsCancelled(t.ctx) { return nil @@ -459,12 +448,12 @@ func (t *autoTagFilesTask) processScenes(r models.ReaderRepository) error { batchSize := 1000 - findFilter := t.batchFindFilter(batchSize) + findFilter := models.BatchFindFilter(batchSize) sceneFilter := t.makeSceneFilter() more := true for more { - scenes, _, err := r.Scene().Query(sceneFilter, findFilter) + scenes, err := scene.Query(r.Scene(), sceneFilter, findFilter) if err != nil { return err } @@ -507,12 +496,12 @@ func (t *autoTagFilesTask) processImages(r models.ReaderRepository) error { batchSize := 1000 - findFilter := t.batchFindFilter(batchSize) + findFilter := models.BatchFindFilter(batchSize) imageFilter := t.makeImageFilter() more := true for more { - images, _, err := r.Image().Query(imageFilter, findFilter) + images, err := image.Query(r.Image(), imageFilter, findFilter) if err != nil { return err } @@ -555,7 +544,7 @@ func (t *autoTagFilesTask) processGalleries(r models.ReaderRepository) error { batchSize := 1000 - findFilter := t.batchFindFilter(batchSize) + findFilter := models.BatchFindFilter(batchSize) galleryFilter := t.makeGalleryFilter() more := true diff --git a/pkg/manager/task_clean.go b/pkg/manager/task_clean.go index 804e039b2..0d5c17036 100644 --- a/pkg/manager/task_clean.go +++ b/pkg/manager/task_clean.go @@ -2,147 +2,377 @@ package manager import ( "context" + "fmt" "os" "path/filepath" - "sync" "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" + "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/utils" ) -type CleanTask struct { - ctx context.Context - TxnManager models.TransactionManager - Scene *models.Scene - Gallery *models.Gallery - Image *models.Image - fileNamingAlgorithm models.HashAlgorithm +type cleanJob struct { + txnManager models.TransactionManager + input models.CleanMetadataInput + scanSubs *subscriptionManager } -func (t *CleanTask) Start(wg *sync.WaitGroup, dryRun bool) { - defer wg.Done() - - if t.Scene != nil && t.shouldCleanScene(t.Scene) && !dryRun { - t.deleteScene(t.Scene.ID) +func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) { + logger.Infof("Starting cleaning of tracked files") + if j.input.DryRun { + logger.Infof("Running in Dry Mode") } - if t.Gallery != nil && t.shouldCleanGallery(t.Gallery) && !dryRun { - t.deleteGallery(t.Gallery.ID) + if err := j.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + total, err := j.getCount(r) + if err != nil { + return fmt.Errorf("error getting count: %w", err) + } + + progress.SetTotal(total) + + if job.IsCancelled(ctx) { + return nil + } + + if err := j.processScenes(ctx, progress, r.Scene()); err != nil { + return fmt.Errorf("error cleaning scenes: %w", err) + } + if err := j.processImages(ctx, progress, r.Image()); err != nil { + return fmt.Errorf("error cleaning images: %w", err) + } + if err := j.processGalleries(ctx, progress, r.Gallery()); err != nil { + return fmt.Errorf("error cleaning galleries: %w", err) + } + + return nil + }); err != nil { + logger.Error(err.Error()) + return } - if t.Image != nil && t.shouldCleanImage(t.Image) && !dryRun { - t.deleteImage(t.Image.ID) + if job.IsCancelled(ctx) { + logger.Info("Stopping due to user request") + return } + + j.scanSubs.notify() + logger.Info("Finished Cleaning") } -func (t *CleanTask) shouldClean(path string) bool { +func (j *cleanJob) getCount(r models.ReaderRepository) (int, error) { + sceneCount, err := r.Scene().Count() + if err != nil { + return 0, err + } + + imageCount, err := r.Image().Count() + if err != nil { + return 0, err + } + + galleryCount, err := r.Gallery().Count() + if err != nil { + return 0, err + } + + return sceneCount + imageCount + galleryCount, nil +} + +func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb models.SceneReader) error { + batchSize := 1000 + + findFilter := models.BatchFindFilter(batchSize) + sort := "path" + findFilter.Sort = &sort + + var toDelete []int + + more := true + for more { + if job.IsCancelled(ctx) { + return nil + } + + scenes, err := scene.Query(qb, nil, findFilter) + if err != nil { + return fmt.Errorf("error querying for scenes: %w", err) + } + + for _, scene := range scenes { + progress.ExecuteTask(fmt.Sprintf("Assessing scene %s for clean", scene.Path), func() { + if j.shouldCleanScene(scene) { + toDelete = append(toDelete, scene.ID) + } else { + // increment progress, no further processing + progress.Increment() + } + }) + } + + if len(scenes) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + + if j.input.DryRun && len(toDelete) > 0 { + // add progress for scenes that would've been deleted + progress.AddProcessed(len(toDelete)) + } + + fileNamingAlgorithm := instance.Config.GetVideoFileNamingAlgorithm() + + if !j.input.DryRun && len(toDelete) > 0 { + progress.ExecuteTask(fmt.Sprintf("Cleaning %d scenes", len(toDelete)), func() { + for _, sceneID := range toDelete { + if job.IsCancelled(ctx) { + return + } + + j.deleteScene(ctx, fileNamingAlgorithm, sceneID) + + progress.Increment() + } + }) + } + + return nil +} + +func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress, qb models.GalleryReader) error { + batchSize := 1000 + + findFilter := models.BatchFindFilter(batchSize) + sort := "path" + findFilter.Sort = &sort + + var toDelete []int + + more := true + for more { + if job.IsCancelled(ctx) { + return nil + } + + galleries, _, err := qb.Query(nil, findFilter) + if err != nil { + return fmt.Errorf("error querying for galleries: %w", err) + } + + for _, gallery := range galleries { + progress.ExecuteTask(fmt.Sprintf("Assessing gallery %s for clean", gallery.GetTitle()), func() { + if j.shouldCleanGallery(gallery) { + toDelete = append(toDelete, gallery.ID) + } else { + // increment progress, no further processing + progress.Increment() + } + }) + } + + if len(galleries) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + + if j.input.DryRun && len(toDelete) > 0 { + // add progress for galleries that would've been deleted + progress.AddProcessed(len(toDelete)) + } + + if !j.input.DryRun && len(toDelete) > 0 { + progress.ExecuteTask(fmt.Sprintf("Cleaning %d galleries", len(toDelete)), func() { + for _, galleryID := range toDelete { + if job.IsCancelled(ctx) { + return + } + + j.deleteGallery(ctx, galleryID) + + progress.Increment() + } + }) + } + + return nil +} + +func (j *cleanJob) processImages(ctx context.Context, progress *job.Progress, qb models.ImageReader) error { + batchSize := 1000 + + findFilter := models.BatchFindFilter(batchSize) + + // performance consideration: order by path since default ordering by + // title is slow + sortBy := "path" + findFilter.Sort = &sortBy + + var toDelete []int + + more := true + for more { + if job.IsCancelled(ctx) { + return nil + } + + images, err := image.Query(qb, nil, findFilter) + if err != nil { + return fmt.Errorf("error querying for images: %w", err) + } + + for _, image := range images { + progress.ExecuteTask(fmt.Sprintf("Assessing image %s for clean", image.Path), func() { + if j.shouldCleanImage(image) { + toDelete = append(toDelete, image.ID) + } else { + // increment progress, no further processing + progress.Increment() + } + }) + } + + if len(images) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + + if j.input.DryRun && len(toDelete) > 0 { + // add progress for images that would've been deleted + progress.AddProcessed(len(toDelete)) + } + + if !j.input.DryRun && len(toDelete) > 0 { + progress.ExecuteTask(fmt.Sprintf("Cleaning %d images", len(toDelete)), func() { + for _, imageID := range toDelete { + if job.IsCancelled(ctx) { + return + } + + j.deleteImage(ctx, imageID) + + progress.Increment() + } + }) + } + + return nil +} + +func (j *cleanJob) shouldClean(path string) bool { // use image.FileExists for zip file checking fileExists := image.FileExists(path) // #1102 - clean anything in generated path generatedPath := config.GetInstance().GetGeneratedPath() if !fileExists || getStashFromPath(path) == nil || utils.IsPathInDir(generatedPath, path) { - logger.Infof("File not found. Cleaning: \"%s\"", path) + logger.Infof("File not found. Marking to clean: \"%s\"", path) return true } return false } -func (t *CleanTask) shouldCleanScene(s *models.Scene) bool { - if t.shouldClean(s.Path) { +func (j *cleanJob) shouldCleanScene(s *models.Scene) bool { + if j.shouldClean(s.Path) { return true } stash := getStashFromPath(s.Path) if stash.ExcludeVideo { - logger.Infof("File in stash library that excludes video. Cleaning: \"%s\"", s.Path) + logger.Infof("File in stash library that excludes video. Marking to clean: \"%s\"", s.Path) return true } config := config.GetInstance() - if !matchExtension(s.Path, config.GetVideoExtensions()) { - logger.Infof("File extension does not match video extensions. Cleaning: \"%s\"", s.Path) + if !utils.MatchExtension(s.Path, config.GetVideoExtensions()) { + logger.Infof("File extension does not match video extensions. Marking to clean: \"%s\"", s.Path) return true } if matchFile(s.Path, config.GetExcludes()) { - logger.Infof("File matched regex. Cleaning: \"%s\"", s.Path) + logger.Infof("File matched regex. Marking to clean: \"%s\"", s.Path) return true } return false } -func (t *CleanTask) shouldCleanGallery(g *models.Gallery) bool { +func (j *cleanJob) shouldCleanGallery(g *models.Gallery) bool { // never clean manually created galleries if !g.Zip { return false } path := g.Path.String - if t.shouldClean(path) { + if j.shouldClean(path) { return true } stash := getStashFromPath(path) if stash.ExcludeImage { - logger.Infof("File in stash library that excludes images. Cleaning: \"%s\"", path) + logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", path) return true } config := config.GetInstance() - if !matchExtension(path, config.GetGalleryExtensions()) { - logger.Infof("File extension does not match gallery extensions. Cleaning: \"%s\"", path) + if !utils.MatchExtension(path, config.GetGalleryExtensions()) { + logger.Infof("File extension does not match gallery extensions. Marking to clean: \"%s\"", path) return true } if matchFile(path, config.GetImageExcludes()) { - logger.Infof("File matched regex. Cleaning: \"%s\"", path) + logger.Infof("File matched regex. Marking to clean: \"%s\"", path) return true } if countImagesInZip(path) == 0 { - logger.Infof("Gallery has 0 images. Cleaning: \"%s\"", path) + logger.Infof("Gallery has 0 images. Marking to clean: \"%s\"", path) return true } return false } -func (t *CleanTask) shouldCleanImage(s *models.Image) bool { - if t.shouldClean(s.Path) { +func (j *cleanJob) shouldCleanImage(s *models.Image) bool { + if j.shouldClean(s.Path) { return true } stash := getStashFromPath(s.Path) if stash.ExcludeImage { - logger.Infof("File in stash library that excludes images. Cleaning: \"%s\"", s.Path) + logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", s.Path) return true } config := config.GetInstance() - if !matchExtension(s.Path, config.GetImageExtensions()) { - logger.Infof("File extension does not match image extensions. Cleaning: \"%s\"", s.Path) + if !utils.MatchExtension(s.Path, config.GetImageExtensions()) { + logger.Infof("File extension does not match image extensions. Marking to clean: \"%s\"", s.Path) return true } if matchFile(s.Path, config.GetImageExcludes()) { - logger.Infof("File matched regex. Cleaning: \"%s\"", s.Path) + logger.Infof("File matched regex. Marking to clean: \"%s\"", s.Path) return true } return false } -func (t *CleanTask) deleteScene(sceneID int) { +func (j *cleanJob) deleteScene(ctx context.Context, fileNamingAlgorithm models.HashAlgorithm, sceneID int) { var postCommitFunc func() var scene *models.Scene - if err := t.TxnManager.WithTxn(context.TODO(), func(repo models.Repository) error { + if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error { qb := repo.Scene() var err error @@ -159,13 +389,13 @@ func (t *CleanTask) deleteScene(sceneID int) { postCommitFunc() - DeleteGeneratedSceneFiles(scene, t.fileNamingAlgorithm) + DeleteGeneratedSceneFiles(scene, fileNamingAlgorithm) - GetInstance().PluginCache.ExecutePostHooks(t.ctx, sceneID, plugin.SceneDestroyPost, nil, nil) + GetInstance().PluginCache.ExecutePostHooks(ctx, sceneID, plugin.SceneDestroyPost, nil, nil) } -func (t *CleanTask) deleteGallery(galleryID int) { - if err := t.TxnManager.WithTxn(context.TODO(), func(repo models.Repository) error { +func (j *cleanJob) deleteGallery(ctx context.Context, galleryID int) { + if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error { qb := repo.Gallery() return qb.Destroy(galleryID) }); err != nil { @@ -173,26 +403,39 @@ func (t *CleanTask) deleteGallery(galleryID int) { return } - GetInstance().PluginCache.ExecutePostHooks(t.ctx, galleryID, plugin.GalleryDestroyPost, nil, nil) + GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryDestroyPost, nil, nil) } -func (t *CleanTask) deleteImage(imageID int) { +func (j *cleanJob) deleteImage(ctx context.Context, imageID int) { + var checksum string - if err := t.TxnManager.WithTxn(context.TODO(), func(repo models.Repository) error { + if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error { qb := repo.Image() + image, err := qb.Find(imageID) + if err != nil { + return err + } + + if image == nil { + return fmt.Errorf("image not found: %d", imageID) + } + + checksum = image.Checksum + return qb.Destroy(imageID) }); err != nil { logger.Errorf("Error deleting image from database: %s", err.Error()) return } - pathErr := os.Remove(GetInstance().Paths.Generated.GetThumbnailPath(t.Image.Checksum, models.DefaultGthumbWidth)) // remove cache dir of gallery + // remove cache image + pathErr := os.Remove(GetInstance().Paths.Generated.GetThumbnailPath(checksum, models.DefaultGthumbWidth)) if pathErr != nil { logger.Errorf("Error deleting thumbnail image from cache: %s", pathErr) } - GetInstance().PluginCache.ExecutePostHooks(t.ctx, imageID, plugin.ImageDestroyPost, nil, nil) + GetInstance().PluginCache.ExecutePostHooks(ctx, imageID, plugin.ImageDestroyPost, nil, nil) } func getStashFromPath(pathToCheck string) *models.StashConfig { diff --git a/pkg/manager/task_generate.go b/pkg/manager/task_generate.go new file mode 100644 index 000000000..f4daf93f2 --- /dev/null +++ b/pkg/manager/task_generate.go @@ -0,0 +1,292 @@ +package manager + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/remeh/sizedwaitgroup" + "github.com/stashapp/stash/pkg/job" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/config" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" + "github.com/stashapp/stash/pkg/utils" +) + +const generateQueueSize = 200000 + +type GenerateJob struct { + txnManager models.TransactionManager + input models.GenerateMetadataInput + + overwrite bool + fileNamingAlgo models.HashAlgorithm +} + +type totalsGenerate struct { + sprites int64 + previews int64 + imagePreviews int64 + markers int64 + transcodes int64 + phashes int64 + + tasks int +} + +func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) { + var scenes []*models.Scene + var err error + var markers []*models.SceneMarker + + if j.input.Overwrite != nil { + j.overwrite = *j.input.Overwrite + } + j.fileNamingAlgo = config.GetInstance().GetVideoFileNamingAlgorithm() + + config := config.GetInstance() + parallelTasks := config.GetParallelTasksWithAutoDetection() + + logger.Infof("Generate started with %d parallel tasks", parallelTasks) + + queue := make(chan Task, generateQueueSize) + go func() { + defer close(queue) + + var totals totalsGenerate + sceneIDs, err := utils.StringSliceToIntSlice(j.input.SceneIDs) + if err != nil { + logger.Error(err.Error()) + } + markerIDs, err := utils.StringSliceToIntSlice(j.input.MarkerIDs) + if err != nil { + logger.Error(err.Error()) + } + + if err := j.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { + qb := r.Scene() + if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 { + totals = j.queueTasks(ctx, queue) + } else { + if len(j.input.SceneIDs) > 0 { + scenes, err = qb.FindMany(sceneIDs) + for _, s := range scenes { + j.queueSceneJobs(s, queue, &totals) + } + } + + if len(j.input.MarkerIDs) > 0 { + markers, err = r.SceneMarker().FindMany(markerIDs) + if err != nil { + return err + } + for _, m := range markers { + j.queueMarkerJob(m, queue, &totals) + } + } + } + + return nil + }); err != nil { + logger.Error(err.Error()) + return + } + + logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes", totals.sprites, totals.previews, totals.imagePreviews, totals.markers, totals.transcodes, totals.phashes) + + progress.SetTotal(int(totals.tasks)) + }() + + wg := sizedwaitgroup.New(parallelTasks) + + // Start measuring how long the generate has taken. (consider moving this up) + start := time.Now() + if err = instance.Paths.Generated.EnsureTmpDir(); err != nil { + logger.Warnf("could not create temporary directory: %v", err) + } + + defer func() { + if err := instance.Paths.Generated.EmptyTmpDir(); err != nil { + logger.Warnf("failure emptying temporary directory: %v", err) + } + }() + + for f := range queue { + if job.IsCancelled(ctx) { + break + } + + wg.Add() + // #1879 - need to make a copy of f - otherwise there is a race condition + // where f is changed when the goroutine runs + localTask := f + go progress.ExecuteTask(localTask.GetDescription(), func() { + localTask.Start(ctx) + wg.Done() + progress.Increment() + }) + } + + wg.Wait() + + if job.IsCancelled(ctx) { + logger.Info("Stopping due to user request") + return + } + + elapsed := time.Since(start) + logger.Info(fmt.Sprintf("Generate finished (%s)", elapsed)) +} + +func (j *GenerateJob) queueTasks(ctx context.Context, queue chan<- Task) totalsGenerate { + var totals totalsGenerate + + const batchSize = 1000 + + findFilter := models.BatchFindFilter(batchSize) + + if err := j.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { + for more := true; more; { + if job.IsCancelled(ctx) { + return context.Canceled + } + + scenes, err := scene.Query(r.Scene(), nil, findFilter) + if err != nil { + return err + } + + for _, ss := range scenes { + if job.IsCancelled(ctx) { + return context.Canceled + } + + j.queueSceneJobs(ss, queue, &totals) + } + + if len(scenes) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + + return nil + }); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("Error encountered queuing files to scan: %s", err.Error()) + } + } + + return totals +} + +func (j *GenerateJob) queueSceneJobs(scene *models.Scene, queue chan<- Task, totals *totalsGenerate) { + if utils.IsTrue(j.input.Sprites) { + task := &GenerateSpriteTask{ + Scene: *scene, + Overwrite: j.overwrite, + fileNamingAlgorithm: j.fileNamingAlgo, + } + + if j.overwrite || task.required() { + totals.sprites++ + totals.tasks++ + queue <- task + } + } + + if utils.IsTrue(j.input.Previews) { + generatePreviewOptions := j.input.PreviewOptions + if generatePreviewOptions == nil { + generatePreviewOptions = &models.GeneratePreviewOptionsInput{} + } + setGeneratePreviewOptionsInput(generatePreviewOptions) + + task := &GeneratePreviewTask{ + Scene: *scene, + ImagePreview: utils.IsTrue(j.input.ImagePreviews), + Options: *generatePreviewOptions, + Overwrite: j.overwrite, + fileNamingAlgorithm: j.fileNamingAlgo, + } + + sceneHash := scene.GetHash(task.fileNamingAlgorithm) + addTask := false + if j.overwrite || !task.doesVideoPreviewExist(sceneHash) { + totals.previews++ + addTask = true + } + + if utils.IsTrue(j.input.ImagePreviews) && (j.overwrite || !task.doesImagePreviewExist(sceneHash)) { + totals.imagePreviews++ + addTask = true + } + + if addTask { + totals.tasks++ + queue <- task + } + } + + if utils.IsTrue(j.input.Markers) { + task := &GenerateMarkersTask{ + TxnManager: j.txnManager, + Scene: scene, + Overwrite: j.overwrite, + fileNamingAlgorithm: j.fileNamingAlgo, + ImagePreview: utils.IsTrue(j.input.MarkerImagePreviews), + Screenshot: utils.IsTrue(j.input.MarkerScreenshots), + } + + markers := task.markersNeeded() + if markers > 0 { + totals.markers += int64(markers) + totals.tasks++ + + queue <- task + } + } + + if utils.IsTrue(j.input.Transcodes) { + task := &GenerateTranscodeTask{ + Scene: *scene, + Overwrite: j.overwrite, + fileNamingAlgorithm: j.fileNamingAlgo, + } + if task.isTranscodeNeeded() { + totals.transcodes++ + totals.tasks++ + queue <- task + } + } + + if utils.IsTrue(j.input.Phashes) { + task := &GeneratePhashTask{ + Scene: *scene, + fileNamingAlgorithm: j.fileNamingAlgo, + txnManager: j.txnManager, + Overwrite: j.overwrite, + } + + if task.shouldGenerate() { + totals.phashes++ + totals.tasks++ + queue <- task + } + } +} + +func (j *GenerateJob) queueMarkerJob(marker *models.SceneMarker, queue chan<- Task, totals *totalsGenerate) { + task := &GenerateMarkersTask{ + TxnManager: j.txnManager, + Marker: marker, + Overwrite: j.overwrite, + fileNamingAlgorithm: j.fileNamingAlgo, + } + totals.markers++ + totals.tasks++ + queue <- task +} diff --git a/pkg/manager/task_generate_markers.go b/pkg/manager/task_generate_markers.go index a89a5e538..e9cf7c074 100644 --- a/pkg/manager/task_generate_markers.go +++ b/pkg/manager/task_generate_markers.go @@ -2,6 +2,7 @@ package manager import ( "context" + "fmt" "path/filepath" "strconv" @@ -22,7 +23,17 @@ type GenerateMarkersTask struct { Screenshot bool } -func (t *GenerateMarkersTask) Start() { +func (t *GenerateMarkersTask) GetDescription() string { + if t.Scene != nil { + return fmt.Sprintf("Generating markers for %s", t.Scene.Path) + } else if t.Marker != nil { + return fmt.Sprintf("Generating marker preview for marker ID %d", t.Marker.ID) + } + + return "Generating markers" +} + +func (t *GenerateMarkersTask) Start(ctx context.Context) { if t.Scene != nil { t.generateSceneMarkers() } @@ -43,7 +54,8 @@ func (t *GenerateMarkersTask) Start() { return } - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + ffprobe := instance.FFProbe + videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false) if err != nil { logger.Errorf("error reading video file: %s", err.Error()) return @@ -68,7 +80,8 @@ func (t *GenerateMarkersTask) generateSceneMarkers() { return } - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + ffprobe := instance.FFProbe + videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false) if err != nil { logger.Errorf("error reading video file: %s", err.Error()) return @@ -106,7 +119,7 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene Width: 640, } - encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) + encoder := instance.FFMPEG if t.Overwrite || !videoExists { videoFilename := baseFilename + ".mp4" @@ -153,7 +166,7 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene } } -func (t *GenerateMarkersTask) isMarkerNeeded() int { +func (t *GenerateMarkersTask) markersNeeded() int { markers := 0 var sceneMarkers []*models.SceneMarker if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { diff --git a/pkg/manager/task_generate_phash.go b/pkg/manager/task_generate_phash.go index 9d2fa172f..30b863cf7 100644 --- a/pkg/manager/task_generate_phash.go +++ b/pkg/manager/task_generate_phash.go @@ -3,8 +3,8 @@ package manager import ( "context" "database/sql" + "fmt" - "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" ) @@ -16,12 +16,17 @@ type GeneratePhashTask struct { txnManager models.TransactionManager } -func (t *GeneratePhashTask) Start() { +func (t *GeneratePhashTask) GetDescription() string { + return fmt.Sprintf("Generating phash for %s", t.Scene.Path) +} + +func (t *GeneratePhashTask) Start(ctx context.Context) { if !t.shouldGenerate() { return } - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + ffprobe := instance.FFProbe + videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false) if err != nil { logger.Errorf("error reading video file: %s", err.Error()) return diff --git a/pkg/manager/task_generate_preview.go b/pkg/manager/task_generate_preview.go index 01a68f006..556c3fd68 100644 --- a/pkg/manager/task_generate_preview.go +++ b/pkg/manager/task_generate_preview.go @@ -1,7 +1,9 @@ package manager import ( - "github.com/stashapp/stash/pkg/ffmpeg" + "context" + "fmt" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" @@ -18,7 +20,11 @@ type GeneratePreviewTask struct { fileNamingAlgorithm models.HashAlgorithm } -func (t *GeneratePreviewTask) Start() { +func (t *GeneratePreviewTask) GetDescription() string { + return fmt.Sprintf("Generating preview for %s", t.Scene.Path) +} + +func (t *GeneratePreviewTask) Start(ctx context.Context) { videoFilename := t.videoFilename() videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm) imageFilename := t.imageFilename() @@ -27,7 +33,8 @@ func (t *GeneratePreviewTask) Start() { return } - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + ffprobe := instance.FFProbe + videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false) if err != nil { logger.Errorf("error reading video file: %s", err.Error()) return diff --git a/pkg/manager/task_generate_screenshot.go b/pkg/manager/task_generate_screenshot.go index 8694c2357..baa3ab107 100644 --- a/pkg/manager/task_generate_screenshot.go +++ b/pkg/manager/task_generate_screenshot.go @@ -7,9 +7,9 @@ import ( "os" "time" - "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" ) type GenerateScreenshotTask struct { @@ -19,9 +19,10 @@ type GenerateScreenshotTask struct { txnManager models.TransactionManager } -func (t *GenerateScreenshotTask) Start() { +func (t *GenerateScreenshotTask) Start(ctx context.Context) { scenePath := t.Scene.Path - probeResult, err := ffmpeg.NewVideoFile(instance.FFProbePath, scenePath, false) + ffprobe := instance.FFProbe + probeResult, err := ffprobe.NewVideoFile(scenePath, false) if err != nil { logger.Error(err.Error()) @@ -66,7 +67,7 @@ func (t *GenerateScreenshotTask) Start() { UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, } - if err := SetSceneScreenshot(checksum, coverImageData); err != nil { + if err := scene.SetScreenshot(instance.Paths, checksum, coverImageData); err != nil { return fmt.Errorf("error writing screenshot: %v", err) } diff --git a/pkg/manager/task_generate_sprite.go b/pkg/manager/task_generate_sprite.go index 0a21b6011..d47b225f1 100644 --- a/pkg/manager/task_generate_sprite.go +++ b/pkg/manager/task_generate_sprite.go @@ -1,7 +1,9 @@ package manager import ( - "github.com/stashapp/stash/pkg/ffmpeg" + "context" + "fmt" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" @@ -13,12 +15,17 @@ type GenerateSpriteTask struct { fileNamingAlgorithm models.HashAlgorithm } -func (t *GenerateSpriteTask) Start() { +func (t *GenerateSpriteTask) GetDescription() string { + return fmt.Sprintf("Generating sprites for %s", t.Scene.Path) +} + +func (t *GenerateSpriteTask) Start(ctx context.Context) { if !t.Overwrite && !t.required() { return } - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + ffprobe := instance.FFProbe + videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false) if err != nil { logger.Errorf("error reading video file: %s", err.Error()) return diff --git a/pkg/manager/task_identify.go b/pkg/manager/task_identify.go new file mode 100644 index 000000000..0e8b789bf --- /dev/null +++ b/pkg/manager/task_identify.go @@ -0,0 +1,242 @@ +package manager + +import ( + "context" + "errors" + "fmt" + "strconv" + + "github.com/stashapp/stash/pkg/identify" + "github.com/stashapp/stash/pkg/job" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" + "github.com/stashapp/stash/pkg/scraper" + "github.com/stashapp/stash/pkg/scraper/stashbox" + "github.com/stashapp/stash/pkg/utils" +) + +var ErrInput = errors.New("invalid request input") + +type IdentifyJob struct { + txnManager models.TransactionManager + postHookExecutor identify.SceneUpdatePostHookExecutor + input models.IdentifyMetadataInput + + stashBoxes models.StashBoxes + progress *job.Progress +} + +func CreateIdentifyJob(input models.IdentifyMetadataInput) *IdentifyJob { + return &IdentifyJob{ + txnManager: instance.TxnManager, + postHookExecutor: instance.PluginCache, + input: input, + stashBoxes: instance.Config.GetStashBoxes(), + } +} + +func (j *IdentifyJob) Execute(ctx context.Context, progress *job.Progress) { + j.progress = progress + + // if no sources provided - just return + if len(j.input.Sources) == 0 { + return + } + + sources, err := j.getSources() + if err != nil { + logger.Error(err) + return + } + + // if scene ids provided, use those + // otherwise, batch query for all scenes - ordering by path + if err := j.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { + if len(j.input.SceneIDs) == 0 { + return j.identifyAllScenes(ctx, r, sources) + } + + sceneIDs, err := utils.StringSliceToIntSlice(j.input.SceneIDs) + if err != nil { + return fmt.Errorf("invalid scene IDs: %w", err) + } + + progress.SetTotal(len(sceneIDs)) + for _, id := range sceneIDs { + if job.IsCancelled(ctx) { + break + } + + // find the scene + var err error + scene, err := r.Scene().Find(id) + if err != nil { + return fmt.Errorf("error finding scene with id %d: %w", id, err) + } + + if scene == nil { + return fmt.Errorf("%w: scene with id %d", models.ErrNotFound, id) + } + + j.identifyScene(ctx, scene, sources) + } + + return nil + }); err != nil { + logger.Errorf("Error encountered while identifying scenes: %v", err) + } +} + +func (j *IdentifyJob) identifyAllScenes(ctx context.Context, r models.ReaderRepository, sources []identify.ScraperSource) error { + // exclude organised + organised := false + sceneFilter := scene.FilterFromPaths(j.input.Paths) + sceneFilter.Organized = &organised + + sort := "path" + findFilter := &models.FindFilterType{ + Sort: &sort, + } + + // get the count + pp := 0 + findFilter.PerPage = &pp + countResult, err := r.Scene().Query(models.SceneQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: findFilter, + Count: true, + }, + SceneFilter: sceneFilter, + }) + if err != nil { + return fmt.Errorf("error getting scene count: %w", err) + } + + j.progress.SetTotal(countResult.Count) + + return scene.BatchProcess(ctx, r.Scene(), sceneFilter, findFilter, func(scene *models.Scene) error { + if job.IsCancelled(ctx) { + return nil + } + + j.identifyScene(ctx, scene, sources) + return nil + }) +} + +func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, sources []identify.ScraperSource) { + if job.IsCancelled(ctx) { + return + } + + var taskError error + j.progress.ExecuteTask("Identifying "+s.Path, func() { + task := identify.SceneIdentifier{ + DefaultOptions: j.input.Options, + Sources: sources, + ScreenshotSetter: &scene.PathsScreenshotSetter{ + Paths: instance.Paths, + FileNamingAlgorithm: instance.Config.GetVideoFileNamingAlgorithm(), + }, + SceneUpdatePostHookExecutor: j.postHookExecutor, + } + + taskError = task.Identify(ctx, j.txnManager, s) + }) + + if taskError != nil { + logger.Errorf("Error encountered identifying %s: %v", s.Path, taskError) + } + + j.progress.Increment() +} + +func (j *IdentifyJob) getSources() ([]identify.ScraperSource, error) { + var ret []identify.ScraperSource + for _, source := range j.input.Sources { + // get scraper source + stashBox, err := j.getStashBox(source.Source) + if err != nil { + return nil, err + } + + var src identify.ScraperSource + if stashBox != nil { + src = identify.ScraperSource{ + Name: "stash-box: " + stashBox.Endpoint, + Scraper: stashboxSource{ + stashbox.NewClient(*stashBox, j.txnManager), + stashBox.Endpoint, + }, + RemoteSite: stashBox.Endpoint, + } + } else { + scraperID := *source.Source.ScraperID + s := instance.ScraperCache.GetScraper(scraperID) + if s == nil { + return nil, fmt.Errorf("%w: scraper with id %q", models.ErrNotFound, scraperID) + } + src = identify.ScraperSource{ + Name: s.Name, + Scraper: scraperSource{ + cache: instance.ScraperCache, + scraperID: scraperID, + }, + } + } + + src.Options = source.Options + ret = append(ret, src) + } + + return ret, nil +} + +func (j *IdentifyJob) getStashBox(src *models.ScraperSourceInput) (*models.StashBox, error) { + if src.ScraperID != nil { + return nil, nil + } + + // must be stash-box + if src.StashBoxIndex == nil && src.StashBoxEndpoint == nil { + return nil, fmt.Errorf("%w: stash_box_index or stash_box_endpoint or scraper_id must be set", ErrInput) + } + + return j.stashBoxes.ResolveStashBox(*src) +} + +type stashboxSource struct { + *stashbox.Client + endpoint string +} + +func (s stashboxSource) ScrapeScene(sceneID int) (*models.ScrapedScene, error) { + results, err := s.FindStashBoxScenesByFingerprintsFlat([]string{strconv.Itoa(sceneID)}) + if err != nil { + return nil, fmt.Errorf("error querying stash-box using scene ID %d: %w", sceneID, err) + } + + if len(results) > 0 { + return results[0], nil + } + + return nil, nil +} + +func (s stashboxSource) String() string { + return fmt.Sprintf("stash-box %s", s.endpoint) +} + +type scraperSource struct { + cache *scraper.Cache + scraperID string +} + +func (s scraperSource) ScrapeScene(sceneID int) (*models.ScrapedScene, error) { + return s.cache.ScrapeScene(s.scraperID, sceneID) +} + +func (s scraperSource) String() string { + return fmt.Sprintf("scraper %s", s.scraperID) +} diff --git a/pkg/manager/task_import.go b/pkg/manager/task_import.go index 4d9b535ae..70b494d7c 100644 --- a/pkg/manager/task_import.go +++ b/pkg/manager/task_import.go @@ -4,6 +4,7 @@ import ( "archive/zip" "context" "database/sql" + "errors" "fmt" "io" "os" @@ -78,7 +79,7 @@ func (t *ImportTask) GetDescription() string { return "Importing..." } -func (t *ImportTask) Start() { +func (t *ImportTask) Start(ctx context.Context) { if t.TmpZip != "" { defer func() { err := utils.RemoveDir(t.BaseDir) @@ -125,8 +126,6 @@ func (t *ImportTask) Start() { } } - ctx := context.TODO() - t.ImportTags(ctx) t.ImportPerformers(ctx) t.ImportStudios(ctx) @@ -239,7 +238,7 @@ func (t *ImportTask) ImportStudios(ctx context.Context) { if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error { return t.ImportStudio(studioJSON, pendingParent, r.Studio()) }); err != nil { - if err == studio.ErrParentStudioNotExist { + if errors.Is(err, studio.ErrParentStudioNotExist) { // add to the pending parent list so that it is created after the parent s := pendingParent[studioJSON.ParentStudio] s = append(s, studioJSON) @@ -391,7 +390,8 @@ func (t *ImportTask) ImportTags(ctx context.Context) { if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error { return t.ImportTag(tagJSON, pendingParent, false, r.Tag()) }); err != nil { - if parentError, ok := err.(tag.ParentTagNotExistError); ok { + var parentError tag.ParentTagNotExistError + if errors.As(err, &parentError) { pendingParent[parentError.MissingParent()] = append(pendingParent[parentError.MissingParent()], tagJSON) continue } @@ -433,7 +433,8 @@ func (t *ImportTask) ImportTag(tagJSON *jsonschema.Tag, pendingParent map[string for _, childTagJSON := range pendingParent[tagJSON.Name] { if err := t.ImportTag(childTagJSON, pendingParent, fail, readerWriter); err != nil { - if parentError, ok := err.(tag.ParentTagNotExistError); ok { + var parentError tag.ParentTagNotExistError + if errors.As(err, &parentError) { pendingParent[parentError.MissingParent()] = append(pendingParent[parentError.MissingParent()], tagJSON) continue } diff --git a/pkg/manager/task_migrate_hash.go b/pkg/manager/task_migrate_hash.go index 3ecdb54d4..e0c7c1131 100644 --- a/pkg/manager/task_migrate_hash.go +++ b/pkg/manager/task_migrate_hash.go @@ -2,6 +2,7 @@ package manager import ( "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" ) // MigrateHashTask renames generated files between oshash and MD5 based on the @@ -28,5 +29,5 @@ func (t *MigrateHashTask) Start() { newHash = oshash } - MigrateHash(oldHash, newHash) + scene.MigrateHash(instance.Paths, oldHash, newHash) } diff --git a/pkg/manager/task_scan.go b/pkg/manager/task_scan.go index bec812a7b..3d9af4ccd 100644 --- a/pkg/manager/task_scan.go +++ b/pkg/manager/task_scan.go @@ -1,142 +1,110 @@ package manager import ( - "archive/zip" "context" - "database/sql" "errors" "fmt" "os" "path/filepath" - "strconv" - "strings" "time" "github.com/remeh/sizedwaitgroup" - "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/gallery" - "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/plugin" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/utils" ) +const scanQueueSize = 200000 + type ScanJob struct { txnManager models.TransactionManager input models.ScanMetadataInput subscriptions *subscriptionManager } +type scanFile struct { + path string + info os.FileInfo + caseSensitiveFs bool +} + func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) { input := j.input paths := getScanPaths(input.Paths) - var total *int - var newFiles *int - progress.ExecuteTask("Counting files to scan...", func() { - total, newFiles = j.neededScan(ctx, paths) - }) - if job.IsCancelled(ctx) { logger.Info("Stopping due to user request") return } - if total == nil || newFiles == nil { - logger.Infof("Taking too long to count content. Skipping...") - logger.Infof("Starting scan") - } else { - logger.Infof("Starting scan of %d files. %d New files found", *total, *newFiles) - } - start := time.Now() config := config.GetInstance() parallelTasks := config.GetParallelTasksWithAutoDetection() - logger.Infof("Scan started with %d parallel tasks", parallelTasks) - wg := sizedwaitgroup.New(parallelTasks) - if total != nil { - progress.SetTotal(*total) - } + logger.Infof("Scan started with %d parallel tasks", parallelTasks) + + fileQueue := make(chan scanFile, scanQueueSize) + go func() { + total, newFiles := j.queueFiles(ctx, paths, fileQueue, parallelTasks) + + if !job.IsCancelled(ctx) { + progress.SetTotal(total) + logger.Infof("Finished counting files. Total files to scan: %d, %d new files found", total, newFiles) + } + }() + + wg := sizedwaitgroup.New(parallelTasks) fileNamingAlgo := config.GetVideoFileNamingAlgorithm() calculateMD5 := config.IsCalculateMD5() - stoppingErr := errors.New("stopping") var err error var galleries []string - for _, sp := range paths { - csFs, er := utils.IsFsPathCaseSensitive(sp.Path) - if er != nil { - logger.Warnf("Cannot determine fs case sensitivity: %s", er.Error()) - } + mutexManager := utils.NewMutexManager() - err = walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error { - if job.IsCancelled(ctx) { - return stoppingErr - } - - // #1756 - skip zero length files and directories - if info.IsDir() { - return nil - } - - if info.Size() == 0 { - logger.Infof("Skipping zero-length file: %s", path) - return nil - } - - if isGallery(path) { - galleries = append(galleries, path) - } - - if err := instance.Paths.Generated.EnsureTmpDir(); err != nil { - logger.Warnf("couldn't create temporary directory: %v", err) - } - - wg.Add() - task := ScanTask{ - TxnManager: j.txnManager, - FilePath: path, - UseFileMetadata: utils.IsTrue(input.UseFileMetadata), - StripFileExtension: utils.IsTrue(input.StripFileExtension), - fileNamingAlgorithm: fileNamingAlgo, - calculateMD5: calculateMD5, - GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews), - GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews), - GenerateSprite: utils.IsTrue(input.ScanGenerateSprites), - GeneratePhash: utils.IsTrue(input.ScanGeneratePhashes), - GenerateThumbnails: utils.IsTrue(input.ScanGenerateThumbnails), - progress: progress, - CaseSensitiveFs: csFs, - ctx: ctx, - } - - go func() { - task.Start() - wg.Done() - progress.Increment() - }() - - return nil - }) - - if err == stoppingErr { - logger.Info("Stopping due to user request") + for f := range fileQueue { + if job.IsCancelled(ctx) { break } - if err != nil { - logger.Errorf("Error encountered scanning files: %s", err.Error()) - break + if isGallery(f.path) { + galleries = append(galleries, f.path) } + + if err := instance.Paths.Generated.EnsureTmpDir(); err != nil { + logger.Warnf("couldn't create temporary directory: %v", err) + } + + wg.Add() + task := ScanTask{ + TxnManager: j.txnManager, + file: file.FSFile(f.path, f.info), + UseFileMetadata: utils.IsTrue(input.UseFileMetadata), + StripFileExtension: utils.IsTrue(input.StripFileExtension), + fileNamingAlgorithm: fileNamingAlgo, + calculateMD5: calculateMD5, + GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews), + GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews), + GenerateSprite: utils.IsTrue(input.ScanGenerateSprites), + GeneratePhash: utils.IsTrue(input.ScanGeneratePhashes), + GenerateThumbnails: utils.IsTrue(input.ScanGenerateThumbnails), + progress: progress, + CaseSensitiveFs: f.caseSensitiveFs, + ctx: ctx, + mutexManager: mutexManager, + } + + go func() { + task.Start(ctx) + wg.Done() + progress.Increment() + }() } wg.Wait() @@ -148,7 +116,12 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) { elapsed := time.Since(start) logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed)) - if job.IsCancelled(ctx) || err != nil { + if job.IsCancelled(ctx) { + logger.Info("Stopping due to user request") + return + } + + if err != nil { return } @@ -157,7 +130,7 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) { wg.Add() task := ScanTask{ TxnManager: j.txnManager, - FilePath: path, + file: file.FSFile(path, nil), // hopefully info is not needed UseFileMetadata: false, } @@ -170,60 +143,103 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) { j.subscriptions.notify() } -func (j *ScanJob) neededScan(ctx context.Context, paths []*models.StashConfig) (total *int, newFiles *int) { - const timeout = 90 * time.Second +func (j *ScanJob) queueFiles(ctx context.Context, paths []*models.StashConfig, scanQueue chan<- scanFile, parallelTasks int) (total int, newFiles int) { + defer close(scanQueue) - // create a control channel through which to signal the counting loop when the timeout is reached - chTimeout := time.After(timeout) - - logger.Infof("Counting files to scan...") - - t := 0 - n := 0 - - timeoutErr := errors.New("timed out") + wg := sizedwaitgroup.New(parallelTasks) for _, sp := range paths { + csFs, er := utils.IsFsPathCaseSensitive(sp.Path) + if er != nil { + logger.Warnf("Cannot determine fs case sensitivity: %s", er.Error()) + } + err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error { - t++ - task := ScanTask{FilePath: path, TxnManager: j.txnManager} - if !task.doesPathExist() { - n++ - } - - //check for timeout - select { - case <-chTimeout: - return timeoutErr - default: - } - // check stop if job.IsCancelled(ctx) { - return timeoutErr + return context.Canceled } + wg.Add() + + go func() { + defer wg.Done() + + // #1756 - skip zero length files and directories + if info.IsDir() { + return + } + + if info.Size() == 0 { + logger.Infof("Skipping zero-length file: %s", path) + return + } + + total++ + if !j.doesPathExist(path) { + newFiles++ + } + + scanQueue <- scanFile{ + path: path, + info: info, + caseSensitiveFs: csFs, + } + }() + return nil }) - if err == timeoutErr { - // timeout should return nil counts - return nil, nil - } + wg.Wait() - if err != nil { - logger.Errorf("Error encountered counting files to scan: %s", err.Error()) - return nil, nil + if err != nil && !errors.Is(err, context.Canceled) { + logger.Errorf("Error encountered queuing files to scan: %s", err.Error()) + return } } - return &t, &n + return +} + +func (j *ScanJob) doesPathExist(path string) bool { + config := config.GetInstance() + vidExt := config.GetVideoExtensions() + imgExt := config.GetImageExtensions() + gExt := config.GetGalleryExtensions() + + ret := false + txnErr := j.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + switch { + case utils.MatchExtension(path, gExt): + g, _ := r.Gallery().FindByPath(path) + if g != nil { + ret = true + } + case utils.MatchExtension(path, vidExt): + s, _ := r.Scene().FindByPath(path) + if s != nil { + ret = true + } + case utils.MatchExtension(path, imgExt): + i, _ := r.Image().FindByPath(path) + if i != nil { + ret = true + } + } + + return nil + }) + if txnErr != nil { + logger.Warnf("error checking if file exists in database: %v", txnErr) + } + + return ret } type ScanTask struct { ctx context.Context TxnManager models.TransactionManager - FilePath string + file file.SourceFile UseFileMetadata bool StripFileExtension bool calculateMD5 bool @@ -236,1127 +252,92 @@ type ScanTask struct { zipGallery *models.Gallery progress *job.Progress CaseSensitiveFs bool + + mutexManager *utils.MutexManager } -func (t *ScanTask) Start() { +func (t *ScanTask) Start(ctx context.Context) { var s *models.Scene - - t.progress.ExecuteTask("Scanning "+t.FilePath, func() { - if isGallery(t.FilePath) { - t.scanGallery() - } else if isVideo(t.FilePath) { + path := t.file.Path() + t.progress.ExecuteTask("Scanning "+path, func() { + switch { + case isGallery(path): + t.scanGallery(ctx) + case isVideo(path): s = t.scanScene() - } else if isImage(t.FilePath) { + case isImage(path): t.scanImage() } }) - if s != nil { - iwg := sizedwaitgroup.New(2) - - if t.GenerateSprite { - iwg.Add() - - go t.progress.ExecuteTask(fmt.Sprintf("Generating sprites for %s", t.FilePath), func() { - taskSprite := GenerateSpriteTask{ - Scene: *s, - Overwrite: false, - fileNamingAlgorithm: t.fileNamingAlgorithm, - } - taskSprite.Start() - iwg.Done() - }) - } - - if t.GeneratePhash { - iwg.Add() - - go t.progress.ExecuteTask(fmt.Sprintf("Generating phash for %s", t.FilePath), func() { - taskPhash := GeneratePhashTask{ - Scene: *s, - fileNamingAlgorithm: t.fileNamingAlgorithm, - txnManager: t.TxnManager, - } - taskPhash.Start() - iwg.Done() - }) - } - - if t.GeneratePreview { - iwg.Add() - - go t.progress.ExecuteTask(fmt.Sprintf("Generating preview for %s", t.FilePath), func() { - config := config.GetInstance() - var previewSegmentDuration = config.GetPreviewSegmentDuration() - var previewSegments = config.GetPreviewSegments() - var previewExcludeStart = config.GetPreviewExcludeStart() - var previewExcludeEnd = config.GetPreviewExcludeEnd() - var previewPresent = config.GetPreviewPreset() - - // NOTE: the reuse of this model like this is painful. - previewOptions := models.GeneratePreviewOptionsInput{ - PreviewSegments: &previewSegments, - PreviewSegmentDuration: &previewSegmentDuration, - PreviewExcludeStart: &previewExcludeStart, - PreviewExcludeEnd: &previewExcludeEnd, - PreviewPreset: &previewPresent, - } - - taskPreview := GeneratePreviewTask{ - Scene: *s, - ImagePreview: t.GenerateImagePreview, - Options: previewOptions, - Overwrite: false, - fileNamingAlgorithm: t.fileNamingAlgorithm, - } - taskPreview.Start() - iwg.Done() - }) - } - - iwg.Wait() - } -} - -func (t *ScanTask) scanGallery() { - var g *models.Gallery - images := 0 - scanImages := false - - if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - var err error - g, err = r.Gallery().FindByPath(t.FilePath) - - if g != nil && err != nil { - images, err = r.Image().CountByGalleryID(g.ID) - if err != nil { - return fmt.Errorf("error getting images for zip gallery %s: %s", t.FilePath, err.Error()) - } - } - - return err - }); err != nil { - logger.Error(err.Error()) + if s == nil { return } - fileModTime, err := t.getFileModTime() - if err != nil { - logger.Error(err.Error()) - return + // Handle the case of a scene + iwg := sizedwaitgroup.New(2) + + if t.GenerateSprite { + iwg.Add() + + go t.progress.ExecuteTask(fmt.Sprintf("Generating sprites for %s", path), func() { + taskSprite := GenerateSpriteTask{ + Scene: *s, + Overwrite: false, + fileNamingAlgorithm: t.fileNamingAlgorithm, + } + taskSprite.Start(ctx) + iwg.Done() + }) } - if g != nil { - // We already have this item in the database, keep going + if t.GeneratePhash { + iwg.Add() - // if file mod time is not set, set it now - if !g.FileModTime.Valid { - // we will also need to rescan the zip contents - scanImages = true - logger.Infof("setting file modification time on %s", t.FilePath) - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Gallery() - if _, err := gallery.UpdateFileModTime(qb, g.ID, models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }); err != nil { - return err - } - - // update our copy of the gallery - var err error - g, err = qb.Find(g.ID) - return err - }); err != nil { - logger.Error(err.Error()) - return + go t.progress.ExecuteTask(fmt.Sprintf("Generating phash for %s", path), func() { + taskPhash := GeneratePhashTask{ + Scene: *s, + fileNamingAlgorithm: t.fileNamingAlgorithm, + txnManager: t.TxnManager, } - } + taskPhash.Start(ctx) + iwg.Done() + }) + } - // if the mod time of the zip file is different than that of the associated - // gallery, then recalculate the checksum - modified := t.isFileModified(fileModTime, g.FileModTime) - if modified { - scanImages = true - logger.Infof("%s has been updated: rescanning", t.FilePath) + if t.GeneratePreview { + iwg.Add() - // update the checksum and the modification time - checksum, err := t.calculateChecksum() - if err != nil { - logger.Error(err.Error()) - return + go t.progress.ExecuteTask(fmt.Sprintf("Generating preview for %s", path), func() { + config := config.GetInstance() + var previewSegmentDuration = config.GetPreviewSegmentDuration() + var previewSegments = config.GetPreviewSegments() + var previewExcludeStart = config.GetPreviewExcludeStart() + var previewExcludeEnd = config.GetPreviewExcludeEnd() + var previewPresent = config.GetPreviewPreset() + + // NOTE: the reuse of this model like this is painful. + previewOptions := models.GeneratePreviewOptionsInput{ + PreviewSegments: &previewSegments, + PreviewSegmentDuration: &previewSegmentDuration, + PreviewExcludeStart: &previewExcludeStart, + PreviewExcludeEnd: &previewExcludeEnd, + PreviewPreset: &previewPresent, } - currentTime := time.Now() - galleryPartial := models.GalleryPartial{ - ID: g.ID, - Checksum: &checksum, - FileModTime: &models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }, - UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime}, + taskPreview := GeneratePreviewTask{ + Scene: *s, + ImagePreview: t.GenerateImagePreview, + Options: previewOptions, + Overwrite: false, + fileNamingAlgorithm: t.fileNamingAlgorithm, } - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - _, err := r.Gallery().UpdatePartial(galleryPartial) - return err - }); err != nil { - logger.Error(err.Error()) - return - } - } - - // scan the zip files if the gallery has no images - scanImages = scanImages || images == 0 - } else { - checksum, err := t.calculateChecksum() - if err != nil { - logger.Error(err.Error()) - return - } - - isNewGallery := false - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Gallery() - g, _ = qb.FindByChecksum(checksum) - if g != nil { - exists, _ := utils.FileExists(g.Path.String) - if !t.CaseSensitiveFs { - // #1426 - if file exists but is a case-insensitive match for the - // original filename, then treat it as a move - if exists && strings.EqualFold(t.FilePath, g.Path.String) { - exists = false - } - } - - if exists { - logger.Infof("%s already exists. Duplicate of %s ", t.FilePath, g.Path.String) - } else { - logger.Infof("%s already exists. Updating path...", t.FilePath) - g.Path = sql.NullString{ - String: t.FilePath, - Valid: true, - } - g, err = qb.Update(*g) - if err != nil { - return err - } - - GetInstance().PluginCache.ExecutePostHooks(t.ctx, g.ID, plugin.GalleryUpdatePost, nil, nil) - } - } else { - currentTime := time.Now() - - newGallery := models.Gallery{ - Checksum: checksum, - Zip: true, - Path: sql.NullString{ - String: t.FilePath, - Valid: true, - }, - FileModTime: models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }, - Title: sql.NullString{ - String: utils.GetNameFromPath(t.FilePath, t.StripFileExtension), - Valid: true, - }, - CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - } - - // don't create gallery if it has no images - if countImagesInZip(t.FilePath) > 0 { - // only warn when creating the gallery - ok, err := utils.IsZipFileUncompressed(t.FilePath) - if err == nil && !ok { - logger.Warnf("%s is using above store (0) level compression.", t.FilePath) - } - - logger.Infof("%s doesn't exist. Creating new item...", t.FilePath) - g, err = qb.Create(newGallery) - if err != nil { - return err - } - scanImages = true - - isNewGallery = true - } - } - - return nil - }); err != nil { - logger.Error(err.Error()) - return - } - - if isNewGallery { - GetInstance().PluginCache.ExecutePostHooks(t.ctx, g.ID, plugin.GalleryCreatePost, nil, nil) - } + taskPreview.Start(ctx) + iwg.Done() + }) } - if g != nil { - if scanImages { - t.scanZipImages(g) - } else { - // in case thumbnails have been deleted, regenerate them - t.regenerateZipImages(g) - } - } -} - -func (t *ScanTask) getFileModTime() (time.Time, error) { - fi, err := os.Stat(t.FilePath) - if err != nil { - return time.Time{}, fmt.Errorf("error performing stat on %s: %s", t.FilePath, err.Error()) - } - - ret := fi.ModTime() - // truncate to seconds, since we don't store beyond that in the database - ret = ret.Truncate(time.Second) - - return ret, nil -} - -func (t *ScanTask) getInteractive() bool { - _, err := os.Stat(utils.GetFunscriptPath(t.FilePath)) - return err == nil - -} - -func (t *ScanTask) isFileModified(fileModTime time.Time, modTime models.NullSQLiteTimestamp) bool { - return !modTime.Timestamp.Equal(fileModTime) -} - -// associates a gallery to a scene with the same basename -func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) { - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Gallery() - sqb := r.Scene() - g, err := qb.FindByPath(t.FilePath) - if err != nil { - return err - } - - if g == nil { - // associate is run after scan is finished - // should only happen if gallery is a directory or an io error occurs during hashing - logger.Warnf("associate: gallery %s not found in DB", t.FilePath) - return nil - } - - basename := strings.TrimSuffix(t.FilePath, filepath.Ext(t.FilePath)) - var relatedFiles []string - vExt := config.GetInstance().GetVideoExtensions() - // make a list of media files that can be related to the gallery - for _, ext := range vExt { - related := basename + "." + ext - // exclude gallery extensions from the related files - if !isGallery(related) { - relatedFiles = append(relatedFiles, related) - } - } - for _, scenePath := range relatedFiles { - scene, _ := sqb.FindByPath(scenePath) - // found related Scene - if scene != nil { - sceneGalleries, _ := sqb.FindByGalleryID(g.ID) // check if gallery is already associated to the scene - isAssoc := false - for _, sg := range sceneGalleries { - if scene.ID == sg.ID { - isAssoc = true - break - } - } - if !isAssoc { - logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID) - if err := sqb.UpdateGalleries(scene.ID, []int{g.ID}); err != nil { - return err - } - } - } - } - return nil - }); err != nil { - logger.Error(err.Error()) - } - wg.Done() -} - -func (t *ScanTask) scanScene() *models.Scene { - logError := func(err error) *models.Scene { - logger.Error(err.Error()) - return nil - } - - var retScene *models.Scene - var s *models.Scene - - if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - var err error - s, err = r.Scene().FindByPath(t.FilePath) - return err - }); err != nil { - logger.Error(err.Error()) - return nil - } - - fileModTime, err := t.getFileModTime() - if err != nil { - return logError(err) - } - interactive := t.getInteractive() - - if s != nil { - // if file mod time is not set, set it now - if !s.FileModTime.Valid { - logger.Infof("setting file modification time on %s", t.FilePath) - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Scene() - if _, err := scene.UpdateFileModTime(qb, s.ID, models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }); err != nil { - return err - } - - // update our copy of the scene - var err error - s, err = qb.Find(s.ID) - return err - }); err != nil { - return logError(err) - } - } - - // if the mod time of the file is different than that of the associated - // scene, then recalculate the checksum and regenerate the thumbnail - modified := t.isFileModified(fileModTime, s.FileModTime) - config := config.GetInstance() - if modified || !s.Size.Valid { - oldHash := s.GetHash(config.GetVideoFileNamingAlgorithm()) - s, err = t.rescanScene(s, fileModTime) - if err != nil { - return logError(err) - } - - // Migrate any generated files if the hash has changed - newHash := s.GetHash(config.GetVideoFileNamingAlgorithm()) - if newHash != oldHash { - MigrateHash(oldHash, newHash) - } - } - - // We already have this item in the database - // check for thumbnails,screenshots - t.makeScreenshots(nil, s.GetHash(t.fileNamingAlgorithm)) - - // check for container - if !s.Format.Valid { - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension) - if err != nil { - return logError(err) - } - container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath) - logger.Infof("Adding container %s to file %s", container, t.FilePath) - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - _, err := scene.UpdateFormat(r.Scene(), s.ID, string(container)) - return err - }); err != nil { - return logError(err) - } - } - - // check if oshash is set - if !s.OSHash.Valid { - logger.Infof("Calculating oshash for existing file %s ...", t.FilePath) - oshash, err := utils.OSHashFromFilePath(t.FilePath) - if err != nil { - return nil - } - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Scene() - // check if oshash clashes with existing scene - dupe, _ := qb.FindByOSHash(oshash) - if dupe != nil { - return fmt.Errorf("OSHash for file %s is the same as that of %s", t.FilePath, dupe.Path) - } - - _, err := scene.UpdateOSHash(qb, s.ID, oshash) - return err - }); err != nil { - return logError(err) - } - } - - // check if MD5 is set, if calculateMD5 is true - if t.calculateMD5 && !s.Checksum.Valid { - checksum, err := t.calculateChecksum() - if err != nil { - return logError(err) - } - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Scene() - // check if checksum clashes with existing scene - dupe, _ := qb.FindByChecksum(checksum) - if dupe != nil { - return fmt.Errorf("MD5 for file %s is the same as that of %s", t.FilePath, dupe.Path) - } - - _, err := scene.UpdateChecksum(qb, s.ID, checksum) - return err - }); err != nil { - return logError(err) - } - } - - if s.Interactive != interactive { - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Scene() - scenePartial := models.ScenePartial{ - ID: s.ID, - Interactive: &interactive, - } - _, err := qb.Update(scenePartial) - return err - }); err != nil { - return logError(err) - } - } - - return nil - } - - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension) - if err != nil { - logger.Error(err.Error()) - return nil - } - container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath) - - // Override title to be filename if UseFileMetadata is false - if !t.UseFileMetadata { - videoFile.SetTitleFromPath(t.StripFileExtension) - } - - var checksum string - - logger.Infof("%s not found. Calculating oshash...", t.FilePath) - oshash, err := utils.OSHashFromFilePath(t.FilePath) - if err != nil { - return logError(err) - } - - if t.fileNamingAlgorithm == models.HashAlgorithmMd5 || t.calculateMD5 { - checksum, err = t.calculateChecksum() - if err != nil { - return logError(err) - } - } - - // check for scene by checksum and oshash - MD5 should be - // redundant, but check both - txnErr := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - qb := r.Scene() - if checksum != "" { - s, _ = qb.FindByChecksum(checksum) - } - - if s == nil { - s, _ = qb.FindByOSHash(oshash) - } - - return nil - }) - if txnErr != nil { - logger.Warnf("error in read transaction: %v", txnErr) - } - - sceneHash := oshash - - if t.fileNamingAlgorithm == models.HashAlgorithmMd5 { - sceneHash = checksum - } - - t.makeScreenshots(videoFile, sceneHash) - - if s != nil { - exists, _ := utils.FileExists(s.Path) - if !t.CaseSensitiveFs { - // #1426 - if file exists but is a case-insensitive match for the - // original filename, then treat it as a move - if exists && strings.EqualFold(t.FilePath, s.Path) { - exists = false - } - } - - if exists { - logger.Infof("%s already exists. Duplicate of %s", t.FilePath, s.Path) - } else { - logger.Infof("%s already exists. Updating path...", t.FilePath) - scenePartial := models.ScenePartial{ - ID: s.ID, - Path: &t.FilePath, - Interactive: &interactive, - } - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - _, err := r.Scene().Update(scenePartial) - return err - }); err != nil { - return logError(err) - } - - GetInstance().PluginCache.ExecutePostHooks(t.ctx, s.ID, plugin.SceneUpdatePost, nil, nil) - } - } else { - logger.Infof("%s doesn't exist. Creating new item...", t.FilePath) - currentTime := time.Now() - newScene := models.Scene{ - Checksum: sql.NullString{String: checksum, Valid: checksum != ""}, - OSHash: sql.NullString{String: oshash, Valid: oshash != ""}, - Path: t.FilePath, - Title: sql.NullString{String: videoFile.Title, Valid: true}, - Duration: sql.NullFloat64{Float64: videoFile.Duration, Valid: true}, - VideoCodec: sql.NullString{String: videoFile.VideoCodec, Valid: true}, - AudioCodec: sql.NullString{String: videoFile.AudioCodec, Valid: true}, - Format: sql.NullString{String: string(container), Valid: true}, - Width: sql.NullInt64{Int64: int64(videoFile.Width), Valid: true}, - Height: sql.NullInt64{Int64: int64(videoFile.Height), Valid: true}, - Framerate: sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true}, - Bitrate: sql.NullInt64{Int64: videoFile.Bitrate, Valid: true}, - Size: sql.NullString{String: strconv.FormatInt(videoFile.Size, 10), Valid: true}, - FileModTime: models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }, - CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - Interactive: interactive, - } - - if t.UseFileMetadata { - newScene.Details = sql.NullString{String: videoFile.Comment, Valid: true} - newScene.Date = models.SQLiteDate{String: videoFile.CreationTime.Format("2006-01-02")} - } - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - var err error - retScene, err = r.Scene().Create(newScene) - return err - }); err != nil { - return logError(err) - } - - GetInstance().PluginCache.ExecutePostHooks(t.ctx, retScene.ID, plugin.SceneCreatePost, nil, nil) - } - - return retScene -} - -func (t *ScanTask) rescanScene(s *models.Scene, fileModTime time.Time) (*models.Scene, error) { - logger.Infof("%s has been updated: rescanning", t.FilePath) - - // update the oshash/checksum and the modification time - logger.Infof("Calculating oshash for existing file %s ...", t.FilePath) - oshash, err := utils.OSHashFromFilePath(t.FilePath) - if err != nil { - return nil, err - } - - var checksum *sql.NullString - if t.calculateMD5 { - cs, err := t.calculateChecksum() - if err != nil { - return nil, err - } - - checksum = &sql.NullString{ - String: cs, - Valid: true, - } - } - - // regenerate the file details as well - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension) - if err != nil { - return nil, err - } - container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath) - - currentTime := time.Now() - scenePartial := models.ScenePartial{ - ID: s.ID, - Checksum: checksum, - OSHash: &sql.NullString{ - String: oshash, - Valid: true, - }, - Duration: &sql.NullFloat64{Float64: videoFile.Duration, Valid: true}, - VideoCodec: &sql.NullString{String: videoFile.VideoCodec, Valid: true}, - AudioCodec: &sql.NullString{String: videoFile.AudioCodec, Valid: true}, - Format: &sql.NullString{String: string(container), Valid: true}, - Width: &sql.NullInt64{Int64: int64(videoFile.Width), Valid: true}, - Height: &sql.NullInt64{Int64: int64(videoFile.Height), Valid: true}, - Framerate: &sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true}, - Bitrate: &sql.NullInt64{Int64: videoFile.Bitrate, Valid: true}, - Size: &sql.NullString{String: strconv.FormatInt(videoFile.Size, 10), Valid: true}, - FileModTime: &models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }, - UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime}, - } - - var ret *models.Scene - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - var err error - ret, err = r.Scene().Update(scenePartial) - return err - }); err != nil { - logger.Error(err.Error()) - return nil, err - } - - GetInstance().PluginCache.ExecutePostHooks(t.ctx, ret.ID, plugin.SceneUpdatePost, nil, nil) - - // leave the generated files as is - the scene file may have been moved - // elsewhere - - return ret, nil -} -func (t *ScanTask) makeScreenshots(probeResult *ffmpeg.VideoFile, checksum string) { - thumbPath := instance.Paths.Scene.GetThumbnailScreenshotPath(checksum) - normalPath := instance.Paths.Scene.GetScreenshotPath(checksum) - - thumbExists, _ := utils.FileExists(thumbPath) - normalExists, _ := utils.FileExists(normalPath) - - if thumbExists && normalExists { - return - } - - if probeResult == nil { - var err error - probeResult, err = ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension) - - if err != nil { - logger.Error(err.Error()) - return - } - logger.Infof("Regenerating images for %s", t.FilePath) - } - - at := float64(probeResult.Duration) * 0.2 - - if !thumbExists { - logger.Debugf("Creating thumbnail for %s", t.FilePath) - makeScreenshot(*probeResult, thumbPath, 5, 320, at) - } - - if !normalExists { - logger.Debugf("Creating screenshot for %s", t.FilePath) - makeScreenshot(*probeResult, normalPath, 2, probeResult.Width, at) - } -} - -func (t *ScanTask) scanZipImages(zipGallery *models.Gallery) { - err := walkGalleryZip(zipGallery.Path.String, func(file *zip.File) error { - // copy this task and change the filename - subTask := *t - - // filepath is the zip file and the internal file name, separated by a null byte - subTask.FilePath = image.ZipFilename(zipGallery.Path.String, file.Name) - subTask.zipGallery = zipGallery - - // run the subtask and wait for it to complete - subTask.Start() - return nil - }) - if err != nil { - logger.Warnf("failed to scan zip file images for %s: %s", zipGallery.Path.String, err.Error()) - } -} - -func (t *ScanTask) regenerateZipImages(zipGallery *models.Gallery) { - var images []*models.Image - if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - iqb := r.Image() - - var err error - images, err = iqb.FindByGalleryID(zipGallery.ID) - return err - }); err != nil { - logger.Warnf("failed to find gallery images: %s", err.Error()) - return - } - - for _, img := range images { - t.generateThumbnail(img) - } -} - -func (t *ScanTask) scanImage() { - var i *models.Image - - if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - var err error - i, err = r.Image().FindByPath(t.FilePath) - return err - }); err != nil { - logger.Error(err.Error()) - return - } - - fileModTime, err := image.GetFileModTime(t.FilePath) - if err != nil { - logger.Error(err.Error()) - return - } - - if i != nil { - // if file mod time is not set, set it now - if !i.FileModTime.Valid { - logger.Infof("setting file modification time on %s", t.FilePath) - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Image() - if _, err := image.UpdateFileModTime(qb, i.ID, models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }); err != nil { - return err - } - - // update our copy of the gallery - var err error - i, err = qb.Find(i.ID) - return err - }); err != nil { - logger.Error(err.Error()) - return - } - } - - // if the mod time of the file is different than that of the associated - // image, then recalculate the checksum and regenerate the thumbnail - modified := t.isFileModified(fileModTime, i.FileModTime) - if modified { - i, err = t.rescanImage(i, fileModTime) - if err != nil { - logger.Error(err.Error()) - return - } - } - - // We already have this item in the database - // check for thumbnails - t.generateThumbnail(i) - } else { - var checksum string - - logger.Infof("%s not found. Calculating checksum...", t.FilePath) - checksum, err = t.calculateImageChecksum() - if err != nil { - logger.Errorf("error calculating checksum for %s: %s", t.FilePath, err.Error()) - return - } - - // check for scene by checksum and oshash - MD5 should be - // redundant, but check both - if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - var err error - i, err = r.Image().FindByChecksum(checksum) - return err - }); err != nil { - logger.Error(err.Error()) - return - } - - if i != nil { - exists := image.FileExists(i.Path) - if !t.CaseSensitiveFs { - // #1426 - if file exists but is a case-insensitive match for the - // original filename, then treat it as a move - if exists && strings.EqualFold(t.FilePath, i.Path) { - exists = false - } - } - - if exists { - logger.Infof("%s already exists. Duplicate of %s ", image.PathDisplayName(t.FilePath), image.PathDisplayName(i.Path)) - } else { - logger.Infof("%s already exists. Updating path...", image.PathDisplayName(t.FilePath)) - imagePartial := models.ImagePartial{ - ID: i.ID, - Path: &t.FilePath, - } - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - _, err := r.Image().Update(imagePartial) - return err - }); err != nil { - logger.Error(err.Error()) - return - } - - GetInstance().PluginCache.ExecutePostHooks(t.ctx, i.ID, plugin.ImageUpdatePost, nil, nil) - } - } else { - logger.Infof("%s doesn't exist. Creating new item...", image.PathDisplayName(t.FilePath)) - currentTime := time.Now() - newImage := models.Image{ - Checksum: checksum, - Path: t.FilePath, - FileModTime: models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }, - CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - } - newImage.Title.String = image.GetFilename(&newImage, t.StripFileExtension) - newImage.Title.Valid = true - - if err := image.SetFileDetails(&newImage); err != nil { - logger.Error(err.Error()) - return - } - - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - var err error - i, err = r.Image().Create(newImage) - return err - }); err != nil { - logger.Error(err.Error()) - return - } - - GetInstance().PluginCache.ExecutePostHooks(t.ctx, i.ID, plugin.ImageCreatePost, nil, nil) - } - - if t.zipGallery != nil { - // associate with gallery - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - return gallery.AddImage(r.Gallery(), t.zipGallery.ID, i.ID) - }); err != nil { - logger.Error(err.Error()) - return - } - } else if config.GetInstance().GetCreateGalleriesFromFolders() { - // create gallery from folder or associate with existing gallery - logger.Infof("Associating image %s with folder gallery", i.Path) - var galleryID int - var isNewGallery bool - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - var err error - galleryID, isNewGallery, err = t.associateImageWithFolderGallery(i.ID, r.Gallery()) - return err - }); err != nil { - logger.Error(err.Error()) - return - } - - if isNewGallery { - GetInstance().PluginCache.ExecutePostHooks(t.ctx, galleryID, plugin.GalleryCreatePost, nil, nil) - } - } - } - - if i != nil { - t.generateThumbnail(i) - } -} - -func (t *ScanTask) rescanImage(i *models.Image, fileModTime time.Time) (*models.Image, error) { - logger.Infof("%s has been updated: rescanning", t.FilePath) - - oldChecksum := i.Checksum - - // update the checksum and the modification time - checksum, err := t.calculateImageChecksum() - if err != nil { - return nil, err - } - - // regenerate the file details as well - fileDetails, err := image.GetFileDetails(t.FilePath) - if err != nil { - return nil, err - } - - currentTime := time.Now() - imagePartial := models.ImagePartial{ - ID: i.ID, - Checksum: &checksum, - Width: &fileDetails.Width, - Height: &fileDetails.Height, - Size: &fileDetails.Size, - FileModTime: &models.NullSQLiteTimestamp{ - Timestamp: fileModTime, - Valid: true, - }, - UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime}, - } - - var ret *models.Image - if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { - var err error - ret, err = r.Image().Update(imagePartial) - return err - }); err != nil { - return nil, err - } - - // remove the old thumbnail if the checksum changed - we'll regenerate it - if oldChecksum != checksum { - err = os.Remove(GetInstance().Paths.Generated.GetThumbnailPath(oldChecksum, models.DefaultGthumbWidth)) // remove cache dir of gallery - if err != nil { - logger.Errorf("Error deleting thumbnail image: %s", err) - } - } - - GetInstance().PluginCache.ExecutePostHooks(t.ctx, ret.ID, plugin.ImageUpdatePost, nil, nil) - - return ret, nil -} - -func (t *ScanTask) associateImageWithFolderGallery(imageID int, qb models.GalleryReaderWriter) (galleryID int, isNew bool, err error) { - // find a gallery with the path specified - path := filepath.Dir(t.FilePath) - var g *models.Gallery - g, err = qb.FindByPath(path) - if err != nil { - return - } - - if g == nil { - checksum := utils.MD5FromString(path) - - // create the gallery - currentTime := time.Now() - - newGallery := models.Gallery{ - Checksum: checksum, - Path: sql.NullString{ - String: path, - Valid: true, - }, - CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - Title: sql.NullString{ - String: utils.GetNameFromPath(path, false), - Valid: true, - }, - } - - logger.Infof("Creating gallery for folder %s", path) - g, err = qb.Create(newGallery) - if err != nil { - return 0, false, err - } - - isNew = true - } - - // associate image with gallery - err = gallery.AddImage(qb, g.ID, imageID) - galleryID = g.ID - return -} - -func (t *ScanTask) generateThumbnail(i *models.Image) { - if !t.GenerateThumbnails { - return - } - - thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth) - exists, _ := utils.FileExists(thumbPath) - if exists { - return - } - - config, _, err := image.DecodeSourceImage(i) - if err != nil { - logger.Errorf("error reading image %s: %s", i.Path, err.Error()) - return - } - - if config.Height > models.DefaultGthumbWidth || config.Width > models.DefaultGthumbWidth { - encoder := image.NewThumbnailEncoder(instance.FFMPEGPath) - data, err := encoder.GetThumbnail(i, models.DefaultGthumbWidth) - - if err != nil { - logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error()) - return - } - - err = utils.WriteFile(thumbPath, data) - if err != nil { - logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err) - } - } -} - -func (t *ScanTask) calculateChecksum() (string, error) { - logger.Infof("Calculating checksum for %s...", t.FilePath) - checksum, err := utils.MD5FromFilePath(t.FilePath) - if err != nil { - return "", err - } - logger.Debugf("Checksum calculated: %s", checksum) - return checksum, nil -} - -func (t *ScanTask) calculateImageChecksum() (string, error) { - logger.Infof("Calculating checksum for %s...", image.PathDisplayName(t.FilePath)) - // uses image.CalculateMD5 to read files in zips - checksum, err := image.CalculateMD5(t.FilePath) - if err != nil { - return "", err - } - logger.Debugf("Checksum calculated: %s", checksum) - return checksum, nil -} - -func (t *ScanTask) doesPathExist() bool { - config := config.GetInstance() - vidExt := config.GetVideoExtensions() - imgExt := config.GetImageExtensions() - gExt := config.GetGalleryExtensions() - - ret := false - txnErr := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - if matchExtension(t.FilePath, gExt) { - gallery, _ := r.Gallery().FindByPath(t.FilePath) - if gallery != nil { - ret = true - } - } else if matchExtension(t.FilePath, vidExt) { - s, _ := r.Scene().FindByPath(t.FilePath) - if s != nil { - ret = true - } - } else if matchExtension(t.FilePath, imgExt) { - i, _ := r.Image().FindByPath(t.FilePath) - if i != nil { - ret = true - } - } - - return nil - }) - if txnErr != nil { - logger.Warnf("error while executing read transaction: %v", txnErr) - } - - return ret + iwg.Wait() } func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error { @@ -1368,7 +349,7 @@ func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error { excludeImgRegex := generateRegexps(config.GetImageExcludes()) // don't scan zip images directly - if image.IsZipPath(s.Path) { + if file.IsZipPath(s.Path) { logger.Warnf("Cannot rescan zip image %s. Rescan zip gallery instead.", s.Path) return nil } @@ -1397,12 +378,12 @@ func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error { return nil } - if !s.ExcludeVideo && matchExtension(path, vidExt) && !matchFileRegex(path, excludeVidRegex) { + if !s.ExcludeVideo && utils.MatchExtension(path, vidExt) && !matchFileRegex(path, excludeVidRegex) { return f(path, info, err) } if !s.ExcludeImage { - if (matchExtension(path, imgExt) || matchExtension(path, gExt)) && !matchFileRegex(path, excludeImgRegex) { + if (utils.MatchExtension(path, imgExt) || utils.MatchExtension(path, gExt)) && !matchFileRegex(path, excludeImgRegex) { return f(path, info, err) } } diff --git a/pkg/manager/task_scan_gallery.go b/pkg/manager/task_scan_gallery.go new file mode 100644 index 000000000..751e6a0f3 --- /dev/null +++ b/pkg/manager/task_scan_gallery.go @@ -0,0 +1,170 @@ +package manager + +import ( + "archive/zip" + "context" + "fmt" + "path/filepath" + "strings" + + "github.com/remeh/sizedwaitgroup" + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/config" + "github.com/stashapp/stash/pkg/models" +) + +func (t *ScanTask) scanGallery(ctx context.Context) { + var g *models.Gallery + path := t.file.Path() + images := 0 + scanImages := false + + if err := t.TxnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { + var err error + g, err = r.Gallery().FindByPath(path) + + if g != nil && err != nil { + images, err = r.Image().CountByGalleryID(g.ID) + if err != nil { + return fmt.Errorf("error getting images for zip gallery %s: %s", path, err.Error()) + } + } + + return err + }); err != nil { + logger.Error(err.Error()) + return + } + + scanner := gallery.Scanner{ + Scanner: gallery.FileScanner(&file.FSHasher{}), + ImageExtensions: instance.Config.GetImageExtensions(), + StripFileExtension: t.StripFileExtension, + Ctx: t.ctx, + CaseSensitiveFs: t.CaseSensitiveFs, + TxnManager: t.TxnManager, + Paths: instance.Paths, + PluginCache: instance.PluginCache, + MutexManager: t.mutexManager, + } + + var err error + if g != nil { + g, scanImages, err = scanner.ScanExisting(g, t.file) + if err != nil { + logger.Error(err.Error()) + return + } + + // scan the zip files if the gallery has no images + scanImages = scanImages || images == 0 + } else { + g, scanImages, err = scanner.ScanNew(t.file) + if err != nil { + logger.Error(err.Error()) + } + } + + if g != nil { + if scanImages { + t.scanZipImages(g) + } else { + // in case thumbnails have been deleted, regenerate them + t.regenerateZipImages(g) + } + } +} + +// associates a gallery to a scene with the same basename +func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) { + path := t.file.Path() + if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + qb := r.Gallery() + sqb := r.Scene() + g, err := qb.FindByPath(path) + if err != nil { + return err + } + + if g == nil { + // associate is run after scan is finished + // should only happen if gallery is a directory or an io error occurs during hashing + logger.Warnf("associate: gallery %s not found in DB", path) + return nil + } + + basename := strings.TrimSuffix(path, filepath.Ext(path)) + var relatedFiles []string + vExt := config.GetInstance().GetVideoExtensions() + // make a list of media files that can be related to the gallery + for _, ext := range vExt { + related := basename + "." + ext + // exclude gallery extensions from the related files + if !isGallery(related) { + relatedFiles = append(relatedFiles, related) + } + } + for _, scenePath := range relatedFiles { + scene, _ := sqb.FindByPath(scenePath) + // found related Scene + if scene != nil { + sceneGalleries, _ := sqb.FindByGalleryID(g.ID) // check if gallery is already associated to the scene + isAssoc := false + for _, sg := range sceneGalleries { + if scene.ID == sg.ID { + isAssoc = true + break + } + } + if !isAssoc { + logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID) + if err := sqb.UpdateGalleries(scene.ID, []int{g.ID}); err != nil { + return err + } + } + } + } + return nil + }); err != nil { + logger.Error(err.Error()) + } + wg.Done() +} + +func (t *ScanTask) scanZipImages(zipGallery *models.Gallery) { + err := walkGalleryZip(zipGallery.Path.String, func(f *zip.File) error { + // copy this task and change the filename + subTask := *t + + // filepath is the zip file and the internal file name, separated by a null byte + subTask.file = file.ZipFile(zipGallery.Path.String, f) + subTask.zipGallery = zipGallery + + // run the subtask and wait for it to complete + subTask.Start(context.TODO()) + return nil + }) + if err != nil { + logger.Warnf("failed to scan zip file images for %s: %s", zipGallery.Path.String, err.Error()) + } +} + +func (t *ScanTask) regenerateZipImages(zipGallery *models.Gallery) { + var images []*models.Image + if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + iqb := r.Image() + + var err error + images, err = iqb.FindByGalleryID(zipGallery.ID) + return err + }); err != nil { + logger.Warnf("failed to find gallery images: %s", err.Error()) + return + } + + for _, img := range images { + t.generateThumbnail(img) + } +} diff --git a/pkg/manager/task_scan_image.go b/pkg/manager/task_scan_image.go new file mode 100644 index 000000000..d48e5f5c7 --- /dev/null +++ b/pkg/manager/task_scan_image.go @@ -0,0 +1,166 @@ +package manager + +import ( + "context" + "database/sql" + "path/filepath" + "time" + + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/config" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stashapp/stash/pkg/utils" +) + +func (t *ScanTask) scanImage() { + var i *models.Image + path := t.file.Path() + + if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + var err error + i, err = r.Image().FindByPath(path) + return err + }); err != nil { + logger.Error(err.Error()) + return + } + + scanner := image.Scanner{ + Scanner: image.FileScanner(&file.FSHasher{}), + StripFileExtension: t.StripFileExtension, + Ctx: t.ctx, + TxnManager: t.TxnManager, + Paths: GetInstance().Paths, + PluginCache: instance.PluginCache, + MutexManager: t.mutexManager, + } + + var err error + if i != nil { + i, err = scanner.ScanExisting(i, t.file) + if err != nil { + logger.Error(err.Error()) + return + } + } else { + i, err = scanner.ScanNew(t.file) + if err != nil { + logger.Error(err.Error()) + return + } + + if i != nil { + if t.zipGallery != nil { + // associate with gallery + if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + return gallery.AddImage(r.Gallery(), t.zipGallery.ID, i.ID) + }); err != nil { + logger.Error(err.Error()) + return + } + } else if config.GetInstance().GetCreateGalleriesFromFolders() { + // create gallery from folder or associate with existing gallery + logger.Infof("Associating image %s with folder gallery", i.Path) + var galleryID int + var isNewGallery bool + if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + var err error + galleryID, isNewGallery, err = t.associateImageWithFolderGallery(i.ID, r.Gallery()) + return err + }); err != nil { + logger.Error(err.Error()) + return + } + + if isNewGallery { + GetInstance().PluginCache.ExecutePostHooks(t.ctx, galleryID, plugin.GalleryCreatePost, nil, nil) + } + } + } + } + + if i != nil { + t.generateThumbnail(i) + } +} + +func (t *ScanTask) associateImageWithFolderGallery(imageID int, qb models.GalleryReaderWriter) (galleryID int, isNew bool, err error) { + // find a gallery with the path specified + path := filepath.Dir(t.file.Path()) + var g *models.Gallery + g, err = qb.FindByPath(path) + if err != nil { + return + } + + if g == nil { + checksum := utils.MD5FromString(path) + + // create the gallery + currentTime := time.Now() + + newGallery := models.Gallery{ + Checksum: checksum, + Path: sql.NullString{ + String: path, + Valid: true, + }, + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + Title: sql.NullString{ + String: utils.GetNameFromPath(path, false), + Valid: true, + }, + } + + logger.Infof("Creating gallery for folder %s", path) + g, err = qb.Create(newGallery) + if err != nil { + return 0, false, err + } + + isNew = true + } + + // associate image with gallery + err = gallery.AddImage(qb, g.ID, imageID) + galleryID = g.ID + return +} + +func (t *ScanTask) generateThumbnail(i *models.Image) { + if !t.GenerateThumbnails { + return + } + + thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth) + exists, _ := utils.FileExists(thumbPath) + if exists { + return + } + + config, _, err := image.DecodeSourceImage(i) + if err != nil { + logger.Errorf("error reading image %s: %s", i.Path, err.Error()) + return + } + + if config.Height > models.DefaultGthumbWidth || config.Width > models.DefaultGthumbWidth { + encoder := image.NewThumbnailEncoder(instance.FFMPEG) + data, err := encoder.GetThumbnail(i, models.DefaultGthumbWidth) + + if err != nil { + logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error()) + return + } + + err = utils.WriteFile(thumbPath, data) + if err != nil { + logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err) + } + } +} diff --git a/pkg/manager/task_scan_scene.go b/pkg/manager/task_scan_scene.go new file mode 100644 index 000000000..98798efaf --- /dev/null +++ b/pkg/manager/task_scan_scene.go @@ -0,0 +1,58 @@ +package manager + +import ( + "context" + + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" +) + +func (t *ScanTask) scanScene() *models.Scene { + logError := func(err error) *models.Scene { + logger.Error(err.Error()) + return nil + } + + var retScene *models.Scene + var s *models.Scene + + if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + var err error + s, err = r.Scene().FindByPath(t.file.Path()) + return err + }); err != nil { + logger.Error(err.Error()) + return nil + } + + scanner := scene.Scanner{ + Scanner: scene.FileScanner(&file.FSHasher{}, t.fileNamingAlgorithm, t.calculateMD5), + StripFileExtension: t.StripFileExtension, + FileNamingAlgorithm: t.fileNamingAlgorithm, + Ctx: t.ctx, + TxnManager: t.TxnManager, + Paths: GetInstance().Paths, + Screenshotter: &instance.FFMPEG, + VideoFileCreator: &instance.FFProbe, + PluginCache: instance.PluginCache, + MutexManager: t.mutexManager, + } + + if s != nil { + if err := scanner.ScanExisting(s, t.file); err != nil { + return logError(err) + } + + return nil + } + + var err error + retScene, err = scanner.ScanNew(t.file) + if err != nil { + return logError(err) + } + + return retScene +} diff --git a/pkg/manager/task_stash_box_tag.go b/pkg/manager/task_stash_box_tag.go index 7a7e7b8f7..6da960381 100644 --- a/pkg/manager/task_stash_box_tag.go +++ b/pkg/manager/task_stash_box_tag.go @@ -22,7 +22,7 @@ type StashBoxPerformerTagTask struct { } func (t *StashBoxPerformerTagTask) Start() { - t.stashBoxPerformerTag() + t.stashBoxPerformerTag(context.TODO()) } func (t *StashBoxPerformerTagTask) Description() string { @@ -36,7 +36,7 @@ func (t *StashBoxPerformerTagTask) Description() string { return fmt.Sprintf("Tagging performer %s from stash-box", name) } -func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() { +func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) { var performer *models.ScrapedPerformer var err error @@ -169,7 +169,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() { } if len(performer.Images) > 0 && !excluded["image"] { - image, err := utils.ReadImageFromURL(performer.Images[0]) + image, err := utils.ReadImageFromURL(ctx, performer.Images[0]) if err != nil { return err } @@ -232,7 +232,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() { } if len(performer.Images) > 0 { - image, imageErr := utils.ReadImageFromURL(performer.Images[0]) + image, imageErr := utils.ReadImageFromURL(ctx, performer.Images[0]) if imageErr != nil { return imageErr } diff --git a/pkg/manager/task_transcode.go b/pkg/manager/task_transcode.go index 7c55eaba5..c78b31435 100644 --- a/pkg/manager/task_transcode.go +++ b/pkg/manager/task_transcode.go @@ -1,6 +1,9 @@ package manager import ( + "context" + "fmt" + "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" @@ -14,19 +17,24 @@ type GenerateTranscodeTask struct { fileNamingAlgorithm models.HashAlgorithm } -func (t *GenerateTranscodeTask) Start() { +func (t *GenerateTranscodeTask) GetDescription() string { + return fmt.Sprintf("Generating transcode for %s", t.Scene.Path) +} + +func (t *GenerateTranscodeTask) Start(ctc context.Context) { hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm) if !t.Overwrite && hasTranscode { return } + ffprobe := instance.FFProbe var container ffmpeg.Container if t.Scene.Format.Valid { container = ffmpeg.Container(t.Scene.Format.String) } else { // container isn't in the DB // shouldn't happen unless user hasn't scanned after updating to PR#384+ version - tmpVideoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + tmpVideoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false) if err != nil { logger.Errorf("[transcode] error reading video file: %s", err.Error()) return @@ -45,7 +53,7 @@ func (t *GenerateTranscodeTask) Start() { return } - videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + videoFile, err := ffprobe.NewVideoFile(t.Scene.Path, false) if err != nil { logger.Errorf("[transcode] error reading video file: %s", err.Error()) return @@ -58,7 +66,7 @@ func (t *GenerateTranscodeTask) Start() { OutputPath: outputPath, MaxTranscodeSize: transcodeSize, } - encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) + encoder := instance.FFMPEG if videoCodec == ffmpeg.H264 { // for non supported h264 files stream copy the video part if audioCodec == ffmpeg.MissingUnsupported { @@ -68,7 +76,7 @@ func (t *GenerateTranscodeTask) Start() { } } else { if audioCodec == ffmpeg.MissingUnsupported { - //ffmpeg fails if it trys to transcode an unsupported audio codec + // ffmpeg fails if it trys to transcode an unsupported audio codec encoder.TranscodeVideo(*videoFile, options) } else { encoder.Transcode(*videoFile, options) diff --git a/pkg/match/path.go b/pkg/match/path.go new file mode 100644 index 000000000..5596d8e36 --- /dev/null +++ b/pkg/match/path.go @@ -0,0 +1,360 @@ +package match + +import ( + "fmt" + "path/filepath" + "regexp" + "strings" + + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" +) + +const separatorChars = `.\-_ ` + +func getPathQueryRegex(name string) string { + // escape specific regex characters + name = regexp.QuoteMeta(name) + + // handle path separators + const separator = `[` + separatorChars + `]` + + ret := strings.ReplaceAll(name, " ", separator+"*") + ret = `(?:^|_|[^\w\d])` + ret + `(?:$|_|[^\w\d])` + return ret +} + +func getPathWords(path string) []string { + retStr := path + + // remove the extension + ext := filepath.Ext(retStr) + if ext != "" { + retStr = strings.TrimSuffix(retStr, ext) + } + + // handle path separators + const separator = `(?:_|[^\w\d])+` + re := regexp.MustCompile(separator) + retStr = re.ReplaceAllString(retStr, " ") + + words := strings.Split(retStr, " ") + + // remove any single letter words + var ret []string + for _, w := range words { + if len(w) > 1 { + // #1450 - we need to open up the criteria for matching so that we + // can match where path has no space between subject names - + // ie name = "foo bar" - path = "foobar" + // we post-match afterwards, so we can afford to be a little loose + // with the query + // just use the first two characters + ret = append(ret, w[0:2]) + } + } + + return ret +} + +func nameMatchesPath(name, path string) bool { + // escape specific regex characters + name = regexp.QuoteMeta(name) + + name = strings.ToLower(name) + path = strings.ToLower(path) + + // handle path separators + const separator = `[` + separatorChars + `]` + + reStr := strings.ReplaceAll(name, " ", separator+"*") + reStr = `(?:^|_|[^\w\d])` + reStr + `(?:$|_|[^\w\d])` + + re := regexp.MustCompile(reStr) + return re.MatchString(path) +} + +func PathToPerformers(path string, performerReader models.PerformerReader) ([]*models.Performer, error) { + words := getPathWords(path) + performers, err := performerReader.QueryForAutoTag(words) + + if err != nil { + return nil, err + } + + var ret []*models.Performer + for _, p := range performers { + // TODO - commenting out alias handling until both sides work correctly + if nameMatchesPath(p.Name.String, path) { // || nameMatchesPath(p.Aliases.String, path) { + ret = append(ret, p) + } + } + + return ret, nil +} + +func PathToStudios(path string, reader models.StudioReader) ([]*models.Studio, error) { + words := getPathWords(path) + candidates, err := reader.QueryForAutoTag(words) + + if err != nil { + return nil, err + } + + var ret []*models.Studio + for _, c := range candidates { + matches := false + if nameMatchesPath(c.Name.String, path) { + matches = true + } + + if !matches { + aliases, err := reader.GetAliases(c.ID) + if err != nil { + return nil, err + } + + for _, alias := range aliases { + if nameMatchesPath(alias, path) { + matches = true + break + } + } + } + + if matches { + ret = append(ret, c) + } + } + + return ret, nil +} + +func PathToTags(path string, tagReader models.TagReader) ([]*models.Tag, error) { + words := getPathWords(path) + tags, err := tagReader.QueryForAutoTag(words) + + if err != nil { + return nil, err + } + + var ret []*models.Tag + for _, t := range tags { + matches := false + if nameMatchesPath(t.Name, path) { + matches = true + } + + if !matches { + aliases, err := tagReader.GetAliases(t.ID) + if err != nil { + return nil, err + } + for _, alias := range aliases { + if nameMatchesPath(alias, path) { + matches = true + break + } + } + } + + if matches { + ret = append(ret, t) + } + } + + return ret, nil +} + +func scenePathsFilter(paths []string) *models.SceneFilterType { + if paths == nil { + return nil + } + + sep := string(filepath.Separator) + + var ret *models.SceneFilterType + var or *models.SceneFilterType + for _, p := range paths { + newOr := &models.SceneFilterType{} + if or != nil { + or.Or = newOr + } else { + ret = newOr + } + + or = newOr + + if !strings.HasSuffix(p, sep) { + p += sep + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + +func PathToScenes(name string, paths []string, sceneReader models.SceneReader) ([]*models.Scene, error) { + regex := getPathQueryRegex(name) + organized := false + filter := models.SceneFilterType{ + Path: &models.StringCriterionInput{ + Value: "(?i)" + regex, + Modifier: models.CriterionModifierMatchesRegex, + }, + Organized: &organized, + } + + filter.And = scenePathsFilter(paths) + + pp := models.PerPageAll + scenes, err := scene.Query(sceneReader, &filter, &models.FindFilterType{ + PerPage: &pp, + }) + + if err != nil { + return nil, fmt.Errorf("error querying scenes with regex '%s': %s", regex, err.Error()) + } + + var ret []*models.Scene + for _, p := range scenes { + if nameMatchesPath(name, p.Path) { + ret = append(ret, p) + } + } + + return ret, nil +} + +func imagePathsFilter(paths []string) *models.ImageFilterType { + if paths == nil { + return nil + } + + sep := string(filepath.Separator) + + var ret *models.ImageFilterType + var or *models.ImageFilterType + for _, p := range paths { + newOr := &models.ImageFilterType{} + if or != nil { + or.Or = newOr + } else { + ret = newOr + } + + or = newOr + + if !strings.HasSuffix(p, sep) { + p += sep + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + +func PathToImages(name string, paths []string, imageReader models.ImageReader) ([]*models.Image, error) { + regex := getPathQueryRegex(name) + organized := false + filter := models.ImageFilterType{ + Path: &models.StringCriterionInput{ + Value: "(?i)" + regex, + Modifier: models.CriterionModifierMatchesRegex, + }, + Organized: &organized, + } + + filter.And = imagePathsFilter(paths) + + pp := models.PerPageAll + images, err := image.Query(imageReader, &filter, &models.FindFilterType{ + PerPage: &pp, + }) + + if err != nil { + return nil, fmt.Errorf("error querying images with regex '%s': %s", regex, err.Error()) + } + + var ret []*models.Image + for _, p := range images { + if nameMatchesPath(name, p.Path) { + ret = append(ret, p) + } + } + + return ret, nil +} + +func galleryPathsFilter(paths []string) *models.GalleryFilterType { + if paths == nil { + return nil + } + + sep := string(filepath.Separator) + + var ret *models.GalleryFilterType + var or *models.GalleryFilterType + for _, p := range paths { + newOr := &models.GalleryFilterType{} + if or != nil { + or.Or = newOr + } else { + ret = newOr + } + + or = newOr + + if !strings.HasSuffix(p, sep) { + p += sep + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + +func PathToGalleries(name string, paths []string, galleryReader models.GalleryReader) ([]*models.Gallery, error) { + regex := getPathQueryRegex(name) + organized := false + filter := models.GalleryFilterType{ + Path: &models.StringCriterionInput{ + Value: "(?i)" + regex, + Modifier: models.CriterionModifierMatchesRegex, + }, + Organized: &organized, + } + + filter.And = galleryPathsFilter(paths) + + pp := models.PerPageAll + gallerys, _, err := galleryReader.Query(&filter, &models.FindFilterType{ + PerPage: &pp, + }) + + if err != nil { + return nil, fmt.Errorf("error querying gallerys with regex '%s': %s", regex, err.Error()) + } + + var ret []*models.Gallery + for _, p := range gallerys { + if nameMatchesPath(name, p.Path.String) { + ret = append(ret, p) + } + } + + return ret, nil +} diff --git a/pkg/match/scraped.go b/pkg/match/scraped.go new file mode 100644 index 000000000..1e9de81e1 --- /dev/null +++ b/pkg/match/scraped.go @@ -0,0 +1,149 @@ +package match + +import ( + "strconv" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/studio" + "github.com/stashapp/stash/pkg/tag" +) + +// ScrapedPerformer matches the provided performer with the +// performers in the database and sets the ID field if one is found. +func ScrapedPerformer(qb models.PerformerReader, p *models.ScrapedPerformer, stashBoxEndpoint *string) error { + if p.StoredID != nil || p.Name == nil { + return nil + } + + // Check if a performer with the StashID already exists + if stashBoxEndpoint != nil && p.RemoteSiteID != nil { + performers, err := qb.FindByStashID(models.StashID{ + StashID: *p.RemoteSiteID, + Endpoint: *stashBoxEndpoint, + }) + if err != nil { + return err + } + if len(performers) > 0 { + id := strconv.Itoa(performers[0].ID) + p.StoredID = &id + return nil + } + } + + performers, err := qb.FindByNames([]string{*p.Name}, true) + + if err != nil { + return err + } + + if len(performers) != 1 { + // ignore - cannot match + return nil + } + + id := strconv.Itoa(performers[0].ID) + p.StoredID = &id + return nil +} + +// ScrapedStudio matches the provided studio with the studios +// in the database and sets the ID field if one is found. +func ScrapedStudio(qb models.StudioReader, s *models.ScrapedStudio, stashBoxEndpoint *string) error { + if s.StoredID != nil { + return nil + } + + // Check if a studio with the StashID already exists + if stashBoxEndpoint != nil && s.RemoteSiteID != nil { + studios, err := qb.FindByStashID(models.StashID{ + StashID: *s.RemoteSiteID, + Endpoint: *stashBoxEndpoint, + }) + if err != nil { + return err + } + if len(studios) > 0 { + id := strconv.Itoa(studios[0].ID) + s.StoredID = &id + return nil + } + } + + st, err := studio.ByName(qb, s.Name) + + if err != nil { + return err + } + + if st == nil { + // try matching by alias + st, err = studio.ByAlias(qb, s.Name) + if err != nil { + return err + } + } + + if st == nil { + // ignore - cannot match + return nil + } + + id := strconv.Itoa(st.ID) + s.StoredID = &id + return nil +} + +// ScrapedMovie matches the provided movie with the movies +// in the database and sets the ID field if one is found. +func ScrapedMovie(qb models.MovieReader, m *models.ScrapedMovie) error { + if m.StoredID != nil || m.Name == nil { + return nil + } + + movies, err := qb.FindByNames([]string{*m.Name}, true) + + if err != nil { + return err + } + + if len(movies) != 1 { + // ignore - cannot match + return nil + } + + id := strconv.Itoa(movies[0].ID) + m.StoredID = &id + return nil +} + +// ScrapedTag matches the provided tag with the tags +// in the database and sets the ID field if one is found. +func ScrapedTag(qb models.TagReader, s *models.ScrapedTag) error { + if s.StoredID != nil { + return nil + } + + t, err := tag.ByName(qb, s.Name) + + if err != nil { + return err + } + + if t == nil { + // try matching by alias + t, err = tag.ByAlias(qb, s.Name) + if err != nil { + return err + } + } + + if t == nil { + // ignore - cannot match + return nil + } + + id := strconv.Itoa(t.ID) + s.StoredID = &id + return nil +} diff --git a/pkg/models/errors.go b/pkg/models/errors.go new file mode 100644 index 000000000..54f5e1d00 --- /dev/null +++ b/pkg/models/errors.go @@ -0,0 +1,5 @@ +package models + +import "errors" + +var ErrNotFound = errors.New("not found") diff --git a/pkg/models/extension_find_filter.go b/pkg/models/extension_find_filter.go index 8dc1ed515..1a6fb15ed 100644 --- a/pkg/models/extension_find_filter.go +++ b/pkg/models/extension_find_filter.go @@ -61,3 +61,13 @@ func (ff FindFilterType) GetPageSize() int { func (ff FindFilterType) IsGetAll() bool { return ff.PerPage != nil && *ff.PerPage < 0 } + +// BatchFindFilter returns a FindFilterType suitable for batch finding +// using the provided batch size. +func BatchFindFilter(batchSize int) *FindFilterType { + page := 1 + return &FindFilterType{ + PerPage: &batchSize, + Page: &page, + } +} diff --git a/pkg/models/image.go b/pkg/models/image.go index c3f3c5b2e..bae3c043f 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -1,8 +1,46 @@ package models -type ImageReader interface { - Find(id int) (*Image, error) +type ImageQueryOptions struct { + QueryOptions + ImageFilter *ImageFilterType + + Megapixels bool + TotalSize bool +} + +type ImageQueryResult struct { + QueryResult + Megapixels float64 + TotalSize float64 + + finder ImageFinder + images []*Image + resolveErr error +} + +func NewImageQueryResult(finder ImageFinder) *ImageQueryResult { + return &ImageQueryResult{ + finder: finder, + } +} + +func (r *ImageQueryResult) Resolve() ([]*Image, error) { + // cache results + if r.images == nil && r.resolveErr == nil { + r.images, r.resolveErr = r.finder.FindMany(r.IDs) + } + return r.images, r.resolveErr +} + +type ImageFinder interface { + // TODO - rename to Find and remove existing method FindMany(ids []int) ([]*Image, error) +} + +type ImageReader interface { + ImageFinder + // TODO - remove this in another PR + Find(id int) (*Image, error) FindByChecksum(checksum string) (*Image, error) FindByGalleryID(galleryID int) ([]*Image, error) CountByGalleryID(galleryID int) (int, error) @@ -16,7 +54,7 @@ type ImageReader interface { // CountByStudioID(studioID int) (int, error) // CountByTagID(tagID int) (int, error) All() ([]*Image, error) - Query(imageFilter *ImageFilterType, findFilter *FindFilterType) ([]*Image, int, error) + Query(options ImageQueryOptions) (*ImageQueryResult, error) QueryCount(imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) GetGalleryIDs(imageID int) ([]int, error) GetTagIDs(imageID int) ([]int, error) diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index a8a8c4b4a..630c1c0d2 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -340,34 +340,27 @@ func (_m *ImageReaderWriter) IncrementOCounter(id int) (int, error) { return r0, r1 } -// Query provides a mock function with given fields: imageFilter, findFilter -func (_m *ImageReaderWriter) Query(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, int, error) { - ret := _m.Called(imageFilter, findFilter) +// Query provides a mock function with given fields: options +func (_m *ImageReaderWriter) Query(options models.ImageQueryOptions) (*models.ImageQueryResult, error) { + ret := _m.Called(options) - var r0 []*models.Image - if rf, ok := ret.Get(0).(func(*models.ImageFilterType, *models.FindFilterType) []*models.Image); ok { - r0 = rf(imageFilter, findFilter) + var r0 *models.ImageQueryResult + if rf, ok := ret.Get(0).(func(models.ImageQueryOptions) *models.ImageQueryResult); ok { + r0 = rf(options) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Image) + r0 = ret.Get(0).(*models.ImageQueryResult) } } - var r1 int - if rf, ok := ret.Get(1).(func(*models.ImageFilterType, *models.FindFilterType) int); ok { - r1 = rf(imageFilter, findFilter) + var r1 error + if rf, ok := ret.Get(1).(func(models.ImageQueryOptions) error); ok { + r1 = rf(options) } else { - r1 = ret.Get(1).(int) + r1 = ret.Error(1) } - var r2 error - if rf, ok := ret.Get(2).(func(*models.ImageFilterType, *models.FindFilterType) error); ok { - r2 = rf(imageFilter, findFilter) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 + return r0, r1 } // QueryCount provides a mock function with given fields: imageFilter, findFilter diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index 986074405..0ccaddb33 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -243,6 +243,29 @@ func (_m *PerformerReaderWriter) FindBySceneID(sceneID int) ([]*models.Performer return r0, r1 } +// FindByStashID provides a mock function with given fields: stashID +func (_m *PerformerReaderWriter) FindByStashID(stashID models.StashID) ([]*models.Performer, error) { + ret := _m.Called(stashID) + + var r0 []*models.Performer + if rf, ok := ret.Get(0).(func(models.StashID) []*models.Performer); ok { + r0 = rf(stashID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Performer) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.StashID) error); ok { + r1 = rf(stashID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByStashIDStatus provides a mock function with given fields: hasStashID, stashboxEndpoint func (_m *PerformerReaderWriter) FindByStashIDStatus(hasStashID bool, stashboxEndpoint string) ([]*models.Performer, error) { ret := _m.Called(hasStashID, stashboxEndpoint) diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 326999518..6c9a91f77 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -641,34 +641,27 @@ func (_m *SceneReaderWriter) IncrementOCounter(id int) (int, error) { return r0, r1 } -// Query provides a mock function with given fields: sceneFilter, findFilter -func (_m *SceneReaderWriter) Query(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { - ret := _m.Called(sceneFilter, findFilter) +// Query provides a mock function with given fields: options +func (_m *SceneReaderWriter) Query(options models.SceneQueryOptions) (*models.SceneQueryResult, error) { + ret := _m.Called(options) - var r0 []*models.Scene - if rf, ok := ret.Get(0).(func(*models.SceneFilterType, *models.FindFilterType) []*models.Scene); ok { - r0 = rf(sceneFilter, findFilter) + var r0 *models.SceneQueryResult + if rf, ok := ret.Get(0).(func(models.SceneQueryOptions) *models.SceneQueryResult); ok { + r0 = rf(options) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Scene) + r0 = ret.Get(0).(*models.SceneQueryResult) } } - var r1 int - if rf, ok := ret.Get(1).(func(*models.SceneFilterType, *models.FindFilterType) int); ok { - r1 = rf(sceneFilter, findFilter) + var r1 error + if rf, ok := ret.Get(1).(func(models.SceneQueryOptions) error); ok { + r1 = rf(options) } else { - r1 = ret.Get(1).(int) + r1 = ret.Error(1) } - var r2 error - if rf, ok := ret.Get(2).(func(*models.SceneFilterType, *models.FindFilterType) error); ok { - r2 = rf(sceneFilter, findFilter) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 + return r0, r1 } // ResetOCounter provides a mock function with given fields: id diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index 3c7b61ab0..c433fe305 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -153,6 +153,29 @@ func (_m *StudioReaderWriter) FindByName(name string, nocase bool) (*models.Stud return r0, r1 } +// FindByStashID provides a mock function with given fields: stashID +func (_m *StudioReaderWriter) FindByStashID(stashID models.StashID) ([]*models.Studio, error) { + ret := _m.Called(stashID) + + var r0 []*models.Studio + if rf, ok := ret.Get(0).(func(models.StashID) []*models.Studio); ok { + r0 = rf(stashID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.StashID) error); ok { + r1 = rf(stashID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindChildren provides a mock function with given fields: id func (_m *StudioReaderWriter) FindChildren(id int) ([]*models.Studio, error) { ret := _m.Called(id) diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index f6b257ed2..1d5e14e34 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -131,15 +131,15 @@ func (_m *TagReaderWriter) Find(id int) (*models.Tag, error) { } // FindAllAncestors provides a mock function with given fields: tagID, excludeIDs -func (_m *TagReaderWriter) FindAllAncestors(tagID int, excludeIDs []int) ([]*models.Tag, error) { +func (_m *TagReaderWriter) FindAllAncestors(tagID int, excludeIDs []int) ([]*models.TagPath, error) { ret := _m.Called(tagID, excludeIDs) - var r0 []*models.Tag - if rf, ok := ret.Get(0).(func(int, []int) []*models.Tag); ok { + var r0 []*models.TagPath + if rf, ok := ret.Get(0).(func(int, []int) []*models.TagPath); ok { r0 = rf(tagID, excludeIDs) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Tag) + r0 = ret.Get(0).([]*models.TagPath) } } @@ -154,15 +154,15 @@ func (_m *TagReaderWriter) FindAllAncestors(tagID int, excludeIDs []int) ([]*mod } // FindAllDescendants provides a mock function with given fields: tagID, excludeIDs -func (_m *TagReaderWriter) FindAllDescendants(tagID int, excludeIDs []int) ([]*models.Tag, error) { +func (_m *TagReaderWriter) FindAllDescendants(tagID int, excludeIDs []int) ([]*models.TagPath, error) { ret := _m.Called(tagID, excludeIDs) - var r0 []*models.Tag - if rf, ok := ret.Get(0).(func(int, []int) []*models.Tag); ok { + var r0 []*models.TagPath + if rf, ok := ret.Get(0).(func(int, []int) []*models.TagPath); ok { r0 = rf(tagID, excludeIDs) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Tag) + r0 = ret.Get(0).([]*models.TagPath) } } diff --git a/pkg/models/mocks/query.go b/pkg/models/mocks/query.go new file mode 100644 index 000000000..152335fc2 --- /dev/null +++ b/pkg/models/mocks/query.go @@ -0,0 +1,41 @@ +package mocks + +import "github.com/stashapp/stash/pkg/models" + +type sceneResolver struct { + scenes []*models.Scene +} + +func (s *sceneResolver) Find(id int) (*models.Scene, error) { + panic("not implemented") +} + +func (s *sceneResolver) FindMany(ids []int) ([]*models.Scene, error) { + return s.scenes, nil +} + +func SceneQueryResult(scenes []*models.Scene, count int) *models.SceneQueryResult { + ret := models.NewSceneQueryResult(&sceneResolver{ + scenes: scenes, + }) + + ret.Count = count + return ret +} + +type imageResolver struct { + images []*models.Image +} + +func (s *imageResolver) FindMany(ids []int) ([]*models.Image, error) { + return s.images, nil +} + +func ImageQueryResult(images []*models.Image, count int) *models.ImageQueryResult { + ret := models.NewImageQueryResult(&imageResolver{ + images: images, + }) + + ret.Count = count + return ret +} diff --git a/pkg/models/mocks/transaction.go b/pkg/models/mocks/transaction.go index da6e2e333..886fef7d6 100644 --- a/pkg/models/mocks/transaction.go +++ b/pkg/models/mocks/transaction.go @@ -7,16 +7,16 @@ import ( ) type TransactionManager struct { - gallery models.GalleryReaderWriter - image models.ImageReaderWriter - movie models.MovieReaderWriter - performer models.PerformerReaderWriter - scene models.SceneReaderWriter - sceneMarker models.SceneMarkerReaderWriter - scrapedItem models.ScrapedItemReaderWriter - studio models.StudioReaderWriter - tag models.TagReaderWriter - savedFilter models.SavedFilterReaderWriter + gallery *GalleryReaderWriter + image *ImageReaderWriter + movie *MovieReaderWriter + performer *PerformerReaderWriter + scene *SceneReaderWriter + sceneMarker *SceneMarkerReaderWriter + scrapedItem *ScrapedItemReaderWriter + studio *StudioReaderWriter + tag *TagReaderWriter + savedFilter *SavedFilterReaderWriter } func NewTransactionManager() *TransactionManager { @@ -38,90 +38,130 @@ func (t *TransactionManager) WithTxn(ctx context.Context, fn func(r models.Repos return fn(t) } -func (t *TransactionManager) Gallery() models.GalleryReaderWriter { +func (t *TransactionManager) GalleryMock() *GalleryReaderWriter { return t.gallery } -func (t *TransactionManager) Image() models.ImageReaderWriter { +func (t *TransactionManager) ImageMock() *ImageReaderWriter { return t.image } -func (t *TransactionManager) Movie() models.MovieReaderWriter { +func (t *TransactionManager) MovieMock() *MovieReaderWriter { return t.movie } -func (t *TransactionManager) Performer() models.PerformerReaderWriter { +func (t *TransactionManager) PerformerMock() *PerformerReaderWriter { return t.performer } -func (t *TransactionManager) SceneMarker() models.SceneMarkerReaderWriter { +func (t *TransactionManager) SceneMarkerMock() *SceneMarkerReaderWriter { return t.sceneMarker } -func (t *TransactionManager) Scene() models.SceneReaderWriter { +func (t *TransactionManager) SceneMock() *SceneReaderWriter { return t.scene } -func (t *TransactionManager) ScrapedItem() models.ScrapedItemReaderWriter { +func (t *TransactionManager) ScrapedItemMock() *ScrapedItemReaderWriter { return t.scrapedItem } -func (t *TransactionManager) Studio() models.StudioReaderWriter { +func (t *TransactionManager) StudioMock() *StudioReaderWriter { return t.studio } -func (t *TransactionManager) Tag() models.TagReaderWriter { +func (t *TransactionManager) TagMock() *TagReaderWriter { return t.tag } -func (t *TransactionManager) SavedFilter() models.SavedFilterReaderWriter { +func (t *TransactionManager) SavedFilterMock() *SavedFilterReaderWriter { return t.savedFilter } +func (t *TransactionManager) Gallery() models.GalleryReaderWriter { + return t.GalleryMock() +} + +func (t *TransactionManager) Image() models.ImageReaderWriter { + return t.ImageMock() +} + +func (t *TransactionManager) Movie() models.MovieReaderWriter { + return t.MovieMock() +} + +func (t *TransactionManager) Performer() models.PerformerReaderWriter { + return t.PerformerMock() +} + +func (t *TransactionManager) SceneMarker() models.SceneMarkerReaderWriter { + return t.SceneMarkerMock() +} + +func (t *TransactionManager) Scene() models.SceneReaderWriter { + return t.SceneMock() +} + +func (t *TransactionManager) ScrapedItem() models.ScrapedItemReaderWriter { + return t.ScrapedItemMock() +} + +func (t *TransactionManager) Studio() models.StudioReaderWriter { + return t.StudioMock() +} + +func (t *TransactionManager) Tag() models.TagReaderWriter { + return t.TagMock() +} + +func (t *TransactionManager) SavedFilter() models.SavedFilterReaderWriter { + return t.SavedFilterMock() +} + type ReadTransaction struct { - t *TransactionManager + *TransactionManager } func (t *TransactionManager) WithReadTxn(ctx context.Context, fn func(r models.ReaderRepository) error) error { - return fn(&ReadTransaction{t: t}) + return fn(&ReadTransaction{t}) } func (r *ReadTransaction) Gallery() models.GalleryReader { - return r.t.gallery + return r.GalleryMock() } func (r *ReadTransaction) Image() models.ImageReader { - return r.t.image + return r.ImageMock() } func (r *ReadTransaction) Movie() models.MovieReader { - return r.t.movie + return r.MovieMock() } func (r *ReadTransaction) Performer() models.PerformerReader { - return r.t.performer + return r.PerformerMock() } func (r *ReadTransaction) SceneMarker() models.SceneMarkerReader { - return r.t.sceneMarker + return r.SceneMarkerMock() } func (r *ReadTransaction) Scene() models.SceneReader { - return r.t.scene + return r.SceneMock() } func (r *ReadTransaction) ScrapedItem() models.ScrapedItemReader { - return r.t.scrapedItem + return r.ScrapedItemMock() } func (r *ReadTransaction) Studio() models.StudioReader { - return r.t.studio + return r.StudioMock() } func (r *ReadTransaction) Tag() models.TagReader { - return r.t.tag + return r.TagMock() } func (r *ReadTransaction) SavedFilter() models.SavedFilterReader { - return r.t.savedFilter + return r.SavedFilterMock() } diff --git a/pkg/models/model_file.go b/pkg/models/model_file.go new file mode 100644 index 000000000..21fd51bab --- /dev/null +++ b/pkg/models/model_file.go @@ -0,0 +1,28 @@ +package models + +import "time" + +type File struct { + Checksum string `db:"checksum" json:"checksum"` + OSHash string `db:"oshash" json:"oshash"` + Path string `db:"path" json:"path"` + Size string `db:"size" json:"size"` + FileModTime time.Time `db:"file_mod_time" json:"file_mod_time"` +} + +// GetHash returns the hash of the scene, based on the hash algorithm provided. If +// hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned. +func (s File) GetHash(hashAlgorithm HashAlgorithm) string { + switch hashAlgorithm { + case HashAlgorithmMd5: + return s.Checksum + case HashAlgorithmOshash: + return s.OSHash + default: + panic("unknown hash algorithm") + } +} + +func (s File) Equal(o File) bool { + return s.Path == o.Path && s.Checksum == o.Checksum && s.OSHash == o.OSHash && s.Size == o.Size && s.FileModTime.Equal(o.FileModTime) +} diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go index 977df7663..e7b2b09b4 100644 --- a/pkg/models/model_gallery.go +++ b/pkg/models/model_gallery.go @@ -3,6 +3,7 @@ package models import ( "database/sql" "path/filepath" + "time" ) type Gallery struct { @@ -40,6 +41,40 @@ type GalleryPartial struct { UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` } +func (s *Gallery) File() File { + ret := File{ + Path: s.Path.String, + } + + ret.Checksum = s.Checksum + + if s.FileModTime.Valid { + ret.FileModTime = s.FileModTime.Timestamp + } + + return ret +} + +func (s *Gallery) SetFile(f File) { + path := f.Path + s.Path = sql.NullString{ + String: path, + Valid: true, + } + + if f.Checksum != "" { + s.Checksum = f.Checksum + } + + zeroTime := time.Time{} + if f.FileModTime != zeroTime { + s.FileModTime = NullSQLiteTimestamp{ + Timestamp: f.FileModTime, + Valid: true, + } + } +} + // GetTitle returns the title of the scene. If the Title field is empty, // then the base filename is returned. func (s Gallery) GetTitle() string { diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go index 6470e619d..4aae450ec 100644 --- a/pkg/models/model_image.go +++ b/pkg/models/model_image.go @@ -3,6 +3,8 @@ package models import ( "database/sql" "path/filepath" + "strconv" + "time" ) // Image stores the metadata for a single image. @@ -41,14 +43,55 @@ type ImagePartial struct { UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` } -// GetTitle returns the title of the image. If the Title field is empty, -// then the base filename is returned. -func (s Image) GetTitle() string { - if s.Title.String != "" { - return s.Title.String +func (i *Image) File() File { + ret := File{ + Path: i.Path, } - return filepath.Base(s.Path) + ret.Checksum = i.Checksum + if i.FileModTime.Valid { + ret.FileModTime = i.FileModTime.Timestamp + } + if i.Size.Valid { + ret.Size = strconv.FormatInt(i.Size.Int64, 10) + } + + return ret +} + +func (i *Image) SetFile(f File) { + path := f.Path + i.Path = path + + if f.Checksum != "" { + i.Checksum = f.Checksum + } + zeroTime := time.Time{} + if f.FileModTime != zeroTime { + i.FileModTime = NullSQLiteTimestamp{ + Timestamp: f.FileModTime, + Valid: true, + } + } + if f.Size != "" { + size, err := strconv.ParseInt(f.Size, 10, 64) + if err == nil { + i.Size = sql.NullInt64{ + Int64: size, + Valid: true, + } + } + } +} + +// GetTitle returns the title of the image. If the Title field is empty, +// then the base filename is returned. +func (i *Image) GetTitle() string { + if i.Title.String != "" { + return i.Title.String + } + + return filepath.Base(i.Path) } // ImageFileType represents the file metadata for an image. diff --git a/pkg/models/model_joins.go b/pkg/models/model_joins.go index 802651e21..1eebcd2f1 100644 --- a/pkg/models/model_joins.go +++ b/pkg/models/model_joins.go @@ -12,3 +12,10 @@ type StashID struct { StashID string `db:"stash_id" json:"stash_id"` Endpoint string `db:"endpoint" json:"endpoint"` } + +func (s StashID) StashIDInput() StashIDInput { + return StashIDInput{ + Endpoint: s.Endpoint, + StashID: s.StashID, + } +} diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index 2adb0a274..6d1c37e3f 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -3,6 +3,8 @@ package models import ( "database/sql" "path/filepath" + "strconv" + "time" ) // Scene stores the metadata for a single video scene. @@ -35,6 +37,58 @@ type Scene struct { Interactive bool `db:"interactive" json:"interactive"` } +func (s *Scene) File() File { + ret := File{ + Path: s.Path, + } + + if s.Checksum.Valid { + ret.Checksum = s.Checksum.String + } + if s.OSHash.Valid { + ret.OSHash = s.OSHash.String + } + if s.FileModTime.Valid { + ret.FileModTime = s.FileModTime.Timestamp + } + if s.Size.Valid { + ret.Size = s.Size.String + } + + return ret +} + +func (s *Scene) SetFile(f File) { + path := f.Path + s.Path = path + + if f.Checksum != "" { + s.Checksum = sql.NullString{ + String: f.Checksum, + Valid: true, + } + } + if f.OSHash != "" { + s.OSHash = sql.NullString{ + String: f.OSHash, + Valid: true, + } + } + zeroTime := time.Time{} + if f.FileModTime != zeroTime { + s.FileModTime = NullSQLiteTimestamp{ + Timestamp: f.FileModTime, + Valid: true, + } + } + if f.Size != "" { + s.Size = sql.NullString{ + String: f.Size, + Valid: true, + } + } +} + // ScenePartial represents part of a Scene object. It is used to update // the database entry. Only non-nil fields will be updated. type ScenePartial struct { @@ -66,6 +120,60 @@ type ScenePartial struct { Interactive *bool `db:"interactive" json:"interactive"` } +// UpdateInput constructs a SceneUpdateInput using the populated fields in the ScenePartial object. +func (s ScenePartial) UpdateInput() SceneUpdateInput { + boolPtrCopy := func(v *bool) *bool { + if v == nil { + return nil + } + + vv := *v + return &vv + } + + return SceneUpdateInput{ + ID: strconv.Itoa(s.ID), + Title: nullStringPtrToStringPtr(s.Title), + Details: nullStringPtrToStringPtr(s.Details), + URL: nullStringPtrToStringPtr(s.URL), + Date: s.Date.StringPtr(), + Rating: nullInt64PtrToIntPtr(s.Rating), + Organized: boolPtrCopy(s.Organized), + StudioID: nullInt64PtrToStringPtr(s.StudioID), + } +} + +func (s *ScenePartial) SetFile(f File) { + path := f.Path + s.Path = &path + + if f.Checksum != "" { + s.Checksum = &sql.NullString{ + String: f.Checksum, + Valid: true, + } + } + if f.OSHash != "" { + s.OSHash = &sql.NullString{ + String: f.OSHash, + Valid: true, + } + } + zeroTime := time.Time{} + if f.FileModTime != zeroTime { + s.FileModTime = &NullSQLiteTimestamp{ + Timestamp: f.FileModTime, + Valid: true, + } + } + if f.Size != "" { + s.Size = &sql.NullString{ + String: f.Size, + Valid: true, + } + } +} + // GetTitle returns the title of the scene. If the Title field is empty, // then the base filename is returned. func (s Scene) GetTitle() string { @@ -79,13 +187,7 @@ func (s Scene) GetTitle() string { // GetHash returns the hash of the scene, based on the hash algorithm provided. If // hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned. func (s Scene) GetHash(hashAlgorithm HashAlgorithm) string { - if hashAlgorithm == HashAlgorithmMd5 { - return s.Checksum.String - } else if hashAlgorithm == HashAlgorithmOshash { - return s.OSHash.String - } - - panic("unknown hash algorithm") + return s.File().GetHash(hashAlgorithm) } func (s Scene) GetMinResolution() int64 { diff --git a/pkg/models/model_scene_test.go b/pkg/models/model_scene_test.go new file mode 100644 index 000000000..43216e539 --- /dev/null +++ b/pkg/models/model_scene_test.go @@ -0,0 +1,80 @@ +package models + +import ( + "database/sql" + "reflect" + "testing" +) + +func TestScenePartial_UpdateInput(t *testing.T) { + const ( + id = 1 + idStr = "1" + ) + + var ( + title = "title" + details = "details" + url = "url" + date = "2001-02-03" + rating = 4 + organized = true + studioID = 2 + studioIDStr = "2" + ) + + tests := []struct { + name string + s ScenePartial + want SceneUpdateInput + }{ + { + "full", + ScenePartial{ + ID: id, + Title: NullStringPtr(title), + Details: NullStringPtr(details), + URL: NullStringPtr(url), + Date: &SQLiteDate{ + String: date, + Valid: true, + }, + Rating: &sql.NullInt64{ + Int64: int64(rating), + Valid: true, + }, + Organized: &organized, + StudioID: &sql.NullInt64{ + Int64: int64(studioID), + Valid: true, + }, + }, + SceneUpdateInput{ + ID: idStr, + Title: &title, + Details: &details, + URL: &url, + Date: &date, + Rating: &rating, + Organized: &organized, + StudioID: &studioIDStr, + }, + }, + { + "empty", + ScenePartial{ + ID: id, + }, + SceneUpdateInput{ + ID: idStr, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.s.UpdateInput(); !reflect.DeepEqual(got, tt.want) { + t.Errorf("ScenePartial.UpdateInput() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/models/model_tag.go b/pkg/models/model_tag.go index c56a49120..90fc67bd9 100644 --- a/pkg/models/model_tag.go +++ b/pkg/models/model_tag.go @@ -16,6 +16,11 @@ type TagPartial struct { UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` } +type TagPath struct { + Tag + Path string `db:"path" json:"path"` +} + func NewTag(name string) *Tag { currentTime := time.Now() return &Tag{ @@ -35,6 +40,16 @@ func (t *Tags) New() interface{} { return &Tag{} } +type TagPaths []*TagPath + +func (t *TagPaths) Append(o interface{}) { + *t = append(*t, o.(*TagPath)) +} + +func (t *TagPaths) New() interface{} { + return &TagPath{} +} + // Original Tag image from: https://fontawesome.com/icons/tag?style=solid // Modified to change color and rotate // Licensed under CC Attribution 4.0: https://fontawesome.com/license diff --git a/pkg/models/performer.go b/pkg/models/performer.go index ea316be2d..04173b47e 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -8,6 +8,7 @@ type PerformerReader interface { FindByImageID(imageID int) ([]*Performer, error) FindByGalleryID(galleryID int) ([]*Performer, error) FindByNames(names []string, nocase bool) ([]*Performer, error) + FindByStashID(stashID StashID) ([]*Performer, error) FindByStashIDStatus(hasStashID bool, stashboxEndpoint string) ([]*Performer, error) CountByTagID(tagID int) (int, error) Count() (int, error) diff --git a/pkg/models/query.go b/pkg/models/query.go new file mode 100644 index 000000000..1b2d347b9 --- /dev/null +++ b/pkg/models/query.go @@ -0,0 +1,11 @@ +package models + +type QueryOptions struct { + FindFilter *FindFilterType + Count bool +} + +type QueryResult struct { + IDs []int + Count int +} diff --git a/pkg/models/scene.go b/pkg/models/scene.go index 60345fce9..a86f75ff0 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -1,8 +1,46 @@ package models -type SceneReader interface { - Find(id int) (*Scene, error) +type SceneQueryOptions struct { + QueryOptions + SceneFilter *SceneFilterType + + TotalDuration bool + TotalSize bool +} + +type SceneQueryResult struct { + QueryResult + TotalDuration float64 + TotalSize float64 + + finder SceneFinder + scenes []*Scene + resolveErr error +} + +func NewSceneQueryResult(finder SceneFinder) *SceneQueryResult { + return &SceneQueryResult{ + finder: finder, + } +} + +func (r *SceneQueryResult) Resolve() ([]*Scene, error) { + // cache results + if r.scenes == nil && r.resolveErr == nil { + r.scenes, r.resolveErr = r.finder.FindMany(r.IDs) + } + return r.scenes, r.resolveErr +} + +type SceneFinder interface { + // TODO - rename this to Find and remove existing method FindMany(ids []int) ([]*Scene, error) +} + +type SceneReader interface { + SceneFinder + // TODO - remove this in another PR + Find(id int) (*Scene, error) FindByChecksum(checksum string) (*Scene, error) FindByOSHash(oshash string) (*Scene, error) FindByPath(path string) (*Scene, error) @@ -23,7 +61,7 @@ type SceneReader interface { CountMissingOSHash() (int, error) Wall(q *string) ([]*Scene, error) All() ([]*Scene, error) - Query(sceneFilter *SceneFilterType, findFilter *FindFilterType) ([]*Scene, int, error) + Query(options SceneQueryOptions) (*SceneQueryResult, error) GetCover(sceneID int) ([]byte, error) GetMovies(sceneID int) ([]MoviesScenes, error) GetTagIDs(sceneID int) ([]int, error) diff --git a/pkg/models/scraped.go b/pkg/models/scraped.go index ecbddf68a..f57a8409a 100644 --- a/pkg/models/scraped.go +++ b/pkg/models/scraped.go @@ -1,5 +1,9 @@ package models +import "errors" + +var ErrScraperSource = errors.New("invalid ScraperSource") + type ScrapedItemReader interface { All() ([]*ScrapedItem, error) } diff --git a/pkg/models/sql.go b/pkg/models/sql.go index ea33f3245..f4960d84b 100644 --- a/pkg/models/sql.go +++ b/pkg/models/sql.go @@ -1,6 +1,9 @@ package models -import "database/sql" +import ( + "database/sql" + "strconv" +) func NullString(v string) sql.NullString { return sql.NullString{ @@ -9,9 +12,43 @@ func NullString(v string) sql.NullString { } } +func NullStringPtr(v string) *sql.NullString { + return &sql.NullString{ + String: v, + Valid: true, + } +} + func NullInt64(v int64) sql.NullInt64 { return sql.NullInt64{ Int64: v, Valid: true, } } + +func nullStringPtrToStringPtr(v *sql.NullString) *string { + if v == nil || !v.Valid { + return nil + } + + vv := v.String + return &vv +} + +func nullInt64PtrToIntPtr(v *sql.NullInt64) *int { + if v == nil || !v.Valid { + return nil + } + + vv := int(v.Int64) + return &vv +} + +func nullInt64PtrToStringPtr(v *sql.NullInt64) *string { + if v == nil || !v.Valid { + return nil + } + + vv := strconv.FormatInt(v.Int64, 10) + return &vv +} diff --git a/pkg/models/sqlite_date.go b/pkg/models/sqlite_date.go index bd9ebf8cd..e11bf462c 100644 --- a/pkg/models/sqlite_date.go +++ b/pkg/models/sqlite_date.go @@ -44,3 +44,12 @@ func (t SQLiteDate) Value() (driver.Value, error) { } return result, nil } + +func (t *SQLiteDate) StringPtr() *string { + if t == nil || !t.Valid { + return nil + } + + vv := t.String + return &vv +} diff --git a/pkg/models/stash_box.go b/pkg/models/stash_box.go new file mode 100644 index 000000000..3e981484b --- /dev/null +++ b/pkg/models/stash_box.go @@ -0,0 +1,39 @@ +package models + +import ( + "fmt" + "strings" +) + +type StashBoxes []*StashBox + +func (sb StashBoxes) ResolveStashBox(source ScraperSourceInput) (*StashBox, error) { + if source.StashBoxIndex != nil { + index := source.StashBoxIndex + if *index < 0 || *index >= len(sb) { + return nil, fmt.Errorf("%w: invalid stash_box_index: %d", ErrScraperSource, index) + } + + return sb[*index], nil + } + + if source.StashBoxEndpoint != nil { + var ret *StashBox + endpoint := *source.StashBoxEndpoint + for _, b := range sb { + if strings.EqualFold(endpoint, b.Endpoint) { + ret = b + } + } + + if ret == nil { + return nil, fmt.Errorf(`%w: stash-box with endpoint "%s"`, ErrNotFound, endpoint) + } + + return ret, nil + } + + // neither stash-box inputs were provided, so assume it is a scraper + + return nil, nil +} diff --git a/pkg/models/studio.go b/pkg/models/studio.go index 6eec0cdf2..e5d6bfb19 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -5,6 +5,7 @@ type StudioReader interface { FindMany(ids []int) ([]*Studio, error) FindChildren(id int) ([]*Studio, error) FindByName(name string, nocase bool) (*Studio, error) + FindByStashID(stashID StashID) ([]*Studio, error) Count() (int, error) All() ([]*Studio, error) // TODO - this interface is temporary until the filter schema can fully diff --git a/pkg/models/tag.go b/pkg/models/tag.go index bcb7096f0..747e7a08e 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -20,8 +20,8 @@ type TagReader interface { Query(tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) GetImage(tagID int) ([]byte, error) GetAliases(tagID int) ([]string, error) - FindAllAncestors(tagID int, excludeIDs []int) ([]*Tag, error) - FindAllDescendants(tagID int, excludeIDs []int) ([]*Tag, error) + FindAllAncestors(tagID int, excludeIDs []int) ([]*TagPath, error) + FindAllDescendants(tagID int, excludeIDs []int) ([]*TagPath, error) } type TagWriter interface { diff --git a/pkg/models/transaction.go b/pkg/models/transaction.go index c30f4cb9a..291038b0c 100644 --- a/pkg/models/transaction.go +++ b/pkg/models/transaction.go @@ -38,7 +38,9 @@ func WithTxn(txn Transaction, fn func(r Repository) error) error { logger.Warnf("error while trying to roll back transaction: %v", err) } panic(p) - } else if err != nil { + } + + if err != nil { // something went wrong, rollback if err := txn.Rollback(); err != nil { logger.Warnf("error while trying to roll back transaction: %v", err) @@ -66,7 +68,9 @@ func WithROTxn(txn ReadTransaction, fn func(r ReaderRepository) error) error { logger.Warnf("error while trying to roll back RO transaction: %v", err) } panic(p) - } else if err != nil { + } + + if err != nil { // something went wrong, rollback if err := txn.Rollback(); err != nil { logger.Warnf("error while trying to roll back RO transaction: %v", err) diff --git a/pkg/movie/export.go b/pkg/movie/export.go index 9ffd31a2f..74d3c892c 100644 --- a/pkg/movie/export.go +++ b/pkg/movie/export.go @@ -46,7 +46,7 @@ func ToJSON(reader models.MovieReader, studioReader models.StudioReader, movie * if movie.StudioID.Valid { studio, err := studioReader.Find(int(movie.StudioID.Int64)) if err != nil { - return nil, fmt.Errorf("error getting movie studio: %s", err.Error()) + return nil, fmt.Errorf("error getting movie studio: %v", err) } if studio != nil { @@ -56,7 +56,7 @@ func ToJSON(reader models.MovieReader, studioReader models.StudioReader, movie * frontImage, err := reader.GetFrontImage(movie.ID) if err != nil { - return nil, fmt.Errorf("error getting movie front image: %s", err.Error()) + return nil, fmt.Errorf("error getting movie front image: %v", err) } if len(frontImage) > 0 { @@ -65,7 +65,7 @@ func ToJSON(reader models.MovieReader, studioReader models.StudioReader, movie * backImage, err := reader.GetBackImage(movie.ID) if err != nil { - return nil, fmt.Errorf("error getting movie back image: %s", err.Error()) + return nil, fmt.Errorf("error getting movie back image: %v", err) } if len(backImage) > 0 { diff --git a/pkg/movie/export_test.go b/pkg/movie/export_test.go index b03de426c..d9fed193a 100644 --- a/pkg/movie/export_test.go +++ b/pkg/movie/export_test.go @@ -213,11 +213,12 @@ func TestToJSON(t *testing.T) { movie := s.movie json, err := ToJSON(mockMovieReader, mockStudioReader, &movie) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } diff --git a/pkg/movie/import.go b/pkg/movie/import.go index 7108944fb..97a5c7e33 100644 --- a/pkg/movie/import.go +++ b/pkg/movie/import.go @@ -31,13 +31,13 @@ func (i *Importer) PreImport() error { if len(i.Input.FrontImage) > 0 { _, i.frontImageData, err = utils.ProcessBase64Image(i.Input.FrontImage) if err != nil { - return fmt.Errorf("invalid front_image: %s", err.Error()) + return fmt.Errorf("invalid front_image: %v", err) } } if len(i.Input.BackImage) > 0 { _, i.backImageData, err = utils.ProcessBase64Image(i.Input.BackImage) if err != nil { - return fmt.Errorf("invalid back_image: %s", err.Error()) + return fmt.Errorf("invalid back_image: %v", err) } } @@ -74,7 +74,7 @@ func (i *Importer) populateStudio() error { if i.Input.Studio != "" { studio, err := i.StudioWriter.FindByName(i.Input.Studio, false) if err != nil { - return fmt.Errorf("error finding studio by name: %s", err.Error()) + return fmt.Errorf("error finding studio by name: %v", err) } if studio == nil { @@ -118,7 +118,7 @@ func (i *Importer) createStudio(name string) (int, error) { func (i *Importer) PostImport(id int) error { if len(i.frontImageData) > 0 { if err := i.ReaderWriter.UpdateImages(id, i.frontImageData, i.backImageData); err != nil { - return fmt.Errorf("error setting movie images: %s", err.Error()) + return fmt.Errorf("error setting movie images: %v", err) } } @@ -147,7 +147,7 @@ func (i *Importer) FindExistingID() (*int, error) { func (i *Importer) Create() (*int, error) { created, err := i.ReaderWriter.Create(i.movie) if err != nil { - return nil, fmt.Errorf("error creating movie: %s", err.Error()) + return nil, fmt.Errorf("error creating movie: %v", err) } id := created.ID @@ -159,7 +159,7 @@ func (i *Importer) Update(id int) error { movie.ID = id _, err := i.ReaderWriter.UpdateFull(movie) if err != nil { - return fmt.Errorf("error updating existing movie: %s", err.Error()) + return fmt.Errorf("error updating existing movie: %v", err) } return nil diff --git a/pkg/performer/export.go b/pkg/performer/export.go index 555abe58d..55e3beaf0 100644 --- a/pkg/performer/export.go +++ b/pkg/performer/export.go @@ -84,13 +84,25 @@ func ToJSON(reader models.PerformerReader, performer *models.Performer) (*jsonsc image, err := reader.GetImage(performer.ID) if err != nil { - return nil, fmt.Errorf("error getting performers image: %s", err.Error()) + return nil, fmt.Errorf("error getting performers image: %v", err) } if len(image) > 0 { newPerformerJSON.Image = utils.GetBase64StringFromData(image) } + stashIDs, _ := reader.GetStashIDs(performer.ID) + var ret []models.StashID + for _, stashID := range stashIDs { + newJoin := models.StashID{ + StashID: stashID.StashID, + Endpoint: stashID.Endpoint, + } + ret = append(ret, newJoin) + } + + newPerformerJSON.StashIDs = ret + return &newPerformerJSON, nil } diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index 0ec00a93b..1851bd13f 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -44,6 +44,14 @@ const ( var imageBytes = []byte("imageBytes") +var stashID = models.StashID{ + StashID: "StashID", + Endpoint: "Endpoint", +} +var stashIDs = []*models.StashID{ + &stashID, +} + const image = "aW1hZ2VCeXRlcw==" var birthDate = models.SQLiteDate{ @@ -144,6 +152,9 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer { DeathDate: deathDate.String, HairColor: hairColor, Weight: weight, + StashIDs: []models.StashID{ + stashID, + }, } } @@ -197,15 +208,19 @@ func TestToJSON(t *testing.T) { mockPerformerReader.On("GetImage", noImageID).Return(nil, nil).Once() mockPerformerReader.On("GetImage", errImageID).Return(nil, imageErr).Once() + mockPerformerReader.On("GetStashIDs", performerID).Return(stashIDs, nil).Once() + mockPerformerReader.On("GetStashIDs", noImageID).Return(nil, nil).Once() + for i, s := range scenarios { tag := s.input json, err := ToJSON(mockPerformerReader, &tag) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } diff --git a/pkg/performer/import.go b/pkg/performer/import.go index db32e1286..e4589995d 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -34,7 +34,7 @@ func (i *Importer) PreImport() error { if len(i.Input.Image) > 0 { _, i.imageData, err = utils.ProcessBase64Image(i.Input.Image) if err != nil { - return fmt.Errorf("invalid image: %s", err.Error()) + return fmt.Errorf("invalid image: %v", err) } } @@ -78,7 +78,7 @@ func importTags(tagWriter models.TagReaderWriter, names []string, missingRefBeha if missingRefBehaviour == models.ImportMissingRefEnumCreate { createdTags, err := createTags(tagWriter, missingTags) if err != nil { - return nil, fmt.Errorf("error creating tags: %s", err.Error()) + return nil, fmt.Errorf("error creating tags: %v", err) } tags = append(tags, createdTags...) @@ -113,13 +113,19 @@ func (i *Importer) PostImport(id int) error { tagIDs = append(tagIDs, t.ID) } if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil { - return fmt.Errorf("failed to associate tags: %s", err.Error()) + return fmt.Errorf("failed to associate tags: %v", err) } } if len(i.imageData) > 0 { if err := i.ReaderWriter.UpdateImage(id, i.imageData); err != nil { - return fmt.Errorf("error setting performer image: %s", err.Error()) + return fmt.Errorf("error setting performer image: %v", err) + } + } + + if len(i.Input.StashIDs) > 0 { + if err := i.ReaderWriter.UpdateStashIDs(id, i.Input.StashIDs); err != nil { + return fmt.Errorf("error setting stash id: %v", err) } } @@ -148,7 +154,7 @@ func (i *Importer) FindExistingID() (*int, error) { func (i *Importer) Create() (*int, error) { created, err := i.ReaderWriter.Create(i.performer) if err != nil { - return nil, fmt.Errorf("error creating performer: %s", err.Error()) + return nil, fmt.Errorf("error creating performer: %v", err) } id := created.ID @@ -160,7 +166,7 @@ func (i *Importer) Update(id int) error { performer.ID = id _, err := i.ReaderWriter.UpdateFull(performer) if err != nil { - return fmt.Errorf("error updating existing performer: %s", err.Error()) + return fmt.Errorf("error updating existing performer: %v", err) } return nil diff --git a/pkg/plugin/config.go b/pkg/plugin/config.go index a56c5520d..05501b4e2 100644 --- a/pkg/plugin/config.go +++ b/pkg/plugin/config.go @@ -173,7 +173,7 @@ func (c Config) getExecCommand(task *OperationConfig) []string { continue } - ret[i] = strings.Replace(arg, "{pluginDir}", dir, -1) + ret[i] = strings.ReplaceAll(arg, "{pluginDir}", dir) } return ret diff --git a/pkg/plugin/examples/common/graphql.go b/pkg/plugin/examples/common/graphql.go index 624c0d5d8..fc045b3b8 100644 --- a/pkg/plugin/examples/common/graphql.go +++ b/pkg/plugin/examples/common/graphql.go @@ -28,8 +28,10 @@ type TagDestroyInput struct { } type FindScenesResultType struct { - Count graphql.Int - Scenes []Scene + Count graphql.Int + DurationSeconds graphql.Float + FilesizeBytes graphql.Int + Scenes []Scene } type Tag struct { @@ -66,7 +68,7 @@ type SceneUpdateInput struct { TagIds []graphql.ID `graphql:"tag_ids" json:"tag_ids"` } -func getTagID(client *graphql.Client, create bool) (*graphql.ID, error) { +func getTagID(ctx context.Context, client *graphql.Client, create bool) (*graphql.ID, error) { log.Info("Checking if tag exists already") // see if tag exists already @@ -74,7 +76,7 @@ func getTagID(client *graphql.Client, create bool) (*graphql.ID, error) { AllTags []Tag `graphql:"allTags"` } - err := client.Query(context.Background(), &q, nil) + err := client.Query(ctx, &q, nil) if err != nil { return nil, fmt.Errorf("Error getting tags: %s\n", err.Error()) } @@ -106,7 +108,7 @@ func getTagID(client *graphql.Client, create bool) (*graphql.ID, error) { log.Info("Creating new tag") - err = client.Mutate(context.Background(), &m, vars) + err = client.Mutate(ctx, &m, vars) if err != nil { return nil, fmt.Errorf("Error mutating scene: %s\n", err.Error()) } @@ -114,7 +116,7 @@ func getTagID(client *graphql.Client, create bool) (*graphql.ID, error) { return &m.TagCreate.ID, nil } -func findRandomScene(client *graphql.Client) (*Scene, error) { +func findRandomScene(ctx context.Context, client *graphql.Client) (*Scene, error) { // get a random scene var q struct { FindScenes FindScenesResultType `graphql:"findScenes(filter: $c)"` @@ -132,7 +134,7 @@ func findRandomScene(client *graphql.Client) (*Scene, error) { } log.Info("Finding a random scene") - err := client.Query(context.Background(), &q, vars) + err := client.Query(ctx, &q, vars) if err != nil { return nil, fmt.Errorf("Error getting random scene: %s\n", err.Error()) } @@ -155,14 +157,14 @@ func addTagId(tagIds []graphql.ID, tagId graphql.ID) []graphql.ID { return tagIds } -func AddTag(client *graphql.Client) error { - tagID, err := getTagID(client, true) +func AddTag(ctx context.Context, client *graphql.Client) error { + tagID, err := getTagID(ctx, client, true) if err != nil { return err } - scene, err := findRandomScene(client) + scene, err := findRandomScene(ctx, client) if err != nil { return err @@ -188,16 +190,16 @@ func AddTag(client *graphql.Client) error { } log.Infof("Adding tag to scene %v", scene.ID) - err = client.Mutate(context.Background(), &m, vars) + err = client.Mutate(ctx, &m, vars) if err != nil { - return fmt.Errorf("Error mutating scene: %s", err.Error()) + return fmt.Errorf("Error mutating scene: %v", err) } return nil } -func RemoveTag(client *graphql.Client) error { - tagID, err := getTagID(client, false) +func RemoveTag(ctx context.Context, client *graphql.Client) error { + tagID, err := getTagID(ctx, client, false) if err != nil { return err @@ -223,9 +225,9 @@ func RemoveTag(client *graphql.Client) error { log.Info("Destroying tag") - err = client.Mutate(context.Background(), &m, vars) + err = client.Mutate(ctx, &m, vars) if err != nil { - return fmt.Errorf("Error destroying tag: %s", err.Error()) + return fmt.Errorf("Error destroying tag: %v", err) } return nil diff --git a/pkg/plugin/js.go b/pkg/plugin/js.go index 1c8f18a32..2ebbb6ccc 100644 --- a/pkg/plugin/js.go +++ b/pkg/plugin/js.go @@ -1,6 +1,7 @@ package plugin import ( + "context" "errors" "fmt" "path/filepath" @@ -84,7 +85,7 @@ func (t *jsPluginTask) Start() error { return fmt.Errorf("error adding util API: %w", err) } - if err := js.AddGQLAPI(t.vm, t.input.ServerConnection.SessionCookie, t.gqlHandler); err != nil { + if err := js.AddGQLAPI(context.TODO(), t.vm, t.input.ServerConnection.SessionCookie, t.gqlHandler); err != nil { return fmt.Errorf("error adding GraphQL API: %w", err) } @@ -97,7 +98,7 @@ func (t *jsPluginTask) Start() error { t.waitGroup.Done() if caught := recover(); caught != nil { - if caught == errStop { + if err, ok := caught.(error); ok && errors.Is(err, errStop) { // TODO - log this return } diff --git a/pkg/plugin/js/gql.go b/pkg/plugin/js/gql.go index 45f9ac9e7..9c8461177 100644 --- a/pkg/plugin/js/gql.go +++ b/pkg/plugin/js/gql.go @@ -2,6 +2,7 @@ package js import ( "bytes" + "context" "encoding/json" "fmt" "net/http" @@ -33,7 +34,7 @@ func throw(vm *otto.Otto, str string) { panic(value) } -func gqlRequestFunc(vm *otto.Otto, cookie *http.Cookie, gqlHandler http.Handler) func(call otto.FunctionCall) otto.Value { +func gqlRequestFunc(ctx context.Context, vm *otto.Otto, cookie *http.Cookie, gqlHandler http.Handler) func(call otto.FunctionCall) otto.Value { return func(call otto.FunctionCall) otto.Value { if len(call.ArgumentList) == 0 { throw(vm, "missing argument") @@ -61,7 +62,7 @@ func gqlRequestFunc(vm *otto.Otto, cookie *http.Cookie, gqlHandler http.Handler) throw(vm, err.Error()) } - r, err := http.NewRequest("POST", "/graphql", &body) + r, err := http.NewRequestWithContext(ctx, "POST", "/graphql", &body) if err != nil { throw(vm, "could not make request") } @@ -103,9 +104,9 @@ func gqlRequestFunc(vm *otto.Otto, cookie *http.Cookie, gqlHandler http.Handler) } } -func AddGQLAPI(vm *otto.Otto, cookie *http.Cookie, gqlHandler http.Handler) error { +func AddGQLAPI(ctx context.Context, vm *otto.Otto, cookie *http.Cookie, gqlHandler http.Handler) error { gql, _ := vm.Object("({})") - if err := gql.Set("Do", gqlRequestFunc(vm, cookie, gqlHandler)); err != nil { + if err := gql.Set("Do", gqlRequestFunc(ctx, vm, cookie, gqlHandler)); err != nil { return fmt.Errorf("unable to set GraphQL Do function: %w", err) } diff --git a/pkg/plugin/plugins.go b/pkg/plugin/plugins.go index 7322221a9..8f88e3d06 100644 --- a/pkg/plugin/plugins.go +++ b/pkg/plugin/plugins.go @@ -13,6 +13,7 @@ import ( "net/http" "os" "path/filepath" + "strconv" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" @@ -179,6 +180,15 @@ func (c Cache) ExecutePostHooks(ctx context.Context, id int, hookType HookTrigge } } +func (c Cache) ExecuteSceneUpdatePostHooks(ctx context.Context, input models.SceneUpdateInput, inputFields []string) { + id, err := strconv.Atoi(input.ID) + if err != nil { + logger.Errorf("error converting id in SceneUpdatePostHooks: %v", err) + return + } + c.ExecutePostHooks(ctx, id, SceneUpdatePost, input, inputFields) +} + func (c Cache) executePostHooks(ctx context.Context, hookType HookTriggerEnum, hookContext common.HookContext) error { visitedPlugins := session.GetVisitedPlugins(ctx) diff --git a/pkg/plugin/raw.go b/pkg/plugin/raw.go index 03d084ccd..1fcc6ad87 100644 --- a/pkg/plugin/raw.go +++ b/pkg/plugin/raw.go @@ -29,6 +29,19 @@ type rawPluginTask struct { done chan bool } +func FindPythonExecutable() (string, error) { + _, err := exec.LookPath("python3") + + if err != nil { + _, err = exec.LookPath("python") + if err != nil { + return "", err + } + return "python", nil + } + return "python3", nil +} + func (t *rawPluginTask) Start() error { if t.started { return errors.New("task already started") @@ -39,11 +52,18 @@ func (t *rawPluginTask) Start() error { return fmt.Errorf("empty exec value in operation %s", t.operation.Name) } + if command[0] == "python" || command[0] == "python3" { + executable, err := FindPythonExecutable() + if err == nil { + command[0] = executable + } + } + cmd := exec.Command(command[0], command[1:]...) stdin, err := cmd.StdinPipe() if err != nil { - return fmt.Errorf("error getting plugin process stdin: %s", err.Error()) + return fmt.Errorf("error getting plugin process stdin: %v", err) } go func() { @@ -68,7 +88,7 @@ func (t *rawPluginTask) Start() error { t.waitGroup.Add(1) t.done = make(chan bool, 1) if err = cmd.Start(); err != nil { - return fmt.Errorf("error running plugin: %s", err.Error()) + return fmt.Errorf("error running plugin: %v", err) } go t.handlePluginStderr(t.plugin.Name, stderr) diff --git a/pkg/scene/export.go b/pkg/scene/export.go index 5f723cdf5..352ce32b9 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -58,13 +58,25 @@ func ToBasicJSON(reader models.SceneReader, scene *models.Scene) (*jsonschema.Sc cover, err := reader.GetCover(scene.ID) if err != nil { - return nil, fmt.Errorf("error getting scene cover: %s", err.Error()) + return nil, fmt.Errorf("error getting scene cover: %v", err) } if len(cover) > 0 { newSceneJSON.Cover = utils.GetBase64StringFromData(cover) } + stashIDs, _ := reader.GetStashIDs(scene.ID) + var ret []models.StashID + for _, stashID := range stashIDs { + newJoin := models.StashID{ + StashID: stashID.StashID, + Endpoint: stashID.Endpoint, + } + ret = append(ret, newJoin) + } + + newSceneJSON.StashIDs = ret + return &newSceneJSON, nil } @@ -136,7 +148,7 @@ func GetStudioName(reader models.StudioReader, scene *models.Scene) (string, err func GetTagNames(reader models.TagReader, scene *models.Scene) ([]string, error) { tags, err := reader.FindBySceneID(scene.ID) if err != nil { - return nil, fmt.Errorf("error getting scene tags: %s", err.Error()) + return nil, fmt.Errorf("error getting scene tags: %v", err) } return getTagNames(tags), nil @@ -175,7 +187,7 @@ func GetDependentTagIDs(tags models.TagReader, markerReader models.SceneMarkerRe ret = utils.IntAppendUnique(ret, smm.PrimaryTagID) smmt, err := tags.FindBySceneMarkerID(smm.ID) if err != nil { - return nil, fmt.Errorf("invalid tags for scene marker: %s", err.Error()) + return nil, fmt.Errorf("invalid tags for scene marker: %v", err) } for _, smmtt := range smmt { @@ -191,14 +203,14 @@ func GetDependentTagIDs(tags models.TagReader, markerReader models.SceneMarkerRe func GetSceneMoviesJSON(movieReader models.MovieReader, sceneReader models.SceneReader, scene *models.Scene) ([]jsonschema.SceneMovie, error) { sceneMovies, err := sceneReader.GetMovies(scene.ID) if err != nil { - return nil, fmt.Errorf("error getting scene movies: %s", err.Error()) + return nil, fmt.Errorf("error getting scene movies: %v", err) } var results []jsonschema.SceneMovie for _, sceneMovie := range sceneMovies { movie, err := movieReader.Find(sceneMovie.MovieID) if err != nil { - return nil, fmt.Errorf("error getting movie: %s", err.Error()) + return nil, fmt.Errorf("error getting movie: %v", err) } if movie.Name.Valid { @@ -234,7 +246,7 @@ func GetDependentMovieIDs(sceneReader models.SceneReader, scene *models.Scene) ( func GetSceneMarkersJSON(markerReader models.SceneMarkerReader, tagReader models.TagReader, scene *models.Scene) ([]jsonschema.SceneMarker, error) { sceneMarkers, err := markerReader.FindBySceneID(scene.ID) if err != nil { - return nil, fmt.Errorf("error getting scene markers: %s", err.Error()) + return nil, fmt.Errorf("error getting scene markers: %v", err) } var results []jsonschema.SceneMarker @@ -242,12 +254,12 @@ func GetSceneMarkersJSON(markerReader models.SceneMarkerReader, tagReader models for _, sceneMarker := range sceneMarkers { primaryTag, err := tagReader.Find(sceneMarker.PrimaryTagID) if err != nil { - return nil, fmt.Errorf("invalid primary tag for scene marker: %s", err.Error()) + return nil, fmt.Errorf("invalid primary tag for scene marker: %v", err) } sceneMarkerTags, err := tagReader.FindBySceneMarkerID(sceneMarker.ID) if err != nil { - return nil, fmt.Errorf("invalid tags for scene marker: %s", err.Error()) + return nil, fmt.Errorf("invalid tags for scene marker: %v", err) } sceneMarkerJSON := jsonschema.SceneMarker{ diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index b0b4f7834..15aec8f4c 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -85,7 +85,15 @@ var names = []string{ var imageBytes = []byte("imageBytes") -const image = "aW1hZ2VCeXRlcw==" +var stashID = models.StashID{ + StashID: "StashID", + Endpoint: "Endpoint", +} +var stashIDs = []*models.StashID{ + &stashID, +} + +const imageBase64 = "aW1hZ2VCeXRlcw==" var ( createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) @@ -174,6 +182,9 @@ func createFullJSONScene(image string) *jsonschema.Scene { Time: updateTime, }, Cover: image, + StashIDs: []models.StashID{ + stashID, + }, } } @@ -198,7 +209,7 @@ type basicTestScenario struct { var scenarios = []basicTestScenario{ { createFullScene(sceneID), - createFullJSONScene(image), + createFullJSONScene(imageBase64), false, }, { @@ -222,15 +233,19 @@ func TestToJSON(t *testing.T) { mockSceneReader.On("GetCover", noImageID).Return(nil, nil).Once() mockSceneReader.On("GetCover", errImageID).Return(nil, imageErr).Once() + mockSceneReader.On("GetStashIDs", sceneID).Return(stashIDs, nil).Once() + mockSceneReader.On("GetStashIDs", noImageID).Return(nil, nil).Once() + for i, s := range scenarios { scene := s.input json, err := ToBasicJSON(mockSceneReader, &scene) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } @@ -283,11 +298,12 @@ func TestGetStudioName(t *testing.T) { scene := s.input json, err := GetStudioName(mockStudioReader, &scene) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } @@ -343,11 +359,12 @@ func TestGetTagNames(t *testing.T) { scene := s.input json, err := GetTagNames(mockTagReader, &scene) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } @@ -435,11 +452,12 @@ func TestGetSceneMoviesJSON(t *testing.T) { scene := s.input json, err := GetSceneMoviesJSON(mockMovieReader, mockSceneReader, &scene) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } @@ -617,11 +635,12 @@ func TestGetSceneMarkersJSON(t *testing.T) { scene := s.input json, err := GetSceneMarkersJSON(mockMarkerReader, mockTagReader, &scene) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } diff --git a/pkg/scene/import.go b/pkg/scene/import.go index a1cad8808..abd087ea1 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -59,7 +59,7 @@ func (i *Importer) PreImport() error { if len(i.Input.Cover) > 0 { _, i.coverImageData, err = utils.ProcessBase64Image(i.Input.Cover) if err != nil { - return fmt.Errorf("invalid cover image: %s", err.Error()) + return fmt.Errorf("invalid cover image: %v", err) } } @@ -138,7 +138,7 @@ func (i *Importer) populateStudio() error { if i.Input.Studio != "" { studio, err := i.StudioWriter.FindByName(i.Input.Studio, false) if err != nil { - return fmt.Errorf("error finding studio by name: %s", err.Error()) + return fmt.Errorf("error finding studio by name: %v", err) } if studio == nil { @@ -238,7 +238,7 @@ func (i *Importer) populatePerformers() error { if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { createdPerformers, err := i.createPerformers(missingPerformers) if err != nil { - return fmt.Errorf("error creating scene performers: %s", err.Error()) + return fmt.Errorf("error creating scene performers: %v", err) } performers = append(performers, createdPerformers...) @@ -274,7 +274,7 @@ func (i *Importer) populateMovies() error { for _, inputMovie := range i.Input.Movies { movie, err := i.MovieWriter.FindByName(inputMovie.MovieName, false) if err != nil { - return fmt.Errorf("error finding scene movie: %s", err.Error()) + return fmt.Errorf("error finding scene movie: %v", err) } if movie == nil { @@ -285,7 +285,7 @@ func (i *Importer) populateMovies() error { if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { movie, err = i.createMovie(inputMovie.MovieName) if err != nil { - return fmt.Errorf("error creating scene movie: %s", err.Error()) + return fmt.Errorf("error creating scene movie: %v", err) } } @@ -341,7 +341,7 @@ func (i *Importer) populateTags() error { func (i *Importer) PostImport(id int) error { if len(i.coverImageData) > 0 { if err := i.ReaderWriter.UpdateCover(id, i.coverImageData); err != nil { - return fmt.Errorf("error setting scene images: %s", err.Error()) + return fmt.Errorf("error setting scene images: %v", err) } } @@ -352,7 +352,7 @@ func (i *Importer) PostImport(id int) error { } if err := i.ReaderWriter.UpdateGalleries(id, galleryIDs); err != nil { - return fmt.Errorf("failed to associate galleries: %s", err.Error()) + return fmt.Errorf("failed to associate galleries: %v", err) } } @@ -363,7 +363,7 @@ func (i *Importer) PostImport(id int) error { } if err := i.ReaderWriter.UpdatePerformers(id, performerIDs); err != nil { - return fmt.Errorf("failed to associate performers: %s", err.Error()) + return fmt.Errorf("failed to associate performers: %v", err) } } @@ -372,7 +372,7 @@ func (i *Importer) PostImport(id int) error { i.movies[index].SceneID = id } if err := i.ReaderWriter.UpdateMovies(id, i.movies); err != nil { - return fmt.Errorf("failed to associate movies: %s", err.Error()) + return fmt.Errorf("failed to associate movies: %v", err) } } @@ -382,7 +382,13 @@ func (i *Importer) PostImport(id int) error { tagIDs = append(tagIDs, t.ID) } if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil { - return fmt.Errorf("failed to associate tags: %s", err.Error()) + return fmt.Errorf("failed to associate tags: %v", err) + } + } + + if len(i.Input.StashIDs) > 0 { + if err := i.ReaderWriter.UpdateStashIDs(id, i.Input.StashIDs); err != nil { + return fmt.Errorf("error setting stash id: %v", err) } } @@ -396,11 +402,13 @@ func (i *Importer) Name() string { func (i *Importer) FindExistingID() (*int, error) { var existing *models.Scene var err error - if i.FileNamingAlgorithm == models.HashAlgorithmMd5 { + + switch i.FileNamingAlgorithm { + case models.HashAlgorithmMd5: existing, err = i.ReaderWriter.FindByChecksum(i.Input.Checksum) - } else if i.FileNamingAlgorithm == models.HashAlgorithmOshash { + case models.HashAlgorithmOshash: existing, err = i.ReaderWriter.FindByOSHash(i.Input.OSHash) - } else { + default: panic("unknown file naming algorithm") } @@ -419,7 +427,7 @@ func (i *Importer) FindExistingID() (*int, error) { func (i *Importer) Create() (*int, error) { created, err := i.ReaderWriter.Create(i.scene) if err != nil { - return nil, fmt.Errorf("error creating scene: %s", err.Error()) + return nil, fmt.Errorf("error creating scene: %v", err) } id := created.ID @@ -433,7 +441,7 @@ func (i *Importer) Update(id int) error { i.ID = id _, err := i.ReaderWriter.UpdateFull(scene) if err != nil { - return fmt.Errorf("error updating existing scene: %s", err.Error()) + return fmt.Errorf("error updating existing scene: %v", err) } return nil @@ -462,7 +470,7 @@ func importTags(tagWriter models.TagReaderWriter, names []string, missingRefBeha if missingRefBehaviour == models.ImportMissingRefEnumCreate { createdTags, err := createTags(tagWriter, missingTags) if err != nil { - return nil, fmt.Errorf("error creating tags: %s", err.Error()) + return nil, fmt.Errorf("error creating tags: %v", err) } tags = append(tags, createdTags...) diff --git a/pkg/scene/import_test.go b/pkg/scene/import_test.go index 3f59c3cf3..2a8122551 100644 --- a/pkg/scene/import_test.go +++ b/pkg/scene/import_test.go @@ -75,7 +75,7 @@ func TestImporterPreImport(t *testing.T) { err := i.PreImport() assert.NotNil(t, err) - i.Input.Cover = image + i.Input.Cover = imageBase64 err = i.PreImport() assert.Nil(t, err) diff --git a/pkg/scene/marker_import.go b/pkg/scene/marker_import.go index d15563eb5..2258025fe 100644 --- a/pkg/scene/marker_import.go +++ b/pkg/scene/marker_import.go @@ -70,7 +70,7 @@ func (i *MarkerImporter) PostImport(id int) error { tagIDs = append(tagIDs, t.ID) } if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil { - return fmt.Errorf("failed to associate tags: %s", err.Error()) + return fmt.Errorf("failed to associate tags: %v", err) } } @@ -101,7 +101,7 @@ func (i *MarkerImporter) FindExistingID() (*int, error) { func (i *MarkerImporter) Create() (*int, error) { created, err := i.ReaderWriter.Create(i.marker) if err != nil { - return nil, fmt.Errorf("error creating marker: %s", err.Error()) + return nil, fmt.Errorf("error creating marker: %v", err) } id := created.ID @@ -113,7 +113,7 @@ func (i *MarkerImporter) Update(id int) error { marker.ID = id _, err := i.ReaderWriter.Update(marker) if err != nil { - return fmt.Errorf("error updating existing marker: %s", err.Error()) + return fmt.Errorf("error updating existing marker: %v", err) } return nil diff --git a/pkg/manager/migrate_hash.go b/pkg/scene/migrate_hash.go similarity index 67% rename from pkg/manager/migrate_hash.go rename to pkg/scene/migrate_hash.go index f1f37d5d3..d0a7892e5 100644 --- a/pkg/manager/migrate_hash.go +++ b/pkg/scene/migrate_hash.go @@ -1,49 +1,50 @@ -package manager +package scene import ( "os" "path/filepath" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/paths" "github.com/stashapp/stash/pkg/utils" ) -func MigrateHash(oldHash string, newHash string) { - oldPath := filepath.Join(instance.Paths.Generated.Markers, oldHash) - newPath := filepath.Join(instance.Paths.Generated.Markers, newHash) - migrate(oldPath, newPath) +func MigrateHash(p *paths.Paths, oldHash string, newHash string) { + oldPath := filepath.Join(p.Generated.Markers, oldHash) + newPath := filepath.Join(p.Generated.Markers, newHash) + migrateSceneFiles(oldPath, newPath) - scenePaths := GetInstance().Paths.Scene + scenePaths := p.Scene oldPath = scenePaths.GetThumbnailScreenshotPath(oldHash) newPath = scenePaths.GetThumbnailScreenshotPath(newHash) - migrate(oldPath, newPath) + migrateSceneFiles(oldPath, newPath) oldPath = scenePaths.GetScreenshotPath(oldHash) newPath = scenePaths.GetScreenshotPath(newHash) - migrate(oldPath, newPath) + migrateSceneFiles(oldPath, newPath) oldPath = scenePaths.GetStreamPreviewPath(oldHash) newPath = scenePaths.GetStreamPreviewPath(newHash) - migrate(oldPath, newPath) + migrateSceneFiles(oldPath, newPath) oldPath = scenePaths.GetStreamPreviewImagePath(oldHash) newPath = scenePaths.GetStreamPreviewImagePath(newHash) - migrate(oldPath, newPath) + migrateSceneFiles(oldPath, newPath) oldPath = scenePaths.GetTranscodePath(oldHash) newPath = scenePaths.GetTranscodePath(newHash) - migrate(oldPath, newPath) + migrateSceneFiles(oldPath, newPath) oldPath = scenePaths.GetSpriteVttFilePath(oldHash) newPath = scenePaths.GetSpriteVttFilePath(newHash) - migrate(oldPath, newPath) + migrateSceneFiles(oldPath, newPath) oldPath = scenePaths.GetSpriteImageFilePath(oldHash) newPath = scenePaths.GetSpriteImageFilePath(newHash) - migrate(oldPath, newPath) + migrateSceneFiles(oldPath, newPath) } -func migrate(oldName, newName string) { +func migrateSceneFiles(oldName, newName string) { oldExists, err := utils.FileExists(oldName) if err != nil && !os.IsNotExist(err) { logger.Errorf("Error checking existence of %s: %s", oldName, err.Error()) diff --git a/pkg/scene/query.go b/pkg/scene/query.go new file mode 100644 index 000000000..f560430c3 --- /dev/null +++ b/pkg/scene/query.go @@ -0,0 +1,125 @@ +package scene + +import ( + "context" + "fmt" + "path/filepath" + "strings" + + "github.com/stashapp/stash/pkg/job" + "github.com/stashapp/stash/pkg/models" +) + +type Queryer interface { + Query(options models.SceneQueryOptions) (*models.SceneQueryResult, error) +} + +// QueryOptions returns a SceneQueryOptions populated with the provided filters. +func QueryOptions(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, count bool) models.SceneQueryOptions { + return models.SceneQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: findFilter, + Count: count, + }, + SceneFilter: sceneFilter, + } +} + +// QueryWithCount queries for scenes, returning the scene objects and the total count. +func QueryWithCount(qb Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { + // this was moved from the queryBuilder code + // left here so that calling functions can reference this instead + result, err := qb.Query(QueryOptions(sceneFilter, findFilter, true)) + if err != nil { + return nil, 0, err + } + + scenes, err := result.Resolve() + if err != nil { + return nil, 0, err + } + + return scenes, result.Count, nil +} + +// Query queries for scenes using the provided filters. +func Query(qb Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, error) { + result, err := qb.Query(QueryOptions(sceneFilter, findFilter, false)) + if err != nil { + return nil, err + } + + scenes, err := result.Resolve() + if err != nil { + return nil, err + } + + return scenes, nil +} + +func BatchProcess(ctx context.Context, reader models.SceneReader, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, fn func(scene *models.Scene) error) error { + const batchSize = 1000 + + if findFilter == nil { + findFilter = &models.FindFilterType{} + } + + page := 1 + perPage := batchSize + findFilter.Page = &page + findFilter.PerPage = &perPage + + for more := true; more; { + if job.IsCancelled(ctx) { + return nil + } + + scenes, err := Query(reader, sceneFilter, findFilter) + if err != nil { + return fmt.Errorf("error querying for scenes: %w", err) + } + + for _, scene := range scenes { + if err := fn(scene); err != nil { + return err + } + } + + if len(scenes) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + + return nil +} + +// FilterFromPaths creates a SceneFilterType that filters using the provided +// paths. +func FilterFromPaths(paths []string) *models.SceneFilterType { + ret := &models.SceneFilterType{} + or := ret + sep := string(filepath.Separator) + + for _, p := range paths { + if !strings.HasSuffix(p, sep) { + p += sep + } + + if ret.Path == nil { + or = ret + } else { + newOr := &models.SceneFilterType{} + or.Or = newOr + or = newOr + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} diff --git a/pkg/scene/scan.go b/pkg/scene/scan.go new file mode 100644 index 000000000..9b3c80de2 --- /dev/null +++ b/pkg/scene/scan.go @@ -0,0 +1,335 @@ +package scene + +import ( + "context" + "database/sql" + "fmt" + "os" + "strconv" + "strings" + "time" + + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/config" + "github.com/stashapp/stash/pkg/manager/paths" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/plugin" + "github.com/stashapp/stash/pkg/utils" +) + +const mutexType = "scene" + +type videoFileCreator interface { + NewVideoFile(path string, stripFileExtension bool) (*ffmpeg.VideoFile, error) +} + +type Scanner struct { + file.Scanner + + StripFileExtension bool + UseFileMetadata bool + FileNamingAlgorithm models.HashAlgorithm + + Ctx context.Context + CaseSensitiveFs bool + TxnManager models.TransactionManager + Paths *paths.Paths + Screenshotter screenshotter + VideoFileCreator videoFileCreator + PluginCache *plugin.Cache + MutexManager *utils.MutexManager +} + +func FileScanner(hasher file.Hasher, fileNamingAlgorithm models.HashAlgorithm, calculateMD5 bool) file.Scanner { + return file.Scanner{ + Hasher: hasher, + CalculateOSHash: true, + CalculateMD5: fileNamingAlgorithm == models.HashAlgorithmMd5 || calculateMD5, + } +} + +func (scanner *Scanner) ScanExisting(existing file.FileBased, file file.SourceFile) (err error) { + scanned, err := scanner.Scanner.ScanExisting(existing, file) + if err != nil { + return err + } + + s := existing.(*models.Scene) + + path := scanned.New.Path + interactive := getInteractive(path) + + config := config.GetInstance() + oldHash := s.GetHash(scanner.FileNamingAlgorithm) + changed := false + + var videoFile *ffmpeg.VideoFile + + if scanned.ContentsChanged() { + logger.Infof("%s has been updated: rescanning", path) + + s.SetFile(*scanned.New) + + videoFile, err = scanner.VideoFileCreator.NewVideoFile(path, scanner.StripFileExtension) + if err != nil { + return err + } + + videoFileToScene(s, videoFile) + changed = true + } else if scanned.FileUpdated() || s.Interactive != interactive { + logger.Infof("Updated scene file %s", path) + + // update fields as needed + s.SetFile(*scanned.New) + changed = true + } + + // check for container + if !s.Format.Valid { + if videoFile == nil { + videoFile, err = scanner.VideoFileCreator.NewVideoFile(path, scanner.StripFileExtension) + if err != nil { + return err + } + } + container := ffmpeg.MatchContainer(videoFile.Container, path) + logger.Infof("Adding container %s to file %s", container, path) + s.Format = models.NullString(string(container)) + changed = true + } + + if changed { + // we are operating on a checksum now, so grab a mutex on the checksum + done := make(chan struct{}) + if scanned.New.OSHash != "" { + scanner.MutexManager.Claim(mutexType, scanned.New.OSHash, done) + } + if scanned.New.Checksum != "" { + scanner.MutexManager.Claim(mutexType, scanned.New.Checksum, done) + } + + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + defer close(done) + qb := r.Scene() + + // ensure no clashes of hashes + if scanned.New.Checksum != "" && scanned.Old.Checksum != scanned.New.Checksum { + dupe, _ := qb.FindByChecksum(s.Checksum.String) + if dupe != nil { + return fmt.Errorf("MD5 for file %s is the same as that of %s", path, dupe.Path) + } + } + + if scanned.New.OSHash != "" && scanned.Old.OSHash != scanned.New.OSHash { + dupe, _ := qb.FindByOSHash(scanned.New.OSHash) + if dupe != nil { + return fmt.Errorf("OSHash for file %s is the same as that of %s", path, dupe.Path) + } + } + + s.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()} + + _, err := qb.UpdateFull(*s) + return err + }); err != nil { + return err + } + + // Migrate any generated files if the hash has changed + newHash := s.GetHash(config.GetVideoFileNamingAlgorithm()) + if newHash != oldHash { + MigrateHash(scanner.Paths, oldHash, newHash) + } + + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, s.ID, plugin.SceneUpdatePost, nil, nil) + } + + // We already have this item in the database + // check for thumbnails, screenshots + scanner.makeScreenshots(path, videoFile, s.GetHash(scanner.FileNamingAlgorithm)) + + return nil +} + +func (scanner *Scanner) ScanNew(file file.SourceFile) (retScene *models.Scene, err error) { + scanned, err := scanner.Scanner.ScanNew(file) + if err != nil { + return nil, err + } + + path := file.Path() + checksum := scanned.Checksum + oshash := scanned.OSHash + + // grab a mutex on the checksum and oshash + done := make(chan struct{}) + if oshash != "" { + scanner.MutexManager.Claim(mutexType, oshash, done) + } + if checksum != "" { + scanner.MutexManager.Claim(mutexType, checksum, done) + } + + defer close(done) + + // check for scene by checksum and oshash - MD5 should be + // redundant, but check both + var s *models.Scene + if err := scanner.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + qb := r.Scene() + if checksum != "" { + s, _ = qb.FindByChecksum(checksum) + } + + if s == nil { + s, _ = qb.FindByOSHash(oshash) + } + + return nil + }); err != nil { + return nil, err + } + + sceneHash := oshash + + if scanner.FileNamingAlgorithm == models.HashAlgorithmMd5 { + sceneHash = checksum + } + + interactive := getInteractive(file.Path()) + + if s != nil { + exists, _ := utils.FileExists(s.Path) + if !scanner.CaseSensitiveFs { + // #1426 - if file exists but is a case-insensitive match for the + // original filename, then treat it as a move + if exists && strings.EqualFold(path, s.Path) { + exists = false + } + } + + if exists { + logger.Infof("%s already exists. Duplicate of %s", path, s.Path) + } else { + logger.Infof("%s already exists. Updating path...", path) + scenePartial := models.ScenePartial{ + ID: s.ID, + Path: &path, + Interactive: &interactive, + } + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + _, err := r.Scene().Update(scenePartial) + return err + }); err != nil { + return nil, err + } + + scanner.makeScreenshots(path, nil, sceneHash) + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, s.ID, plugin.SceneUpdatePost, nil, nil) + } + } else { + logger.Infof("%s doesn't exist. Creating new item...", path) + currentTime := time.Now() + + videoFile, err := scanner.VideoFileCreator.NewVideoFile(path, scanner.StripFileExtension) + if err != nil { + return nil, err + } + + // Override title to be filename if UseFileMetadata is false + if !scanner.UseFileMetadata { + videoFile.SetTitleFromPath(scanner.StripFileExtension) + } + + newScene := models.Scene{ + Checksum: sql.NullString{String: checksum, Valid: checksum != ""}, + OSHash: sql.NullString{String: oshash, Valid: oshash != ""}, + Path: path, + FileModTime: models.NullSQLiteTimestamp{ + Timestamp: scanned.FileModTime, + Valid: true, + }, + Title: sql.NullString{String: videoFile.Title, Valid: true}, + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + Interactive: interactive, + } + + videoFileToScene(&newScene, videoFile) + + if scanner.UseFileMetadata { + newScene.Details = sql.NullString{String: videoFile.Comment, Valid: true} + newScene.Date = models.SQLiteDate{String: videoFile.CreationTime.Format("2006-01-02")} + } + + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + var err error + retScene, err = r.Scene().Create(newScene) + return err + }); err != nil { + return nil, err + } + + scanner.makeScreenshots(path, videoFile, sceneHash) + scanner.PluginCache.ExecutePostHooks(scanner.Ctx, retScene.ID, plugin.SceneCreatePost, nil, nil) + } + + return retScene, nil +} + +func videoFileToScene(s *models.Scene, videoFile *ffmpeg.VideoFile) { + container := ffmpeg.MatchContainer(videoFile.Container, s.Path) + + s.Duration = sql.NullFloat64{Float64: videoFile.Duration, Valid: true} + s.VideoCodec = sql.NullString{String: videoFile.VideoCodec, Valid: true} + s.AudioCodec = sql.NullString{String: videoFile.AudioCodec, Valid: true} + s.Format = sql.NullString{String: string(container), Valid: true} + s.Width = sql.NullInt64{Int64: int64(videoFile.Width), Valid: true} + s.Height = sql.NullInt64{Int64: int64(videoFile.Height), Valid: true} + s.Framerate = sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true} + s.Bitrate = sql.NullInt64{Int64: videoFile.Bitrate, Valid: true} + s.Size = sql.NullString{String: strconv.FormatInt(videoFile.Size, 10), Valid: true} +} + +func (scanner *Scanner) makeScreenshots(path string, probeResult *ffmpeg.VideoFile, checksum string) { + thumbPath := scanner.Paths.Scene.GetThumbnailScreenshotPath(checksum) + normalPath := scanner.Paths.Scene.GetScreenshotPath(checksum) + + thumbExists, _ := utils.FileExists(thumbPath) + normalExists, _ := utils.FileExists(normalPath) + + if thumbExists && normalExists { + return + } + + if probeResult == nil { + var err error + probeResult, err = scanner.VideoFileCreator.NewVideoFile(path, scanner.StripFileExtension) + + if err != nil { + logger.Error(err.Error()) + return + } + logger.Infof("Regenerating images for %s", path) + } + + at := float64(probeResult.Duration) * 0.2 + + if !thumbExists { + logger.Debugf("Creating thumbnail for %s", path) + makeScreenshot(scanner.Screenshotter, *probeResult, thumbPath, 5, 320, at) + } + + if !normalExists { + logger.Debugf("Creating screenshot for %s", path) + makeScreenshot(scanner.Screenshotter, *probeResult, normalPath, 2, probeResult.Width, at) + } +} + +func getInteractive(path string) bool { + _, err := os.Stat(utils.GetFunscriptPath(path)) + return err == nil +} diff --git a/pkg/scene/screenshot.go b/pkg/scene/screenshot.go new file mode 100644 index 000000000..7af8ca3e4 --- /dev/null +++ b/pkg/scene/screenshot.go @@ -0,0 +1,97 @@ +package scene + +import ( + "bytes" + "image" + "image/jpeg" + "os" + + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/paths" + "github.com/stashapp/stash/pkg/models" + + "github.com/disintegration/imaging" + + // needed to decode other image formats + _ "image/gif" + _ "image/png" +) + +type screenshotter interface { + Screenshot(probeResult ffmpeg.VideoFile, options ffmpeg.ScreenshotOptions) error +} + +func makeScreenshot(encoder screenshotter, probeResult ffmpeg.VideoFile, outputPath string, quality int, width int, time float64) { + options := ffmpeg.ScreenshotOptions{ + OutputPath: outputPath, + Quality: quality, + Time: time, + Width: width, + } + + if err := encoder.Screenshot(probeResult, options); err != nil { + logger.Warnf("[encoder] failure to generate screenshot: %v", err) + } +} + +type ScreenshotSetter interface { + SetScreenshot(scene *models.Scene, imageData []byte) error +} + +type PathsScreenshotSetter struct { + Paths *paths.Paths + FileNamingAlgorithm models.HashAlgorithm +} + +func (ss *PathsScreenshotSetter) SetScreenshot(scene *models.Scene, imageData []byte) error { + checksum := scene.GetHash(ss.FileNamingAlgorithm) + return SetScreenshot(ss.Paths, checksum, imageData) +} + +func writeImage(path string, imageData []byte) error { + f, err := os.Create(path) + if err != nil { + return err + } + defer f.Close() + + _, err = f.Write(imageData) + return err +} + +func writeThumbnail(path string, thumbnail image.Image) error { + f, err := os.Create(path) + if err != nil { + return err + } + defer f.Close() + + return jpeg.Encode(f, thumbnail, nil) +} + +func SetScreenshot(paths *paths.Paths, checksum string, imageData []byte) error { + thumbPath := paths.Scene.GetThumbnailScreenshotPath(checksum) + normalPath := paths.Scene.GetScreenshotPath(checksum) + + img, _, err := image.Decode(bytes.NewReader(imageData)) + if err != nil { + return err + } + + // resize to 320 width maintaining aspect ratio, for the thumbnail + const width = 320 + origWidth := img.Bounds().Max.X + origHeight := img.Bounds().Max.Y + height := width / origWidth * origHeight + + thumbnail := imaging.Resize(img, width, height, imaging.Lanczos) + err = writeThumbnail(thumbPath, thumbnail) + if err != nil { + return err + } + + err = writeImage(normalPath, imageData) + + return err +} diff --git a/pkg/scene/update.go b/pkg/scene/update.go index 7dec4f415..d351b13eb 100644 --- a/pkg/scene/update.go +++ b/pkg/scene/update.go @@ -2,11 +2,127 @@ package scene import ( "database/sql" + "errors" + "fmt" + "time" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) +var ErrEmptyUpdater = errors.New("no fields have been set") + +// UpdateSet is used to update a scene and its relationships. +type UpdateSet struct { + ID int + + Partial models.ScenePartial + + // in future these could be moved into a separate struct and reused + // for a Creator struct + + // Not set if nil. Set to []int{} to clear existing + PerformerIDs []int + // Not set if nil. Set to []int{} to clear existing + TagIDs []int + // Not set if nil. Set to []int{} to clear existing + StashIDs []models.StashID + // Not set if nil. Set to []byte{} to clear existing + CoverImage []byte +} + +// IsEmpty returns true if there is nothing to update. +func (u *UpdateSet) IsEmpty() bool { + withoutID := u.Partial + withoutID.ID = 0 + + return withoutID == models.ScenePartial{} && + u.PerformerIDs == nil && + u.TagIDs == nil && + u.StashIDs == nil && + u.CoverImage == nil +} + +// Update updates a scene by updating the fields in the Partial field, then +// updates non-nil relationships. Returns an error if there is no work to +// be done. +func (u *UpdateSet) Update(qb models.SceneWriter, screenshotSetter ScreenshotSetter) (*models.Scene, error) { + if u.IsEmpty() { + return nil, ErrEmptyUpdater + } + + partial := u.Partial + partial.ID = u.ID + partial.UpdatedAt = &models.SQLiteTimestamp{ + Timestamp: time.Now(), + } + + ret, err := qb.Update(partial) + if err != nil { + return nil, fmt.Errorf("error updating scene: %w", err) + } + + if u.PerformerIDs != nil { + if err := qb.UpdatePerformers(u.ID, u.PerformerIDs); err != nil { + return nil, fmt.Errorf("error updating scene performers: %w", err) + } + } + + if u.TagIDs != nil { + if err := qb.UpdateTags(u.ID, u.TagIDs); err != nil { + return nil, fmt.Errorf("error updating scene tags: %w", err) + } + } + + if u.StashIDs != nil { + if err := qb.UpdateStashIDs(u.ID, u.StashIDs); err != nil { + return nil, fmt.Errorf("error updating scene stash_ids: %w", err) + } + } + + if u.CoverImage != nil { + if err := qb.UpdateCover(u.ID, u.CoverImage); err != nil { + return nil, fmt.Errorf("error updating scene cover: %w", err) + } + + if err := screenshotSetter.SetScreenshot(ret, u.CoverImage); err != nil { + return nil, fmt.Errorf("error setting scene screenshot: %w", err) + } + } + + return ret, nil +} + +// UpdateInput converts the UpdateSet into SceneUpdateInput for hook firing purposes. +func (u UpdateSet) UpdateInput() models.SceneUpdateInput { + // ensure the partial ID is set + u.Partial.ID = u.ID + ret := u.Partial.UpdateInput() + + if u.PerformerIDs != nil { + ret.PerformerIds = utils.IntSliceToStringSlice(u.PerformerIDs) + } + + if u.TagIDs != nil { + ret.TagIds = utils.IntSliceToStringSlice(u.TagIDs) + } + + if u.StashIDs != nil { + for _, s := range u.StashIDs { + ss := s.StashIDInput() + ret.StashIds = append(ret.StashIds, &ss) + } + } + + if u.CoverImage != nil { + // convert back to base64 + data := utils.GetBase64StringFromData(u.CoverImage) + ret.CoverImage = &data + } + + return ret +} + func UpdateFormat(qb models.SceneWriter, id int, format string) (*models.Scene, error) { return qb.Update(models.ScenePartial{ ID: id, diff --git a/pkg/scene/update_test.go b/pkg/scene/update_test.go new file mode 100644 index 000000000..587258b93 --- /dev/null +++ b/pkg/scene/update_test.go @@ -0,0 +1,337 @@ +package scene + +import ( + "errors" + "strconv" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/utils" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestUpdater_IsEmpty(t *testing.T) { + organized := true + ids := []int{1} + stashIDs := []models.StashID{ + {}, + } + cover := []byte{1} + + tests := []struct { + name string + u *UpdateSet + want bool + }{ + { + "empty", + &UpdateSet{}, + true, + }, + { + "id only", + &UpdateSet{ + Partial: models.ScenePartial{ + ID: 1, + }, + }, + true, + }, + { + "partial set", + &UpdateSet{ + Partial: models.ScenePartial{ + Organized: &organized, + }, + }, + false, + }, + { + "performer set", + &UpdateSet{ + PerformerIDs: ids, + }, + false, + }, + { + "tags set", + &UpdateSet{ + TagIDs: ids, + }, + false, + }, + { + "performer set", + &UpdateSet{ + StashIDs: stashIDs, + }, + false, + }, + { + "cover set", + &UpdateSet{ + CoverImage: cover, + }, + false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.u.IsEmpty(); got != tt.want { + t.Errorf("Updater.IsEmpty() = %v, want %v", got, tt.want) + } + }) + } +} + +type mockScreenshotSetter struct{} + +func (s *mockScreenshotSetter) SetScreenshot(scene *models.Scene, imageData []byte) error { + return nil +} + +func TestUpdater_Update(t *testing.T) { + const ( + sceneID = iota + 1 + badUpdateID + badPerformersID + badTagsID + badStashIDsID + badCoverID + performerID + tagID + ) + + performerIDs := []int{performerID} + tagIDs := []int{tagID} + stashID := "stashID" + endpoint := "endpoint" + stashIDs := []models.StashID{ + { + StashID: stashID, + Endpoint: endpoint, + }, + } + + title := "title" + cover := []byte("cover") + + validScene := &models.Scene{} + + updateErr := errors.New("error updating") + + qb := mocks.SceneReaderWriter{} + qb.On("Update", mock.MatchedBy(func(s models.ScenePartial) bool { + return s.ID != badUpdateID + })).Return(validScene, nil) + qb.On("Update", mock.MatchedBy(func(s models.ScenePartial) bool { + return s.ID == badUpdateID + })).Return(nil, updateErr) + + qb.On("UpdatePerformers", sceneID, performerIDs).Return(nil).Once() + qb.On("UpdateTags", sceneID, tagIDs).Return(nil).Once() + qb.On("UpdateStashIDs", sceneID, stashIDs).Return(nil).Once() + qb.On("UpdateCover", sceneID, cover).Return(nil).Once() + + qb.On("UpdatePerformers", badPerformersID, performerIDs).Return(updateErr).Once() + qb.On("UpdateTags", badTagsID, tagIDs).Return(updateErr).Once() + qb.On("UpdateStashIDs", badStashIDsID, stashIDs).Return(updateErr).Once() + qb.On("UpdateCover", badCoverID, cover).Return(updateErr).Once() + + tests := []struct { + name string + u *UpdateSet + wantNil bool + wantErr bool + }{ + { + "empty", + &UpdateSet{ + ID: sceneID, + }, + true, + true, + }, + { + "update all", + &UpdateSet{ + ID: sceneID, + PerformerIDs: performerIDs, + TagIDs: tagIDs, + StashIDs: []models.StashID{ + { + StashID: stashID, + Endpoint: endpoint, + }, + }, + CoverImage: cover, + }, + false, + false, + }, + { + "update fields only", + &UpdateSet{ + ID: sceneID, + Partial: models.ScenePartial{ + Title: models.NullStringPtr(title), + }, + }, + false, + false, + }, + { + "error updating scene", + &UpdateSet{ + ID: badUpdateID, + Partial: models.ScenePartial{ + Title: models.NullStringPtr(title), + }, + }, + true, + true, + }, + { + "error updating performers", + &UpdateSet{ + ID: badPerformersID, + PerformerIDs: performerIDs, + }, + true, + true, + }, + { + "error updating tags", + &UpdateSet{ + ID: badTagsID, + TagIDs: tagIDs, + }, + true, + true, + }, + { + "error updating stash IDs", + &UpdateSet{ + ID: badStashIDsID, + StashIDs: stashIDs, + }, + true, + true, + }, + { + "error updating cover", + &UpdateSet{ + ID: badCoverID, + CoverImage: cover, + }, + true, + true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.u.Update(&qb, &mockScreenshotSetter{}) + if (err != nil) != tt.wantErr { + t.Errorf("Updater.Update() error = %v, wantErr %v", err, tt.wantErr) + return + } + if (got == nil) != tt.wantNil { + t.Errorf("Updater.Update() = %v, want %v", got, tt.wantNil) + } + }) + } + + qb.AssertExpectations(t) +} + +func TestUpdateSet_UpdateInput(t *testing.T) { + const ( + sceneID = iota + 1 + badUpdateID + badPerformersID + badTagsID + badStashIDsID + badCoverID + performerID + tagID + ) + + sceneIDStr := strconv.Itoa(sceneID) + + performerIDs := []int{performerID} + performerIDStrs := utils.IntSliceToStringSlice(performerIDs) + tagIDs := []int{tagID} + tagIDStrs := utils.IntSliceToStringSlice(tagIDs) + stashID := "stashID" + endpoint := "endpoint" + stashIDs := []models.StashID{ + { + StashID: stashID, + Endpoint: endpoint, + }, + } + stashIDInputs := []*models.StashIDInput{ + { + StashID: stashID, + Endpoint: endpoint, + }, + } + + title := "title" + cover := []byte("cover") + coverB64 := "Y292ZXI=" + + tests := []struct { + name string + u UpdateSet + want models.SceneUpdateInput + }{ + { + "empty", + UpdateSet{ + ID: sceneID, + }, + models.SceneUpdateInput{ + ID: sceneIDStr, + }, + }, + { + "update all", + UpdateSet{ + ID: sceneID, + PerformerIDs: performerIDs, + TagIDs: tagIDs, + StashIDs: stashIDs, + CoverImage: cover, + }, + models.SceneUpdateInput{ + ID: sceneIDStr, + PerformerIds: performerIDStrs, + TagIds: tagIDStrs, + StashIds: stashIDInputs, + CoverImage: &coverB64, + }, + }, + { + "update fields only", + UpdateSet{ + ID: sceneID, + Partial: models.ScenePartial{ + Title: models.NullStringPtr(title), + }, + }, + models.SceneUpdateInput{ + ID: sceneIDStr, + Title: &title, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := tt.u.UpdateInput() + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/scraper/action.go b/pkg/scraper/action.go index 493163936..d8c08da97 100644 --- a/pkg/scraper/action.go +++ b/pkg/scraper/action.go @@ -1,6 +1,10 @@ package scraper -import "github.com/stashapp/stash/pkg/models" +import ( + "net/http" + + "github.com/stashapp/stash/pkg/models" +) type scraperAction string @@ -19,7 +23,7 @@ func (e scraperAction) IsValid() bool { return false } -type scraper interface { +type scraperActionImpl interface { scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) @@ -36,16 +40,16 @@ type scraper interface { scrapeMovieByURL(url string) (*models.ScrapedMovie, error) } -func getScraper(scraper scraperTypeConfig, txnManager models.TransactionManager, config config, globalConfig GlobalConfig) scraper { +func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraperActionImpl { switch scraper.Action { case scraperActionScript: - return newScriptScraper(scraper, config, globalConfig) + return newScriptScraper(scraper, c, globalConfig) case scraperActionStash: - return newStashScraper(scraper, txnManager, config, globalConfig) + return newStashScraper(scraper, client, txnManager, c, globalConfig) case scraperActionXPath: - return newXpathScraper(scraper, txnManager, config, globalConfig) + return newXpathScraper(scraper, client, txnManager, c, globalConfig) case scraperActionJson: - return newJsonScraper(scraper, txnManager, config, globalConfig) + return newJsonScraper(scraper, client, txnManager, c, globalConfig) } panic("unknown scraper action: " + scraper.Action) diff --git a/pkg/scraper/autotag.go b/pkg/scraper/autotag.go new file mode 100644 index 000000000..73a836224 --- /dev/null +++ b/pkg/scraper/autotag.go @@ -0,0 +1,218 @@ +package scraper + +import ( + "context" + "errors" + "fmt" + "strconv" + + "github.com/stashapp/stash/pkg/match" + "github.com/stashapp/stash/pkg/models" +) + +// autoTagScraperID is the scraper ID for the built-in AutoTag scraper +const ( + autoTagScraperID = "builtin_autotag" + autoTagScraperName = "Auto Tag" +) + +var errNotSupported = errors.New("not supported") + +type autotagScraper struct { + txnManager models.TransactionManager + globalConfig GlobalConfig +} + +func (s *autotagScraper) matchPerformers(path string, performerReader models.PerformerReader) ([]*models.ScrapedPerformer, error) { + p, err := match.PathToPerformers(path, performerReader) + if err != nil { + return nil, fmt.Errorf("error matching performers: %w", err) + } + + var ret []*models.ScrapedPerformer + for _, pp := range p { + id := strconv.Itoa(pp.ID) + + sp := &models.ScrapedPerformer{ + Name: &pp.Name.String, + StoredID: &id, + } + if pp.Gender.Valid { + sp.Gender = &pp.Gender.String + } + + ret = append(ret, sp) + } + + return ret, nil +} + +func (s *autotagScraper) matchStudio(path string, studioReader models.StudioReader) (*models.ScrapedStudio, error) { + st, err := match.PathToStudios(path, studioReader) + if err != nil { + return nil, fmt.Errorf("error matching studios: %w", err) + } + + if len(st) > 0 { + id := strconv.Itoa(st[0].ID) + return &models.ScrapedStudio{ + Name: st[0].Name.String, + StoredID: &id, + }, nil + } + + return nil, nil +} + +func (s *autotagScraper) matchTags(path string, tagReader models.TagReader) ([]*models.ScrapedTag, error) { + t, err := match.PathToTags(path, tagReader) + if err != nil { + return nil, fmt.Errorf("error matching tags: %w", err) + } + + var ret []*models.ScrapedTag + for _, tt := range t { + id := strconv.Itoa(tt.ID) + + st := &models.ScrapedTag{ + Name: tt.Name, + StoredID: &id, + } + + ret = append(ret, st) + } + + return ret, nil +} + +type autotagSceneScraper struct { + *autotagScraper +} + +func (c *autotagSceneScraper) scrapeByName(name string) ([]*models.ScrapedScene, error) { + return nil, errNotSupported +} + +func (c *autotagSceneScraper) scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error) { + var ret *models.ScrapedScene + + // populate performers, studio and tags based on scene path + if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + path := scene.Path + performers, err := c.matchPerformers(path, r.Performer()) + if err != nil { + return err + } + studio, err := c.matchStudio(path, r.Studio()) + if err != nil { + return err + } + + tags, err := c.matchTags(path, r.Tag()) + if err != nil { + return err + } + + if len(performers) > 0 || studio != nil || len(tags) > 0 { + ret = &models.ScrapedScene{ + Performers: performers, + Studio: studio, + Tags: tags, + } + } + + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (c *autotagSceneScraper) scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) { + return nil, errNotSupported +} + +func (c *autotagSceneScraper) scrapeByURL(url string) (*models.ScrapedScene, error) { + return nil, errNotSupported +} + +type autotagGalleryScraper struct { + *autotagScraper +} + +func (c *autotagGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) { + if !gallery.Path.Valid { + // not valid for non-path-based galleries + return nil, nil + } + + var ret *models.ScrapedGallery + + // populate performers, studio and tags based on scene path + if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + path := gallery.Path.String + performers, err := c.matchPerformers(path, r.Performer()) + if err != nil { + return err + } + studio, err := c.matchStudio(path, r.Studio()) + if err != nil { + return err + } + + tags, err := c.matchTags(path, r.Tag()) + if err != nil { + return err + } + + if len(performers) > 0 || studio != nil || len(tags) > 0 { + ret = &models.ScrapedGallery{ + Performers: performers, + Studio: studio, + Tags: tags, + } + } + + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (c *autotagGalleryScraper) scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) { + return nil, errNotSupported +} + +func (c *autotagGalleryScraper) scrapeByURL(url string) (*models.ScrapedGallery, error) { + return nil, errNotSupported +} + +func getAutoTagScraper(txnManager models.TransactionManager, globalConfig GlobalConfig) scraper { + base := autotagScraper{ + txnManager: txnManager, + globalConfig: globalConfig, + } + + supportedScrapes := []models.ScrapeType{ + models.ScrapeTypeFragment, + } + + return scraper{ + ID: autoTagScraperID, + Spec: &models.Scraper{ + ID: autoTagScraperID, + Name: autoTagScraperName, + Scene: &models.ScraperSpec{ + SupportedScrapes: supportedScrapes, + }, + Gallery: &models.ScraperSpec{ + SupportedScrapes: supportedScrapes, + }, + }, + Scene: &autotagSceneScraper{&base}, + Gallery: &autotagGalleryScraper{&base}, + } +} diff --git a/pkg/scraper/config.go b/pkg/scraper/config.go index 78d3fe4fe..ee12c7e90 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/config.go @@ -9,8 +9,6 @@ import ( "strings" "gopkg.in/yaml.v2" - - "github.com/stashapp/stash/pkg/models" ) type config struct { @@ -194,7 +192,7 @@ type scraperDriverOptions struct { Headers []*header `yaml:"headers"` } -func loadScraperFromYAML(id string, reader io.Reader) (*config, error) { +func loadConfigFromYAML(id string, reader io.Reader) (*config, error) { ret := &config{} parser := yaml.NewDecoder(reader) @@ -213,7 +211,7 @@ func loadScraperFromYAML(id string, reader io.Reader) (*config, error) { return ret, nil } -func loadScraperFromYAMLFile(path string) (*config, error) { +func loadConfigFromYAMLFile(path string) (*config, error) { file, err := os.Open(path) if err != nil { return nil, err @@ -224,7 +222,7 @@ func loadScraperFromYAMLFile(path string) (*config, error) { id := filepath.Base(path) id = id[:strings.LastIndex(id, ".")] - ret, err := loadScraperFromYAML(id, file) + ret, err := loadConfigFromYAML(id, file) if err != nil { return nil, err } @@ -234,78 +232,6 @@ func loadScraperFromYAMLFile(path string) (*config, error) { return ret, nil } -func (c config) toScraper() *models.Scraper { - ret := models.Scraper{ - ID: c.ID, - Name: c.Name, - } - - performer := models.ScraperSpec{} - if c.PerformerByName != nil { - performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeName) - } - if c.PerformerByFragment != nil { - performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeFragment) - } - if len(c.PerformerByURL) > 0 { - performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeURL) - for _, v := range c.PerformerByURL { - performer.Urls = append(performer.Urls, v.URL...) - } - } - - if len(performer.SupportedScrapes) > 0 { - ret.Performer = &performer - } - - scene := models.ScraperSpec{} - if c.SceneByFragment != nil { - scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment) - } - if c.SceneByName != nil && c.SceneByQueryFragment != nil { - scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName) - } - if len(c.SceneByURL) > 0 { - scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL) - for _, v := range c.SceneByURL { - scene.Urls = append(scene.Urls, v.URL...) - } - } - - if len(scene.SupportedScrapes) > 0 { - ret.Scene = &scene - } - - gallery := models.ScraperSpec{} - if c.GalleryByFragment != nil { - gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeFragment) - } - if len(c.GalleryByURL) > 0 { - gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeURL) - for _, v := range c.GalleryByURL { - gallery.Urls = append(gallery.Urls, v.URL...) - } - } - - if len(gallery.SupportedScrapes) > 0 { - ret.Gallery = &gallery - } - - movie := models.ScraperSpec{} - if len(c.MovieByURL) > 0 { - movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL) - for _, v := range c.MovieByURL { - movie.Urls = append(movie.Urls, v.URL...) - } - } - - if len(movie.SupportedScrapes) > 0 { - ret.Movie = &movie - } - - return &ret -} - func (c config) supportsPerformers() bool { return c.PerformerByName != nil || c.PerformerByFragment != nil || len(c.PerformerByURL) > 0 } @@ -320,47 +246,6 @@ func (c config) matchesPerformerURL(url string) bool { return false } -func (c config) ScrapePerformerNames(name string, txnManager models.TransactionManager, globalConfig GlobalConfig) ([]*models.ScrapedPerformer, error) { - if c.PerformerByName != nil { - s := getScraper(*c.PerformerByName, txnManager, c, globalConfig) - return s.scrapePerformersByName(name) - } - - return nil, nil -} - -func (c config) ScrapePerformer(scrapedPerformer models.ScrapedPerformerInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedPerformer, error) { - if c.PerformerByFragment != nil { - s := getScraper(*c.PerformerByFragment, txnManager, c, globalConfig) - return s.scrapePerformerByFragment(scrapedPerformer) - } - - // try to match against URL if present - if scrapedPerformer.URL != nil && *scrapedPerformer.URL != "" { - return c.ScrapePerformerURL(*scrapedPerformer.URL, txnManager, globalConfig) - } - - return nil, nil -} - -func (c config) ScrapePerformerURL(url string, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedPerformer, error) { - for _, scraper := range c.PerformerByURL { - if scraper.matchesURL(url) { - s := getScraper(scraper.scraperTypeConfig, txnManager, c, globalConfig) - ret, err := s.scrapePerformerByURL(url) - if err != nil { - return nil, err - } - - if ret != nil { - return ret, nil - } - } - } - - return nil, nil -} - func (c config) supportsScenes() bool { return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0 } @@ -401,103 +286,3 @@ func (c config) matchesMovieURL(url string) bool { return false } - -func (c config) ScrapeSceneQuery(name string, txnManager models.TransactionManager, globalConfig GlobalConfig) ([]*models.ScrapedScene, error) { - if c.SceneByName != nil { - s := getScraper(*c.SceneByName, txnManager, c, globalConfig) - return s.scrapeScenesByName(name) - } - - return nil, nil -} - -func (c config) ScrapeSceneByScene(scene *models.Scene, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) { - if c.SceneByFragment != nil { - s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig) - return s.scrapeSceneByScene(scene) - } - - return nil, nil -} - -func (c config) ScrapeSceneByFragment(scene models.ScrapedSceneInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) { - if c.SceneByQueryFragment != nil { - s := getScraper(*c.SceneByQueryFragment, txnManager, c, globalConfig) - return s.scrapeSceneByFragment(scene) - } - - return nil, nil -} - -func (c config) ScrapeSceneURL(url string, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) { - for _, scraper := range c.SceneByURL { - if scraper.matchesURL(url) { - s := getScraper(scraper.scraperTypeConfig, txnManager, c, globalConfig) - ret, err := s.scrapeSceneByURL(url) - if err != nil { - return nil, err - } - - if ret != nil { - return ret, nil - } - } - } - - return nil, nil -} - -func (c config) ScrapeGalleryByGallery(gallery *models.Gallery, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) { - if c.GalleryByFragment != nil { - s := getScraper(*c.GalleryByFragment, txnManager, c, globalConfig) - return s.scrapeGalleryByGallery(gallery) - } - - return nil, nil -} - -func (c config) ScrapeGalleryByFragment(gallery models.ScrapedGalleryInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) { - if c.GalleryByFragment != nil { - // TODO - this should be galleryByQueryFragment - s := getScraper(*c.GalleryByFragment, txnManager, c, globalConfig) - return s.scrapeGalleryByFragment(gallery) - } - - return nil, nil -} - -func (c config) ScrapeGalleryURL(url string, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) { - for _, scraper := range c.GalleryByURL { - if scraper.matchesURL(url) { - s := getScraper(scraper.scraperTypeConfig, txnManager, c, globalConfig) - ret, err := s.scrapeGalleryByURL(url) - if err != nil { - return nil, err - } - - if ret != nil { - return ret, nil - } - } - } - - return nil, nil -} - -func (c config) ScrapeMovieURL(url string, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedMovie, error) { - for _, scraper := range c.MovieByURL { - if scraper.matchesURL(url) { - s := getScraper(scraper.scraperTypeConfig, txnManager, c, globalConfig) - ret, err := s.scrapeMovieByURL(url) - if err != nil { - return nil, err - } - - if ret != nil { - return ret, nil - } - } - } - - return nil, nil -} diff --git a/pkg/scraper/config_scraper.go b/pkg/scraper/config_scraper.go new file mode 100644 index 000000000..01ccd4f2b --- /dev/null +++ b/pkg/scraper/config_scraper.go @@ -0,0 +1,289 @@ +package scraper + +import ( + "net/http" + + "github.com/stashapp/stash/pkg/models" +) + +type configSceneScraper struct { + *configScraper +} + +func (c *configSceneScraper) matchesURL(url string) bool { + return c.config.matchesSceneURL(url) +} + +func (c *configSceneScraper) scrapeByName(name string) ([]*models.ScrapedScene, error) { + if c.config.SceneByName != nil { + s := c.config.getScraper(*c.config.SceneByName, c.client, c.txnManager, c.globalConfig) + return s.scrapeScenesByName(name) + } + + return nil, nil +} + +func (c *configSceneScraper) scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error) { + if c.config.SceneByFragment != nil { + s := c.config.getScraper(*c.config.SceneByFragment, c.client, c.txnManager, c.globalConfig) + return s.scrapeSceneByScene(scene) + } + + return nil, nil +} + +func (c *configSceneScraper) scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) { + if c.config.SceneByQueryFragment != nil { + s := c.config.getScraper(*c.config.SceneByQueryFragment, c.client, c.txnManager, c.globalConfig) + return s.scrapeSceneByFragment(scene) + } + + return nil, nil +} + +func (c *configSceneScraper) scrapeByURL(url string) (*models.ScrapedScene, error) { + for _, scraper := range c.config.SceneByURL { + if scraper.matchesURL(url) { + s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig) + ret, err := s.scrapeSceneByURL(url) + if err != nil { + return nil, err + } + + if ret != nil { + return ret, nil + } + } + } + + return nil, nil +} + +type configPerformerScraper struct { + *configScraper +} + +func (c *configPerformerScraper) matchesURL(url string) bool { + return c.config.matchesPerformerURL(url) +} + +func (c *configPerformerScraper) scrapeByName(name string) ([]*models.ScrapedPerformer, error) { + if c.config.PerformerByName != nil { + s := c.config.getScraper(*c.config.PerformerByName, c.client, c.txnManager, c.globalConfig) + return s.scrapePerformersByName(name) + } + + return nil, nil +} + +func (c *configPerformerScraper) scrapeByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { + if c.config.PerformerByFragment != nil { + s := c.config.getScraper(*c.config.PerformerByFragment, c.client, c.txnManager, c.globalConfig) + return s.scrapePerformerByFragment(scrapedPerformer) + } + + // try to match against URL if present + if scrapedPerformer.URL != nil && *scrapedPerformer.URL != "" { + return c.scrapeByURL(*scrapedPerformer.URL) + } + + return nil, nil +} + +func (c *configPerformerScraper) scrapeByURL(url string) (*models.ScrapedPerformer, error) { + for _, scraper := range c.config.PerformerByURL { + if scraper.matchesURL(url) { + s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig) + ret, err := s.scrapePerformerByURL(url) + if err != nil { + return nil, err + } + + if ret != nil { + return ret, nil + } + } + } + + return nil, nil +} + +type configGalleryScraper struct { + *configScraper +} + +func (c *configGalleryScraper) matchesURL(url string) bool { + return c.config.matchesGalleryURL(url) +} + +func (c *configGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) { + if c.config.GalleryByFragment != nil { + s := c.config.getScraper(*c.config.GalleryByFragment, c.client, c.txnManager, c.globalConfig) + return s.scrapeGalleryByGallery(gallery) + } + + return nil, nil +} + +func (c *configGalleryScraper) scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) { + if c.config.GalleryByFragment != nil { + // TODO - this should be galleryByQueryFragment + s := c.config.getScraper(*c.config.GalleryByFragment, c.client, c.txnManager, c.globalConfig) + return s.scrapeGalleryByFragment(gallery) + } + + return nil, nil +} + +func (c *configGalleryScraper) scrapeByURL(url string) (*models.ScrapedGallery, error) { + for _, scraper := range c.config.GalleryByURL { + if scraper.matchesURL(url) { + s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig) + ret, err := s.scrapeGalleryByURL(url) + if err != nil { + return nil, err + } + + if ret != nil { + return ret, nil + } + } + } + + return nil, nil +} + +type configMovieScraper struct { + *configScraper +} + +func (c *configMovieScraper) matchesURL(url string) bool { + return c.config.matchesMovieURL(url) +} + +func (c *configMovieScraper) scrapeByURL(url string) (*models.ScrapedMovie, error) { + for _, scraper := range c.config.MovieByURL { + if scraper.matchesURL(url) { + s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig) + ret, err := s.scrapeMovieByURL(url) + if err != nil { + return nil, err + } + + if ret != nil { + return ret, nil + } + } + } + + return nil, nil +} + +type configScraper struct { + config config + client *http.Client + txnManager models.TransactionManager + globalConfig GlobalConfig +} + +func createScraperFromConfig(c config, client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraper { + base := configScraper{ + client: client, + config: c, + txnManager: txnManager, + globalConfig: globalConfig, + } + + ret := scraper{ + ID: c.ID, + Spec: configScraperSpec(c), + } + + // only set fields if supported + if c.supportsPerformers() { + ret.Performer = &configPerformerScraper{&base} + } + if c.supportsGalleries() { + ret.Gallery = &configGalleryScraper{&base} + } + if c.supportsMovies() { + ret.Movie = &configMovieScraper{&base} + } + if c.supportsScenes() { + ret.Scene = &configSceneScraper{&base} + } + + return ret +} + +func configScraperSpec(c config) *models.Scraper { + ret := models.Scraper{ + ID: c.ID, + Name: c.Name, + } + + performer := models.ScraperSpec{} + if c.PerformerByName != nil { + performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeName) + } + if c.PerformerByFragment != nil { + performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeFragment) + } + if len(c.PerformerByURL) > 0 { + performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeURL) + for _, v := range c.PerformerByURL { + performer.Urls = append(performer.Urls, v.URL...) + } + } + + if len(performer.SupportedScrapes) > 0 { + ret.Performer = &performer + } + + scene := models.ScraperSpec{} + if c.SceneByFragment != nil { + scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment) + } + if c.SceneByName != nil && c.SceneByQueryFragment != nil { + scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName) + } + if len(c.SceneByURL) > 0 { + scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL) + for _, v := range c.SceneByURL { + scene.Urls = append(scene.Urls, v.URL...) + } + } + + if len(scene.SupportedScrapes) > 0 { + ret.Scene = &scene + } + + gallery := models.ScraperSpec{} + if c.GalleryByFragment != nil { + gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeFragment) + } + if len(c.GalleryByURL) > 0 { + gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeURL) + for _, v := range c.GalleryByURL { + gallery.Urls = append(gallery.Urls, v.URL...) + } + } + + if len(gallery.SupportedScrapes) > 0 { + ret.Gallery = &gallery + } + + movie := models.ScraperSpec{} + if len(c.MovieByURL) > 0 { + movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL) + for _, v := range c.MovieByURL { + movie.Urls = append(movie.Urls, v.URL...) + } + } + + if len(movie.SupportedScrapes) > 0 { + ret.Movie = &movie + } + + return &ret +} diff --git a/pkg/scraper/cookies.go b/pkg/scraper/cookies.go index 824c64ee6..ed854d50a 100644 --- a/pkg/scraper/cookies.go +++ b/pkg/scraper/cookies.go @@ -11,42 +11,51 @@ import ( "github.com/chromedp/cdproto/cdp" "github.com/chromedp/cdproto/network" "github.com/chromedp/chromedp" + "golang.org/x/net/publicsuffix" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/utils" ) -// set cookies for the native http client -func setCookies(jar *cookiejar.Jar, scraperConfig config) { - driverOptions := scraperConfig.DriverOptions - if driverOptions != nil && !driverOptions.UseCDP { +// jar constructs a cookie jar from a configuration +func (c config) jar() (*cookiejar.Jar, error) { + opts := c.DriverOptions + jar, err := cookiejar.New(&cookiejar.Options{ + PublicSuffixList: publicsuffix.List, + }) + if err != nil { + return nil, err + } - for _, ckURL := range driverOptions.Cookies { // go through all cookies - url, err := url.Parse(ckURL.CookieURL) // CookieURL must be valid, include schema - if err != nil { - logger.Warnf("Skipping jar cookies for cookieURL %s. Error %s", ckURL.CookieURL, err) - } else { - var httpCookies []*http.Cookie - var httpCookie *http.Cookie + if opts == nil || opts.UseCDP { + return jar, nil + } - for _, cookie := range ckURL.Cookies { - httpCookie = &http.Cookie{ - Name: cookie.Name, - Value: getCookieValue(cookie), - Path: cookie.Path, - Domain: cookie.Domain, - } + for i, ckURL := range opts.Cookies { + url, err := url.Parse(ckURL.CookieURL) // CookieURL must be valid, include schema + if err != nil { + logger.Warnf("skipping cookie [%d] for cookieURL %s: %v", i, ckURL.CookieURL, err) + continue + } - httpCookies = append(httpCookies, httpCookie) - } - jar.SetCookies(url, httpCookies) // jar.SetCookies only sets cookies with the domain matching the URL - - if jar.Cookies(url) == nil { - logger.Warnf("Setting jar cookies for %s failed", url.String()) - } + var httpCookies []*http.Cookie + for _, cookie := range ckURL.Cookies { + c := &http.Cookie{ + Name: cookie.Name, + Value: getCookieValue(cookie), + Path: cookie.Path, + Domain: cookie.Domain, } + httpCookies = append(httpCookies, c) + } + + jar.SetCookies(url, httpCookies) + if jar.Cookies(url) == nil { + logger.Warnf("setting jar cookies for %s failed", url.String()) } } + + return jar, nil } func getCookieValue(cookie *scraperCookies) string { @@ -56,7 +65,7 @@ func getCookieValue(cookie *scraperCookies) string { return cookie.Value } -// print all cookies from the jar of the native http client +// printCookies prints all cookies from the given cookie jar func printCookies(jar *cookiejar.Jar, scraperConfig config, msg string) { driverOptions := scraperConfig.DriverOptions if driverOptions != nil && !driverOptions.UseCDP { diff --git a/pkg/scraper/freeones.go b/pkg/scraper/freeones.go index c229e874a..c50235cc4 100644 --- a/pkg/scraper/freeones.go +++ b/pkg/scraper/freeones.go @@ -1,9 +1,11 @@ package scraper import ( + "net/http" "strings" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) // FreeonesScraperID is the scraper ID for the built-in Freeones scraper @@ -45,7 +47,7 @@ xPathScrapers: - regex: \sBio\s*$ with: "" URL: //link[@rel="alternate" and @hreflang="x-default"]/@href - Twitter: //a[contains(@href,'twitter.com/')]/@href + Twitter: //a[not(starts-with(@href,'https://twitter.com/FreeOnes'))][contains(@href,'twitter.com/')]/@href Instagram: //a[contains(@href,'instagram.com/')]/@href Birthdate: selector: //span[contains(text(),'Born On')] @@ -122,13 +124,13 @@ xPathScrapers: # Last updated April 13, 2021 ` -func getFreeonesScraper() config { +func getFreeonesScraper(client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraper { yml := freeonesScraperConfig - scraper, err := loadScraperFromYAML(FreeonesScraperID, strings.NewReader(yml)) + c, err := loadConfigFromYAML(FreeonesScraperID, strings.NewReader(yml)) if err != nil { logger.Fatalf("Error loading builtin freeones scraper: %s", err.Error()) } - return *scraper + return createScraperFromConfig(*c, client, txnManager, globalConfig) } diff --git a/pkg/scraper/image.go b/pkg/scraper/image.go index e2b3b5e1e..3954cdbaf 100644 --- a/pkg/scraper/image.go +++ b/pkg/scraper/image.go @@ -1,28 +1,23 @@ package scraper import ( - "crypto/tls" + "context" "fmt" "io" "net/http" "strings" - "time" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) -// Timeout to get the image. Includes transfer time. May want to make this -// configurable at some point. -const imageGetTimeout = time.Second * 30 - -func setPerformerImage(p *models.ScrapedPerformer, globalConfig GlobalConfig) error { +func setPerformerImage(ctx context.Context, client *http.Client, p *models.ScrapedPerformer, globalConfig GlobalConfig) error { if p == nil || p.Image == nil || !strings.HasPrefix(*p.Image, "http") { // nothing to do return nil } - img, err := getImage(*p.Image, globalConfig) + img, err := getImage(ctx, *p.Image, client, globalConfig) if err != nil { return err } @@ -34,14 +29,14 @@ func setPerformerImage(p *models.ScrapedPerformer, globalConfig GlobalConfig) er return nil } -func setSceneImage(s *models.ScrapedScene, globalConfig GlobalConfig) error { +func setSceneImage(ctx context.Context, client *http.Client, s *models.ScrapedScene, globalConfig GlobalConfig) error { // don't try to get the image if it doesn't appear to be a URL if s == nil || s.Image == nil || !strings.HasPrefix(*s.Image, "http") { // nothing to do return nil } - img, err := getImage(*s.Image, globalConfig) + img, err := getImage(ctx, *s.Image, client, globalConfig) if err != nil { return err } @@ -51,14 +46,14 @@ func setSceneImage(s *models.ScrapedScene, globalConfig GlobalConfig) error { return nil } -func setMovieFrontImage(m *models.ScrapedMovie, globalConfig GlobalConfig) error { +func setMovieFrontImage(ctx context.Context, client *http.Client, m *models.ScrapedMovie, globalConfig GlobalConfig) error { // don't try to get the image if it doesn't appear to be a URL if m == nil || m.FrontImage == nil || !strings.HasPrefix(*m.FrontImage, "http") { // nothing to do return nil } - img, err := getImage(*m.FrontImage, globalConfig) + img, err := getImage(ctx, *m.FrontImage, client, globalConfig) if err != nil { return err } @@ -68,14 +63,14 @@ func setMovieFrontImage(m *models.ScrapedMovie, globalConfig GlobalConfig) error return nil } -func setMovieBackImage(m *models.ScrapedMovie, globalConfig GlobalConfig) error { +func setMovieBackImage(ctx context.Context, client *http.Client, m *models.ScrapedMovie, globalConfig GlobalConfig) error { // don't try to get the image if it doesn't appear to be a URL if m == nil || m.BackImage == nil || !strings.HasPrefix(*m.BackImage, "http") { // nothing to do return nil } - img, err := getImage(*m.BackImage, globalConfig) + img, err := getImage(ctx, *m.BackImage, client, globalConfig) if err != nil { return err } @@ -85,14 +80,8 @@ func setMovieBackImage(m *models.ScrapedMovie, globalConfig GlobalConfig) error return nil } -func getImage(url string, globalConfig GlobalConfig) (*string, error) { - client := &http.Client{ - Transport: &http.Transport{ // ignore insecure certificates - TLSClientConfig: &tls.Config{InsecureSkipVerify: !globalConfig.GetScraperCertCheck()}}, - Timeout: imageGetTimeout, - } - - req, err := http.NewRequest("GET", url, nil) +func getImage(ctx context.Context, url string, client *http.Client, globalConfig GlobalConfig) (*string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { return nil, err } @@ -136,10 +125,10 @@ func getImage(url string, globalConfig GlobalConfig) (*string, error) { return &img, nil } -func getStashPerformerImage(stashURL string, performerID string, globalConfig GlobalConfig) (*string, error) { - return getImage(stashURL+"/performer/"+performerID+"/image", globalConfig) +func getStashPerformerImage(ctx context.Context, stashURL string, performerID string, client *http.Client, globalConfig GlobalConfig) (*string, error) { + return getImage(ctx, stashURL+"/performer/"+performerID+"/image", client, globalConfig) } -func getStashSceneImage(stashURL string, sceneID string, globalConfig GlobalConfig) (*string, error) { - return getImage(stashURL+"/scene/"+sceneID+"/screenshot", globalConfig) +func getStashSceneImage(ctx context.Context, stashURL string, sceneID string, client *http.Client, globalConfig GlobalConfig) (*string, error) { + return getImage(ctx, stashURL+"/scene/"+sceneID+"/screenshot", client, globalConfig) } diff --git a/pkg/scraper/json.go b/pkg/scraper/json.go index e99a4b698..82bf1aa0b 100644 --- a/pkg/scraper/json.go +++ b/pkg/scraper/json.go @@ -1,8 +1,10 @@ package scraper import ( + "context" "errors" "io" + "net/http" "net/url" "strings" @@ -15,13 +17,15 @@ type jsonScraper struct { scraper scraperTypeConfig config config globalConfig GlobalConfig + client *http.Client txnManager models.TransactionManager } -func newJsonScraper(scraper scraperTypeConfig, txnManager models.TransactionManager, config config, globalConfig GlobalConfig) *jsonScraper { +func newJsonScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, config config, globalConfig GlobalConfig) *jsonScraper { return &jsonScraper{ scraper: scraper, config: config, + client: client, globalConfig: globalConfig, txnManager: txnManager, } @@ -31,14 +35,14 @@ func (s *jsonScraper) getJsonScraper() *mappedScraper { return s.config.JsonScrapers[s.scraper.Scraper] } -func (s *jsonScraper) scrapeURL(url string) (string, *mappedScraper, error) { +func (s *jsonScraper) scrapeURL(ctx context.Context, url string) (string, *mappedScraper, error) { scraper := s.getJsonScraper() if scraper == nil { return "", nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") } - doc, err := s.loadURL(url) + doc, err := s.loadURL(ctx, url) if err != nil { return "", nil, err @@ -47,8 +51,8 @@ func (s *jsonScraper) scrapeURL(url string) (string, *mappedScraper, error) { return doc, scraper, nil } -func (s *jsonScraper) loadURL(url string) (string, error) { - r, err := loadURL(url, s.config, s.globalConfig) +func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) { + r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig) if err != nil { return "", err } @@ -72,7 +76,7 @@ func (s *jsonScraper) loadURL(url string) (string, error) { func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) { u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries - doc, scraper, err := s.scrapeURL(u) + doc, scraper, err := s.scrapeURL(context.TODO(), u) if err != nil { return nil, err } @@ -83,7 +87,7 @@ func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) { u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries - doc, scraper, err := s.scrapeURL(u) + doc, scraper, err := s.scrapeURL(context.TODO(), u) if err != nil { return nil, err } @@ -94,7 +98,7 @@ func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) { u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries - doc, scraper, err := s.scrapeURL(u) + doc, scraper, err := s.scrapeURL(context.TODO(), u) if err != nil { return nil, err } @@ -105,7 +109,7 @@ func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, er func (s *jsonScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) { u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries - doc, scraper, err := s.scrapeURL(u) + doc, scraper, err := s.scrapeURL(context.TODO(), u) if err != nil { return nil, err } @@ -127,9 +131,9 @@ func (s *jsonScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerf escapedName := url.QueryEscape(name) url := s.scraper.QueryURL - url = strings.Replace(url, placeholder, escapedName, -1) + url = strings.ReplaceAll(url, placeholder, escapedName) - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -156,9 +160,9 @@ func (s *jsonScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, e escapedName := url.QueryEscape(name) url := s.scraper.QueryURL - url = strings.Replace(url, placeholder, escapedName, -1) + url = strings.ReplaceAll(url, placeholder, escapedName) - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -182,7 +186,7 @@ func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedSc return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") } - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -206,7 +210,7 @@ func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*mo return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") } - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -230,7 +234,7 @@ func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.S return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") } - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -278,7 +282,7 @@ func (q *jsonQuery) runQuery(selector string) []string { } func (q *jsonQuery) subScrape(value string) mappedQuery { - doc, err := q.scraper.loadURL(value) + doc, err := q.scraper.loadURL(context.TODO(), value) if err != nil { logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error()) diff --git a/pkg/scraper/mapped.go b/pkg/scraper/mapped.go index 80d854341..764cfa730 100644 --- a/pkg/scraper/mapped.go +++ b/pkg/scraper/mapped.go @@ -32,7 +32,7 @@ func (s mappedConfig) applyCommon(c commonMappedConfig, src string) string { ret := src for commonKey, commonVal := range c { - ret = strings.Replace(ret, commonKey, commonVal, -1) + ret = strings.ReplaceAll(ret, commonKey, commonVal) } return ret @@ -486,7 +486,7 @@ func (p *postProcessLbToKg) Apply(value string, q mappedQuery) string { const lb_in_kg = 0.45359237 w, err := strconv.ParseFloat(value, 64) if err == nil { - w = w * lb_in_kg + w *= lb_in_kg value = strconv.Itoa(int(math.Round(w))) } return value @@ -576,7 +576,7 @@ type mappedScraperAttrConfig struct { postProcessActions []postProcessAction - // deprecated: use PostProcess instead + // Deprecated: use PostProcess instead ParseDate string `yaml:"parseDate"` Replace mappedRegexConfigs `yaml:"replace"` SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` @@ -588,7 +588,8 @@ func (c *mappedScraperAttrConfig) UnmarshalYAML(unmarshal func(interface{}) erro // try unmarshalling into a string first if err := unmarshal(&c.Selector); err != nil { // if it's a type error then we try to unmarshall to the full object - if _, ok := err.(*yaml.TypeError); !ok { + var typeErr *yaml.TypeError + if !errors.As(err, &typeErr) { return err } diff --git a/pkg/scraper/matchers.go b/pkg/scraper/matchers.go deleted file mode 100644 index f129ec8b8..000000000 --- a/pkg/scraper/matchers.go +++ /dev/null @@ -1,109 +0,0 @@ -package scraper - -import ( - "strconv" - - "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" -) - -// MatchScrapedPerformer matches the provided performer with the -// performers in the database and sets the ID field if one is found. -func MatchScrapedPerformer(qb models.PerformerReader, p *models.ScrapedPerformer) error { - if p.Name == nil { - return nil - } - - performers, err := qb.FindByNames([]string{*p.Name}, true) - - if err != nil { - return err - } - - if len(performers) != 1 { - // ignore - cannot match - return nil - } - - id := strconv.Itoa(performers[0].ID) - p.StoredID = &id - return nil -} - -// MatchScrapedStudio matches the provided studio with the studios -// in the database and sets the ID field if one is found. -func MatchScrapedStudio(qb models.StudioReader, s *models.ScrapedStudio) error { - st, err := studio.ByName(qb, s.Name) - - if err != nil { - return err - } - - if st == nil { - // try matching by alias - st, err = studio.ByAlias(qb, s.Name) - if err != nil { - return err - } - } - - if st == nil { - // ignore - cannot match - return nil - } - - id := strconv.Itoa(st.ID) - s.StoredID = &id - return nil -} - -// MatchScrapedMovie matches the provided movie with the movies -// in the database and sets the ID field if one is found. -func MatchScrapedMovie(qb models.MovieReader, m *models.ScrapedMovie) error { - if m.Name == nil { - return nil - } - - movies, err := qb.FindByNames([]string{*m.Name}, true) - - if err != nil { - return err - } - - if len(movies) != 1 { - // ignore - cannot match - return nil - } - - id := strconv.Itoa(movies[0].ID) - m.StoredID = &id - return nil -} - -// MatchScrapedTag matches the provided tag with the tags -// in the database and sets the ID field if one is found. -func MatchScrapedTag(qb models.TagReader, s *models.ScrapedTag) error { - t, err := tag.ByName(qb, s.Name) - - if err != nil { - return err - } - - if t == nil { - // try matching by alias - t, err = tag.ByAlias(qb, s.Name) - if err != nil { - return err - } - } - - if t == nil { - // ignore - cannot match - return nil - } - - id := strconv.Itoa(t.ID) - s.StoredID = &id - return nil -} diff --git a/pkg/scraper/query_url.go b/pkg/scraper/query_url.go index b48b2b794..2826e15e4 100644 --- a/pkg/scraper/query_url.go +++ b/pkg/scraper/query_url.go @@ -69,7 +69,7 @@ func (p queryURLParameters) applyReplacements(r queryURLReplacements) { func (p queryURLParameters) constructURL(url string) string { ret := url for k, v := range p { - ret = strings.Replace(ret, "{"+k+"}", v, -1) + ret = strings.ReplaceAll(ret, "{"+k+"}", v) } return ret diff --git a/pkg/scraper/scraper.go b/pkg/scraper/scraper.go new file mode 100644 index 000000000..b842f3df4 --- /dev/null +++ b/pkg/scraper/scraper.go @@ -0,0 +1,51 @@ +package scraper + +import "github.com/stashapp/stash/pkg/models" + +type urlMatcher interface { + matchesURL(url string) bool +} + +type performerScraper interface { + scrapeByName(name string) ([]*models.ScrapedPerformer, error) + scrapeByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) + scrapeByURL(url string) (*models.ScrapedPerformer, error) +} + +type sceneScraper interface { + scrapeByName(name string) ([]*models.ScrapedScene, error) + scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error) + scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) + scrapeByURL(url string) (*models.ScrapedScene, error) +} + +type galleryScraper interface { + scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) + scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) + scrapeByURL(url string) (*models.ScrapedGallery, error) +} + +type movieScraper interface { + scrapeByURL(url string) (*models.ScrapedMovie, error) +} + +type scraper struct { + ID string + Spec *models.Scraper + + Performer performerScraper + Scene sceneScraper + Gallery galleryScraper + Movie movieScraper +} + +func matchesURL(maybeURLMatcher interface{}, url string) bool { + if maybeURLMatcher != nil { + matcher, ok := maybeURLMatcher.(urlMatcher) + if ok { + return matcher.matchesURL(url) + } + } + + return false +} diff --git a/pkg/scraper/scrapers.go b/pkg/scraper/scrapers.go index d039a59a7..590991000 100644 --- a/pkg/scraper/scrapers.go +++ b/pkg/scraper/scrapers.go @@ -2,18 +2,39 @@ package scraper import ( "context" + "crypto/tls" "errors" + "fmt" + "net/http" "os" "path/filepath" "regexp" "strings" + "time" "github.com/stashapp/stash/pkg/logger" stash_config "github.com/stashapp/stash/pkg/manager/config" + "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) +var ErrMaxRedirects = errors.New("maximum number of HTTP redirects reached") + +const ( + // scrapeGetTimeout is the timeout for scraper HTTP requests. Includes transfer time. + // We may want to bump this at some point and use local context-timeouts if more granularity + // is needed. + scrapeGetTimeout = time.Second * 60 + + // maxIdleConnsPerHost is the maximum number of idle connections the HTTP client will + // keep on a per-host basis. + maxIdleConnsPerHost = 8 + + // maxRedirects defines the maximum number of redirects the HTTP client will follow + maxRedirects = 20 +) + // GlobalConfig contains the global scraper options. type GlobalConfig interface { GetScraperUserAgent() string @@ -32,11 +53,32 @@ func isCDPPathWS(c GlobalConfig) bool { // Cache stores scraper details. type Cache struct { - scrapers []config + client *http.Client + scrapers []scraper globalConfig GlobalConfig txnManager models.TransactionManager } +// newClient creates a scraper-local http client we use throughout the scraper subsystem. +func newClient(gc GlobalConfig) *http.Client { + client := &http.Client{ + Transport: &http.Transport{ // ignore insecure certificates + TLSClientConfig: &tls.Config{InsecureSkipVerify: !gc.GetScraperCertCheck()}, + MaxIdleConnsPerHost: maxIdleConnsPerHost, + }, + Timeout: scrapeGetTimeout, + // defaultCheckRedirect code with max changed from 10 to maxRedirects + CheckRedirect: func(req *http.Request, via []*http.Request) error { + if len(via) >= maxRedirects { + return fmt.Errorf("after %d redirects: %w", maxRedirects, ErrMaxRedirects) + } + return nil + }, + } + + return client +} + // NewCache returns a new Cache loading scraper configurations from the // scraper path provided in the global config object. It returns a new // instance and an error if the scraper directory could not be loaded. @@ -44,20 +86,25 @@ type Cache struct { // Scraper configurations are loaded from yml files in the provided scrapers // directory and any subdirectories. func NewCache(globalConfig GlobalConfig, txnManager models.TransactionManager) (*Cache, error) { - scrapers, err := loadScrapers(globalConfig.GetScrapersPath()) + // HTTP Client setup + client := newClient(globalConfig) + + scrapers, err := loadScrapers(globalConfig, client, txnManager) if err != nil { return nil, err } return &Cache{ + client: client, globalConfig: globalConfig, scrapers: scrapers, txnManager: txnManager, }, nil } -func loadScrapers(path string) ([]config, error) { - scrapers := make([]config, 0) +func loadScrapers(globalConfig GlobalConfig, client *http.Client, txnManager models.TransactionManager) ([]scraper, error) { + path := globalConfig.GetScrapersPath() + scrapers := make([]scraper, 0) logger.Debugf("Reading scraper configs from %s", path) scraperFiles := []string{} @@ -74,14 +121,15 @@ func loadScrapers(path string) ([]config, error) { } // add built-in freeones scraper - scrapers = append(scrapers, getFreeonesScraper()) + scrapers = append(scrapers, getFreeonesScraper(client, txnManager, globalConfig), getAutoTagScraper(txnManager, globalConfig)) for _, file := range scraperFiles { - scraper, err := loadScraperFromYAMLFile(file) + c, err := loadConfigFromYAMLFile(file) if err != nil { logger.Errorf("Error loading scraper %s: %s", file, err.Error()) } else { - scrapers = append(scrapers, *scraper) + scraper := createScraperFromConfig(*c, client, txnManager, globalConfig) + scrapers = append(scrapers, scraper) } } @@ -92,7 +140,7 @@ func loadScrapers(path string) ([]config, error) { // In the event of an error during loading, the cache will be left empty. func (c *Cache) ReloadScrapers() error { c.scrapers = nil - scrapers, err := loadScrapers(c.globalConfig.GetScrapersPath()) + scrapers, err := loadScrapers(c.globalConfig, c.client, c.txnManager) if err != nil { return err } @@ -114,8 +162,8 @@ func (c Cache) ListPerformerScrapers() []*models.Scraper { var ret []*models.Scraper for _, s := range c.scrapers { // filter on type - if s.supportsPerformers() { - ret = append(ret, s.toScraper()) + if s.Performer != nil { + ret = append(ret, s.Spec) } } @@ -128,8 +176,8 @@ func (c Cache) ListSceneScrapers() []*models.Scraper { var ret []*models.Scraper for _, s := range c.scrapers { // filter on type - if s.supportsScenes() { - ret = append(ret, s.toScraper()) + if s.Scene != nil { + ret = append(ret, s.Spec) } } @@ -142,8 +190,8 @@ func (c Cache) ListGalleryScrapers() []*models.Scraper { var ret []*models.Scraper for _, s := range c.scrapers { // filter on type - if s.supportsGalleries() { - ret = append(ret, s.toScraper()) + if s.Gallery != nil { + ret = append(ret, s.Spec) } } @@ -156,15 +204,25 @@ func (c Cache) ListMovieScrapers() []*models.Scraper { var ret []*models.Scraper for _, s := range c.scrapers { // filter on type - if s.supportsMovies() { - ret = append(ret, s.toScraper()) + if s.Movie != nil { + ret = append(ret, s.Spec) } } return ret } -func (c Cache) findScraper(scraperID string) *config { +// GetScraper returns the scraper matching the provided id. +func (c Cache) GetScraper(scraperID string) *models.Scraper { + ret := c.findScraper(scraperID) + if ret != nil { + return ret.Spec + } + + return nil +} + +func (c Cache) findScraper(scraperID string) *scraper { for _, s := range c.scrapers { if s.ID == scraperID { return &s @@ -180,8 +238,8 @@ func (c Cache) findScraper(scraperID string) *config { func (c Cache) ScrapePerformerList(scraperID string, query string) ([]*models.ScrapedPerformer, error) { // find scraper with the provided id s := c.findScraper(scraperID) - if s != nil { - return s.ScrapePerformerNames(query, c.txnManager, c.globalConfig) + if s != nil && s.Performer != nil { + return s.Performer.scrapeByName(query) } return nil, errors.New("Scraper with ID " + scraperID + " not found") @@ -192,14 +250,14 @@ func (c Cache) ScrapePerformerList(scraperID string, query string) ([]*models.Sc func (c Cache) ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { // find scraper with the provided id s := c.findScraper(scraperID) - if s != nil { - ret, err := s.ScrapePerformer(scrapedPerformer, c.txnManager, c.globalConfig) + if s != nil && s.Performer != nil { + ret, err := s.Performer.scrapeByFragment(scrapedPerformer) if err != nil { return nil, err } if ret != nil { - err = c.postScrapePerformer(ret) + err = c.postScrapePerformer(context.TODO(), ret) if err != nil { return nil, err } @@ -216,14 +274,14 @@ func (c Cache) ScrapePerformer(scraperID string, scrapedPerformer models.Scraped // the URL, then nil is returned. func (c Cache) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) { for _, s := range c.scrapers { - if s.matchesPerformerURL(url) { - ret, err := s.ScrapePerformerURL(url, c.txnManager, c.globalConfig) + if matchesURL(s.Performer, url) { + ret, err := s.Performer.scrapeByURL(url) if err != nil { return nil, err } if ret != nil { - err = c.postScrapePerformer(ret) + err = c.postScrapePerformer(context.TODO(), ret) if err != nil { return nil, err } @@ -236,8 +294,8 @@ func (c Cache) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) return nil, nil } -func (c Cache) postScrapePerformer(ret *models.ScrapedPerformer) error { - if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { +func (c Cache) postScrapePerformer(ctx context.Context, ret *models.ScrapedPerformer) error { + if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { tqb := r.Tag() tags, err := postProcessTags(tqb, ret.Tags) @@ -252,7 +310,7 @@ func (c Cache) postScrapePerformer(ret *models.ScrapedPerformer) error { } // post-process - set the image if applicable - if err := setPerformerImage(ret, c.globalConfig); err != nil { + if err := setPerformerImage(ctx, c.client, ret, c.globalConfig); err != nil { logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error()) } @@ -277,8 +335,8 @@ func (c Cache) postScrapeScenePerformer(ret *models.ScrapedPerformer) error { return nil } -func (c Cache) postScrapeScene(ret *models.ScrapedScene) error { - if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { +func (c Cache) postScrapeScene(ctx context.Context, ret *models.ScrapedScene) error { + if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { pqb := r.Performer() mqb := r.Movie() tqb := r.Tag() @@ -289,13 +347,13 @@ func (c Cache) postScrapeScene(ret *models.ScrapedScene) error { return err } - if err := MatchScrapedPerformer(pqb, p); err != nil { + if err := match.ScrapedPerformer(pqb, p, nil); err != nil { return err } } for _, p := range ret.Movies { - err := MatchScrapedMovie(mqb, p) + err := match.ScrapedMovie(mqb, p) if err != nil { return err } @@ -308,7 +366,7 @@ func (c Cache) postScrapeScene(ret *models.ScrapedScene) error { ret.Tags = tags if ret.Studio != nil { - err := MatchScrapedStudio(sqb, ret.Studio) + err := match.ScrapedStudio(sqb, ret.Studio, nil) if err != nil { return err } @@ -320,8 +378,8 @@ func (c Cache) postScrapeScene(ret *models.ScrapedScene) error { } // post-process - set the image if applicable - if err := setSceneImage(ret, c.globalConfig); err != nil { - logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error()) + if err := setSceneImage(ctx, c.client, ret, c.globalConfig); err != nil { + logger.Warnf("Could not set image using URL %s: %v", *ret.Image, err) } return nil @@ -334,7 +392,7 @@ func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error { sqb := r.Studio() for _, p := range ret.Performers { - err := MatchScrapedPerformer(pqb, p) + err := match.ScrapedPerformer(pqb, p, nil) if err != nil { return err } @@ -347,7 +405,7 @@ func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error { ret.Tags = tags if ret.Studio != nil { - err := MatchScrapedStudio(sqb, ret.Studio) + err := match.ScrapedStudio(sqb, ret.Studio, nil) if err != nil { return err } @@ -365,21 +423,21 @@ func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error { func (c Cache) ScrapeScene(scraperID string, sceneID int) (*models.ScrapedScene, error) { // find scraper with the provided id s := c.findScraper(scraperID) - if s != nil { + if s != nil && s.Scene != nil { // get scene from id scene, err := getScene(sceneID, c.txnManager) if err != nil { return nil, err } - ret, err := s.ScrapeSceneByScene(scene, c.txnManager, c.globalConfig) + ret, err := s.Scene.scrapeByScene(scene) if err != nil { return nil, err } if ret != nil { - err = c.postScrapeScene(ret) + err = c.postScrapeScene(context.TODO(), ret) if err != nil { return nil, err } @@ -397,8 +455,8 @@ func (c Cache) ScrapeScene(scraperID string, sceneID int) (*models.ScrapedScene, func (c Cache) ScrapeSceneQuery(scraperID string, query string) ([]*models.ScrapedScene, error) { // find scraper with the provided id s := c.findScraper(scraperID) - if s != nil { - return s.ScrapeSceneQuery(query, c.txnManager, c.globalConfig) + if s != nil && s.Scene != nil { + return s.Scene.scrapeByName(query) } return nil, errors.New("Scraper with ID " + scraperID + " not found") @@ -408,15 +466,15 @@ func (c Cache) ScrapeSceneQuery(scraperID string, query string) ([]*models.Scrap func (c Cache) ScrapeSceneFragment(scraperID string, scene models.ScrapedSceneInput) (*models.ScrapedScene, error) { // find scraper with the provided id s := c.findScraper(scraperID) - if s != nil { - ret, err := s.ScrapeSceneByFragment(scene, c.txnManager, c.globalConfig) + if s != nil && s.Scene != nil { + ret, err := s.Scene.scrapeByFragment(scene) if err != nil { return nil, err } if ret != nil { - err = c.postScrapeScene(ret) + err = c.postScrapeScene(context.TODO(), ret) if err != nil { return nil, err } @@ -433,14 +491,14 @@ func (c Cache) ScrapeSceneFragment(scraperID string, scene models.ScrapedSceneIn // the URL, then nil is returned. func (c Cache) ScrapeSceneURL(url string) (*models.ScrapedScene, error) { for _, s := range c.scrapers { - if s.matchesSceneURL(url) { - ret, err := s.ScrapeSceneURL(url, c.txnManager, c.globalConfig) + if matchesURL(s.Scene, url) { + ret, err := s.Scene.scrapeByURL(url) if err != nil { return nil, err } - err = c.postScrapeScene(ret) + err = c.postScrapeScene(context.TODO(), ret) if err != nil { return nil, err } @@ -455,14 +513,14 @@ func (c Cache) ScrapeSceneURL(url string) (*models.ScrapedScene, error) { // ScrapeGallery uses the scraper with the provided ID to scrape a gallery using existing data. func (c Cache) ScrapeGallery(scraperID string, galleryID int) (*models.ScrapedGallery, error) { s := c.findScraper(scraperID) - if s != nil { + if s != nil && s.Gallery != nil { // get gallery from id gallery, err := getGallery(galleryID, c.txnManager) if err != nil { return nil, err } - ret, err := s.ScrapeGalleryByGallery(gallery, c.txnManager, c.globalConfig) + ret, err := s.Gallery.scrapeByGallery(gallery) if err != nil { return nil, err @@ -484,8 +542,8 @@ func (c Cache) ScrapeGallery(scraperID string, galleryID int) (*models.ScrapedGa // ScrapeGalleryFragment uses the scraper with the provided ID to scrape a gallery. func (c Cache) ScrapeGalleryFragment(scraperID string, gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) { s := c.findScraper(scraperID) - if s != nil { - ret, err := s.ScrapeGalleryByFragment(gallery, c.txnManager, c.globalConfig) + if s != nil && s.Gallery != nil { + ret, err := s.Gallery.scrapeByFragment(gallery) if err != nil { return nil, err @@ -509,8 +567,8 @@ func (c Cache) ScrapeGalleryFragment(scraperID string, gallery models.ScrapedGal // the URL, then nil is returned. func (c Cache) ScrapeGalleryURL(url string) (*models.ScrapedGallery, error) { for _, s := range c.scrapers { - if s.matchesGalleryURL(url) { - ret, err := s.ScrapeGalleryURL(url, c.txnManager, c.globalConfig) + if matchesURL(s.Gallery, url) { + ret, err := s.Gallery.scrapeByURL(url) if err != nil { return nil, err @@ -533,25 +591,25 @@ func (c Cache) ScrapeGalleryURL(url string) (*models.ScrapedGallery, error) { // the URL, then nil is returned. func (c Cache) ScrapeMovieURL(url string) (*models.ScrapedMovie, error) { for _, s := range c.scrapers { - if s.matchesMovieURL(url) { - ret, err := s.ScrapeMovieURL(url, c.txnManager, c.globalConfig) + if s.Movie != nil && matchesURL(s.Movie, url) { + ret, err := s.Movie.scrapeByURL(url) if err != nil { return nil, err } if ret.Studio != nil { if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - return MatchScrapedStudio(r.Studio(), ret.Studio) + return match.ScrapedStudio(r.Studio(), ret.Studio, nil) }); err != nil { return nil, err } } // post-process - set the image if applicable - if err := setMovieFrontImage(ret, c.globalConfig); err != nil { + if err := setMovieFrontImage(context.TODO(), c.client, ret, c.globalConfig); err != nil { logger.Warnf("Could not set front image using URL %s: %s", *ret.FrontImage, err.Error()) } - if err := setMovieBackImage(ret, c.globalConfig); err != nil { + if err := setMovieBackImage(context.TODO(), c.client, ret, c.globalConfig); err != nil { logger.Warnf("Could not set back image using URL %s: %s", *ret.BackImage, err.Error()) } @@ -587,7 +645,7 @@ ScrapeTag: } } - err := MatchScrapedTag(tqb, t) + err := match.ScrapedTag(tqb, t) if err != nil { return nil, err } diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index 7023a6d3a..50f5cc12a 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -4,6 +4,7 @@ import ( "context" "database/sql" "errors" + "net/http" "strconv" "github.com/jinzhu/copier" @@ -16,13 +17,15 @@ type stashScraper struct { scraper scraperTypeConfig config config globalConfig GlobalConfig + client *http.Client txnManager models.TransactionManager } -func newStashScraper(scraper scraperTypeConfig, txnManager models.TransactionManager, config config, globalConfig GlobalConfig) *stashScraper { +func newStashScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, config config, globalConfig GlobalConfig) *stashScraper { return &stashScraper{ scraper: scraper, config: config, + client: client, globalConfig: globalConfig, txnManager: txnManager, } @@ -69,7 +72,7 @@ func (s *stashScraper) scrapePerformersByName(name string) ([]*models.ScrapedPer }, } - err := client.Query(context.Background(), &q, vars) + err := client.Query(context.TODO(), &q, vars) if err != nil { return nil, err } @@ -125,7 +128,7 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped "f": performerID, } - err := client.Query(context.Background(), &q, vars) + err := client.Query(context.TODO(), &q, vars) if err != nil { return nil, err } @@ -138,7 +141,7 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped } // get the performer image directly - ret.Image, err = getStashPerformerImage(s.config.StashServer.URL, performerID, s.globalConfig) + ret.Image, err = getStashPerformerImage(context.TODO(), s.config.StashServer.URL, performerID, s.client, s.globalConfig) if err != nil { return nil, err } @@ -164,7 +167,7 @@ func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash) } // get the performer image directly - ret.Image, err = getStashSceneImage(s.config.StashServer.URL, scene.ID, s.globalConfig) + ret.Image, err = getStashSceneImage(context.TODO(), s.config.StashServer.URL, scene.ID, s.client, s.globalConfig) if err != nil { return nil, err } @@ -190,7 +193,7 @@ func (s *stashScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, }, } - err := client.Query(context.Background(), &q, vars) + err := client.Query(context.TODO(), &q, vars) if err != nil { return nil, err } @@ -240,7 +243,7 @@ func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS } client := s.getStashClient() - if err := client.Query(context.Background(), &q, vars); err != nil { + if err := client.Query(context.TODO(), &q, vars); err != nil { return nil, err } @@ -251,7 +254,7 @@ func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS } // get the performer image directly - ret.Image, err = getStashSceneImage(s.config.StashServer.URL, q.FindScene.ID, s.globalConfig) + ret.Image, err = getStashSceneImage(context.TODO(), s.config.StashServer.URL, q.FindScene.ID, s.client, s.globalConfig) if err != nil { return nil, err } @@ -293,7 +296,7 @@ func (s *stashScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models. } client := s.getStashClient() - if err := client.Query(context.Background(), &q, vars); err != nil { + if err := client.Query(context.TODO(), &q, vars); err != nil { return nil, err } diff --git a/pkg/scraper/stashbox/graphql/generated_client.go b/pkg/scraper/stashbox/graphql/generated_client.go index e3f4b45dd..8e0b31429 100644 --- a/pkg/scraper/stashbox/graphql/generated_client.go +++ b/pkg/scraper/stashbox/graphql/generated_client.go @@ -18,26 +18,27 @@ func NewClient(cli *http.Client, baseURL string, options ...client.HTTPRequestOp } type Query struct { - FindPerformer *Performer "json:\"findPerformer\" graphql:\"findPerformer\"" - QueryPerformers QueryPerformersResultType "json:\"queryPerformers\" graphql:\"queryPerformers\"" - FindStudio *Studio "json:\"findStudio\" graphql:\"findStudio\"" - QueryStudios QueryStudiosResultType "json:\"queryStudios\" graphql:\"queryStudios\"" - FindTag *Tag "json:\"findTag\" graphql:\"findTag\"" - QueryTags QueryTagsResultType "json:\"queryTags\" graphql:\"queryTags\"" - FindTagCategory *TagCategory "json:\"findTagCategory\" graphql:\"findTagCategory\"" - QueryTagCategories QueryTagCategoriesResultType "json:\"queryTagCategories\" graphql:\"queryTagCategories\"" - FindScene *Scene "json:\"findScene\" graphql:\"findScene\"" - FindSceneByFingerprint []*Scene "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" - FindScenesByFingerprints []*Scene "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" - QueryScenes QueryScenesResultType "json:\"queryScenes\" graphql:\"queryScenes\"" - FindEdit *Edit "json:\"findEdit\" graphql:\"findEdit\"" - QueryEdits QueryEditsResultType "json:\"queryEdits\" graphql:\"queryEdits\"" - FindUser *User "json:\"findUser\" graphql:\"findUser\"" - QueryUsers QueryUsersResultType "json:\"queryUsers\" graphql:\"queryUsers\"" - Me *User "json:\"me\" graphql:\"me\"" - SearchPerformer []*Performer "json:\"searchPerformer\" graphql:\"searchPerformer\"" - SearchScene []*Scene "json:\"searchScene\" graphql:\"searchScene\"" - Version Version "json:\"version\" graphql:\"version\"" + FindPerformer *Performer "json:\"findPerformer\" graphql:\"findPerformer\"" + QueryPerformers QueryPerformersResultType "json:\"queryPerformers\" graphql:\"queryPerformers\"" + FindStudio *Studio "json:\"findStudio\" graphql:\"findStudio\"" + QueryStudios QueryStudiosResultType "json:\"queryStudios\" graphql:\"queryStudios\"" + FindTag *Tag "json:\"findTag\" graphql:\"findTag\"" + QueryTags QueryTagsResultType "json:\"queryTags\" graphql:\"queryTags\"" + FindTagCategory *TagCategory "json:\"findTagCategory\" graphql:\"findTagCategory\"" + QueryTagCategories QueryTagCategoriesResultType "json:\"queryTagCategories\" graphql:\"queryTagCategories\"" + FindScene *Scene "json:\"findScene\" graphql:\"findScene\"" + FindSceneByFingerprint []*Scene "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" + FindScenesByFingerprints []*Scene "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" + FindScenesByFullFingerprints []*Scene "json:\"findScenesByFullFingerprints\" graphql:\"findScenesByFullFingerprints\"" + QueryScenes QueryScenesResultType "json:\"queryScenes\" graphql:\"queryScenes\"" + FindEdit *Edit "json:\"findEdit\" graphql:\"findEdit\"" + QueryEdits QueryEditsResultType "json:\"queryEdits\" graphql:\"queryEdits\"" + FindUser *User "json:\"findUser\" graphql:\"findUser\"" + QueryUsers QueryUsersResultType "json:\"queryUsers\" graphql:\"queryUsers\"" + Me *User "json:\"me\" graphql:\"me\"" + SearchPerformer []*Performer "json:\"searchPerformer\" graphql:\"searchPerformer\"" + SearchScene []*Scene "json:\"searchScene\" graphql:\"searchScene\"" + Version Version "json:\"version\" graphql:\"version\"" } type Mutation struct { @@ -120,6 +121,7 @@ type PerformerFragment struct { Disambiguation *string "json:\"disambiguation\" graphql:\"disambiguation\"" Aliases []string "json:\"aliases\" graphql:\"aliases\"" Gender *GenderEnum "json:\"gender\" graphql:\"gender\"" + MergedIds []string "json:\"merged_ids\" graphql:\"merged_ids\"" Urls []*URLFragment "json:\"urls\" graphql:\"urls\"" Images []*ImageFragment "json:\"images\" graphql:\"images\"" Birthdate *FuzzyDateFragment "json:\"birthdate\" graphql:\"birthdate\"" @@ -160,8 +162,8 @@ type SceneFragment struct { type FindSceneByFingerprint struct { FindSceneByFingerprint []*SceneFragment "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" } -type FindScenesByFingerprints struct { - FindScenesByFingerprints []*SceneFragment "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" +type FindScenesByFullFingerprints struct { + FindScenesByFullFingerprints []*SceneFragment "json:\"findScenesByFullFingerprints\" graphql:\"findScenesByFullFingerprints\"" } type SearchScene struct { SearchScene []*SceneFragment "json:\"searchScene\" graphql:\"searchScene\"" @@ -172,6 +174,9 @@ type SearchPerformer struct { type FindPerformerByID struct { FindPerformer *PerformerFragment "json:\"findPerformer\" graphql:\"findPerformer\"" } +type FindSceneByID struct { + FindScene *SceneFragment "json:\"findScene\" graphql:\"findScene\"" +} type SubmitFingerprintPayload struct { SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\"" } @@ -181,56 +186,10 @@ const FindSceneByFingerprintQuery = `query FindSceneByFingerprint ($fingerprint: ... SceneFragment } } -fragment TagFragment on Tag { - name - id -} -fragment PerformerFragment on Performer { - id - name - disambiguation - aliases - gender - urls { - ... URLFragment - } - images { - ... ImageFragment - } - birthdate { - ... FuzzyDateFragment - } - ethnicity - country - eye_color - hair_color - height - measurements { - ... MeasurementsFragment - } - breast_type - career_start_year - career_end_year - tattoos { - ... BodyModificationFragment - } - piercings { - ... BodyModificationFragment - } -} fragment FuzzyDateFragment on FuzzyDate { date accuracy } -fragment BodyModificationFragment on BodyModification { - location - description -} -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration -} fragment SceneFragment on Scene { id title @@ -270,24 +229,71 @@ fragment StudioFragment on Studio { ... ImageFragment } } -fragment ImageFragment on Image { - id - url - width - height -} fragment PerformerAppearanceFragment on PerformerAppearance { as performer { ... PerformerFragment } } +fragment PerformerFragment on Performer { + id + name + disambiguation + aliases + gender + merged_ids + urls { + ... URLFragment + } + images { + ... ImageFragment + } + birthdate { + ... FuzzyDateFragment + } + ethnicity + country + eye_color + hair_color + height + measurements { + ... MeasurementsFragment + } + breast_type + career_start_year + career_end_year + tattoos { + ... BodyModificationFragment + } + piercings { + ... BodyModificationFragment + } +} +fragment ImageFragment on Image { + id + url + width + height +} +fragment TagFragment on Tag { + name + id +} fragment MeasurementsFragment on Measurements { band_size cup_size waist hip } +fragment BodyModificationFragment on BodyModification { + location + description +} +fragment FingerprintFragment on Fingerprint { + algorithm + hash + duration +} ` func (c *Client) FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, httpRequestOptions ...client.HTTPRequestOption) (*FindSceneByFingerprint, error) { @@ -303,31 +309,38 @@ func (c *Client) FindSceneByFingerprint(ctx context.Context, fingerprint Fingerp return &res, nil } -const FindScenesByFingerprintsQuery = `query FindScenesByFingerprints ($fingerprints: [String!]!) { - findScenesByFingerprints(fingerprints: $fingerprints) { +const FindScenesByFullFingerprintsQuery = `query FindScenesByFullFingerprints ($fingerprints: [FingerprintQueryInput!]!) { + findScenesByFullFingerprints(fingerprints: $fingerprints) { ... SceneFragment } } +fragment ImageFragment on Image { + id + url + width + height +} +fragment StudioFragment on Studio { + name + id + urls { + ... URLFragment + } + images { + ... ImageFragment + } +} fragment TagFragment on Tag { name id } -fragment FuzzyDateFragment on FuzzyDate { - date - accuracy -} -fragment PerformerAppearanceFragment on PerformerAppearance { - as - performer { - ... PerformerFragment - } -} fragment PerformerFragment on Performer { id name disambiguation aliases gender + merged_ids urls { ... URLFragment } @@ -355,16 +368,16 @@ fragment PerformerFragment on Performer { ... BodyModificationFragment } } +fragment FuzzyDateFragment on FuzzyDate { + date + accuracy +} fragment MeasurementsFragment on Measurements { band_size cup_size waist hip } -fragment BodyModificationFragment on BodyModification { - location - description -} fragment SceneFragment on Scene { id title @@ -390,40 +403,34 @@ fragment SceneFragment on Scene { ... FingerprintFragment } } -fragment URLFragment on URL { - url - type -} -fragment ImageFragment on Image { - id - url - width - height -} -fragment StudioFragment on Studio { - name - id - urls { - ... URLFragment - } - images { - ... ImageFragment +fragment PerformerAppearanceFragment on PerformerAppearance { + as + performer { + ... PerformerFragment } } +fragment BodyModificationFragment on BodyModification { + location + description +} fragment FingerprintFragment on Fingerprint { algorithm hash duration } +fragment URLFragment on URL { + url + type +} ` -func (c *Client) FindScenesByFingerprints(ctx context.Context, fingerprints []string, httpRequestOptions ...client.HTTPRequestOption) (*FindScenesByFingerprints, error) { +func (c *Client) FindScenesByFullFingerprints(ctx context.Context, fingerprints []*FingerprintQueryInput, httpRequestOptions ...client.HTTPRequestOption) (*FindScenesByFullFingerprints, error) { vars := map[string]interface{}{ "fingerprints": fingerprints, } - var res FindScenesByFingerprints - if err := c.Client.Post(ctx, FindScenesByFingerprintsQuery, &res, vars, httpRequestOptions...); err != nil { + var res FindScenesByFullFingerprints + if err := c.Client.Post(ctx, FindScenesByFullFingerprintsQuery, &res, vars, httpRequestOptions...); err != nil { return nil, err } @@ -435,21 +442,6 @@ const SearchSceneQuery = `query SearchScene ($term: String!) { ... SceneFragment } } -fragment MeasurementsFragment on Measurements { - band_size - cup_size - waist - hip -} -fragment BodyModificationFragment on BodyModification { - location - description -} -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration -} fragment URLFragment on URL { url type @@ -468,6 +460,11 @@ fragment FuzzyDateFragment on FuzzyDate { date accuracy } +fragment FingerprintFragment on Fingerprint { + algorithm + hash + duration +} fragment SceneFragment on Scene { id title @@ -515,6 +512,7 @@ fragment PerformerFragment on Performer { disambiguation aliases gender + merged_ids urls { ... URLFragment } @@ -542,6 +540,16 @@ fragment PerformerFragment on Performer { ... BodyModificationFragment } } +fragment MeasurementsFragment on Measurements { + band_size + cup_size + waist + hip +} +fragment BodyModificationFragment on BodyModification { + location + description +} ` func (c *Client) SearchScene(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchScene, error) { @@ -562,6 +570,16 @@ const SearchPerformerQuery = `query SearchPerformer ($term: String!) { ... PerformerFragment } } +fragment URLFragment on URL { + url + type +} +fragment ImageFragment on Image { + id + url + width + height +} fragment FuzzyDateFragment on FuzzyDate { date accuracy @@ -582,6 +600,7 @@ fragment PerformerFragment on Performer { disambiguation aliases gender + merged_ids urls { ... URLFragment } @@ -609,16 +628,6 @@ fragment PerformerFragment on Performer { ... BodyModificationFragment } } -fragment URLFragment on URL { - url - type -} -fragment ImageFragment on Image { - id - url - width - height -} ` func (c *Client) SearchPerformer(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchPerformer, error) { @@ -639,26 +648,13 @@ const FindPerformerByIDQuery = `query FindPerformerByID ($id: ID!) { ... PerformerFragment } } -fragment FuzzyDateFragment on FuzzyDate { - date - accuracy -} -fragment MeasurementsFragment on Measurements { - band_size - cup_size - waist - hip -} -fragment BodyModificationFragment on BodyModification { - location - description -} fragment PerformerFragment on Performer { id name disambiguation aliases gender + merged_ids urls { ... URLFragment } @@ -696,6 +692,20 @@ fragment ImageFragment on Image { width height } +fragment FuzzyDateFragment on FuzzyDate { + date + accuracy +} +fragment MeasurementsFragment on Measurements { + band_size + cup_size + waist + hip +} +fragment BodyModificationFragment on BodyModification { + location + description +} ` func (c *Client) FindPerformerByID(ctx context.Context, id string, httpRequestOptions ...client.HTTPRequestOption) (*FindPerformerByID, error) { @@ -711,6 +721,134 @@ func (c *Client) FindPerformerByID(ctx context.Context, id string, httpRequestOp return &res, nil } +const FindSceneByIDQuery = `query FindSceneByID ($id: ID!) { + findScene(id: $id) { + ... SceneFragment + } +} +fragment ImageFragment on Image { + id + url + width + height +} +fragment StudioFragment on Studio { + name + id + urls { + ... URLFragment + } + images { + ... ImageFragment + } +} +fragment TagFragment on Tag { + name + id +} +fragment PerformerFragment on Performer { + id + name + disambiguation + aliases + gender + merged_ids + urls { + ... URLFragment + } + images { + ... ImageFragment + } + birthdate { + ... FuzzyDateFragment + } + ethnicity + country + eye_color + hair_color + height + measurements { + ... MeasurementsFragment + } + breast_type + career_start_year + career_end_year + tattoos { + ... BodyModificationFragment + } + piercings { + ... BodyModificationFragment + } +} +fragment MeasurementsFragment on Measurements { + band_size + cup_size + waist + hip +} +fragment FingerprintFragment on Fingerprint { + algorithm + hash + duration +} +fragment SceneFragment on Scene { + id + title + details + duration + date + urls { + ... URLFragment + } + images { + ... ImageFragment + } + studio { + ... StudioFragment + } + tags { + ... TagFragment + } + performers { + ... PerformerAppearanceFragment + } + fingerprints { + ... FingerprintFragment + } +} +fragment URLFragment on URL { + url + type +} +fragment BodyModificationFragment on BodyModification { + location + description +} +fragment PerformerAppearanceFragment on PerformerAppearance { + as + performer { + ... PerformerFragment + } +} +fragment FuzzyDateFragment on FuzzyDate { + date + accuracy +} +` + +func (c *Client) FindSceneByID(ctx context.Context, id string, httpRequestOptions ...client.HTTPRequestOption) (*FindSceneByID, error) { + vars := map[string]interface{}{ + "id": id, + } + + var res FindSceneByID + if err := c.Client.Post(ctx, FindSceneByIDQuery, &res, vars, httpRequestOptions...); err != nil { + return nil, err + } + + return &res, nil +} + const SubmitFingerprintQuery = `mutation SubmitFingerprint ($input: FingerprintSubmission!) { submitFingerprint(input: $input) } diff --git a/pkg/scraper/stashbox/graphql/generated_models.go b/pkg/scraper/stashbox/graphql/generated_models.go index 9fa66170f..932acbe6b 100644 --- a/pkg/scraper/stashbox/graphql/generated_models.go +++ b/pkg/scraper/stashbox/graphql/generated_models.go @@ -81,6 +81,7 @@ type Edit struct { Status VoteStatusEnum `json:"status"` Applied bool `json:"applied"` Created time.Time `json:"created"` + Updated time.Time `json:"updated"` } type EditComment struct { @@ -134,9 +135,21 @@ type EyeColorCriterionInput struct { } type Fingerprint struct { - Hash string `json:"hash"` - Algorithm FingerprintAlgorithm `json:"algorithm"` - Duration int `json:"duration"` + Hash string `json:"hash"` + Algorithm FingerprintAlgorithm `json:"algorithm"` + Duration int `json:"duration"` + Submissions int `json:"submissions"` + Created time.Time `json:"created"` + Updated time.Time `json:"updated"` +} + +type FingerprintEditInput struct { + Hash string `json:"hash"` + Algorithm FingerprintAlgorithm `json:"algorithm"` + Duration int `json:"duration"` + Submissions int `json:"submissions"` + Created time.Time `json:"created"` + Updated time.Time `json:"updated"` } type FingerprintInput struct { @@ -255,6 +268,8 @@ type Performer struct { Deleted bool `json:"deleted"` Edits []*Edit `json:"edits"` SceneCount int `json:"scene_count"` + MergedIds []string `json:"merged_ids"` + Studios []*PerformerStudio `json:"studios"` } func (Performer) IsEditTarget() {} @@ -359,16 +374,16 @@ type PerformerEditInput struct { } type PerformerEditOptions struct { - // Set performer alias on scenes without alias to old name if name is changed + // Set performer alias on scenes without alias to old name if name is changed SetModifyAliases bool `json:"set_modify_aliases"` - // Set performer alias on scenes attached to merge sources to old name + // Set performer alias on scenes attached to merge sources to old name SetMergeAliases bool `json:"set_merge_aliases"` } type PerformerEditOptionsInput struct { - // Set performer alias on scenes without alias to old name if name is changed + // Set performer alias on scenes without alias to old name if name is changed SetModifyAliases *bool `json:"set_modify_aliases"` - // Set performer alias on scenes attached to merge sources to old name + // Set performer alias on scenes attached to merge sources to old name SetMergeAliases *bool `json:"set_merge_aliases"` } @@ -402,6 +417,11 @@ type PerformerFilterType struct { Piercings *BodyModificationCriterionInput `json:"piercings"` } +type PerformerStudio struct { + Studio *Studio `json:"studio"` + SceneCount int `json:"scene_count"` +} + type PerformerUpdateInput struct { ID string `json:"id"` Name *string `json:"name"` @@ -507,7 +527,7 @@ type SceneCreateInput struct { Performers []*PerformerAppearanceInput `json:"performers"` TagIds []string `json:"tag_ids"` ImageIds []string `json:"image_ids"` - Fingerprints []*FingerprintInput `json:"fingerprints"` + Fingerprints []*FingerprintEditInput `json:"fingerprints"` Duration *int `json:"duration"` Director *string `json:"director"` } @@ -547,7 +567,7 @@ type SceneEditDetailsInput struct { Performers []*PerformerAppearanceInput `json:"performers"` TagIds []string `json:"tag_ids"` ImageIds []string `json:"image_ids"` - Fingerprints []*FingerprintInput `json:"fingerprints"` + Fingerprints []*FingerprintEditInput `json:"fingerprints"` Duration *int `json:"duration"` Director *string `json:"director"` } @@ -578,6 +598,8 @@ type SceneFilterType struct { Performers *MultiIDCriterionInput `json:"performers"` // Filter to include scenes with performer appearing as alias Alias *StringCriterionInput `json:"alias"` + // Filter to only include scenes with these fingerprints + Fingerprints *MultiIDCriterionInput `json:"fingerprints"` } type SceneUpdateInput struct { @@ -590,7 +612,7 @@ type SceneUpdateInput struct { Performers []*PerformerAppearanceInput `json:"performers"` TagIds []string `json:"tag_ids"` ImageIds []string `json:"image_ids"` - Fingerprints []*FingerprintInput `json:"fingerprints"` + Fingerprints []*FingerprintEditInput `json:"fingerprints"` Duration *int `json:"duration"` Director *string `json:"director"` } diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index b6174d7ee..cd4638809 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -7,25 +7,21 @@ import ( "net/http" "strconv" "strings" - "time" "github.com/Yamashou/gqlgenc/client" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scraper" "github.com/stashapp/stash/pkg/scraper/stashbox/graphql" "github.com/stashapp/stash/pkg/utils" ) -// Timeout to get the image. Includes transfer time. May want to make this -// configurable at some point. -const imageGetTimeout = time.Second * 30 - // Client represents the client interface to a stash-box server instance. type Client struct { client *graphql.Client txnManager models.TransactionManager + box models.StashBox } // NewClient returns a new instance of a stash-box client. @@ -41,12 +37,17 @@ func NewClient(box models.StashBox, txnManager models.TransactionManager) *Clien return &Client{ client: client, txnManager: txnManager, + box: box, } } +func (c Client) getHTTPClient() *http.Client { + return c.client.Client.Client +} + // QueryStashBoxScene queries stash-box for scenes using a query string. -func (c Client) QueryStashBoxScene(queryStr string) ([]*models.ScrapedScene, error) { - scenes, err := c.client.SearchScene(context.TODO(), queryStr) +func (c Client) QueryStashBoxScene(ctx context.Context, queryStr string) ([]*models.ScrapedScene, error) { + scenes, err := c.client.SearchScene(ctx, queryStr) if err != nil { return nil, err } @@ -55,7 +56,7 @@ func (c Client) QueryStashBoxScene(queryStr string) ([]*models.ScrapedScene, err var ret []*models.ScrapedScene for _, s := range sceneFragments { - ss, err := sceneFragmentToScrapedScene(c.txnManager, s) + ss, err := c.sceneFragmentToScrapedScene(context.TODO(), s) if err != nil { return nil, err } @@ -69,16 +70,18 @@ func (c Client) QueryStashBoxScene(queryStr string) ([]*models.ScrapedScene, err // scene's MD5/OSHASH checksum, or PHash, and returns results in the same order // as the input slice. func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models.ScrapedScene, error) { + ctx := context.TODO() + ids, err := utils.StringSliceToIntSlice(sceneIDs) if err != nil { return nil, err } - var fingerprints []string + var fingerprints []*graphql.FingerprintQueryInput // map fingerprints to their scene index fpToScene := make(map[string][]int) - if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { qb := r.Scene() for index, sceneID := range ids { @@ -92,18 +95,27 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models } if scene.Checksum.Valid { - fingerprints = append(fingerprints, scene.Checksum.String) + fingerprints = append(fingerprints, &graphql.FingerprintQueryInput{ + Hash: scene.Checksum.String, + Algorithm: graphql.FingerprintAlgorithmMd5, + }) fpToScene[scene.Checksum.String] = append(fpToScene[scene.Checksum.String], index) } if scene.OSHash.Valid { - fingerprints = append(fingerprints, scene.OSHash.String) + fingerprints = append(fingerprints, &graphql.FingerprintQueryInput{ + Hash: scene.OSHash.String, + Algorithm: graphql.FingerprintAlgorithmOshash, + }) fpToScene[scene.OSHash.String] = append(fpToScene[scene.OSHash.String], index) } if scene.Phash.Valid { phashStr := utils.PhashToString(scene.Phash.Int64) - fingerprints = append(fingerprints, phashStr) + fingerprints = append(fingerprints, &graphql.FingerprintQueryInput{ + Hash: phashStr, + Algorithm: graphql.FingerprintAlgorithmPhash, + }) fpToScene[phashStr] = append(fpToScene[phashStr], index) } } @@ -113,7 +125,7 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models return nil, err } - allScenes, err := c.findStashBoxScenesByFingerprints(fingerprints) + allScenes, err := c.findStashBoxScenesByFingerprints(ctx, fingerprints) if err != nil { return nil, err } @@ -139,14 +151,16 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models // FindStashBoxScenesByFingerprintsFlat queries stash-box for scenes using every // scene's MD5/OSHASH checksum, or PHash, and returns results a flat slice. func (c Client) FindStashBoxScenesByFingerprintsFlat(sceneIDs []string) ([]*models.ScrapedScene, error) { + ctx := context.TODO() + ids, err := utils.StringSliceToIntSlice(sceneIDs) if err != nil { return nil, err } - var fingerprints []string + var fingerprints []*graphql.FingerprintQueryInput - if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { qb := r.Scene() for _, sceneID := range ids { @@ -160,16 +174,24 @@ func (c Client) FindStashBoxScenesByFingerprintsFlat(sceneIDs []string) ([]*mode } if scene.Checksum.Valid { - fingerprints = append(fingerprints, scene.Checksum.String) + fingerprints = append(fingerprints, &graphql.FingerprintQueryInput{ + Hash: scene.Checksum.String, + Algorithm: graphql.FingerprintAlgorithmMd5, + }) } if scene.OSHash.Valid { - fingerprints = append(fingerprints, scene.OSHash.String) + fingerprints = append(fingerprints, &graphql.FingerprintQueryInput{ + Hash: scene.OSHash.String, + Algorithm: graphql.FingerprintAlgorithmOshash, + }) } if scene.Phash.Valid { - phashStr := utils.PhashToString(scene.Phash.Int64) - fingerprints = append(fingerprints, phashStr) + fingerprints = append(fingerprints, &graphql.FingerprintQueryInput{ + Hash: utils.PhashToString(scene.Phash.Int64), + Algorithm: graphql.FingerprintAlgorithmPhash, + }) } } @@ -178,26 +200,26 @@ func (c Client) FindStashBoxScenesByFingerprintsFlat(sceneIDs []string) ([]*mode return nil, err } - return c.findStashBoxScenesByFingerprints(fingerprints) + return c.findStashBoxScenesByFingerprints(ctx, fingerprints) } -func (c Client) findStashBoxScenesByFingerprints(fingerprints []string) ([]*models.ScrapedScene, error) { +func (c Client) findStashBoxScenesByFingerprints(ctx context.Context, fingerprints []*graphql.FingerprintQueryInput) ([]*models.ScrapedScene, error) { var ret []*models.ScrapedScene for i := 0; i < len(fingerprints); i += 100 { end := i + 100 if end > len(fingerprints) { end = len(fingerprints) } - scenes, err := c.client.FindScenesByFingerprints(context.TODO(), fingerprints[i:end]) + scenes, err := c.client.FindScenesByFullFingerprints(ctx, fingerprints[i:end]) if err != nil { return nil, err } - sceneFragments := scenes.FindScenesByFingerprints + sceneFragments := scenes.FindScenesByFullFingerprints for _, s := range sceneFragments { - ss, err := sceneFragmentToScrapedScene(c.txnManager, s) + ss, err := c.sceneFragmentToScrapedScene(ctx, s) if err != nil { return nil, err } @@ -475,11 +497,12 @@ func formatCareerLength(start, end *int) *string { } var ret string - if end == nil { + switch { + case end == nil: ret = fmt.Sprintf("%d -", *start) - } else if start == nil { + case start == nil: ret = fmt.Sprintf("- %d", *end) - } else { + default: ret = fmt.Sprintf("%d - %d", *start, *end) } @@ -504,12 +527,8 @@ func formatBodyModifications(m []*graphql.BodyModificationFragment) *string { return &ret } -func fetchImage(url string) (*string, error) { - client := &http.Client{ - Timeout: imageGetTimeout, - } - - req, err := http.NewRequest("GET", url, nil) +func fetchImage(ctx context.Context, client *http.Client, url string) (*string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { return nil, err } @@ -583,6 +602,10 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode sp.EyeColor = enumToStringPtr(p.EyeColor, true) } + if p.HairColor != nil { + sp.HairColor = enumToStringPtr(p.HairColor, true) + } + if p.BreastType != nil { sp.FakeTits = enumToStringPtr(p.BreastType, true) } @@ -590,8 +613,8 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode return sp } -func getFirstImage(images []*graphql.ImageFragment) *string { - ret, err := fetchImage(images[0].URL) +func getFirstImage(ctx context.Context, client *http.Client, images []*graphql.ImageFragment) *string { + ret, err := fetchImage(ctx, client, images[0].URL) if err != nil { logger.Warnf("Error fetching image %s: %s", images[0].URL, err.Error()) } @@ -612,7 +635,7 @@ func getFingerprints(scene *graphql.SceneFragment) []*models.StashBoxFingerprint return fingerprints } -func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphql.SceneFragment) (*models.ScrapedScene, error) { +func (c Client) sceneFragmentToScrapedScene(ctx context.Context, s *graphql.SceneFragment) (*models.ScrapedScene, error) { stashID := s.ID ss := &models.ScrapedScene{ Title: s.Title, @@ -629,10 +652,10 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq if len(s.Images) > 0 { // TODO - #454 code sorts images by aspect ratio according to a wanted // orientation. I'm just grabbing the first for now - ss.Image = getFirstImage(s.Images) + ss.Image = getFirstImage(ctx, c.getHTTPClient(), s.Images) } - if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error { pqb := r.Performer() tqb := r.Tag() @@ -644,7 +667,7 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq RemoteSiteID: &studioID, } - err := scraper.MatchScrapedStudio(r.Studio(), ss.Studio) + err := match.ScrapedStudio(r.Studio(), ss.Studio, &c.box.Endpoint) if err != nil { return err } @@ -653,7 +676,7 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq for _, p := range s.Performers { sp := performerFragmentToScrapedScenePerformer(p.Performer) - err := scraper.MatchScrapedPerformer(pqb, sp) + err := match.ScrapedPerformer(pqb, sp, &c.box.Endpoint) if err != nil { return err } @@ -666,7 +689,7 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq Name: t.Name, } - err := scraper.MatchScrapedTag(tqb, st) + err := match.ScrapedTag(tqb, st) if err != nil { return err } diff --git a/pkg/scraper/url.go b/pkg/scraper/url.go index fe33c157d..b07722d3f 100644 --- a/pkg/scraper/url.go +++ b/pkg/scraper/url.go @@ -3,12 +3,10 @@ package scraper import ( "bytes" "context" - "crypto/tls" - "errors" "fmt" "io" "net/http" - "net/http/cookiejar" + "net/url" "os" "strings" "time" @@ -18,55 +16,40 @@ import ( "github.com/chromedp/chromedp" jsoniter "github.com/json-iterator/go" "golang.org/x/net/html/charset" - "golang.org/x/net/publicsuffix" "github.com/stashapp/stash/pkg/logger" ) -// Timeout for the scrape http request. Includes transfer time. May want to make this -// configurable at some point. -const scrapeGetTimeout = time.Second * 60 const scrapeDefaultSleep = time.Second * 2 -func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Reader, error) { +func loadURL(ctx context.Context, loadURL string, client *http.Client, scraperConfig config, globalConfig GlobalConfig) (io.Reader, error) { driverOptions := scraperConfig.DriverOptions if driverOptions != nil && driverOptions.UseCDP { // get the page using chrome dp - return urlFromCDP(url, *driverOptions, globalConfig) + return urlFromCDP(ctx, loadURL, *driverOptions, globalConfig) } - // get the page using http.Client - options := cookiejar.Options{ - PublicSuffixList: publicsuffix.List, - } - jar, er := cookiejar.New(&options) - if er != nil { - return nil, er - } - - setCookies(jar, scraperConfig) - printCookies(jar, scraperConfig, "Jar cookies set from scraper") - - client := &http.Client{ - Transport: &http.Transport{ // ignore insecure certificates - TLSClientConfig: &tls.Config{InsecureSkipVerify: !globalConfig.GetScraperCertCheck()}, - }, - Timeout: scrapeGetTimeout, - // defaultCheckRedirect code with max changed from 10 to 20 - CheckRedirect: func(req *http.Request, via []*http.Request) error { - if len(via) >= 20 { - return errors.New("stopped after 20 redirects") - } - return nil - }, - Jar: jar, - } - - req, err := http.NewRequest("GET", url, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, loadURL, nil) if err != nil { return nil, err } + jar, err := scraperConfig.jar() + if err != nil { + return nil, fmt.Errorf("error creating cookie jar: %w", err) + } + + u, err := url.Parse(loadURL) + if err != nil { + return nil, fmt.Errorf("error parsing url %s: %w", loadURL, err) + } + + // Fetch relevant cookies from the jar for url u and add them to the request + cookies := jar.Cookies(u) + for _, cookie := range cookies { + req.AddCookie(cookie) + } + userAgent := globalConfig.GetScraperUserAgent() if userAgent != "" { req.Header.Set("User-Agent", userAgent) @@ -98,14 +81,13 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re bodyReader := bytes.NewReader(body) printCookies(jar, scraperConfig, "Jar cookies found for scraper urls") - return charset.NewReader(bodyReader, resp.Header.Get("Content-Type")) } // func urlFromCDP uses chrome cdp and DOM to load and process the url // if remote is set as true in the scraperConfig it will try to use localhost:9222 // else it will look for google-chrome in path -func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig GlobalConfig) (io.Reader, error) { +func urlFromCDP(ctx context.Context, url string, driverOptions scraperDriverOptions, globalConfig GlobalConfig) (io.Reader, error) { if !driverOptions.UseCDP { return nil, fmt.Errorf("url shouldn't be fetched through CDP") @@ -117,7 +99,7 @@ func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig Glo sleepDuration = time.Duration(driverOptions.Sleep) * time.Second } - act := context.Background() + act := context.TODO() // if scraperCDPPath is a remote address, then allocate accordingly cdpPath := globalConfig.GetScraperCDPPath() @@ -130,13 +112,13 @@ func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig Glo // if CDPPath is http(s) then we need to get the websocket URL if isCDPPathHTTP(globalConfig) { var err error - remote, err = getRemoteCDPWSAddress(remote) + remote, err = getRemoteCDPWSAddress(ctx, remote) if err != nil { return nil, err } } - act, cancelAct = chromedp.NewRemoteAllocator(context.Background(), remote) + act, cancelAct = chromedp.NewRemoteAllocator(act, remote) } else { // use a temporary user directory for chrome dir, err := os.MkdirTemp("", "stash-chromedp") @@ -218,8 +200,13 @@ func setCDPClicks(driverOptions scraperDriverOptions) chromedp.Tasks { } // getRemoteCDPWSAddress returns the complete remote address that is required to access the cdp instance -func getRemoteCDPWSAddress(address string) (string, error) { - resp, err := http.Get(address) +func getRemoteCDPWSAddress(ctx context.Context, url string) (string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return "", err + } + + resp, err := http.DefaultClient.Do(req) if err != nil { return "", err } diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go index 0cd955788..0f820a4cd 100644 --- a/pkg/scraper/xpath.go +++ b/pkg/scraper/xpath.go @@ -2,7 +2,9 @@ package scraper import ( "bytes" + "context" "errors" + "net/http" "net/url" "regexp" "strings" @@ -19,14 +21,16 @@ type xpathScraper struct { scraper scraperTypeConfig config config globalConfig GlobalConfig + client *http.Client txnManager models.TransactionManager } -func newXpathScraper(scraper scraperTypeConfig, txnManager models.TransactionManager, config config, globalConfig GlobalConfig) *xpathScraper { +func newXpathScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, config config, globalConfig GlobalConfig) *xpathScraper { return &xpathScraper{ scraper: scraper, config: config, globalConfig: globalConfig, + client: client, txnManager: txnManager, } } @@ -42,7 +46,7 @@ func (s *xpathScraper) scrapeURL(url string) (*html.Node, *mappedScraper, error) return nil, nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") } - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, nil, err @@ -108,9 +112,9 @@ func (s *xpathScraper) scrapePerformersByName(name string) ([]*models.ScrapedPer escapedName := url.QueryEscape(name) url := s.scraper.QueryURL - url = strings.Replace(url, placeholder, escapedName, -1) + url = strings.ReplaceAll(url, placeholder, escapedName) - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -137,9 +141,9 @@ func (s *xpathScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, escapedName := url.QueryEscape(name) url := s.scraper.QueryURL - url = strings.Replace(url, placeholder, escapedName, -1) + url = strings.ReplaceAll(url, placeholder, escapedName) - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -163,7 +167,7 @@ func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") } - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -187,7 +191,7 @@ func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*m return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") } - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -211,7 +215,7 @@ func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models. return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") } - doc, err := s.loadURL(url) + doc, err := s.loadURL(context.TODO(), url) if err != nil { return nil, err @@ -225,8 +229,8 @@ func (s *xpathScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInpu return nil, errors.New("scrapeGalleryByFragment not supported for xpath scraper") } -func (s *xpathScraper) loadURL(url string) (*html.Node, error) { - r, err := loadURL(url, s.config, s.globalConfig) +func (s *xpathScraper) loadURL(ctx context.Context, url string) (*html.Node, error) { + r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig) if err != nil { return nil, err } @@ -298,7 +302,7 @@ func (q *xpathQuery) nodeText(n *html.Node) string { } func (q *xpathQuery) subScrape(value string) mappedQuery { - doc, err := q.scraper.loadURL(value) + doc, err := q.scraper.loadURL(context.TODO(), value) if err != nil { logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error()) diff --git a/pkg/scraper/xpath_test.go b/pkg/scraper/xpath_test.go index 4ba98d40d..ff01741b7 100644 --- a/pkg/scraper/xpath_test.go +++ b/pkg/scraper/xpath_test.go @@ -874,7 +874,9 @@ xPathScrapers: globalConfig := mockGlobalConfig{} - performer, err := c.ScrapePerformerURL(ts.URL, nil, globalConfig) + client := &http.Client{} + s := createScraperFromConfig(*c, client, nil, globalConfig) + performer, err := s.Performer.scrapeByURL(ts.URL) if err != nil { t.Errorf("Error scraping performer: %s", err.Error()) diff --git a/pkg/session/authentication.go b/pkg/session/authentication.go index 7bdf0ea22..ff617c774 100644 --- a/pkg/session/authentication.go +++ b/pkg/session/authentication.go @@ -29,7 +29,16 @@ func CheckAllowPublicWithoutAuth(c *config.Instance, r *http.Request) error { return fmt.Errorf("error parsing remote host (%s): %w", r.RemoteAddr, err) } + // presence of scope ID in IPv6 addresses prevents parsing. Remove if present + scopeIDIndex := strings.Index(requestIPString, "%") + if scopeIDIndex != -1 { + requestIPString = requestIPString[0:scopeIDIndex] + } + requestIP := net.ParseIP(requestIPString) + if requestIP == nil { + return fmt.Errorf("unable to parse remote host (%s)", requestIPString) + } if r.Header.Get("X-FORWARDED-FOR") != "" { // Request was proxied @@ -70,12 +79,10 @@ func CheckAllowPublicWithoutAuth(c *config.Instance, r *http.Request) error { return UntrustedProxyError(requestIP) } } - } else { - // request was not proxied - if !isLocalIP(requestIP) { - return ExternalAccessError(requestIP) - } + } else if !isLocalIP(requestIP) { // request was not proxied + return ExternalAccessError(requestIP) } + } return nil @@ -94,7 +101,7 @@ func CheckExternalAccessTripwire(c *config.Instance) *ExternalAccessError { func isLocalIP(requestIP net.IP) bool { _, cgNatAddrSpace, _ := net.ParseCIDR("100.64.0.0/10") - return requestIP.IsPrivate() || requestIP.IsLoopback() || cgNatAddrSpace.Contains(requestIP) + return requestIP.IsPrivate() || requestIP.IsLoopback() || requestIP.IsLinkLocalUnicast() || cgNatAddrSpace.Contains(requestIP) } func isIPTrustedProxy(ip net.IP, trustedProxies []string) bool { diff --git a/pkg/session/authentication_test.go b/pkg/session/authentication_test.go new file mode 100644 index 000000000..1cf967c8f --- /dev/null +++ b/pkg/session/authentication_test.go @@ -0,0 +1,209 @@ +package session + +import ( + "errors" + "net/http" + "testing" + + "github.com/stashapp/stash/pkg/manager/config" +) + +func TestCheckAllowPublicWithoutAuth(t *testing.T) { + c := config.GetInstance() + _ = c.SetInitialMemoryConfig() + + doTest := func(caseIndex int, r *http.Request, expectedErr interface{}) { + t.Helper() + err := CheckAllowPublicWithoutAuth(c, r) + + if expectedErr == nil && err == nil { + return + } + + if expectedErr == nil { + t.Errorf("[%d]: unexpected error: %v", caseIndex, err) + return + } + + if !errors.As(err, expectedErr) { + t.Errorf("[%d]: expected %T, got %v (%T)", caseIndex, expectedErr, err, err) + return + } + } + + { + // direct connection tests + testCases := []struct { + address string + err error + }{ + {"192.168.1.1:8080", nil}, + {"192.168.1.1:8080", nil}, + {"100.64.0.1:8080", nil}, + {"127.0.0.1:8080", nil}, + {"[::1]:8080", nil}, + {"[fe80::c081:1c1a:ae39:d3cd%Ethernet 5]:9999", nil}, + {"193.168.1.1:8080", &ExternalAccessError{}}, + {"[2002:9fc4:ed97:e472:5170:5766:520c:c901]:9999", &ExternalAccessError{}}, + } + + // try with no X-FORWARDED-FOR and valid one + xFwdVals := []string{"", "192.168.1.1"} + + for i, xFwdVal := range xFwdVals { + header := make(http.Header) + header.Set("X-FORWARDED-FOR", xFwdVal) + + for ii, tc := range testCases { + r := &http.Request{ + RemoteAddr: tc.address, + Header: header, + } + + doTest((i*len(testCases) + ii), r, tc.err) + } + } + } + + { + // X-FORWARDED-FOR without trusted proxy + testCases := []struct { + proxyChain string + err error + }{ + {"192.168.1.1, 192.168.1.2, 100.64.0.1, 127.0.0.1", nil}, + {"192.168.1.1, 193.168.1.1", &ExternalAccessError{}}, + {"193.168.1.1, 192.168.1.1", &ExternalAccessError{}}, + } + + const remoteAddr = "192.168.1.1:8080" + + header := make(http.Header) + + for i, tc := range testCases { + header.Set("X-FORWARDED-FOR", tc.proxyChain) + r := &http.Request{ + RemoteAddr: remoteAddr, + Header: header, + } + + doTest(i, r, tc.err) + } + } + + { + // X-FORWARDED-FOR with trusted proxy + var trustedProxies = []string{"8.8.8.8", "4.4.4.4"} + c.Set(config.TrustedProxies, trustedProxies) + + testCases := []struct { + address string + proxyChain string + err error + }{ + {"192.168.1.1:8080", "192.168.1.1, 192.168.1.2, 100.64.0.1, 127.0.0.1", &UntrustedProxyError{}}, + {"8.8.8.8:8080", "192.168.1.2, 127.0.0.1", &UntrustedProxyError{}}, + {"8.8.8.8:8080", "193.168.1.1, 4.4.4.4", &ExternalAccessError{}}, + {"8.8.8.8:8080", "4.4.4.4", &ExternalAccessError{}}, + {"8.8.8.8:8080", "192.168.1.1, 4.4.4.4a", &UntrustedProxyError{}}, + {"8.8.8.8:8080", "192.168.1.1a, 4.4.4.4", &ExternalAccessError{}}, + {"8.8.8.8:8080", "192.168.1.1, 4.4.4.4", nil}, + {"8.8.8.8:8080", "192.168.1.1", nil}, + } + + header := make(http.Header) + + for i, tc := range testCases { + header.Set("X-FORWARDED-FOR", tc.proxyChain) + r := &http.Request{ + RemoteAddr: tc.address, + Header: header, + } + + doTest(i, r, tc.err) + } + } + + { + // test invalid request IPs + invalidIPs := []string{"192.168.1.a:9999", "192.168.1.1"} + + for _, remoteAddr := range invalidIPs { + r := &http.Request{ + RemoteAddr: remoteAddr, + } + + err := CheckAllowPublicWithoutAuth(c, r) + if errors.As(err, &UntrustedProxyError{}) || errors.As(err, &ExternalAccessError{}) { + t.Errorf("[%s]: unexpected error: %v", remoteAddr, err) + continue + } + + if err == nil { + t.Errorf("[%s]: expected error", remoteAddr) + continue + } + } + } + + { + // test overrides + r := &http.Request{ + RemoteAddr: "193.168.1.1:8080", + } + + c.Set(config.Username, "admin") + c.Set(config.Password, "admin") + + if err := CheckAllowPublicWithoutAuth(c, r); err != nil { + t.Errorf("unexpected error: %v", err) + } + + c.Set(config.Username, "") + c.Set(config.Password, "") + + // HACK - this key isn't publically exposed + c.Set("dangerous_allow_public_without_auth", true) + + if err := CheckAllowPublicWithoutAuth(c, r); err != nil { + t.Errorf("unexpected error: %v", err) + } + } +} + +func TestCheckExternalAccessTripwire(t *testing.T) { + c := config.GetInstance() + _ = c.SetInitialMemoryConfig() + + c.Set(config.SecurityTripwireAccessedFromPublicInternet, "4.4.4.4") + + // always return nil if authentication configured or dangerous key set + c.Set(config.Username, "admin") + c.Set(config.Password, "admin") + + if err := CheckExternalAccessTripwire(c); err != nil { + t.Errorf("unexpected error %v", err) + } + + c.Set(config.Username, "") + c.Set(config.Password, "") + + // HACK - this key isn't publically exposed + c.Set("dangerous_allow_public_without_auth", true) + + if err := CheckExternalAccessTripwire(c); err != nil { + t.Errorf("unexpected error %v", err) + } + + c.Set("dangerous_allow_public_without_auth", false) + + if err := CheckExternalAccessTripwire(c); err == nil { + t.Errorf("expected error %v", ExternalAccessError("4.4.4.4")) + } + + c.Set(config.SecurityTripwireAccessedFromPublicInternet, "") + + if err := CheckExternalAccessTripwire(c); err != nil { + t.Errorf("unexpected error %v", err) + } +} diff --git a/pkg/sqlite/filter.go b/pkg/sqlite/filter.go index c85b68536..9d5edeb8f 100644 --- a/pkg/sqlite/filter.go +++ b/pkg/sqlite/filter.go @@ -84,12 +84,16 @@ func (j *joins) add(newJoins ...join) { } func (j *joins) toSQL() string { + if len(*j) == 0 { + return "" + } + var ret []string for _, jj := range *j { ret = append(ret, jj.toSQL()) } - return strings.Join(ret, " ") + return " " + strings.Join(ret, " ") } type filterBuilder struct { @@ -210,10 +214,8 @@ func (f *filterBuilder) getSubFilterClause(clause, subFilterClause string) strin var op string if len(ret) > 0 { op = " " + f.subFilterOp + " " - } else { - if f.subFilterOp == notOp { - op = "NOT " - } + } else if f.subFilterOp == notOp { + op = "NOT " } ret += op + "(" + subFilterClause + ")" @@ -423,29 +425,51 @@ type joinedMultiCriterionHandlerBuilder struct { func (m *joinedMultiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if criterion != nil && len(criterion.Value) > 0 { - var args []interface{} - for _, tagID := range criterion.Value { - args = append(args, tagID) - } - + if criterion != nil { joinAlias := m.joinAs if joinAlias == "" { joinAlias = m.joinTable } + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + m.addJoinTable(f) + + f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ + "table": joinAlias, + "column": m.foreignFK, + "not": notClause, + })) + return + } + + if len(criterion.Value) == 0 { + return + } + + var args []interface{} + for _, tagID := range criterion.Value { + args = append(args, tagID) + } + whereClause := "" havingClause := "" - if criterion.Modifier == models.CriterionModifierIncludes { + + switch criterion.Modifier { + case models.CriterionModifierIncludes: // includes any of the provided ids m.addJoinTable(f) whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) - } else if criterion.Modifier == models.CriterionModifierIncludesAll { + case models.CriterionModifierIncludesAll: // includes all of the provided ids m.addJoinTable(f) whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value)) - } else if criterion.Modifier == models.CriterionModifierExcludes { + case models.CriterionModifierExcludes: // excludes all of the provided ids // need to use actual join table name for this // .id NOT IN (select . from where . in ) @@ -471,7 +495,27 @@ type multiCriterionHandlerBuilder struct { func (m *multiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if criterion != nil && len(criterion.Value) > 0 { + if criterion != nil { + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + table := m.primaryTable + if m.joinTable != "" { + table = m.joinTable + f.addJoin(table, "", fmt.Sprintf("%s.%s = %s.id", table, m.primaryFK, m.primaryTable)) + } + + f.addWhere(fmt.Sprintf("%s.%s IS %s NULL", table, m.foreignFK, notClause)) + return + } + + if len(criterion.Value) == 0 { + return + } + var args []interface{} for _, tagID := range criterion.Value { args = append(args, tagID) @@ -620,19 +664,38 @@ WHERE id in {inBinding} } func addHierarchicalConditionClauses(f *filterBuilder, criterion *models.HierarchicalMultiCriterionInput, table, idColumn string) { - if criterion.Modifier == models.CriterionModifierIncludes { + switch criterion.Modifier { + case models.CriterionModifierIncludes: f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) - } else if criterion.Modifier == models.CriterionModifierIncludesAll { + case models.CriterionModifierIncludesAll: f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", table, idColumn, len(criterion.Value))) - } else if criterion.Modifier == models.CriterionModifierExcludes { + case models.CriterionModifierExcludes: f.addWhere(fmt.Sprintf("%s.%s IS NULL", table, idColumn)) } } func (m *hierarchicalMultiCriterionHandlerBuilder) handler(criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if criterion != nil && len(criterion.Value) > 0 { + if criterion != nil { + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ + "table": m.primaryTable, + "column": m.foreignFK, + "not": notClause, + })) + return + } + + if len(criterion.Value) == 0 { + return + } + valuesClause := getHierarchicalValues(m.tx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, criterion.Depth) f.addJoin("(SELECT column1 AS root_id, column2 AS item_id FROM ("+valuesClause+"))", m.derivedTable, fmt.Sprintf("%s.item_id = %s.%s", m.derivedTable, m.primaryTable, m.foreignFK)) @@ -659,10 +722,31 @@ type joinedHierarchicalMultiCriterionHandlerBuilder struct { func (m *joinedHierarchicalMultiCriterionHandlerBuilder) handler(criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if criterion != nil && len(criterion.Value) > 0 { + if criterion != nil { + joinAlias := m.joinAs + + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin(m.joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.id", joinAlias, m.primaryFK, m.primaryTable)) + + f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ + "table": joinAlias, + "column": m.foreignFK, + "not": notClause, + })) + return + } + + if len(criterion.Value) == 0 { + return + } + valuesClause := getHierarchicalValues(m.tx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, criterion.Depth) - joinAlias := m.joinAs joinTable := utils.StrFormat(`( SELECT j.*, d.column1 AS root_id, d.column2 AS item_id FROM {joinTable} AS j INNER JOIN ({valuesClause}) AS d ON j.{foreignFK} = d.column2 diff --git a/pkg/sqlite/gallery.go b/pkg/sqlite/gallery.go index 2564c068b..cc79e1a89 100644 --- a/pkg/sqlite/gallery.go +++ b/pkg/sqlite/gallery.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "github.com/stashapp/stash/pkg/models" @@ -75,7 +76,7 @@ func (qb *galleryQueryBuilder) Destroy(id int) error { func (qb *galleryQueryBuilder) Find(id int) (*models.Gallery, error) { var ret models.Gallery if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err @@ -232,8 +233,7 @@ func (qb *galleryQueryBuilder) makeQuery(galleryFilter *models.GalleryFilterType } query := qb.newQuery() - - query.body = selectDistinctIDs(galleryTable) + distinctIDs(&query, galleryTable) if q := findFilter.Q; q != nil && *q != "" { searchColumns := []string{"galleries.title", "galleries.path", "galleries.checksum"} @@ -390,7 +390,24 @@ func galleryStudioCriterionHandler(qb *galleryQueryBuilder, studios *models.Hier func galleryPerformerTagsCriterionHandler(qb *galleryQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if tags != nil && len(tags.Value) > 0 { + if tags != nil { + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { + var notClause string + if tags.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin("performers_galleries", "", "galleries.id = performers_galleries.gallery_id") + f.addJoin("performers_tags", "", "performers_galleries.performer_id = performers_tags.performer_id") + + f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) + return + } + + if len(tags.Value) == 0 { + return + } + valuesClause := getHierarchicalValues(qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) f.addWith(`performer_tags AS ( @@ -417,13 +434,14 @@ func galleryAverageResolutionCriterionHandler(qb *galleryQueryBuilder, resolutio const widthHeight = "avg(MIN(images.width, images.height))" - if resolution.Modifier == models.CriterionModifierEquals { + switch resolution.Modifier { + case models.CriterionModifierEquals: f.addHaving(fmt.Sprintf("%s BETWEEN %d AND %d", widthHeight, min, max)) - } else if resolution.Modifier == models.CriterionModifierNotEquals { + case models.CriterionModifierNotEquals: f.addHaving(fmt.Sprintf("%s NOT BETWEEN %d AND %d", widthHeight, min, max)) - } else if resolution.Modifier == models.CriterionModifierLessThan { + case models.CriterionModifierLessThan: f.addHaving(fmt.Sprintf("%s < %d", widthHeight, min)) - } else if resolution.Modifier == models.CriterionModifierGreaterThan { + case models.CriterionModifierGreaterThan: f.addHaving(fmt.Sprintf("%s > %d", widthHeight, max)) } } diff --git a/pkg/sqlite/gallery_test.go b/pkg/sqlite/gallery_test.go index d07907156..a121e4b5c 100644 --- a/pkg/sqlite/gallery_test.go +++ b/pkg/sqlite/gallery_test.go @@ -825,6 +825,29 @@ func TestGalleryQueryPerformerTags(t *testing.T) { galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) assert.Len(t, galleries, 0) + tagCriterion = models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + q = getGalleryStringValue(galleryIdx1WithImage, titleField) + + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + assert.Len(t, galleries, 1) + assert.Equal(t, galleryIDs[galleryIdx1WithImage], galleries[0].ID) + + q = getGalleryStringValue(galleryIdxWithPerformerTag, titleField) + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + assert.Len(t, galleries, 0) + + tagCriterion.Modifier = models.CriterionModifierNotNull + + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + assert.Len(t, galleries, 1) + assert.Equal(t, galleryIDs[galleryIdxWithPerformerTag], galleries[0].ID) + + q = getGalleryStringValue(galleryIdx1WithImage, titleField) + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + assert.Len(t, galleries, 0) + return nil }) } @@ -962,18 +985,24 @@ func verifyGalleriesImageCount(t *testing.T, imageCountCriterion models.IntCrite for _, gallery := range galleries { pp := 0 - _, count, err := r.Image().Query(&models.ImageFilterType{ - Galleries: &models.MultiCriterionInput{ - Value: []string{strconv.Itoa(gallery.ID)}, - Modifier: models.CriterionModifierIncludes, + result, err := r.Image().Query(models.ImageQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: &models.FindFilterType{ + PerPage: &pp, + }, + Count: true, + }, + ImageFilter: &models.ImageFilterType{ + Galleries: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(gallery.ID)}, + Modifier: models.CriterionModifierIncludes, + }, }, - }, &models.FindFilterType{ - PerPage: &pp, }) if err != nil { return err } - verifyInt(t, count, imageCountCriterion) + verifyInt(t, result.Count, imageCountCriterion) } return nil diff --git a/pkg/sqlite/image.go b/pkg/sqlite/image.go index 01598c51a..12121ef90 100644 --- a/pkg/sqlite/image.go +++ b/pkg/sqlite/image.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "github.com/stashapp/stash/pkg/models" @@ -145,7 +146,7 @@ func (qb *imageQueryBuilder) FindMany(ids []int) ([]*models.Image, error) { func (qb *imageQueryBuilder) find(id int) (*models.Image, error) { var ret models.Image if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err @@ -260,8 +261,7 @@ func (qb *imageQueryBuilder) makeQuery(imageFilter *models.ImageFilterType, find } query := qb.newQuery() - - query.body = selectDistinctIDs(imageTable) + distinctIDs(&query, imageTable) if q := findFilter.Q; q != nil && *q != "" { searchColumns := []string{"images.title", "images.path", "images.checksum"} @@ -282,28 +282,65 @@ func (qb *imageQueryBuilder) makeQuery(imageFilter *models.ImageFilterType, find return &query, nil } -func (qb *imageQueryBuilder) Query(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, int, error) { - query, err := qb.makeQuery(imageFilter, findFilter) +func (qb *imageQueryBuilder) Query(options models.ImageQueryOptions) (*models.ImageQueryResult, error) { + query, err := qb.makeQuery(options.ImageFilter, options.FindFilter) if err != nil { - return nil, 0, err + return nil, err } - idsResult, countResult, err := query.executeFind() + result, err := qb.queryGroupedFields(options, *query) if err != nil { - return nil, 0, err + return nil, fmt.Errorf("error querying aggregate fields: %w", err) } - var images []*models.Image - for _, id := range idsResult { - image, err := qb.Find(id) - if err != nil { - return nil, 0, err - } - - images = append(images, image) + idsResult, err := query.findIDs() + if err != nil { + return nil, fmt.Errorf("error finding IDs: %w", err) } - return images, countResult, nil + result.IDs = idsResult + return result, nil +} + +func (qb *imageQueryBuilder) queryGroupedFields(options models.ImageQueryOptions, query queryBuilder) (*models.ImageQueryResult, error) { + if !options.Count && !options.Megapixels && !options.TotalSize { + // nothing to do - return empty result + return models.NewImageQueryResult(qb), nil + } + + aggregateQuery := qb.newQuery() + + if options.Count { + aggregateQuery.addColumn("COUNT(temp.id) as total") + } + + if options.Megapixels { + query.addColumn("COALESCE(images.width, 0) * COALESCE(images.height, 0) / 1000000 as megapixels") + aggregateQuery.addColumn("COALESCE(SUM(temp.megapixels), 0) as megapixels") + } + + if options.TotalSize { + query.addColumn("COALESCE(images.size, 0) as size") + aggregateQuery.addColumn("COALESCE(SUM(temp.size), 0) as size") + } + + const includeSortPagination = false + aggregateQuery.from = fmt.Sprintf("(%s) as temp", query.toSQL(includeSortPagination)) + + out := struct { + Total int + Megapixels float64 + Size float64 + }{} + if err := qb.repository.queryStruct(aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { + return nil, err + } + + ret := models.NewImageQueryResult(qb) + ret.Count = out.Total + ret.Megapixels = out.Megapixels + ret.TotalSize = out.Size + return ret, nil } func (qb *imageQueryBuilder) QueryCount(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) (int, error) { @@ -427,7 +464,24 @@ func imageStudioCriterionHandler(qb *imageQueryBuilder, studios *models.Hierarch func imagePerformerTagsCriterionHandler(qb *imageQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if tags != nil && len(tags.Value) > 0 { + if tags != nil { + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { + var notClause string + if tags.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin("performers_images", "", "images.id = performers_images.image_id") + f.addJoin("performers_tags", "", "performers_images.performer_id = performers_tags.performer_id") + + f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) + return + } + + if len(tags.Value) == 0 { + return + } + valuesClause := getHierarchicalValues(qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) f.addWith(`performer_tags AS ( diff --git a/pkg/sqlite/image_test.go b/pkg/sqlite/image_test.go index 59802e0d8..141fbb3d6 100644 --- a/pkg/sqlite/image_test.go +++ b/pkg/sqlite/image_test.go @@ -83,14 +83,31 @@ func TestImageQueryQ(t *testing.T) { }) } +func queryImagesWithCount(sqb models.ImageReader, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, int, error) { + result, err := sqb.Query(models.ImageQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: findFilter, + Count: true, + }, + ImageFilter: imageFilter, + }) + if err != nil { + return nil, 0, err + } + + images, err := result.Resolve() + if err != nil { + return nil, 0, err + } + + return images, result.Count, nil +} + func imageQueryQ(t *testing.T, sqb models.ImageReader, q string, expectedImageIdx int) { filter := models.FindFilterType{ Q: &q, } - images, _, err := sqb.Query(nil, &filter) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } + images := queryImages(t, sqb, nil, &filter) assert.Len(t, images, 1) image := images[0] @@ -104,10 +121,7 @@ func imageQueryQ(t *testing.T, sqb models.ImageReader, q string, expectedImageId // no Q should return all results filter.Q = nil - images, _, err = sqb.Query(nil, &filter) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } + images = queryImages(t, sqb, nil, &filter) assert.Len(t, images, totalImages) } @@ -141,10 +155,7 @@ func verifyImagePath(t *testing.T, pathCriterion models.StringCriterionInput, ex Path: &pathCriterion, } - images, _, err := sqb.Query(&imageFilter, nil) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } + images := queryImages(t, sqb, &imageFilter, nil) assert.Equal(t, expected, len(images), "number of returned images") @@ -276,17 +287,17 @@ func TestImageIllegalQuery(t *testing.T) { withTxn(func(r models.Repository) error { sqb := r.Image() - _, _, err := sqb.Query(imageFilter, nil) + _, _, err := queryImagesWithCount(sqb, imageFilter, nil) assert.NotNil(err) imageFilter.Or = nil imageFilter.Not = &subFilter - _, _, err = sqb.Query(imageFilter, nil) + _, _, err = queryImagesWithCount(sqb, imageFilter, nil) assert.NotNil(err) imageFilter.And = nil imageFilter.Or = &subFilter - _, _, err = sqb.Query(imageFilter, nil) + _, _, err = queryImagesWithCount(sqb, imageFilter, nil) assert.NotNil(err) return nil @@ -325,7 +336,7 @@ func verifyImagesRating(t *testing.T, ratingCriterion models.IntCriterionInput) Rating: &ratingCriterion, } - images, _, err := sqb.Query(&imageFilter, nil) + images, _, err := queryImagesWithCount(sqb, &imageFilter, nil) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -364,7 +375,7 @@ func verifyImagesOCounter(t *testing.T, oCounterCriterion models.IntCriterionInp OCounter: &oCounterCriterion, } - images, _, err := sqb.Query(&imageFilter, nil) + images, _, err := queryImagesWithCount(sqb, &imageFilter, nil) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -396,7 +407,7 @@ func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) { }, } - images, _, err := sqb.Query(&imageFilter, nil) + images, _, err := queryImagesWithCount(sqb, &imageFilter, nil) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -440,7 +451,7 @@ func TestImageQueryIsMissingGalleries(t *testing.T) { Q: &q, } - images, _, err := sqb.Query(&imageFilter, &findFilter) + images, _, err := queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -448,7 +459,7 @@ func TestImageQueryIsMissingGalleries(t *testing.T) { assert.Len(t, images, 0) findFilter.Q = nil - images, _, err = sqb.Query(&imageFilter, &findFilter) + images, _, err = queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -475,7 +486,7 @@ func TestImageQueryIsMissingStudio(t *testing.T) { Q: &q, } - images, _, err := sqb.Query(&imageFilter, &findFilter) + images, _, err := queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -483,7 +494,7 @@ func TestImageQueryIsMissingStudio(t *testing.T) { assert.Len(t, images, 0) findFilter.Q = nil - images, _, err = sqb.Query(&imageFilter, &findFilter) + images, _, err = queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -510,7 +521,7 @@ func TestImageQueryIsMissingPerformers(t *testing.T) { Q: &q, } - images, _, err := sqb.Query(&imageFilter, &findFilter) + images, _, err := queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -518,7 +529,7 @@ func TestImageQueryIsMissingPerformers(t *testing.T) { assert.Len(t, images, 0) findFilter.Q = nil - images, _, err = sqb.Query(&imageFilter, &findFilter) + images, _, err = queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -547,7 +558,7 @@ func TestImageQueryIsMissingTags(t *testing.T) { Q: &q, } - images, _, err := sqb.Query(&imageFilter, &findFilter) + images, _, err := queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -555,7 +566,7 @@ func TestImageQueryIsMissingTags(t *testing.T) { assert.Len(t, images, 0) findFilter.Q = nil - images, _, err = sqb.Query(&imageFilter, &findFilter) + images, _, err = queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -574,7 +585,7 @@ func TestImageQueryIsMissingRating(t *testing.T) { IsMissing: &isMissing, } - images, _, err := sqb.Query(&imageFilter, nil) + images, _, err := queryImagesWithCount(sqb, &imageFilter, nil) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -604,7 +615,7 @@ func TestImageQueryGallery(t *testing.T) { Galleries: &galleryCriterion, } - images, _, err := sqb.Query(&imageFilter, nil) + images, _, err := queryImagesWithCount(sqb, &imageFilter, nil) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -624,7 +635,7 @@ func TestImageQueryGallery(t *testing.T) { Modifier: models.CriterionModifierIncludesAll, } - images, _, err = sqb.Query(&imageFilter, nil) + images, _, err = queryImagesWithCount(sqb, &imageFilter, nil) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -644,7 +655,7 @@ func TestImageQueryGallery(t *testing.T) { Q: &q, } - images, _, err = sqb.Query(&imageFilter, &findFilter) + images, _, err = queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -669,11 +680,7 @@ func TestImageQueryPerformers(t *testing.T) { Performers: &performerCriterion, } - images, _, err := sqb.Query(&imageFilter, nil) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } - + images := queryImages(t, sqb, &imageFilter, nil) assert.Len(t, images, 2) // ensure ids are correct @@ -689,11 +696,7 @@ func TestImageQueryPerformers(t *testing.T) { Modifier: models.CriterionModifierIncludesAll, } - images, _, err = sqb.Query(&imageFilter, nil) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } - + images = queryImages(t, sqb, &imageFilter, nil) assert.Len(t, images, 1) assert.Equal(t, imageIDs[imageIdxWithTwoPerformers], images[0].ID) @@ -709,10 +712,30 @@ func TestImageQueryPerformers(t *testing.T) { Q: &q, } - images, _, err = sqb.Query(&imageFilter, &findFilter) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 0) + + performerCriterion = models.MultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, } + q = getImageStringValue(imageIdxWithGallery, titleField) + + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithGallery], images[0].ID) + + q = getImageStringValue(imageIdxWithPerformerTag, titleField) + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 0) + + performerCriterion.Modifier = models.CriterionModifierNotNull + + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithPerformerTag], images[0].ID) + + q = getImageStringValue(imageIdxWithGallery, titleField) + images = queryImages(t, sqb, &imageFilter, &findFilter) assert.Len(t, images, 0) return nil @@ -734,11 +757,7 @@ func TestImageQueryTags(t *testing.T) { Tags: &tagCriterion, } - images, _, err := sqb.Query(&imageFilter, nil) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } - + images := queryImages(t, sqb, &imageFilter, nil) assert.Len(t, images, 2) // ensure ids are correct @@ -754,11 +773,7 @@ func TestImageQueryTags(t *testing.T) { Modifier: models.CriterionModifierIncludesAll, } - images, _, err = sqb.Query(&imageFilter, nil) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) - } - + images = queryImages(t, sqb, &imageFilter, nil) assert.Len(t, images, 1) assert.Equal(t, imageIDs[imageIdxWithTwoTags], images[0].ID) @@ -774,10 +789,30 @@ func TestImageQueryTags(t *testing.T) { Q: &q, } - images, _, err = sqb.Query(&imageFilter, &findFilter) - if err != nil { - t.Errorf("Error querying image: %s", err.Error()) + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 0) + + tagCriterion = models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, } + q = getImageStringValue(imageIdxWithGallery, titleField) + + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithGallery], images[0].ID) + + q = getImageStringValue(imageIdxWithTag, titleField) + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 0) + + tagCriterion.Modifier = models.CriterionModifierNotNull + + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithTag], images[0].ID) + + q = getImageStringValue(imageIdxWithGallery, titleField) + images = queryImages(t, sqb, &imageFilter, &findFilter) assert.Len(t, images, 0) return nil @@ -798,7 +833,7 @@ func TestImageQueryStudio(t *testing.T) { Studios: &studioCriterion, } - images, _, err := sqb.Query(&imageFilter, nil) + images, _, err := queryImagesWithCount(sqb, &imageFilter, nil) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -820,7 +855,7 @@ func TestImageQueryStudio(t *testing.T) { Q: &q, } - images, _, err = sqb.Query(&imageFilter, &findFilter) + images, _, err = queryImagesWithCount(sqb, &imageFilter, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -892,7 +927,7 @@ func TestImageQueryStudioDepth(t *testing.T) { } func queryImages(t *testing.T, sqb models.ImageReader, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) []*models.Image { - images, _, err := sqb.Query(imageFilter, findFilter) + images, _, err := queryImagesWithCount(sqb, imageFilter, findFilter) if err != nil { t.Errorf("Error querying images: %s", err.Error()) } @@ -951,6 +986,29 @@ func TestImageQueryPerformerTags(t *testing.T) { images = queryImages(t, sqb, &imageFilter, &findFilter) assert.Len(t, images, 0) + tagCriterion = models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + q = getImageStringValue(imageIdxWithGallery, titleField) + + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithGallery], images[0].ID) + + q = getImageStringValue(imageIdxWithPerformerTag, titleField) + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 0) + + tagCriterion.Modifier = models.CriterionModifierNotNull + + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithPerformerTag], images[0].ID) + + q = getImageStringValue(imageIdxWithGallery, titleField) + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 0) + return nil }) } @@ -1047,7 +1105,7 @@ func TestImageQuerySorting(t *testing.T) { } sqb := r.Image() - images, _, err := sqb.Query(nil, &findFilter) + images, _, err := queryImagesWithCount(sqb, nil, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -1062,7 +1120,7 @@ func TestImageQuerySorting(t *testing.T) { // sort in descending order direction = models.SortDirectionEnumDesc - images, _, err = sqb.Query(nil, &findFilter) + images, _, err = queryImagesWithCount(sqb, nil, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -1084,7 +1142,7 @@ func TestImageQueryPagination(t *testing.T) { } sqb := r.Image() - images, _, err := sqb.Query(nil, &findFilter) + images, _, err := queryImagesWithCount(sqb, nil, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -1095,7 +1153,7 @@ func TestImageQueryPagination(t *testing.T) { page := 2 findFilter.Page = &page - images, _, err = sqb.Query(nil, &findFilter) + images, _, err = queryImagesWithCount(sqb, nil, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } @@ -1107,7 +1165,7 @@ func TestImageQueryPagination(t *testing.T) { perPage = 2 page = 1 - images, _, err = sqb.Query(nil, &findFilter) + images, _, err = queryImagesWithCount(sqb, nil, &findFilter) if err != nil { t.Errorf("Error querying image: %s", err.Error()) } diff --git a/pkg/sqlite/movies.go b/pkg/sqlite/movies.go index 40340ac13..c954db942 100644 --- a/pkg/sqlite/movies.go +++ b/pkg/sqlite/movies.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "github.com/stashapp/stash/pkg/models" @@ -58,7 +59,7 @@ func (qb *movieQueryBuilder) Destroy(id int) error { func (qb *movieQueryBuilder) Find(id int) (*models.Movie, error) { var ret models.Movie if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err @@ -140,8 +141,7 @@ func (qb *movieQueryBuilder) Query(movieFilter *models.MovieFilterType, findFilt } query := qb.newQuery() - - query.body = selectDistinctIDs("movies") + distinctIDs(&query, movieTable) if q := findFilter.Q; q != nil && *q != "" { searchColumns := []string{"movies.name"} @@ -209,7 +209,24 @@ func movieStudioCriterionHandler(qb *movieQueryBuilder, studios *models.Hierarch func moviePerformersCriterionHandler(qb *movieQueryBuilder, performers *models.MultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if performers != nil && len(performers.Value) > 0 { + if performers != nil { + if performers.Modifier == models.CriterionModifierIsNull || performers.Modifier == models.CriterionModifierNotNull { + var notClause string + if performers.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin("movies_scenes", "", "movies.id = movies_scenes.movie_id") + f.addJoin("performers_scenes", "", "movies_scenes.scene_id = performers_scenes.scene_id") + + f.addWhere(fmt.Sprintf("performers_scenes.performer_id IS %s NULL", notClause)) + return + } + + if len(performers.Value) == 0 { + return + } + var args []interface{} for _, arg := range performers.Value { args = append(args, arg) @@ -224,12 +241,13 @@ func moviePerformersCriterionHandler(qb *movieQueryBuilder, performers *models.M )`, args...) f.addJoin("movies_performers", "", "movies.id = movies_performers.movie_id") - if performers.Modifier == models.CriterionModifierIncludes { + switch performers.Modifier { + case models.CriterionModifierIncludes: f.addWhere("movies_performers.performer_id IS NOT NULL") - } else if performers.Modifier == models.CriterionModifierIncludesAll { + case models.CriterionModifierIncludesAll: f.addWhere("movies_performers.performer_id IS NOT NULL") f.addHaving("COUNT(DISTINCT movies_performers.performer_id) = ?", len(performers.Value)) - } else if performers.Modifier == models.CriterionModifierExcludes { + case models.CriterionModifierExcludes: f.addWhere("movies_performers.performer_id IS NULL") } } diff --git a/pkg/sqlite/performer.go b/pkg/sqlite/performer.go index c8b3f86de..d33d63539 100644 --- a/pkg/sqlite/performer.go +++ b/pkg/sqlite/performer.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "strings" @@ -84,7 +85,7 @@ func (qb *performerQueryBuilder) Destroy(id int) error { func (qb *performerQueryBuilder) Find(id int) (*models.Performer, error) { var ret models.Performer if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err @@ -182,11 +183,15 @@ func (qb *performerQueryBuilder) QueryForAutoTag(words []string) ([]*models.Perf var whereClauses []string var args []interface{} + whereClauses = append(whereClauses, "name regexp ?") + args = append(args, "^[\\w][.\\-_ ]") + for _, w := range words { whereClauses = append(whereClauses, "name like ?") args = append(args, w+"%") - whereClauses = append(whereClauses, "aliases like ?") - args = append(args, w+"%") + // TODO - commented out until alias matching works both ways + // whereClauses = append(whereClauses, "aliases like ?") + // args = append(args, w+"%") } where := strings.Join(whereClauses, " OR ") @@ -298,10 +303,8 @@ func (qb *performerQueryBuilder) Query(performerFilter *models.PerformerFilterTy findFilter = &models.FindFilterType{} } - tableName := "performers" query := qb.newQuery() - - query.body = selectDistinctIDs(tableName) + distinctIDs(&query, performerTable) if q := findFilter.Q; q != nil && *q != "" { searchColumns := []string{"performers.name", "performers.aliases"} @@ -436,18 +439,6 @@ func performerGalleryCountCriterionHandler(qb *performerQueryBuilder, count *mod func performerStudiosCriterionHandler(qb *performerQueryBuilder, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { if studios != nil { - var clauseCondition string - - if studios.Modifier == models.CriterionModifierIncludes { - // return performers who appear in scenes/images/galleries with any of the given studios - clauseCondition = "NOT" - } else if studios.Modifier == models.CriterionModifierExcludes { - // exclude performers who appear in scenes/images/galleries with any of the given studios - clauseCondition = "" - } else { - return - } - formatMaps := []utils.StrFormatMap{ { "primaryTable": sceneTable, @@ -466,6 +457,41 @@ func performerStudiosCriterionHandler(qb *performerQueryBuilder, studios *models }, } + if studios.Modifier == models.CriterionModifierIsNull || studios.Modifier == models.CriterionModifierNotNull { + var notClause string + if studios.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + var conditions []string + for _, c := range formatMaps { + f.addJoin(c["joinTable"].(string), "", fmt.Sprintf("%s.performer_id = performers.id", c["joinTable"])) + f.addJoin(c["primaryTable"].(string), "", fmt.Sprintf("%s.%s = %s.id", c["joinTable"], c["primaryFK"], c["primaryTable"])) + + conditions = append(conditions, fmt.Sprintf("%s.studio_id IS NULL", c["primaryTable"])) + } + + f.addWhere(fmt.Sprintf("%s (%s)", notClause, strings.Join(conditions, " AND "))) + return + } + + if len(studios.Value) == 0 { + return + } + + var clauseCondition string + + switch studios.Modifier { + case models.CriterionModifierIncludes: + // return performers who appear in scenes/images/galleries with any of the given studios + clauseCondition = "NOT" + case models.CriterionModifierExcludes: + // exclude performers who appear in scenes/images/galleries with any of the given studios + clauseCondition = "" + default: + return + } + const derivedPerformerStudioTable = "performer_studio" valuesClause := getHierarchicalValues(qb.tx, studios.Value, studioTable, "", "parent_id", studios.Depth) f.addWith("studio(root_id, item_id) AS (" + valuesClause + ")") @@ -479,7 +505,7 @@ func performerStudiosCriterionHandler(qb *performerQueryBuilder, studios *models unions = append(unions, utils.StrFormat(templStr, c)) } - f.addWith(fmt.Sprintf("%s AS (%s)", "performer_studio", strings.Join(unions, " UNION "))) + f.addWith(fmt.Sprintf("%s AS (%s)", derivedPerformerStudioTable, strings.Join(unions, " UNION "))) f.addJoin(derivedPerformerStudioTable, "", fmt.Sprintf("performers.id = %s.performer_id", derivedPerformerStudioTable)) f.addWhere(fmt.Sprintf("%s.performer_id IS %s NULL", derivedPerformerStudioTable, clauseCondition)) @@ -584,6 +610,16 @@ func (qb *performerQueryBuilder) UpdateStashIDs(performerID int, stashIDs []mode return qb.stashIDRepository().replace(performerID, stashIDs) } +func (qb *performerQueryBuilder) FindByStashID(stashID models.StashID) ([]*models.Performer, error) { + query := selectAll("performers") + ` + LEFT JOIN performer_stash_ids on performer_stash_ids.performer_id = performers.id + WHERE performer_stash_ids.stash_id = ? + AND performer_stash_ids.endpoint = ? + ` + args := []interface{}{stashID.StashID, stashID.Endpoint} + return qb.queryPerformers(query, args) +} + func (qb *performerQueryBuilder) FindByStashIDStatus(hasStashID bool, stashboxEndpoint string) ([]*models.Performer, error) { query := selectAll("performers") + ` LEFT JOIN performer_stash_ids on performer_stash_ids.performer_id = performers.id diff --git a/pkg/sqlite/performer_test.go b/pkg/sqlite/performer_test.go index 829016d57..9bd8e05f0 100644 --- a/pkg/sqlite/performer_test.go +++ b/pkg/sqlite/performer_test.go @@ -665,18 +665,24 @@ func verifyPerformersImageCount(t *testing.T, imageCountCriterion models.IntCrit for _, performer := range performers { pp := 0 - _, count, err := r.Image().Query(&models.ImageFilterType{ - Performers: &models.MultiCriterionInput{ - Value: []string{strconv.Itoa(performer.ID)}, - Modifier: models.CriterionModifierIncludes, + result, err := r.Image().Query(models.ImageQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: &models.FindFilterType{ + PerPage: &pp, + }, + Count: true, + }, + ImageFilter: &models.ImageFilterType{ + Performers: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(performer.ID)}, + Modifier: models.CriterionModifierIncludes, + }, }, - }, &models.FindFilterType{ - PerPage: &pp, }) if err != nil { return err } - verifyInt(t, count, imageCountCriterion) + verifyInt(t, result.Count, imageCountCriterion) } return nil @@ -781,6 +787,34 @@ func TestPerformerQueryStudio(t *testing.T) { assert.Len(t, performers, 0) } + // test NULL/not NULL + q := getPerformerStringValue(performerIdx1WithImage, "Name") + performerFilter := &models.PerformerFilterType{ + Studios: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + } + findFilter := &models.FindFilterType{ + Q: &q, + } + + performers := queryPerformers(t, sqb, performerFilter, findFilter) + assert.Len(t, performers, 1) + assert.Equal(t, imageIDs[performerIdx1WithImage], performers[0].ID) + + q = getPerformerStringValue(performerIdxWithSceneStudio, "Name") + performers = queryPerformers(t, sqb, performerFilter, findFilter) + assert.Len(t, performers, 0) + + performerFilter.Studios.Modifier = models.CriterionModifierNotNull + performers = queryPerformers(t, sqb, performerFilter, findFilter) + assert.Len(t, performers, 1) + assert.Equal(t, imageIDs[performerIdxWithSceneStudio], performers[0].ID) + + q = getPerformerStringValue(performerIdx1WithImage, "Name") + performers = queryPerformers(t, sqb, performerFilter, findFilter) + assert.Len(t, performers, 0) + return nil }) } diff --git a/pkg/sqlite/query.go b/pkg/sqlite/query.go index 59b200641..7a0d24878 100644 --- a/pkg/sqlite/query.go +++ b/pkg/sqlite/query.go @@ -1,13 +1,15 @@ package sqlite import ( + "fmt" "strings" ) type queryBuilder struct { repository *repository - body string + columns []string + from string joins joins whereClauses []string @@ -21,13 +23,45 @@ type queryBuilder struct { err error } +func (qb queryBuilder) body() string { + return fmt.Sprintf("SELECT %s FROM %s%s", strings.Join(qb.columns, ", "), qb.from, qb.joins.toSQL()) +} + +func (qb *queryBuilder) addColumn(column string) { + qb.columns = append(qb.columns, column) +} + +func (qb queryBuilder) toSQL(includeSortPagination bool) string { + body := qb.body() + + withClause := "" + if len(qb.withClauses) > 0 { + var recursive string + if qb.recursiveWith { + recursive = " RECURSIVE " + } + withClause = "WITH " + recursive + strings.Join(qb.withClauses, ", ") + " " + } + + body = withClause + qb.repository.buildQueryBody(body, qb.whereClauses, qb.havingClauses) + if includeSortPagination { + body += qb.sortAndPagination + } + + return body +} + +func (qb queryBuilder) findIDs() ([]int, error) { + const includeSortPagination = true + return qb.repository.runIdsQuery(qb.toSQL(includeSortPagination), qb.args) +} + func (qb queryBuilder) executeFind() ([]int, int, error) { if qb.err != nil { return nil, 0, qb.err } - body := qb.body - body += qb.joins.toSQL() + body := qb.body() return qb.repository.executeFindQuery(body, qb.args, qb.sortAndPagination, qb.whereClauses, qb.havingClauses, qb.withClauses, qb.recursiveWith) } @@ -37,8 +71,7 @@ func (qb queryBuilder) executeCount() (int, error) { return 0, qb.err } - body := qb.body - body += qb.joins.toSQL() + body := qb.body() withClause := "" if len(qb.withClauses) > 0 { diff --git a/pkg/sqlite/repository.go b/pkg/sqlite/repository.go index b6b107151..160cbbc88 100644 --- a/pkg/sqlite/repository.go +++ b/pkg/sqlite/repository.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "reflect" "strings" @@ -32,7 +33,7 @@ func (r *repository) get(id int, dest interface{}) error { func (r *repository) getAll(id int, f func(rows *sqlx.Rows) error) error { stmt := fmt.Sprintf("SELECT * FROM %s WHERE %s = ?", r.tableName, r.idColumn) - return r.queryFunc(stmt, []interface{}{id}, f) + return r.queryFunc(stmt, []interface{}{id}, false, f) } func (r *repository) insert(obj interface{}) (sql.Result, error) { @@ -132,7 +133,7 @@ func (r *repository) runCountQuery(query string, args []interface{}) (int, error }{0} // Perform query and fetch result - if err := r.tx.Get(&result, query, args...); err != nil && err != sql.ErrNoRows { + if err := r.tx.Get(&result, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) { return 0, err } @@ -144,7 +145,7 @@ func (r *repository) runIdsQuery(query string, args []interface{}) ([]int, error Int int `db:"id"` } - if err := r.tx.Select(&result, query, args...); err != nil && err != sql.ErrNoRows { + if err := r.tx.Select(&result, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) { return []int{}, err } @@ -162,17 +163,17 @@ func (r *repository) runSumQuery(query string, args []interface{}) (float64, err }{0} // Perform query and fetch result - if err := r.tx.Get(&result, query, args...); err != nil && err != sql.ErrNoRows { + if err := r.tx.Get(&result, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) { return 0, err } return result.Float64, nil } -func (r *repository) queryFunc(query string, args []interface{}, f func(rows *sqlx.Rows) error) error { +func (r *repository) queryFunc(query string, args []interface{}, single bool, f func(rows *sqlx.Rows) error) error { rows, err := r.tx.Queryx(query, args...) - if err != nil && err != sql.ErrNoRows { + if err != nil && !errors.Is(err, sql.ErrNoRows) { return err } defer rows.Close() @@ -181,6 +182,9 @@ func (r *repository) queryFunc(query string, args []interface{}, f func(rows *sq if err := f(rows); err != nil { return err } + if single { + break + } } if err := rows.Err(); err != nil { @@ -191,32 +195,29 @@ func (r *repository) queryFunc(query string, args []interface{}, f func(rows *sq } func (r *repository) query(query string, args []interface{}, out objectList) error { - rows, err := r.tx.Queryx(query, args...) - - if err != nil && err != sql.ErrNoRows { - return err - } - defer rows.Close() - - for rows.Next() { + return r.queryFunc(query, args, false, func(rows *sqlx.Rows) error { object := out.New() if err := rows.StructScan(object); err != nil { return err } out.Append(object) - } + return nil + }) +} - if err := rows.Err(); err != nil { - return err - } - - return nil +func (r *repository) queryStruct(query string, args []interface{}, out interface{}) error { + return r.queryFunc(query, args, true, func(rows *sqlx.Rows) error { + if err := rows.StructScan(out); err != nil { + return err + } + return nil + }) } func (r *repository) querySimple(query string, args []interface{}, out interface{}) error { rows, err := r.tx.Queryx(query, args...) - if err != nil && err != sql.ErrNoRows { + if err != nil && !errors.Is(err, sql.ErrNoRows) { return err } defer rows.Close() @@ -360,7 +361,7 @@ type stringRepository struct { func (r *stringRepository) get(id int) ([]string, error) { query := fmt.Sprintf("SELECT %s from %s WHERE %s = ?", r.stringColumn, r.tableName, r.idColumn) var ret []string - err := r.queryFunc(query, []interface{}{id}, func(rows *sqlx.Rows) error { + err := r.queryFunc(query, []interface{}{id}, false, func(rows *sqlx.Rows) error { var out string if err := rows.Scan(&out); err != nil { return err @@ -431,7 +432,7 @@ func listKeys(i interface{}, addPrefix bool) string { var query []string v := reflect.ValueOf(i) for i := 0; i < v.NumField(); i++ { - //get key for struct tag + // Get key for struct tag rawKey := v.Type().Field(i).Tag.Get("db") key := strings.Split(rawKey, ",")[0] if key == "id" { @@ -449,7 +450,7 @@ func updateSet(i interface{}, partial bool) string { var query []string v := reflect.ValueOf(i) for i := 0; i < v.NumField(); i++ { - //get key for struct tag + // Get key for struct tag rawKey := v.Type().Field(i).Tag.Get("db") key := strings.Split(rawKey, ",")[0] if key == "id" { diff --git a/pkg/sqlite/saved_filter.go b/pkg/sqlite/saved_filter.go index c4bbf0f8e..8630a14a7 100644 --- a/pkg/sqlite/saved_filter.go +++ b/pkg/sqlite/saved_filter.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "github.com/stashapp/stash/pkg/models" @@ -72,7 +73,7 @@ func (qb *savedFilterQueryBuilder) Destroy(id int) error { func (qb *savedFilterQueryBuilder) Find(id int) (*models.SavedFilter, error) { var ret models.SavedFilter if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index 5aa0d4722..1edf73d11 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "strconv" "strings" @@ -215,7 +216,7 @@ func (qb *sceneQueryBuilder) FindMany(ids []int) ([]*models.Scene, error) { func (qb *sceneQueryBuilder) find(id int) (*models.Scene, error) { var ret models.Scene if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err @@ -394,7 +395,10 @@ func (qb *sceneQueryBuilder) makeFilter(sceneFilter *models.SceneFilterType) *fi return query } -func (qb *sceneQueryBuilder) Query(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { +func (qb *sceneQueryBuilder) Query(options models.SceneQueryOptions) (*models.SceneQueryResult, error) { + sceneFilter := options.SceneFilter + findFilter := options.FindFilter + if sceneFilter == nil { sceneFilter = &models.SceneFilterType{} } @@ -403,8 +407,7 @@ func (qb *sceneQueryBuilder) Query(sceneFilter *models.SceneFilterType, findFilt } query := qb.newQuery() - - query.body = selectDistinctIDs(sceneTable) + distinctIDs(&query, sceneTable) if q := findFilter.Q; q != nil && *q != "" { query.join("scene_markers", "", "scene_markers.scene_id = scenes.id") @@ -415,7 +418,7 @@ func (qb *sceneQueryBuilder) Query(sceneFilter *models.SceneFilterType, findFilt } if err := qb.validateFilter(sceneFilter); err != nil { - return nil, 0, err + return nil, err } filter := qb.makeFilter(sceneFilter) @@ -424,21 +427,59 @@ func (qb *sceneQueryBuilder) Query(sceneFilter *models.SceneFilterType, findFilt qb.setSceneSort(&query, findFilter) query.sortAndPagination += getPagination(findFilter) - idsResult, countResult, err := query.executeFind() + result, err := qb.queryGroupedFields(options, query) if err != nil { - return nil, 0, err + return nil, fmt.Errorf("error querying aggregate fields: %w", err) } - var scenes []*models.Scene - for _, id := range idsResult { - scene, err := qb.Find(id) - if err != nil { - return nil, 0, err - } - scenes = append(scenes, scene) + idsResult, err := query.findIDs() + if err != nil { + return nil, fmt.Errorf("error finding IDs: %w", err) } - return scenes, countResult, nil + result.IDs = idsResult + return result, nil +} + +func (qb *sceneQueryBuilder) queryGroupedFields(options models.SceneQueryOptions, query queryBuilder) (*models.SceneQueryResult, error) { + if !options.Count && !options.TotalDuration && !options.TotalSize { + // nothing to do - return empty result + return models.NewSceneQueryResult(qb), nil + } + + aggregateQuery := qb.newQuery() + + if options.Count { + aggregateQuery.addColumn("COUNT(temp.id) as total") + } + + if options.TotalDuration { + query.addColumn("COALESCE(scenes.duration, 0) as duration") + aggregateQuery.addColumn("COALESCE(SUM(temp.duration), 0) as duration") + } + + if options.TotalSize { + query.addColumn("COALESCE(scenes.size, 0) as size") + aggregateQuery.addColumn("COALESCE(SUM(temp.size), 0) as size") + } + + const includeSortPagination = false + aggregateQuery.from = fmt.Sprintf("(%s) as temp", query.toSQL(includeSortPagination)) + + out := struct { + Total int + Duration float64 + Size float64 + }{} + if err := qb.repository.queryStruct(aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { + return nil, err + } + + ret := models.NewSceneQueryResult(qb) + ret.Count = out.Total + ret.TotalDuration = out.Duration + ret.TotalSize = out.Size + return ret, nil } func phashCriterionHandler(phashFilter *models.StringCriterionInput) criterionHandlerFunc { @@ -481,13 +522,14 @@ func resolutionCriterionHandler(resolution *models.ResolutionCriterionInput, hei widthHeight := fmt.Sprintf("MIN(%s, %s)", widthColumn, heightColumn) - if resolution.Modifier == models.CriterionModifierEquals { + switch resolution.Modifier { + case models.CriterionModifierEquals: f.addWhere(fmt.Sprintf("%s BETWEEN %d AND %d", widthHeight, min, max)) - } else if resolution.Modifier == models.CriterionModifierNotEquals { + case models.CriterionModifierNotEquals: f.addWhere(fmt.Sprintf("%s NOT BETWEEN %d AND %d", widthHeight, min, max)) - } else if resolution.Modifier == models.CriterionModifierLessThan { + case models.CriterionModifierLessThan: f.addWhere(fmt.Sprintf("%s < %d", widthHeight, min)) - } else if resolution.Modifier == models.CriterionModifierGreaterThan { + case models.CriterionModifierGreaterThan: f.addWhere(fmt.Sprintf("%s > %d", widthHeight, max)) } } @@ -626,7 +668,24 @@ func sceneMoviesCriterionHandler(qb *sceneQueryBuilder, movies *models.MultiCrit func scenePerformerTagsCriterionHandler(qb *sceneQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if tags != nil && len(tags.Value) > 0 { + if tags != nil { + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { + var notClause string + if tags.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin("performers_scenes", "", "scenes.id = performers_scenes.scene_id") + f.addJoin("performers_tags", "", "performers_scenes.performer_id = performers_tags.performer_id") + + f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) + return + } + + if len(tags.Value) == 0 { + return + } + valuesClause := getHierarchicalValues(qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) f.addWith(`performer_tags AS ( @@ -846,7 +905,7 @@ func (qb *sceneQueryBuilder) FindDuplicates(distance int) ([][]*models.Scene, er } else { var hashes []*utils.Phash - if err := qb.queryFunc(findAllPhashesQuery, nil, func(rows *sqlx.Rows) error { + if err := qb.queryFunc(findAllPhashesQuery, nil, false, func(rows *sqlx.Rows) error { phash := utils.Phash{ Bucket: -1, } diff --git a/pkg/sqlite/scene_marker.go b/pkg/sqlite/scene_marker.go index 7872b21c5..02e610631 100644 --- a/pkg/sqlite/scene_marker.go +++ b/pkg/sqlite/scene_marker.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "github.com/stashapp/stash/pkg/database" @@ -105,13 +106,13 @@ func (qb *sceneMarkerQueryBuilder) CountByTagID(tagID int) (int, error) { func (qb *sceneMarkerQueryBuilder) GetMarkerStrings(q *string, sort *string) ([]*models.MarkerStringsResultType, error) { query := "SELECT count(*) as `count`, scene_markers.id as id, scene_markers.title as title FROM scene_markers" if q != nil { - query = query + " WHERE title LIKE '%" + *q + "%'" + query += " WHERE title LIKE '%" + *q + "%'" } - query = query + " GROUP BY title" + query += " GROUP BY title" if sort != nil && *sort == "count" { - query = query + " ORDER BY `count` DESC" + query += " ORDER BY `count` DESC" } else { - query = query + " ORDER BY title ASC" + query += " ORDER BY title ASC" } var args []interface{} return qb.queryMarkerStringsResultType(query, args) @@ -146,8 +147,7 @@ func (qb *sceneMarkerQueryBuilder) Query(sceneMarkerFilter *models.SceneMarkerFi } query := qb.newQuery() - - query.body = selectDistinctIDs("scene_markers") + distinctIDs(&query, sceneMarkerTable) if q := findFilter.Q; q != nil && *q != "" { searchColumns := []string{"scene_markers.title", "scenes.title"} @@ -191,7 +191,22 @@ func sceneMarkerTagIDCriterionHandler(qb *sceneMarkerQueryBuilder, tagID *string func sceneMarkerTagsCriterionHandler(qb *sceneMarkerQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if tags != nil && len(tags.Value) > 0 { + if tags != nil { + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { + var notClause string + if tags.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin("scene_markers_tags", "", "scene_markers.id = scene_markers_tags.scene_marker_id") + + f.addWhere(fmt.Sprintf("%s scene_markers_tags.tag_id IS NULL", notClause)) + return + } + + if len(tags.Value) == 0 { + return + } valuesClause := getHierarchicalValues(qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) f.addWith(`marker_tags AS ( @@ -211,7 +226,23 @@ INNER JOIN (` + valuesClause + `) t ON t.column2 = m.primary_tag_id func sceneMarkerSceneTagsCriterionHandler(qb *sceneMarkerQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if tags != nil && len(tags.Value) > 0 { + if tags != nil { + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { + var notClause string + if tags.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin("scenes_tags", "", "scene_markers.scene_id = scenes_tags.scene_id") + + f.addWhere(fmt.Sprintf("scenes_tags.tag_id IS %s NULL", notClause)) + return + } + + if len(tags.Value) == 0 { + return + } + valuesClause := getHierarchicalValues(qb.tx, tags.Value, tagTable, "tags_relations", "", tags.Depth) f.addWith(`scene_tags AS ( @@ -271,7 +302,7 @@ func (qb *sceneMarkerQueryBuilder) querySceneMarkers(query string, args []interf func (qb *sceneMarkerQueryBuilder) queryMarkerStringsResultType(query string, args []interface{}) ([]*models.MarkerStringsResultType, error) { rows, err := database.DB.Queryx(query, args...) - if err != nil && err != sql.ErrNoRows { + if err != nil && !errors.Is(err, sql.ErrNoRows) { return nil, err } defer rows.Close() diff --git a/pkg/sqlite/scene_marker_test.go b/pkg/sqlite/scene_marker_test.go index d50c181de..2fa0d7501 100644 --- a/pkg/sqlite/scene_marker_test.go +++ b/pkg/sqlite/scene_marker_test.go @@ -14,15 +14,17 @@ func TestMarkerFindBySceneID(t *testing.T) { withTxn(func(r models.Repository) error { mqb := r.SceneMarker() - sceneID := sceneIDs[sceneIdxWithMarker] + sceneID := sceneIDs[sceneIdxWithMarkers] markers, err := mqb.FindBySceneID(sceneID) if err != nil { t.Errorf("Error finding markers: %s", err.Error()) } - assert.Len(t, markers, 1) - assert.Equal(t, markerIDs[markerIdxWithScene], markers[0].ID) + assert.Greater(t, len(markers), 0) + for _, marker := range markers { + assert.Equal(t, sceneIDs[sceneIdxWithMarkers], int(marker.SceneID.Int64)) + } markers, err = mqb.FindBySceneID(0) @@ -40,15 +42,15 @@ func TestMarkerCountByTagID(t *testing.T) { withTxn(func(r models.Repository) error { mqb := r.SceneMarker() - markerCount, err := mqb.CountByTagID(tagIDs[tagIdxWithPrimaryMarker]) + markerCount, err := mqb.CountByTagID(tagIDs[tagIdxWithPrimaryMarkers]) if err != nil { t.Errorf("error calling CountByTagID: %s", err.Error()) } - assert.Equal(t, 1, markerCount) + assert.Equal(t, 3, markerCount) - markerCount, err = mqb.CountByTagID(tagIDs[tagIdxWithMarker]) + markerCount, err = mqb.CountByTagID(tagIDs[tagIdxWithMarkers]) if err != nil { t.Errorf("error calling CountByTagID: %s", err.Error()) @@ -83,6 +85,128 @@ func TestMarkerQuerySortBySceneUpdated(t *testing.T) { }) } +func TestMarkerQueryTags(t *testing.T) { + type test struct { + name string + markerFilter *models.SceneMarkerFilterType + findFilter *models.FindFilterType + } + + withTxn(func(r models.Repository) error { + testTags := func(m *models.SceneMarker, markerFilter *models.SceneMarkerFilterType) { + tagIDs, err := r.SceneMarker().GetTagIDs(m.ID) + if err != nil { + t.Errorf("error getting marker tag ids: %v", err) + } + if markerFilter.Tags.Modifier == models.CriterionModifierIsNull && len(tagIDs) > 0 { + t.Errorf("expected marker %d to have no tags - found %d", m.ID, len(tagIDs)) + } + if markerFilter.Tags.Modifier == models.CriterionModifierNotNull && len(tagIDs) == 0 { + t.Errorf("expected marker %d to have tags - found 0", m.ID) + } + } + + cases := []test{ + { + "is null", + &models.SceneMarkerFilterType{ + Tags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + }, + nil, + }, + { + "not null", + &models.SceneMarkerFilterType{ + Tags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + }, + nil, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + markers := queryMarkers(t, r.SceneMarker(), tc.markerFilter, tc.findFilter) + assert.Greater(t, len(markers), 0) + for _, m := range markers { + testTags(m, tc.markerFilter) + } + }) + } + + return nil + }) +} + +func TestMarkerQuerySceneTags(t *testing.T) { + type test struct { + name string + markerFilter *models.SceneMarkerFilterType + findFilter *models.FindFilterType + } + + withTxn(func(r models.Repository) error { + testTags := func(m *models.SceneMarker, markerFilter *models.SceneMarkerFilterType) { + tagIDs, err := r.Scene().GetTagIDs(int(m.SceneID.Int64)) + if err != nil { + t.Errorf("error getting marker tag ids: %v", err) + } + if markerFilter.SceneTags.Modifier == models.CriterionModifierIsNull && len(tagIDs) > 0 { + t.Errorf("expected marker %d to have no scene tags - found %d", m.ID, len(tagIDs)) + } + if markerFilter.SceneTags.Modifier == models.CriterionModifierNotNull && len(tagIDs) == 0 { + t.Errorf("expected marker %d to have scene tags - found 0", m.ID) + } + } + + cases := []test{ + { + "is null", + &models.SceneMarkerFilterType{ + SceneTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + }, + nil, + }, + { + "not null", + &models.SceneMarkerFilterType{ + SceneTags: &models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + }, + nil, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + markers := queryMarkers(t, r.SceneMarker(), tc.markerFilter, tc.findFilter) + assert.Greater(t, len(markers), 0) + for _, m := range markers { + testTags(m, tc.markerFilter) + } + }) + } + + return nil + }) +} + +func queryMarkers(t *testing.T, sqb models.SceneMarkerReader, markerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) []*models.SceneMarker { + t.Helper() + result, _, err := sqb.Query(markerFilter, findFilter) + if err != nil { + t.Errorf("Error querying markers: %v", err) + } + + return result +} + // TODO Update // TODO Destroy // TODO Find diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index 48790d1ae..0c45a2c0e 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -141,9 +141,19 @@ func TestSceneQueryQ(t *testing.T) { func queryScene(t *testing.T, sqb models.SceneReader, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) []*models.Scene { t.Helper() - scenes, _, err := sqb.Query(sceneFilter, findFilter) + result, err := sqb.Query(models.SceneQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: findFilter, + }, + SceneFilter: sceneFilter, + }) if err != nil { - t.Errorf("Error querying scene: %s", err.Error()) + t.Errorf("Error querying scene: %v", err) + } + + scenes, err := result.Resolve() + if err != nil { + t.Errorf("Error resolving scenes: %v", err) } return scenes @@ -346,17 +356,21 @@ func TestSceneIllegalQuery(t *testing.T) { withTxn(func(r models.Repository) error { sqb := r.Scene() - _, _, err := sqb.Query(sceneFilter, nil) + queryOptions := models.SceneQueryOptions{ + SceneFilter: sceneFilter, + } + + _, err := sqb.Query(queryOptions) assert.NotNil(err) sceneFilter.Or = nil sceneFilter.Not = &subFilter - _, _, err = sqb.Query(sceneFilter, nil) + _, err = sqb.Query(queryOptions) assert.NotNil(err) sceneFilter.And = nil sceneFilter.Or = &subFilter - _, _, err = sqb.Query(sceneFilter, nil) + _, err = sqb.Query(queryOptions) assert.NotNil(err) return nil @@ -761,7 +775,7 @@ func TestSceneQueryHasMarkers(t *testing.T) { HasMarkers: &hasMarkers, } - q := getSceneStringValue(sceneIdxWithMarker, titleField) + q := getSceneStringValue(sceneIdxWithMarkers, titleField) findFilter := models.FindFilterType{ Q: &q, } @@ -769,7 +783,7 @@ func TestSceneQueryHasMarkers(t *testing.T) { scenes := queryScene(t, sqb, &sceneFilter, &findFilter) assert.Len(t, scenes, 1) - assert.Equal(t, sceneIDs[sceneIdxWithMarker], scenes[0].ID) + assert.Equal(t, sceneIDs[sceneIdxWithMarkers], scenes[0].ID) hasMarkers = "false" scenes = queryScene(t, sqb, &sceneFilter, &findFilter) @@ -782,7 +796,7 @@ func TestSceneQueryHasMarkers(t *testing.T) { // ensure non of the ids equal the one with gallery for _, scene := range scenes { - assert.NotEqual(t, sceneIDs[sceneIdxWithMarker], scene.ID) + assert.NotEqual(t, sceneIDs[sceneIdxWithMarkers], scene.ID) } return nil @@ -1137,6 +1151,29 @@ func TestSceneQueryPerformerTags(t *testing.T) { scenes = queryScene(t, sqb, &sceneFilter, &findFilter) assert.Len(t, scenes, 0) + tagCriterion = models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + q = getSceneStringValue(sceneIdx1WithPerformer, titleField) + + scenes = queryScene(t, sqb, &sceneFilter, &findFilter) + assert.Len(t, scenes, 1) + assert.Equal(t, sceneIDs[sceneIdx1WithPerformer], scenes[0].ID) + + q = getSceneStringValue(sceneIdxWithPerformerTag, titleField) + scenes = queryScene(t, sqb, &sceneFilter, &findFilter) + assert.Len(t, scenes, 0) + + tagCriterion.Modifier = models.CriterionModifierNotNull + + scenes = queryScene(t, sqb, &sceneFilter, &findFilter) + assert.Len(t, scenes, 1) + assert.Equal(t, sceneIDs[sceneIdxWithPerformerTag], scenes[0].ID) + + q = getSceneStringValue(sceneIdx1WithPerformer, titleField) + scenes = queryScene(t, sqb, &sceneFilter, &findFilter) + assert.Len(t, scenes, 0) + return nil }) } diff --git a/pkg/sqlite/scraped_item.go b/pkg/sqlite/scraped_item.go index 30f772dc9..1eafc98a5 100644 --- a/pkg/sqlite/scraped_item.go +++ b/pkg/sqlite/scraped_item.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "github.com/stashapp/stash/pkg/models" ) @@ -47,7 +48,7 @@ func (qb *scrapedItemQueryBuilder) Find(id int) (*models.ScrapedItem, error) { func (qb *scrapedItemQueryBuilder) find(id int) (*models.ScrapedItem, error) { var ret models.ScrapedItem if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index 116d8eabb..e1aaf4f9d 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -34,10 +34,11 @@ const ( sceneIdxWithTwoPerformers sceneIdxWithTag sceneIdxWithTwoTags + sceneIdxWithMarkerAndTag sceneIdxWithStudio sceneIdx1WithStudio sceneIdx2WithStudio - sceneIdxWithMarker + sceneIdxWithMarkers sceneIdxWithPerformerTag sceneIdxWithPerformerTwoTags sceneIdxWithSpacedName @@ -139,8 +140,9 @@ const ( tagIdxWithScene = iota tagIdx1WithScene tagIdx2WithScene - tagIdxWithPrimaryMarker - tagIdxWithMarker + tagIdx3WithScene + tagIdxWithPrimaryMarkers + tagIdxWithMarkers tagIdxWithCoverImage tagIdxWithImage tagIdx1WithImage @@ -191,6 +193,9 @@ const ( const ( markerIdxWithScene = iota + markerIdxWithTag + markerIdxWithSceneTag + totalMarkers ) const ( @@ -239,6 +244,7 @@ var ( {sceneIdxWithTag, tagIdxWithScene}, {sceneIdxWithTwoTags, tagIdx1WithScene}, {sceneIdxWithTwoTags, tagIdx2WithScene}, + {sceneIdxWithMarkerAndTag, tagIdx3WithScene}, } scenePerformerLinks = [][2]int{ @@ -269,6 +275,21 @@ var ( } ) +type markerSpec struct { + sceneIdx int + primaryTagIdx int + tagIdxs []int +} + +var ( + // indexed by marker + markerSpecs = []markerSpec{ + {sceneIdxWithMarkers, tagIdxWithPrimaryMarkers, nil}, + {sceneIdxWithMarkers, tagIdxWithPrimaryMarkers, []int{tagIdxWithMarkers}}, + {sceneIdxWithMarkerAndTag, tagIdxWithPrimaryMarkers, nil}, + } +) + var ( imageGalleryLinks = [][2]int{ {imageIdxWithGallery, galleryIdxWithImage}, @@ -516,8 +537,10 @@ func populateDB() error { return fmt.Errorf("error linking tags parent: %s", err.Error()) } - if err := createMarker(r.SceneMarker(), sceneIdxWithMarker, tagIdxWithPrimaryMarker, []int{tagIdxWithMarker}); err != nil { - return fmt.Errorf("error creating scene marker: %s", err.Error()) + for _, ms := range markerSpecs { + if err := createMarker(r.SceneMarker(), ms); err != nil { + return fmt.Errorf("error creating scene marker: %s", err.Error()) + } } return nil @@ -687,6 +710,7 @@ func createGalleries(gqb models.GalleryReaderWriter, n int) error { for i := 0; i < n; i++ { gallery := models.Gallery{ Path: models.NullString(getGalleryStringValue(i, pathField)), + Title: models.NullString(getGalleryStringValue(i, titleField)), URL: getGalleryNullStringValue(i, urlField), Checksum: getGalleryStringValue(i, checksumField), Rating: getRating(i), @@ -843,7 +867,7 @@ func getTagStringValue(index int, field string) string { } func getTagSceneCount(id int) int { - if id == tagIDs[tagIdx1WithScene] || id == tagIDs[tagIdx2WithScene] || id == tagIDs[tagIdxWithScene] { + if id == tagIDs[tagIdx1WithScene] || id == tagIDs[tagIdx2WithScene] || id == tagIDs[tagIdxWithScene] || id == tagIDs[tagIdx3WithScene] { return 1 } @@ -851,7 +875,11 @@ func getTagSceneCount(id int) int { } func getTagMarkerCount(id int) int { - if id == tagIDs[tagIdxWithMarker] || id == tagIDs[tagIdxWithPrimaryMarker] { + if id == tagIDs[tagIdxWithPrimaryMarkers] { + return 3 + } + + if id == tagIDs[tagIdxWithMarkers] { return 1 } @@ -1008,28 +1036,30 @@ func createStudios(sqb models.StudioReaderWriter, n int, o int) error { return nil } -func createMarker(mqb models.SceneMarkerReaderWriter, sceneIdx, primaryTagIdx int, tagIdxs []int) error { +func createMarker(mqb models.SceneMarkerReaderWriter, markerSpec markerSpec) error { marker := models.SceneMarker{ - SceneID: sql.NullInt64{Int64: int64(sceneIDs[sceneIdx]), Valid: true}, - PrimaryTagID: tagIDs[primaryTagIdx], + SceneID: sql.NullInt64{Int64: int64(sceneIDs[markerSpec.sceneIdx]), Valid: true}, + PrimaryTagID: tagIDs[markerSpec.primaryTagIdx], } created, err := mqb.Create(marker) if err != nil { - return fmt.Errorf("Error creating marker %v+: %s", marker, err.Error()) + return fmt.Errorf("error creating marker %v+: %w", marker, err) } markerIDs = append(markerIDs, created.ID) - newTagIDs := []int{} + if len(markerSpec.tagIdxs) > 0 { + newTagIDs := []int{} - for _, tagIdx := range tagIdxs { - newTagIDs = append(newTagIDs, tagIDs[tagIdx]) - } + for _, tagIdx := range markerSpec.tagIdxs { + newTagIDs = append(newTagIDs, tagIDs[tagIdx]) + } - if err := mqb.UpdateTags(created.ID, newTagIDs); err != nil { - return fmt.Errorf("Error creating marker/tag join: %s", err.Error()) + if err := mqb.UpdateTags(created.ID, newTagIDs); err != nil { + return fmt.Errorf("error creating marker/tag join: %w", err) + } } return nil diff --git a/pkg/sqlite/sql.go b/pkg/sqlite/sql.go index 827bdbda2..56fe9f299 100644 --- a/pkg/sqlite/sql.go +++ b/pkg/sqlite/sql.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "math/rand" "regexp" @@ -19,9 +20,9 @@ func selectAll(tableName string) string { return "SELECT " + idColumn + " FROM " + tableName + " " } -func selectDistinctIDs(tableName string) string { - idColumn := getColumn(tableName, "id") - return "SELECT DISTINCT " + idColumn + " FROM " + tableName + " " +func distinctIDs(qb *queryBuilder, tableName string) { + qb.addColumn("DISTINCT " + getColumn(tableName, "id")) + qb.from = tableName } func getColumn(tableName string, columnName string) string { @@ -57,14 +58,15 @@ func getSort(sort string, direction string, tableName string) string { const randomSeedPrefix = "random_" - if strings.HasSuffix(sort, "_count") { + switch { + case strings.HasSuffix(sort, "_count"): var relationTableName = strings.TrimSuffix(sort, "_count") // TODO: pluralize? colName := getColumn(relationTableName, "id") return " ORDER BY COUNT(distinct " + colName + ") " + direction - } else if strings.Compare(sort, "filesize") == 0 { + case strings.Compare(sort, "filesize") == 0: colName := getColumn(tableName, "size") return " ORDER BY cast(" + colName + " as integer) " + direction - } else if strings.HasPrefix(sort, randomSeedPrefix) { + case strings.HasPrefix(sort, randomSeedPrefix): // seed as a parameter from the UI // turn the provided seed into a float seedStr := "0." + sort[len(randomSeedPrefix):] @@ -74,9 +76,9 @@ func getSort(sort string, direction string, tableName string) string { seed = randomSortFloat } return getRandomSort(tableName, direction, seed) - } else if strings.Compare(sort, "random") == 0 { + case strings.Compare(sort, "random") == 0: return getRandomSort(tableName, direction, randomSortFloat) - } else { + default: colName := getColumn(tableName, sort) var additional string if tableName == "scenes" { @@ -201,14 +203,15 @@ func getIntCriterionWhereClause(column string, input models.IntCriterionInput) ( func getMultiCriterionClause(primaryTable, foreignTable, joinTable, primaryFK, foreignFK string, criterion *models.MultiCriterionInput) (string, string) { whereClause := "" havingClause := "" - if criterion.Modifier == models.CriterionModifierIncludes { + switch criterion.Modifier { + case models.CriterionModifierIncludes: // includes any of the provided ids whereClause = foreignTable + ".id IN " + getInBinding(len(criterion.Value)) - } else if criterion.Modifier == models.CriterionModifierIncludesAll { + case models.CriterionModifierIncludesAll: // includes all of the provided ids whereClause = foreignTable + ".id IN " + getInBinding(len(criterion.Value)) havingClause = "count(distinct " + foreignTable + ".id) IS " + strconv.Itoa(len(criterion.Value)) - } else if criterion.Modifier == models.CriterionModifierExcludes { + case models.CriterionModifierExcludes: // excludes all of the provided ids if joinTable != "" { whereClause = primaryTable + ".id not in (select " + joinTable + "." + primaryFK + " from " + joinTable + " where " + joinTable + "." + foreignFK + " in " + getInBinding(len(criterion.Value)) + ")" @@ -228,7 +231,7 @@ func getCountCriterionClause(primaryTable, joinTable, primaryFK string, criterio func getImage(tx dbi, query string, args ...interface{}) ([]byte, error) { rows, err := tx.Queryx(query, args...) - if err != nil && err != sql.ErrNoRows { + if err != nil && !errors.Is(err, sql.ErrNoRows) { return nil, err } defer rows.Close() diff --git a/pkg/sqlite/studio.go b/pkg/sqlite/studio.go index 746a89b12..b0c7745a1 100644 --- a/pkg/sqlite/studio.go +++ b/pkg/sqlite/studio.go @@ -2,6 +2,7 @@ package sqlite import ( "database/sql" + "errors" "fmt" "strings" @@ -68,7 +69,7 @@ func (qb *studioQueryBuilder) Destroy(id int) error { func (qb *studioQueryBuilder) Find(id int) (*models.Studio, error) { var ret models.Studio if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err @@ -116,6 +117,16 @@ func (qb *studioQueryBuilder) FindByName(name string, nocase bool) (*models.Stud return qb.queryStudio(query, args) } +func (qb *studioQueryBuilder) FindByStashID(stashID models.StashID) ([]*models.Studio, error) { + query := selectAll("studios") + ` + LEFT JOIN studio_stash_ids on studio_stash_ids.studio_id = studios.id + WHERE studio_stash_ids.stash_id = ? + AND studio_stash_ids.endpoint = ? + ` + args := []interface{}{stashID.StashID, stashID.Endpoint} + return qb.queryStudios(query, args) +} + func (qb *studioQueryBuilder) Count() (int, error) { return qb.runCountQuery(qb.buildCountQuery("SELECT studios.id FROM studios"), nil) } @@ -133,6 +144,11 @@ func (qb *studioQueryBuilder) QueryForAutoTag(words []string) ([]*models.Studio, var whereClauses []string var args []interface{} + // always include names that begin with a single character + singleFirstCharacterRegex := "^[\\w][.\\-_ ]" + whereClauses = append(whereClauses, "studios.name regexp ? OR COALESCE(studio_aliases.alias, '') regexp ?") + args = append(args, singleFirstCharacterRegex, singleFirstCharacterRegex) + for _, w := range words { ww := w + "%" whereClauses = append(whereClauses, "studios.name like ?") @@ -222,8 +238,7 @@ func (qb *studioQueryBuilder) Query(studioFilter *models.StudioFilterType, findF } query := qb.newQuery() - - query.body = selectDistinctIDs("studios") + distinctIDs(&query, studioTable) if q := findFilter.Q; q != nil && *q != "" { query.join(studioAliasesTable, "", "studio_aliases.studio_id = studios.id") diff --git a/pkg/sqlite/studio_test.go b/pkg/sqlite/studio_test.go index 8e623e53f..037c5958a 100644 --- a/pkg/sqlite/studio_test.go +++ b/pkg/sqlite/studio_test.go @@ -507,18 +507,24 @@ func verifyStudiosImageCount(t *testing.T, imageCountCriterion models.IntCriteri for _, studio := range studios { pp := 0 - _, count, err := r.Image().Query(&models.ImageFilterType{ - Studios: &models.HierarchicalMultiCriterionInput{ - Value: []string{strconv.Itoa(studio.ID)}, - Modifier: models.CriterionModifierIncludes, + result, err := r.Image().Query(models.ImageQueryOptions{ + QueryOptions: models.QueryOptions{ + FindFilter: &models.FindFilterType{ + PerPage: &pp, + }, + Count: true, + }, + ImageFilter: &models.ImageFilterType{ + Studios: &models.HierarchicalMultiCriterionInput{ + Value: []string{strconv.Itoa(studio.ID)}, + Modifier: models.CriterionModifierIncludes, + }, }, - }, &models.FindFilterType{ - PerPage: &pp, }) if err != nil { return err } - verifyInt(t, count, imageCountCriterion) + verifyInt(t, result.Count, imageCountCriterion) } return nil diff --git a/pkg/sqlite/tag.go b/pkg/sqlite/tag.go index 0acf748c0..ea7042251 100644 --- a/pkg/sqlite/tag.go +++ b/pkg/sqlite/tag.go @@ -87,7 +87,7 @@ func (qb *tagQueryBuilder) Destroy(id int) error { func (qb *tagQueryBuilder) Find(id int) (*models.Tag, error) { var ret models.Tag if err := qb.get(id, &ret); err != nil { - if err == sql.ErrNoRows { + if errors.Is(err, sql.ErrNoRows) { return nil, nil } return nil, err @@ -235,6 +235,11 @@ func (qb *tagQueryBuilder) QueryForAutoTag(words []string) ([]*models.Tag, error var whereClauses []string var args []interface{} + // always include names that begin with a single character + singleFirstCharacterRegex := "^[\\w][.\\-_ ]" + whereClauses = append(whereClauses, "tags.name regexp ? OR COALESCE(tag_aliases.alias, '') regexp ?") + args = append(args, singleFirstCharacterRegex, singleFirstCharacterRegex) + for _, w := range words { ww := w + "%" whereClauses = append(whereClauses, "tags.name like ?") @@ -319,8 +324,7 @@ func (qb *tagQueryBuilder) Query(tagFilter *models.TagFilterType, findFilter *mo } query := qb.newQuery() - - query.body = selectDistinctIDs(tagTable) + distinctIDs(&query, tagTable) if q := findFilter.Q; q != nil && *q != "" { query.join(tagAliasesTable, "", "tag_aliases.tag_id = tags.id") @@ -439,7 +443,23 @@ func tagMarkerCountCriterionHandler(qb *tagQueryBuilder, markerCount *models.Int func tagParentsCriterionHandler(qb *tagQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if tags != nil && len(tags.Value) > 0 { + if tags != nil { + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { + var notClause string + if tags.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin("tags_relations", "parent_relations", "tags.id = parent_relations.child_id") + + f.addWhere(fmt.Sprintf("parent_relations.parent_id IS %s NULL", notClause)) + return + } + + if len(tags.Value) == 0 { + return + } + var args []interface{} for _, val := range tags.Value { args = append(args, val) @@ -472,7 +492,23 @@ func tagParentsCriterionHandler(qb *tagQueryBuilder, tags *models.HierarchicalMu func tagChildrenCriterionHandler(qb *tagQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { return func(f *filterBuilder) { - if tags != nil && len(tags.Value) > 0 { + if tags != nil { + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { + var notClause string + if tags.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addJoin("tags_relations", "child_relations", "tags.id = child_relations.parent_id") + + f.addWhere(fmt.Sprintf("child_relations.child_id IS %s NULL", notClause)) + return + } + + if len(tags.Value) == 0 { + return + } + var args []interface{} for _, val := range tags.Value { args = append(args, val) @@ -644,13 +680,13 @@ func (qb *tagQueryBuilder) Merge(source []int, destination int) error { "performers_tags": "performer_id", } - tagArgs := append(args, destination) + args = append(args, destination) for table, idColumn := range tagTables { _, err := qb.tx.Exec(`UPDATE `+table+` SET tag_id = ? WHERE tag_id IN `+inBinding+` AND NOT EXISTS(SELECT 1 FROM `+table+` o WHERE o.`+idColumn+` = `+table+`.`+idColumn+` AND o.tag_id = ?)`, - tagArgs..., + args..., ) if err != nil { return err @@ -728,26 +764,21 @@ func (qb *tagQueryBuilder) UpdateChildTags(tagID int, childIDs []int) error { return nil } -func (qb *tagQueryBuilder) FindAllAncestors(tagID int, excludeIDs []int) ([]*models.Tag, error) { +// FindAllAncestors returns a slice of TagPath objects, representing all +// ancestors of the tag with the provided id. +func (qb *tagQueryBuilder) FindAllAncestors(tagID int, excludeIDs []int) ([]*models.TagPath, error) { inBinding := getInBinding(len(excludeIDs) + 1) query := `WITH RECURSIVE parents AS ( - SELECT t.id AS parent_id, t.id AS child_id FROM tags t WHERE t.id = ? + SELECT t.id AS parent_id, t.id AS child_id, t.name as path FROM tags t WHERE t.id = ? UNION - SELECT tr.parent_id, tr.child_id FROM tags_relations tr INNER JOIN parents p ON p.parent_id = tr.child_id WHERE tr.parent_id NOT IN` + inBinding + ` -), -children AS ( - SELECT tr.parent_id, tr.child_id FROM tags_relations tr INNER JOIN parents p ON p.parent_id = tr.parent_id WHERE tr.child_id NOT IN` + inBinding + ` - UNION - SELECT tr.parent_id, tr.child_id FROM tags_relations tr INNER JOIN children c ON c.child_id = tr.parent_id WHERE tr.child_id NOT IN` + inBinding + ` + SELECT tr.parent_id, tr.child_id, t.name || '->' || p.path as path FROM tags_relations tr INNER JOIN parents p ON p.parent_id = tr.child_id JOIN tags t ON t.id = tr.parent_id WHERE tr.parent_id NOT IN` + inBinding + ` ) -SELECT t.* FROM tags t INNER JOIN parents p ON t.id = p.parent_id -UNION -SELECT t.* FROM tags t INNER JOIN children c ON t.id = c.child_id +SELECT t.*, p.path FROM tags t INNER JOIN parents p ON t.id = p.parent_id ` - var ret models.Tags + var ret models.TagPaths excludeArgs := []interface{}{tagID} for _, excludeID := range excludeIDs { excludeArgs = append(excludeArgs, excludeID) @@ -761,26 +792,21 @@ SELECT t.* FROM tags t INNER JOIN children c ON t.id = c.child_id return ret, nil } -func (qb *tagQueryBuilder) FindAllDescendants(tagID int, excludeIDs []int) ([]*models.Tag, error) { +// FindAllDescendants returns a slice of TagPath objects, representing all +// descendants of the tag with the provided id. +func (qb *tagQueryBuilder) FindAllDescendants(tagID int, excludeIDs []int) ([]*models.TagPath, error) { inBinding := getInBinding(len(excludeIDs) + 1) query := `WITH RECURSIVE children AS ( - SELECT t.id AS parent_id, t.id AS child_id FROM tags t WHERE t.id = ? + SELECT t.id AS parent_id, t.id AS child_id, t.name as path FROM tags t WHERE t.id = ? UNION - SELECT tr.parent_id, tr.child_id FROM tags_relations tr INNER JOIN children c ON c.child_id = tr.parent_id WHERE tr.child_id NOT IN` + inBinding + ` -), -parents AS ( - SELECT tr.parent_id, tr.child_id FROM tags_relations tr INNER JOIN children c ON c.child_id = tr.child_id WHERE tr.parent_id NOT IN` + inBinding + ` - UNION - SELECT tr.parent_id, tr.child_id FROM tags_relations tr INNER JOIN parents p ON p.parent_id = tr.child_id WHERE tr.parent_id NOT IN` + inBinding + ` + SELECT tr.parent_id, tr.child_id, c.path || '->' || t.name as path FROM tags_relations tr INNER JOIN children c ON c.child_id = tr.parent_id JOIN tags t ON t.id = tr.child_id WHERE tr.child_id NOT IN` + inBinding + ` ) -SELECT t.* FROM tags t INNER JOIN children c ON t.id = c.child_id -UNION -SELECT t.* FROM tags t INNER JOIN parents p ON t.id = p.parent_id +SELECT t.*, c.path FROM tags t INNER JOIN children c ON t.id = c.child_id ` - var ret models.Tags + var ret models.TagPaths excludeArgs := []interface{}{tagID} for _, excludeID := range excludeIDs { excludeArgs = append(excludeArgs, excludeID) diff --git a/pkg/sqlite/tag_test.go b/pkg/sqlite/tag_test.go index d91325280..70284019f 100644 --- a/pkg/sqlite/tag_test.go +++ b/pkg/sqlite/tag_test.go @@ -18,7 +18,7 @@ func TestMarkerFindBySceneMarkerID(t *testing.T) { withTxn(func(r models.Repository) error { tqb := r.Tag() - markerID := markerIDs[markerIdxWithScene] + markerID := markerIDs[markerIdxWithTag] tags, err := tqb.FindBySceneMarkerID(markerID) @@ -27,7 +27,7 @@ func TestMarkerFindBySceneMarkerID(t *testing.T) { } assert.Len(t, tags, 1) - assert.Equal(t, tagIDs[tagIdxWithMarker], tags[0].ID) + assert.Equal(t, tagIDs[tagIdxWithMarkers], tags[0].ID) tags, err = tqb.FindBySceneMarkerID(0) @@ -168,7 +168,7 @@ func TestTagQuerySort(t *testing.T) { sortBy = "scene_markers_count" tags = queryTags(t, sqb, nil, findFilter) - assert.Equal(tagIDs[tagIdxWithMarker], tags[0].ID) + assert.Equal(tagIDs[tagIdxWithMarkers], tags[0].ID) sortBy = "images_count" tags = queryTags(t, sqb, nil, findFilter) @@ -613,6 +613,7 @@ func verifyTagChildCount(t *testing.T, sceneCountCriterion models.IntCriterionIn func TestTagQueryParent(t *testing.T) { withTxn(func(r models.Repository) error { + const nameField = "Name" sqb := r.Tag() tagCriterion := models.HierarchicalMultiCriterionInput{ Value: []string{ @@ -634,7 +635,7 @@ func TestTagQueryParent(t *testing.T) { tagCriterion.Modifier = models.CriterionModifierExcludes - q := getTagStringValue(tagIdxWithParentTag, titleField) + q := getTagStringValue(tagIdxWithParentTag, nameField) findFilter := models.FindFilterType{ Q: &q, } @@ -660,12 +661,37 @@ func TestTagQueryParent(t *testing.T) { tags = queryTags(t, sqb, &tagFilter, nil) assert.Len(t, tags, 2) + tagCriterion = models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + q = getTagStringValue(tagIdxWithGallery, nameField) + + tags = queryTags(t, sqb, &tagFilter, &findFilter) + assert.Len(t, tags, 1) + assert.Equal(t, tagIDs[tagIdxWithGallery], tags[0].ID) + + q = getTagStringValue(tagIdxWithParentTag, nameField) + tags = queryTags(t, sqb, &tagFilter, &findFilter) + assert.Len(t, tags, 0) + + tagCriterion.Modifier = models.CriterionModifierNotNull + + tags = queryTags(t, sqb, &tagFilter, &findFilter) + assert.Len(t, tags, 1) + assert.Equal(t, tagIDs[tagIdxWithParentTag], tags[0].ID) + + q = getTagStringValue(tagIdxWithGallery, nameField) + tags = queryTags(t, sqb, &tagFilter, &findFilter) + assert.Len(t, tags, 0) + return nil }) } func TestTagQueryChild(t *testing.T) { withTxn(func(r models.Repository) error { + const nameField = "Name" + sqb := r.Tag() tagCriterion := models.HierarchicalMultiCriterionInput{ Value: []string{ @@ -687,7 +713,7 @@ func TestTagQueryChild(t *testing.T) { tagCriterion.Modifier = models.CriterionModifierExcludes - q := getTagStringValue(tagIdxWithChildTag, titleField) + q := getTagStringValue(tagIdxWithChildTag, nameField) findFilter := models.FindFilterType{ Q: &q, } @@ -713,6 +739,29 @@ func TestTagQueryChild(t *testing.T) { tags = queryTags(t, sqb, &tagFilter, nil) assert.Len(t, tags, 2) + tagCriterion = models.HierarchicalMultiCriterionInput{ + Modifier: models.CriterionModifierIsNull, + } + q = getTagStringValue(tagIdxWithGallery, nameField) + + tags = queryTags(t, sqb, &tagFilter, &findFilter) + assert.Len(t, tags, 1) + assert.Equal(t, tagIDs[tagIdxWithGallery], tags[0].ID) + + q = getTagStringValue(tagIdxWithChildTag, nameField) + tags = queryTags(t, sqb, &tagFilter, &findFilter) + assert.Len(t, tags, 0) + + tagCriterion.Modifier = models.CriterionModifierNotNull + + tags = queryTags(t, sqb, &tagFilter, &findFilter) + assert.Len(t, tags, 1) + assert.Equal(t, tagIDs[tagIdxWithChildTag], tags[0].ID) + + q = getTagStringValue(tagIdxWithGallery, nameField) + tags = queryTags(t, sqb, &tagFilter, &findFilter) + assert.Len(t, tags, 0) + return nil }) } @@ -842,8 +891,8 @@ func TestTagMerge(t *testing.T) { srcIdxs := []int{ tagIdx1WithScene, tagIdx2WithScene, - tagIdxWithPrimaryMarker, - tagIdxWithMarker, + tagIdxWithPrimaryMarkers, + tagIdxWithMarkers, tagIdxWithCoverImage, tagIdxWithImage, tagIdx1WithImage, @@ -893,7 +942,7 @@ func TestTagMerge(t *testing.T) { assert.Contains(sceneTagIDs, destID) // ensure marker points to new tag - marker, err := r.SceneMarker().Find(markerIDs[markerIdxWithScene]) + marker, err := r.SceneMarker().Find(markerIDs[markerIdxWithTag]) if err != nil { return err } diff --git a/pkg/sqlite/transaction.go b/pkg/sqlite/transaction.go index 0eb45d5f9..50486d01e 100644 --- a/pkg/sqlite/transaction.go +++ b/pkg/sqlite/transaction.go @@ -36,7 +36,7 @@ func (t *transaction) Begin() error { var err error t.tx, err = database.DB.BeginTxx(t.Ctx, nil) if err != nil { - return fmt.Errorf("error starting transaction: %s", err.Error()) + return fmt.Errorf("error starting transaction: %v", err) } return nil @@ -49,7 +49,7 @@ func (t *transaction) Rollback() error { err := t.tx.Rollback() if err != nil { - return fmt.Errorf("error rolling back transaction: %s", err.Error()) + return fmt.Errorf("error rolling back transaction: %v", err) } t.tx = nil @@ -63,7 +63,7 @@ func (t *transaction) Commit() error { err := t.tx.Commit() if err != nil { - return fmt.Errorf("error committing transaction: %s", err.Error()) + return fmt.Errorf("error committing transaction: %v", err) } t.tx = nil diff --git a/pkg/studio/export.go b/pkg/studio/export.go index 46b92a07d..41f535494 100644 --- a/pkg/studio/export.go +++ b/pkg/studio/export.go @@ -30,7 +30,7 @@ func ToJSON(reader models.StudioReader, studio *models.Studio) (*jsonschema.Stud if studio.ParentID.Valid { parent, err := reader.Find(int(studio.ParentID.Int64)) if err != nil { - return nil, fmt.Errorf("error getting parent studio: %s", err.Error()) + return nil, fmt.Errorf("error getting parent studio: %v", err) } if parent != nil { @@ -44,19 +44,31 @@ func ToJSON(reader models.StudioReader, studio *models.Studio) (*jsonschema.Stud aliases, err := reader.GetAliases(studio.ID) if err != nil { - return nil, fmt.Errorf("error getting studio aliases: %s", err.Error()) + return nil, fmt.Errorf("error getting studio aliases: %v", err) } newStudioJSON.Aliases = aliases image, err := reader.GetImage(studio.ID) if err != nil { - return nil, fmt.Errorf("error getting studio image: %s", err.Error()) + return nil, fmt.Errorf("error getting studio image: %v", err) } if len(image) > 0 { newStudioJSON.Image = utils.GetBase64StringFromData(image) } + stashIDs, _ := reader.GetStashIDs(studio.ID) + var ret []models.StashID + for _, stashID := range stashIDs { + newJoin := models.StashID{ + StashID: stashID.StashID, + Endpoint: stashID.Endpoint, + } + ret = append(ret, newJoin) + } + + newStudioJSON.StashIDs = ret + return &newStudioJSON, nil } diff --git a/pkg/studio/export_test.go b/pkg/studio/export_test.go index e251ad52c..c18fbfa30 100644 --- a/pkg/studio/export_test.go +++ b/pkg/studio/export_test.go @@ -39,6 +39,14 @@ var parentStudio models.Studio = models.Studio{ var imageBytes = []byte("imageBytes") +var stashID = models.StashID{ + StashID: "StashID", + Endpoint: "Endpoint", +} +var stashIDs = []*models.StashID{ + &stashID, +} + const image = "aW1hZ2VCeXRlcw==" var ( @@ -95,6 +103,9 @@ func createFullJSONStudio(parentStudio, image string, aliases []string) *jsonsch Image: image, Rating: rating, Aliases: aliases, + StashIDs: []models.StashID{ + stashID, + }, } } @@ -180,15 +191,20 @@ func TestToJSON(t *testing.T) { mockStudioReader.On("GetAliases", missingParentStudioID).Return(nil, nil).Once() mockStudioReader.On("GetAliases", errAliasID).Return(nil, aliasErr).Once() + mockStudioReader.On("GetStashIDs", studioID).Return(stashIDs, nil).Once() + mockStudioReader.On("GetStashIDs", noImageID).Return(nil, nil).Once() + mockStudioReader.On("GetStashIDs", missingParentStudioID).Return(stashIDs, nil).Once() + for i, s := range scenarios { studio := s.input json, err := ToJSON(mockStudioReader, &studio) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } diff --git a/pkg/studio/import.go b/pkg/studio/import.go index a3a35023d..8a9dfa644 100644 --- a/pkg/studio/import.go +++ b/pkg/studio/import.go @@ -42,7 +42,7 @@ func (i *Importer) PreImport() error { if len(i.Input.Image) > 0 { _, i.imageData, err = utils.ProcessBase64Image(i.Input.Image) if err != nil { - return fmt.Errorf("invalid image: %s", err.Error()) + return fmt.Errorf("invalid image: %v", err) } } @@ -53,7 +53,7 @@ func (i *Importer) populateParentStudio() error { if i.Input.ParentStudio != "" { studio, err := i.ReaderWriter.FindByName(i.Input.ParentStudio, false) if err != nil { - return fmt.Errorf("error finding studio by name: %s", err.Error()) + return fmt.Errorf("error finding studio by name: %v", err) } if studio == nil { @@ -97,12 +97,18 @@ func (i *Importer) createParentStudio(name string) (int, error) { func (i *Importer) PostImport(id int) error { if len(i.imageData) > 0 { if err := i.ReaderWriter.UpdateImage(id, i.imageData); err != nil { - return fmt.Errorf("error setting studio image: %s", err.Error()) + return fmt.Errorf("error setting studio image: %v", err) + } + } + + if len(i.Input.StashIDs) > 0 { + if err := i.ReaderWriter.UpdateStashIDs(id, i.Input.StashIDs); err != nil { + return fmt.Errorf("error setting stash id: %v", err) } } if err := i.ReaderWriter.UpdateAliases(id, i.Input.Aliases); err != nil { - return fmt.Errorf("error setting tag aliases: %s", err.Error()) + return fmt.Errorf("error setting tag aliases: %v", err) } return nil @@ -130,7 +136,7 @@ func (i *Importer) FindExistingID() (*int, error) { func (i *Importer) Create() (*int, error) { created, err := i.ReaderWriter.Create(i.studio) if err != nil { - return nil, fmt.Errorf("error creating studio: %s", err.Error()) + return nil, fmt.Errorf("error creating studio: %v", err) } id := created.ID @@ -142,7 +148,7 @@ func (i *Importer) Update(id int) error { studio.ID = id _, err := i.ReaderWriter.UpdateFull(studio) if err != nil { - return fmt.Errorf("error updating existing studio: %s", err.Error()) + return fmt.Errorf("error updating existing studio: %v", err) } return nil diff --git a/pkg/tag/export.go b/pkg/tag/export.go index 54c64990e..dde39760c 100644 --- a/pkg/tag/export.go +++ b/pkg/tag/export.go @@ -18,14 +18,14 @@ func ToJSON(reader models.TagReader, tag *models.Tag) (*jsonschema.Tag, error) { aliases, err := reader.GetAliases(tag.ID) if err != nil { - return nil, fmt.Errorf("error getting tag aliases: %s", err.Error()) + return nil, fmt.Errorf("error getting tag aliases: %v", err) } newTagJSON.Aliases = aliases image, err := reader.GetImage(tag.ID) if err != nil { - return nil, fmt.Errorf("error getting tag image: %s", err.Error()) + return nil, fmt.Errorf("error getting tag image: %v", err) } if len(image) > 0 { @@ -34,7 +34,7 @@ func ToJSON(reader models.TagReader, tag *models.Tag) (*jsonschema.Tag, error) { parents, err := reader.FindByChildTagID(tag.ID) if err != nil { - return nil, fmt.Errorf("error getting parents: %s", err.Error()) + return nil, fmt.Errorf("error getting parents: %v", err) } newTagJSON.Parents = GetNames(parents) diff --git a/pkg/tag/export_test.go b/pkg/tag/export_test.go index 2057ccad3..e37008ab4 100644 --- a/pkg/tag/export_test.go +++ b/pkg/tag/export_test.go @@ -130,11 +130,12 @@ func TestToJSON(t *testing.T) { tag := s.tag json, err := ToJSON(mockTagReader, &tag) - if !s.err && err != nil { + switch { + case !s.err && err != nil: t.Errorf("[%d] unexpected error: %s", i, err.Error()) - } else if s.err && err == nil { + case s.err && err == nil: t.Errorf("[%d] expected error not returned", i) - } else { + default: assert.Equal(t, s.expected, json, "[%d]", i) } } diff --git a/pkg/tag/import.go b/pkg/tag/import.go index 8fe0410d2..3fd793d0e 100644 --- a/pkg/tag/import.go +++ b/pkg/tag/import.go @@ -40,7 +40,7 @@ func (i *Importer) PreImport() error { if len(i.Input.Image) > 0 { _, i.imageData, err = utils.ProcessBase64Image(i.Input.Image) if err != nil { - return fmt.Errorf("invalid image: %s", err.Error()) + return fmt.Errorf("invalid image: %v", err) } } @@ -50,12 +50,12 @@ func (i *Importer) PreImport() error { func (i *Importer) PostImport(id int) error { if len(i.imageData) > 0 { if err := i.ReaderWriter.UpdateImage(id, i.imageData); err != nil { - return fmt.Errorf("error setting tag image: %s", err.Error()) + return fmt.Errorf("error setting tag image: %v", err) } } if err := i.ReaderWriter.UpdateAliases(id, i.Input.Aliases); err != nil { - return fmt.Errorf("error setting tag aliases: %s", err.Error()) + return fmt.Errorf("error setting tag aliases: %v", err) } parents, err := i.getParents() @@ -64,7 +64,7 @@ func (i *Importer) PostImport(id int) error { } if err := i.ReaderWriter.UpdateParentTags(id, parents); err != nil { - return fmt.Errorf("error setting parents: %s", err.Error()) + return fmt.Errorf("error setting parents: %v", err) } return nil @@ -92,7 +92,7 @@ func (i *Importer) FindExistingID() (*int, error) { func (i *Importer) Create() (*int, error) { created, err := i.ReaderWriter.Create(i.tag) if err != nil { - return nil, fmt.Errorf("error creating tag: %s", err.Error()) + return nil, fmt.Errorf("error creating tag: %v", err) } id := created.ID @@ -104,7 +104,7 @@ func (i *Importer) Update(id int) error { tag.ID = id _, err := i.ReaderWriter.UpdateFull(tag) if err != nil { - return fmt.Errorf("error updating existing tag: %s", err.Error()) + return fmt.Errorf("error updating existing tag: %v", err) } return nil @@ -115,7 +115,7 @@ func (i *Importer) getParents() ([]int, error) { for _, parent := range i.Input.Parents { tag, err := i.ReaderWriter.FindByName(parent, false) if err != nil { - return nil, fmt.Errorf("error finding parent by name: %s", err.Error()) + return nil, fmt.Errorf("error finding parent by name: %v", err) } if tag == nil { diff --git a/pkg/tag/update.go b/pkg/tag/update.go index d4a03f8f5..dfee55154 100644 --- a/pkg/tag/update.go +++ b/pkg/tag/update.go @@ -2,6 +2,7 @@ package tag import ( "fmt" + "github.com/stashapp/stash/pkg/models" ) @@ -23,17 +24,15 @@ func (e *NameUsedByAliasError) Error() string { } type InvalidTagHierarchyError struct { - Direction string - InvalidTag string - ApplyingTag string + Direction string + CurrentRelation string + InvalidTag string + ApplyingTag string + TagPath string } func (e *InvalidTagHierarchyError) Error() string { - if e.InvalidTag == e.ApplyingTag { - return fmt.Sprintf("Cannot apply tag \"%s\" as it already is a %s", e.InvalidTag, e.Direction) - } else { - return fmt.Sprintf("Cannot apply tag \"%s\" as it is linked to \"%s\" which already is a %s", e.ApplyingTag, e.InvalidTag, e.Direction) - } + return fmt.Sprintf("cannot apply tag \"%s\" as a %s of \"%s\" as it is already %s (%s)", e.InvalidTag, e.Direction, e.ApplyingTag, e.CurrentRelation, e.TagPath) } // EnsureTagNameUnique returns an error if the tag name provided @@ -77,45 +76,55 @@ func EnsureAliasesUnique(id int, aliases []string, qb models.TagReader) error { return nil } -func EnsureUniqueHierarchy(id int, parentIDs, childIDs []int, qb models.TagReader) error { - allAncestors := make(map[int]*models.Tag) - allDescendants := make(map[int]*models.Tag) - excludeIDs := []int{id} +func ValidateHierarchy(tag *models.Tag, parentIDs, childIDs []int, qb models.TagReader) error { + id := tag.ID + allAncestors := make(map[int]*models.TagPath) + allDescendants := make(map[int]*models.TagPath) - validateParent := func(testID, applyingID int) error { - if parentTag, exists := allAncestors[testID]; exists { - applyingTag, err := qb.Find(applyingID) + parentsAncestors, err := qb.FindAllAncestors(id, nil) + if err != nil { + return err + } - if err != nil { - return nil - } + for _, ancestorTag := range parentsAncestors { + allAncestors[ancestorTag.ID] = ancestorTag + } + childsDescendants, err := qb.FindAllDescendants(id, nil) + if err != nil { + return err + } + + for _, descendentTag := range childsDescendants { + allDescendants[descendentTag.ID] = descendentTag + } + + validateParent := func(testID int) error { + if parentTag, exists := allDescendants[testID]; exists { return &InvalidTagHierarchyError{ - Direction: "parent", - InvalidTag: parentTag.Name, - ApplyingTag: applyingTag.Name, + Direction: "parent", + CurrentRelation: "a descendant", + InvalidTag: parentTag.Name, + ApplyingTag: tag.Name, + TagPath: parentTag.Path, } } return nil } - validateChild := func(testID, applyingID int) error { - if childTag, exists := allDescendants[testID]; exists { - applyingTag, err := qb.Find(applyingID) - - if err != nil { - return nil - } - + validateChild := func(testID int) error { + if childTag, exists := allAncestors[testID]; exists { return &InvalidTagHierarchyError{ - Direction: "child", - InvalidTag: childTag.Name, - ApplyingTag: applyingTag.Name, + Direction: "child", + CurrentRelation: "an ancestor", + InvalidTag: childTag.Name, + ApplyingTag: tag.Name, + TagPath: childTag.Path, } } - return validateParent(testID, applyingID) + return nil } if parentIDs == nil { @@ -141,33 +150,15 @@ func EnsureUniqueHierarchy(id int, parentIDs, childIDs []int, qb models.TagReade } for _, parentID := range parentIDs { - parentsAncestors, err := qb.FindAllAncestors(parentID, excludeIDs) - if err != nil { + if err := validateParent(parentID); err != nil { return err } - - for _, ancestorTag := range parentsAncestors { - if err := validateParent(ancestorTag.ID, parentID); err != nil { - return err - } - - allAncestors[ancestorTag.ID] = ancestorTag - } } for _, childID := range childIDs { - childsDescendants, err := qb.FindAllDescendants(childID, excludeIDs) - if err != nil { + if err := validateChild(childID); err != nil { return err } - - for _, descendentTag := range childsDescendants { - if err := validateChild(descendentTag.ID, childID); err != nil { - return err - } - - allDescendants[descendentTag.ID] = descendentTag - } } return nil @@ -216,10 +207,5 @@ func MergeHierarchy(destination int, sources []int, qb models.TagReader) ([]int, mergedChildren = addTo(mergedChildren, children) } - err := EnsureUniqueHierarchy(destination, mergedParents, mergedChildren, qb) - if err != nil { - return nil, nil, err - } - return mergedParents, mergedChildren, nil } diff --git a/pkg/tag/update_test.go b/pkg/tag/update_test.go index d3a7f226d..f7338da23 100644 --- a/pkg/tag/update_test.go +++ b/pkg/tag/update_test.go @@ -29,39 +29,50 @@ var testUniqueHierarchyTags = map[int]*models.Tag{ }, } +var testUniqueHierarchyTagPaths = map[int]*models.TagPath{ + 1: { + Tag: *testUniqueHierarchyTags[1], + }, + 2: { + Tag: *testUniqueHierarchyTags[2], + }, + 3: { + Tag: *testUniqueHierarchyTags[3], + }, + 4: { + Tag: *testUniqueHierarchyTags[4], + }, +} + type testUniqueHierarchyCase struct { id int parents []*models.Tag children []*models.Tag - onFindAllAncestors map[int][]*models.Tag - onFindAllDescendants map[int][]*models.Tag + onFindAllAncestors []*models.TagPath + onFindAllDescendants []*models.TagPath expectedError string } var testUniqueHierarchyCases = []testUniqueHierarchyCase{ { - id: 1, - parents: []*models.Tag{}, - children: []*models.Tag{}, - onFindAllAncestors: map[int][]*models.Tag{ - 1: {}, - }, - onFindAllDescendants: map[int][]*models.Tag{ - 1: {}, - }, - expectedError: "", + id: 1, + parents: []*models.Tag{}, + children: []*models.Tag{}, + onFindAllAncestors: []*models.TagPath{}, + onFindAllDescendants: []*models.TagPath{}, + expectedError: "", }, { id: 1, parents: []*models.Tag{testUniqueHierarchyTags[2]}, children: []*models.Tag{testUniqueHierarchyTags[3]}, - onFindAllAncestors: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, - onFindAllDescendants: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], }, expectedError: "", }, @@ -69,11 +80,11 @@ var testUniqueHierarchyCases = []testUniqueHierarchyCase{ id: 2, parents: []*models.Tag{testUniqueHierarchyTags[3]}, children: make([]*models.Tag, 0), - onFindAllAncestors: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], }, - onFindAllDescendants: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, expectedError: "", }, @@ -84,24 +95,23 @@ var testUniqueHierarchyCases = []testUniqueHierarchyCase{ testUniqueHierarchyTags[4], }, children: []*models.Tag{}, - onFindAllAncestors: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3], testUniqueHierarchyTags[4]}, - 4: {testUniqueHierarchyTags[4]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], testUniqueHierarchyTagPaths[4], }, - onFindAllDescendants: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, - expectedError: "Cannot apply tag \"four\" as it already is a parent", + expectedError: "", }, { id: 2, parents: []*models.Tag{}, children: []*models.Tag{testUniqueHierarchyTags[3]}, - onFindAllAncestors: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, - onFindAllDescendants: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], }, expectedError: "", }, @@ -112,50 +122,49 @@ var testUniqueHierarchyCases = []testUniqueHierarchyCase{ testUniqueHierarchyTags[3], testUniqueHierarchyTags[4], }, - onFindAllAncestors: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, - onFindAllDescendants: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3], testUniqueHierarchyTags[4]}, - 4: {testUniqueHierarchyTags[4]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], testUniqueHierarchyTagPaths[4], }, - expectedError: "Cannot apply tag \"four\" as it already is a child", + expectedError: "", }, { id: 1, parents: []*models.Tag{testUniqueHierarchyTags[2]}, children: []*models.Tag{testUniqueHierarchyTags[3]}, - onFindAllAncestors: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2], testUniqueHierarchyTags[3]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], testUniqueHierarchyTagPaths[3], }, - onFindAllDescendants: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], }, - expectedError: "Cannot apply tag \"three\" as it already is a parent", + expectedError: "cannot apply tag \"three\" as a child of \"one\" as it is already an ancestor ()", }, { id: 1, parents: []*models.Tag{testUniqueHierarchyTags[2]}, children: []*models.Tag{testUniqueHierarchyTags[3]}, - onFindAllAncestors: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, - onFindAllDescendants: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3], testUniqueHierarchyTags[2]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], testUniqueHierarchyTagPaths[2], }, - expectedError: "Cannot apply tag \"three\" as it is linked to \"two\" which already is a parent", + expectedError: "cannot apply tag \"two\" as a parent of \"one\" as it is already a descendant ()", }, { id: 1, parents: []*models.Tag{testUniqueHierarchyTags[3]}, children: []*models.Tag{testUniqueHierarchyTags[3]}, - onFindAllAncestors: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], }, - onFindAllDescendants: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], }, - expectedError: "Cannot apply tag \"three\" as it already is a parent", + expectedError: "cannot apply tag \"three\" as a parent of \"one\" as it is already a descendant ()", }, { id: 1, @@ -165,54 +174,55 @@ var testUniqueHierarchyCases = []testUniqueHierarchyCase{ children: []*models.Tag{ testUniqueHierarchyTags[3], }, - onFindAllAncestors: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, - onFindAllDescendants: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3], testUniqueHierarchyTags[2]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], testUniqueHierarchyTagPaths[2], }, - expectedError: "Cannot apply tag \"three\" as it is linked to \"two\" which already is a parent", + expectedError: "cannot apply tag \"two\" as a parent of \"one\" as it is already a descendant ()", }, { id: 1, parents: []*models.Tag{testUniqueHierarchyTags[2]}, children: []*models.Tag{testUniqueHierarchyTags[2]}, - onFindAllAncestors: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, - onFindAllDescendants: map[int][]*models.Tag{ - 2: {testUniqueHierarchyTags[2]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[2], }, - expectedError: "Cannot apply tag \"two\" as it already is a parent", + expectedError: "cannot apply tag \"two\" as a parent of \"one\" as it is already a descendant ()", }, { id: 2, parents: []*models.Tag{testUniqueHierarchyTags[1]}, children: []*models.Tag{testUniqueHierarchyTags[3]}, - onFindAllAncestors: map[int][]*models.Tag{ - 1: {testUniqueHierarchyTags[1]}, + onFindAllAncestors: []*models.TagPath{ + testUniqueHierarchyTagPaths[1], }, - onFindAllDescendants: map[int][]*models.Tag{ - 3: {testUniqueHierarchyTags[3], testUniqueHierarchyTags[1]}, + onFindAllDescendants: []*models.TagPath{ + testUniqueHierarchyTagPaths[3], testUniqueHierarchyTagPaths[1], }, - expectedError: "Cannot apply tag \"three\" as it is linked to \"one\" which already is a parent", + expectedError: "cannot apply tag \"one\" as a parent of \"two\" as it is already a descendant ()", }, } -func TestEnsureUniqueHierarchy(t *testing.T) { +func TestEnsureHierarchy(t *testing.T) { for _, tc := range testUniqueHierarchyCases { - testEnsureUniqueHierarchy(t, tc, false, false) - testEnsureUniqueHierarchy(t, tc, true, false) - testEnsureUniqueHierarchy(t, tc, false, true) - testEnsureUniqueHierarchy(t, tc, true, true) + testEnsureHierarchy(t, tc, false, false) + testEnsureHierarchy(t, tc, true, false) + testEnsureHierarchy(t, tc, false, true) + testEnsureHierarchy(t, tc, true, true) } } -func testEnsureUniqueHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryParents, queryChildren bool) { +func testEnsureHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryParents, queryChildren bool) { mockTagReader := &mocks.TagReaderWriter{} var parentIDs, childIDs []int find := make(map[int]*models.Tag) + find[tc.id] = testUniqueHierarchyTags[tc.id] if tc.parents != nil { parentIDs = make([]int, 0) for _, parent := range tc.parents { @@ -243,50 +253,25 @@ func testEnsureUniqueHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryPa mockTagReader.On("FindByParentTagID", tc.id).Return(tc.children, nil).Once() } - mockTagReader.On("Find", mock.AnythingOfType("int")).Return(func(tagID int) *models.Tag { - for id, tag := range find { - if id == tagID { - return tag - } - } - return nil - }, func(tagID int) error { - return nil - }).Maybe() - - mockTagReader.On("FindAllAncestors", mock.AnythingOfType("int"), []int{tc.id}).Return(func(tagID int, excludeIDs []int) []*models.Tag { - for id, tags := range tc.onFindAllAncestors { - if id == tagID { - return tags - } - } - return nil + mockTagReader.On("FindAllAncestors", mock.AnythingOfType("int"), []int(nil)).Return(func(tagID int, excludeIDs []int) []*models.TagPath { + return tc.onFindAllAncestors }, func(tagID int, excludeIDs []int) error { - for id := range tc.onFindAllAncestors { - if id == tagID { - return nil - } + if tc.onFindAllAncestors != nil { + return nil } return fmt.Errorf("undefined ancestors for: %d", tagID) }).Maybe() - mockTagReader.On("FindAllDescendants", mock.AnythingOfType("int"), []int{tc.id}).Return(func(tagID int, excludeIDs []int) []*models.Tag { - for id, tags := range tc.onFindAllDescendants { - if id == tagID { - return tags - } - } - return nil + mockTagReader.On("FindAllDescendants", mock.AnythingOfType("int"), []int(nil)).Return(func(tagID int, excludeIDs []int) []*models.TagPath { + return tc.onFindAllDescendants }, func(tagID int, excludeIDs []int) error { - for id := range tc.onFindAllDescendants { - if id == tagID { - return nil - } + if tc.onFindAllDescendants != nil { + return nil } return fmt.Errorf("undefined descendants for: %d", tagID) }).Maybe() - res := EnsureUniqueHierarchy(tc.id, parentIDs, childIDs, mockTagReader) + res := ValidateHierarchy(testUniqueHierarchyTags[tc.id], parentIDs, childIDs, mockTagReader) assert := assert.New(t) diff --git a/pkg/utils/collections.go b/pkg/utils/collections.go new file mode 100644 index 000000000..06bc9f1f5 --- /dev/null +++ b/pkg/utils/collections.go @@ -0,0 +1,60 @@ +package utils + +import "reflect" + +// SliceSame returns true if the two provided lists have the same elements, +// regardless of order. Panics if either parameter is not a slice. +func SliceSame(a, b interface{}) bool { + v1 := reflect.ValueOf(a) + v2 := reflect.ValueOf(b) + + if (v1.IsValid() && v1.Kind() != reflect.Slice) || (v2.IsValid() && v2.Kind() != reflect.Slice) { + panic("not a slice") + } + + v1Len := 0 + v2Len := 0 + + v1Valid := v1.IsValid() + v2Valid := v2.IsValid() + + if v1Valid { + v1Len = v1.Len() + } + if v2Valid { + v2Len = v2.Len() + } + + if !v1Valid || !v2Valid { + return v1Len == v2Len + } + + if v1Len != v2Len { + return false + } + + if v1.Type() != v2.Type() { + return false + } + + visited := make(map[int]bool) + for i := 0; i < v1.Len(); i++ { + found := false + for j := 0; j < v2.Len(); j++ { + if visited[j] { + continue + } + if reflect.DeepEqual(v1.Index(i).Interface(), v2.Index(j).Interface()) { + found = true + visited[j] = true + break + } + } + + if !found { + return false + } + } + + return true +} diff --git a/pkg/utils/collections_test.go b/pkg/utils/collections_test.go new file mode 100644 index 000000000..359b9ad10 --- /dev/null +++ b/pkg/utils/collections_test.go @@ -0,0 +1,92 @@ +package utils + +import "testing" + +func TestSliceSame(t *testing.T) { + objs := []struct { + a string + b int + }{ + {"1", 2}, + {"1", 2}, + {"2", 1}, + } + + tests := []struct { + name string + a interface{} + b interface{} + want bool + }{ + {"nil values", nil, nil, true}, + {"empty", []int{}, []int{}, true}, + {"nil and empty", nil, []int{}, true}, + { + "different type", + []string{"1"}, + []int{1}, + false, + }, + { + "different length", + []int{1, 2, 3}, + []int{1, 2}, + false, + }, + { + "equal", + []int{1, 2, 3, 4, 5}, + []int{1, 2, 3, 4, 5}, + true, + }, + { + "different order", + []int{5, 4, 3, 2, 1}, + []int{1, 2, 3, 4, 5}, + true, + }, + { + "different", + []int{5, 4, 3, 2, 6}, + []int{1, 2, 3, 4, 5}, + false, + }, + { + "same with duplicates", + []int{1, 1, 2, 3, 4}, + []int{1, 2, 3, 4, 1}, + true, + }, + { + "subset", + []int{1, 1, 2, 2, 3}, + []int{1, 2, 3, 4, 5}, + false, + }, + { + "superset", + []int{1, 2, 3, 4, 5}, + []int{1, 1, 2, 2, 3}, + false, + }, + { + "structs equal", + objs[0:1], + objs[0:1], + true, + }, + { + "structs not equal", + objs[0:2], + objs[1:3], + false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := SliceSame(tt.a, tt.b); got != tt.want { + t.Errorf("SliceSame() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/utils/file.go b/pkg/utils/file.go index 722fc1f88..474f7b8a2 100644 --- a/pkg/utils/file.go +++ b/pkg/utils/file.go @@ -4,6 +4,7 @@ import ( "archive/zip" "fmt" "io" + "io/fs" "net/http" "os" "os/user" @@ -14,6 +15,7 @@ import ( "github.com/h2non/filetype" "github.com/h2non/filetype/types" "github.com/stashapp/stash/pkg/logger" + "golang.org/x/text/collate" ) // FileType uses the filetype package to determine the given file path's type @@ -104,8 +106,22 @@ func EmptyDir(path string) error { return nil } +type dirSorter []fs.DirEntry + +func (s dirSorter) Len() int { + return len(s) +} + +func (s dirSorter) Swap(i, j int) { + s[j], s[i] = s[i], s[j] +} + +func (s dirSorter) Bytes(i int) []byte { + return []byte(s[i].Name()) +} + // ListDir will return the contents of a given directory path as a string slice -func ListDir(path string) ([]string, error) { +func ListDir(col *collate.Collator, path string) ([]string, error) { var dirPaths []string files, err := os.ReadDir(path) if err != nil { @@ -115,6 +131,11 @@ func ListDir(path string) ([]string, error) { return dirPaths, err } } + + if col != nil { + col.Sort(dirSorter(files)) + } + for _, file := range files { if !file.IsDir() { continue @@ -356,3 +377,16 @@ func FindInPaths(paths []string, baseName string) string { return "" } + +// MatchExtension returns true if the extension of the provided path +// matches any of the provided extensions. +func MatchExtension(path string, extensions []string) bool { + ext := filepath.Ext(path) + for _, e := range extensions { + if strings.EqualFold(ext, "."+e) { + return true + } + } + + return false +} diff --git a/pkg/utils/image.go b/pkg/utils/image.go index 75903fa6d..59435160f 100644 --- a/pkg/utils/image.go +++ b/pkg/utils/image.go @@ -1,6 +1,7 @@ package utils import ( + "context" "crypto/md5" "crypto/tls" "encoding/base64" @@ -20,7 +21,7 @@ const base64RE = `^data:.+\/(.+);base64,(.*)$` // ProcessImageInput transforms an image string either from a base64 encoded // string, or from a URL, and returns the image as a byte slice -func ProcessImageInput(imageInput string) ([]byte, error) { +func ProcessImageInput(ctx context.Context, imageInput string) ([]byte, error) { regex := regexp.MustCompile(base64RE) if regex.MatchString(imageInput) { _, d, err := ProcessBase64Image(imageInput) @@ -28,11 +29,11 @@ func ProcessImageInput(imageInput string) ([]byte, error) { } // assume input is a URL. Read it. - return ReadImageFromURL(imageInput) + return ReadImageFromURL(ctx, imageInput) } // ReadImageFromURL returns image data from a URL -func ReadImageFromURL(url string) ([]byte, error) { +func ReadImageFromURL(ctx context.Context, url string) ([]byte, error) { client := &http.Client{ Transport: &http.Transport{ // ignore insecure certificates TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, @@ -41,7 +42,7 @@ func ReadImageFromURL(url string) ([]byte, error) { Timeout: imageGetTimeout, } - req, err := http.NewRequest("GET", url, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { return nil, err } @@ -105,13 +106,6 @@ func GetDataFromBase64String(encodedString string) ([]byte, error) { // GetBase64StringFromData returns the given byte slice as a base64 encoded string func GetBase64StringFromData(data []byte) string { return base64.StdEncoding.EncodeToString(data) - - // Really slow - //result = regexp.MustCompile(`(.{60})`).ReplaceAllString(result, "$1\n") - //if result[len(result)-1:] != "\n" { - // result += "\n" - //} - //return result } func ServeImage(image []byte, w http.ResponseWriter, r *http.Request) error { diff --git a/pkg/utils/int_collections.go b/pkg/utils/int_collections.go index 41daa5185..9f2d3e6bc 100644 --- a/pkg/utils/int_collections.go +++ b/pkg/utils/int_collections.go @@ -1,5 +1,7 @@ package utils +import "strconv" + // IntIndex returns the first index of the provided int value in the provided // int slice. It returns -1 if it is not found. func IntIndex(vs []int, t int) int { @@ -50,3 +52,13 @@ func IntExclude(vs []int, toExclude []int) []int { return ret } + +// IntSliceToStringSlice converts a slice of ints to a slice of strings. +func IntSliceToStringSlice(ss []int) []string { + ret := make([]string, len(ss)) + for i, v := range ss { + ret[i] = strconv.Itoa(v) + } + + return ret +} diff --git a/pkg/utils/mutex.go b/pkg/utils/mutex.go new file mode 100644 index 000000000..212200214 --- /dev/null +++ b/pkg/utils/mutex.go @@ -0,0 +1,64 @@ +package utils + +// MutexManager manages access to mutexes using a mutex type and key. +type MutexManager struct { + mapChan chan map[string]<-chan struct{} +} + +// NewMutexManager returns a new instance of MutexManager. +func NewMutexManager() *MutexManager { + ret := &MutexManager{ + mapChan: make(chan map[string]<-chan struct{}, 1), + } + + initial := make(map[string]<-chan struct{}) + ret.mapChan <- initial + + return ret +} + +// Claim blocks until the mutex for the mutexType and key pair is available. +// The mutex is then claimed by the calling code until the provided done +// channel is closed. +func (csm *MutexManager) Claim(mutexType string, key string, done <-chan struct{}) { + mapKey := mutexType + "_" + key + success := false + + var existing <-chan struct{} + for !success { + // grab the map + m := <-csm.mapChan + + // get the entry for the given key + newEntry := m[mapKey] + + // if its the existing entry or nil, then it's available, add our channel + if newEntry == nil || newEntry == existing { + m[mapKey] = done + success = true + } + + // return the map + csm.mapChan <- m + + // if there is an existing entry, now we can wait for it to + // finish, then repeat the process + if newEntry != nil { + existing = newEntry + <-newEntry + } + } + + // add to goroutine to remove from the map only + go func() { + <-done + + m := <-csm.mapChan + + if m[mapKey] == done { + delete(m, mapKey) + } + + csm.mapChan <- m + }() +} diff --git a/pkg/utils/mutex_test.go b/pkg/utils/mutex_test.go new file mode 100644 index 000000000..f5d03091e --- /dev/null +++ b/pkg/utils/mutex_test.go @@ -0,0 +1,50 @@ +package utils + +import ( + "sync" + "testing" +) + +// should be run with -race +func TestMutexManager(t *testing.T) { + m := NewMutexManager() + + map1 := make(map[string]bool) + map2 := make(map[string]bool) + map3 := make(map[string]bool) + maps := []map[string]bool{ + map1, + map2, + map3, + } + + types := []string{ + "foo", + "foo", + "bar", + } + + const key = "baz" + + const workers = 8 + const loops = 300 + var wg sync.WaitGroup + for k := 0; k < workers; k++ { + wg.Add(1) + go func(wk int) { + defer wg.Done() + for l := 0; l < loops; l++ { + func(l int) { + c := make(chan struct{}) + defer close(c) + + m.Claim(types[l%3], key, c) + + maps[l%3][key] = true + }(l) + } + }(k) + } + + wg.Wait() +} diff --git a/pkg/utils/oshash.go b/pkg/utils/oshash.go index 75f552b84..0057e2617 100644 --- a/pkg/utils/oshash.go +++ b/pkg/utils/oshash.go @@ -1,37 +1,79 @@ package utils import ( - "bytes" "encoding/binary" + "errors" "fmt" + "io" "os" ) const chunkSize int64 = 64 * 1024 -func oshash(size int64, head []byte, tail []byte) (string, error) { - // put the head and tail together - buf := append(head, tail...) +var ErrOsHashLen = errors.New("buffer is not a multiple of 8") - // convert bytes into uint64 - ints := make([]uint64, len(buf)/8) - reader := bytes.NewReader(buf) - err := binary.Read(reader, binary.LittleEndian, &ints) +func sumBytes(buf []byte) (uint64, error) { + if len(buf)%8 != 0 { + return 0, ErrOsHashLen + } + + sz := len(buf) / 8 + var sum uint64 + for j := 0; j < sz; j++ { + sum += binary.LittleEndian.Uint64(buf[8*j : 8*(j+1)]) + } + + return sum, nil +} + +func oshash(size int64, head []byte, tail []byte) (string, error) { + headSum, err := sumBytes(head) + if err != nil { + return "", fmt.Errorf("oshash head: %w", err) + } + tailSum, err := sumBytes(tail) + if err != nil { + return "", fmt.Errorf("oshash tail: %w", err) + } + + // Compute the sum of the head, tail and file size + result := headSum + tailSum + uint64(size) + // output as hex + return fmt.Sprintf("%016x", result), nil +} + +func OSHashFromReader(src io.ReadSeeker, fileSize int64) (string, error) { + if fileSize == 0 { + return "", nil + } + + fileChunkSize := chunkSize + if fileSize < fileChunkSize { + fileChunkSize = fileSize + } + + head := make([]byte, fileChunkSize) + tail := make([]byte, fileChunkSize) + + // read the head of the file into the start of the buffer + _, err := src.Read(head) if err != nil { return "", err } - // sum the integers - var sum uint64 - for _, v := range ints { - sum += v + // seek to the end of the file - the chunk size + _, err = src.Seek(-fileChunkSize, 2) + if err != nil { + return "", err } - // add the filesize - sum += uint64(size) + // read the tail of the file + _, err = src.Read(tail) + if err != nil { + return "", err + } - // output as hex - return fmt.Sprintf("%016x", sum), nil + return oshash(fileSize, head, tail) } // OSHashFromFilePath calculates the hash using the same algorithm that @@ -53,35 +95,5 @@ func OSHashFromFilePath(filePath string) (string, error) { fileSize := fi.Size() - if fileSize == 0 { - return "", nil - } - - fileChunkSize := chunkSize - if fileSize < fileChunkSize { - fileChunkSize = fileSize - } - - head := make([]byte, fileChunkSize) - tail := make([]byte, fileChunkSize) - - // read the head of the file into the start of the buffer - _, err = f.Read(head) - if err != nil { - return "", err - } - - // seek to the end of the file - the chunk size - _, err = f.Seek(-fileChunkSize, 2) - if err != nil { - return "", err - } - - // read the tail of the file - _, err = f.Read(tail) - if err != nil { - return "", err - } - - return oshash(fileSize, head, tail) + return OSHashFromReader(f, fileSize) } diff --git a/pkg/utils/oshash_internal_test.go b/pkg/utils/oshash_internal_test.go index b4d5b9d35..d9e709444 100644 --- a/pkg/utils/oshash_internal_test.go +++ b/pkg/utils/oshash_internal_test.go @@ -1,6 +1,7 @@ package utils import ( + "math/rand" "testing" ) @@ -44,3 +45,31 @@ func TestOshashCollisions(t *testing.T) { t.Errorf("TestOshashCollisions: oshash(n, k, ... %v) =! oshash(n, k, ... %v)", buf1, buf2) } } + +func BenchmarkOsHash(b *testing.B) { + src := rand.NewSource(9999) + r := rand.New(src) + + size := int64(1234567890) + + head := make([]byte, 1024*64) + _, err := r.Read(head) + if err != nil { + b.Errorf("unable to generate head array: %v", err) + } + + tail := make([]byte, 1024*64) + _, err = r.Read(tail) + if err != nil { + b.Errorf("unable to generate tail array: %v", err) + } + + b.ResetTimer() + + for n := 0; n < b.N; n++ { + _, err := oshash(size, head, tail) + if err != nil { + b.Errorf("unexpected error: %v", err) + } + } +} diff --git a/pkg/utils/reflect.go b/pkg/utils/reflect.go new file mode 100644 index 000000000..65b0903b6 --- /dev/null +++ b/pkg/utils/reflect.go @@ -0,0 +1,30 @@ +package utils + +import "reflect" + +// NotNilFields returns the matching tag values of fields from an object that are not nil. +// Panics if the provided object is not a struct. +func NotNilFields(subject interface{}, tag string) []string { + value := reflect.ValueOf(subject) + structType := value.Type() + + if structType.Kind() != reflect.Struct { + panic("subject must be struct") + } + + var ret []string + + for i := 0; i < value.NumField(); i++ { + field := value.Field(i) + + kind := field.Type().Kind() + if (kind == reflect.Ptr || kind == reflect.Slice) && !field.IsNil() { + tagValue := structType.Field(i).Tag.Get(tag) + if tagValue != "" { + ret = append(ret, tagValue) + } + } + } + + return ret +} diff --git a/pkg/utils/reflect_test.go b/pkg/utils/reflect_test.go new file mode 100644 index 000000000..87757e0e1 --- /dev/null +++ b/pkg/utils/reflect_test.go @@ -0,0 +1,83 @@ +package utils + +import ( + "reflect" + "testing" +) + +func TestNotNilFields(t *testing.T) { + v := "value" + var zeroStr string + + type testObject struct { + ptrField *string `tag:"ptrField"` + noTagField *string + otherTagField *string `otherTag:"otherTagField"` + sliceField []string `tag:"sliceField"` + } + + type args struct { + subject interface{} + tag string + } + tests := []struct { + name string + args args + want []string + }{ + { + "basic", + args{ + testObject{ + ptrField: &v, + noTagField: &v, + otherTagField: &v, + sliceField: []string{v}, + }, + "tag", + }, + []string{"ptrField", "sliceField"}, + }, + { + "empty", + args{ + testObject{}, + "tag", + }, + nil, + }, + { + "zero values", + args{ + testObject{ + ptrField: &zeroStr, + noTagField: &zeroStr, + otherTagField: &zeroStr, + sliceField: []string{}, + }, + "tag", + }, + []string{"ptrField", "sliceField"}, + }, + { + "other tag", + args{ + testObject{ + ptrField: &v, + noTagField: &v, + otherTagField: &v, + sliceField: []string{v}, + }, + "otherTag", + }, + []string{"otherTagField"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := NotNilFields(tt.args.subject, tt.args.tag); !reflect.DeepEqual(got, tt.want) { + t.Errorf("NotNilFields() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/utils/windows.go b/pkg/utils/windows.go index fb25f912d..cf63e1ae4 100644 --- a/pkg/utils/windows.go +++ b/pkg/utils/windows.go @@ -8,7 +8,7 @@ import ( // FixWindowsPath replaces \ with / in the given path because sometimes the \ isn't recognized as valid on windows func FixWindowsPath(str string) string { if runtime.GOOS == "windows" { - return strings.Replace(str, "\\", "/", -1) + return strings.ReplaceAll(str, "\\", "/") } return str } diff --git a/ui/v2.5/.eslintrc.json b/ui/v2.5/.eslintrc.json index 32597b3e3..ca3e8c0ce 100644 --- a/ui/v2.5/.eslintrc.json +++ b/ui/v2.5/.eslintrc.json @@ -10,18 +10,23 @@ "project": "./tsconfig.json" }, "plugins": [ - "@typescript-eslint" + "@typescript-eslint", + "jsx-a11y" ], "extends": [ "airbnb-typescript", "airbnb/hooks", + "plugin:react/recommended", + "plugin:import/recommended", "prettier", - "prettier/prettier", - "prettier/react", - "prettier/@typescript-eslint" + "prettier/prettier" ], + "settings": { + "react": { + "version": "detect" + } + }, "rules": { - "@typescript-eslint/explicit-function-return-type": "off", "@typescript-eslint/no-explicit-any": 2, "@typescript-eslint/naming-convention": [ "error", @@ -48,36 +53,17 @@ ], "import/named": "off", "import/namespace": "off", - "import/default": "off", - "import/no-named-as-default-member": "off", - "import/no-named-as-default": "off", - "import/no-cycle": "off", - "import/no-unused-modules": "off", - "import/no-deprecated": "off", "import/no-unresolved": "off", - "import/prefer-default-export": "off", - "import/no-extraneous-dependencies": "off", - "indent": "off", - "@typescript-eslint/indent": "off", + "react/display-name": "off", "react/prop-types": "off", - "react/destructuring-assignment": "off", - "react/require-default-props": "off", - "react/jsx-props-no-spreading": "off", - "react/sort-comp": "off", "react/style-prop-object": ["error", { "allow": ["FormattedNumber"] }], "spaced-comment": ["error", "always", { "markers": ["/"] }], - "max-classes-per-file": "off", - "no-plusplus": "off", "prefer-destructuring": ["error", {"object": true, "array": false}], - "default-case": "off", - "consistent-return": "off", "@typescript-eslint/no-use-before-define": ["error", { "functions": false, "classes": true }], - "no-underscore-dangle": "off", - "no-nested-ternary": "off", - "jsx-a11y/media-has-caption": "off" + "no-nested-ternary": "off" } } diff --git a/ui/v2.5/craco.config.js b/ui/v2.5/craco.config.js new file mode 100644 index 000000000..5309c4c3e --- /dev/null +++ b/ui/v2.5/craco.config.js @@ -0,0 +1,4 @@ +const CracoEsbuildPlugin = require('craco-esbuild'); +module.exports = { + plugins: [{ plugin: CracoEsbuildPlugin }], +}; diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index 3eb6464c7..66a0a1a16 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -6,7 +6,7 @@ "sideEffects": false, "scripts": { "start": "react-scripts start", - "build": "react-scripts build", + "build": "craco build", "test": "react-scripts test", "eject": "react-scripts eject", "build-ci": "yarn validate && yarn build", @@ -46,6 +46,7 @@ "formik": "^2.2.6", "graphql": "^15.4.0", "graphql-tag": "^2.11.0", + "hamming-distance": "^1.0.0", "i18n-iso-countries": "^6.4.0", "intersection-observer": "^0.12.0", "jimp": "^0.16.1", @@ -54,9 +55,10 @@ "mousetrap": "^1.6.5", "mousetrap-pause": "^1.0.0", "query-string": "6.13.8", - "react": "17.0.1", + "react": "17.0.2", "react-bootstrap": "1.4.3", - "react-dom": "17.0.1", + "react-dom": "17.0.2", + "react-helmet": "^6.1.0", "react-intl": "^5.10.16", "react-jw-player": "1.19.1", "react-markdown": "^5.0.3", @@ -73,6 +75,7 @@ "yup": "^0.32.9" }, "devDependencies": { + "@craco/craco": "^6.3.0", "@graphql-codegen/add": "^2.0.2", "@graphql-codegen/cli": "^1.20.0", "@graphql-codegen/time": "^2.0.2", @@ -85,19 +88,22 @@ "@types/lodash": "^4.14.168", "@types/mousetrap": "^1.6.5", "@types/node": "14.14.22", - "@types/react": "17.0.0", - "@types/react-dom": "^17.0.0", + "@types/react": "17.0.31", + "@types/react-dom": "^17.0.10", + "@types/react-helmet": "^6.1.3", "@types/react-router-bootstrap": "^0.24.5", "@types/react-router-dom": "5.1.7", "@types/react-router-hash-link": "^1.2.1", - "@typescript-eslint/eslint-plugin": "^4.14.0", - "@typescript-eslint/parser": "^4.14.0", - "eslint": "^7.18.0", - "eslint-config-airbnb-typescript": "^12.0.0", - "eslint-config-prettier": "^7.2.0", - "eslint-plugin-import": "^2.22.1", + "@typescript-eslint/eslint-plugin": "^4.33.0", + "@typescript-eslint/parser": "^4.33.0", + "craco-esbuild": "^0.4.2", + "eslint": "^7.32.0", + "eslint-config-airbnb": "^18.2.1", + "eslint-config-airbnb-typescript": "^14.0.1", + "eslint-config-prettier": "^8.3.0", + "eslint-plugin-import": "^2.25.2", "eslint-plugin-jsx-a11y": "^6.4.1", - "eslint-plugin-react": "^7.22.0", + "eslint-plugin-react": "^7.26.1", "eslint-plugin-react-hooks": "^4.2.0", "extract-react-intl-messages": "^4.1.1", "postcss-safe-parser": "^5.0.2", @@ -106,6 +112,6 @@ "stylelint": "^13.9.0", "stylelint-config-prettier": "^8.0.2", "stylelint-order": "^4.1.0", - "typescript": "~4.0.5" + "typescript": "~4.4.4" } } diff --git a/ui/v2.5/src/App.tsx b/ui/v2.5/src/App.tsx index c1560d1f5..8ee1c2b9b 100755 --- a/ui/v2.5/src/App.tsx +++ b/ui/v2.5/src/App.tsx @@ -1,6 +1,7 @@ import React, { useEffect } from "react"; import { Route, Switch, useRouteMatch } from "react-router-dom"; -import { IntlProvider } from "react-intl"; +import { IntlProvider, CustomFormats } from "react-intl"; +import { Helmet } from "react-helmet"; import { mergeWith } from "lodash"; import { ToastProvider } from "src/hooks/Toast"; import LightboxProvider from "src/hooks/Lightbox/context"; @@ -30,7 +31,8 @@ import Images from "./components/Images/Images"; import { Setup } from "./components/Setup/Setup"; import { Migrate } from "./components/Setup/Migrate"; import * as GQL from "./core/generated-graphql"; -import { LoadingIndicator } from "./components/Shared"; +import { LoadingIndicator, TITLE_SUFFIX } from "./components/Shared"; +import { ConfigurationProvider } from "./hooks/Config"; initPolyfills(); @@ -39,7 +41,7 @@ MousetrapPause(Mousetrap); // Set fontawesome/free-solid-svg as default fontawesome icons library.add(fas); -const intlFormats = { +const intlFormats: CustomFormats = { date: { long: { year: "numeric", month: "long", day: "numeric" }, }, @@ -138,12 +140,21 @@ export const App: React.FC = () => { return ( - - - {maybeRenderNavbar()} -
{renderContent()}
-
-
+ + + + + {maybeRenderNavbar()} +
{renderContent()}
+
+
+
); diff --git a/ui/v2.5/src/components/Changelog/Changelog.tsx b/ui/v2.5/src/components/Changelog/Changelog.tsx index f504f81de..423edf578 100644 --- a/ui/v2.5/src/components/Changelog/Changelog.tsx +++ b/ui/v2.5/src/components/Changelog/Changelog.tsx @@ -13,6 +13,7 @@ import V070 from "./versions/v070.md"; import V080 from "./versions/v080.md"; import V090 from "./versions/v090.md"; import V0100 from "./versions/v0100.md"; +import V0110 from "./versions/v0110.md"; import { MarkdownPage } from "../Shared/MarkdownPage"; // to avoid use of explicit any @@ -51,9 +52,9 @@ const Changelog: React.FC = () => { // after new release: // add entry to releases, using the current* fields // then update the current fields. - const currentVersion = stashVersion || "v0.10.0"; + const currentVersion = stashVersion || "v0.11.0"; const currentDate = buildDate; - const currentPage = V0100; + const currentPage = V0110; const releases: IStashRelease[] = [ { @@ -62,6 +63,11 @@ const Changelog: React.FC = () => { page: currentPage, defaultOpen: true, }, + { + version: "v0.10.0", + date: "2021-10-11", + page: V0100, + }, { version: "v0.9.0", date: "2021-09-06", diff --git a/ui/v2.5/src/components/Changelog/versions/v0110.md b/ui/v2.5/src/components/Changelog/versions/v0110.md new file mode 100644 index 000000000..a6c18fc62 --- /dev/null +++ b/ui/v2.5/src/components/Changelog/versions/v0110.md @@ -0,0 +1,43 @@ +### 💫 [Help Shape the Future of Stash!](https://forms.gle/x5nZa1zrVTJpgMHx8) +The Stash developers would greatly appreciate if you take a [short, anonymous survey](https://forms.gle/x5nZa1zrVTJpgMHx8). It would help us out a great deal to make yourself heard, let us know how you use Stash, and tell us what you'd like to see in the future. + +### ✨ New Features +* Added Identify task to automatically identify scenes from stash-box/scraper sources. See manual entry for details. ([#1839](https://github.com/stashapp/stash/pull/1839)) +* Added support for matching scenes using perceptual hashes when querying stash-box. ([#1858](https://github.com/stashapp/stash/pull/1858)) +* Generalised Tagger view to support tagging using supported scene scrapers. ([#1812](https://github.com/stashapp/stash/pull/1812)) +* Added built-in `Auto Tag` scene scraper to match performers, studio and tags from filename - using AutoTag logic. ([#1817](https://github.com/stashapp/stash/pull/1817)) +* Add options to auto-start videos when playing from selection and continue to scene playlists. ([#1921](https://github.com/stashapp/stash/pull/1921)) +* Support is (not) null for multi-relational filter criteria. ([#1785](https://github.com/stashapp/stash/pull/1785)) +* Optionally open browser on startup (enabled by default for new systems). ([#1832](https://github.com/stashapp/stash/pull/1832)) +* Support setting defaults for Delete File and Delete Generated Files in the Interface Settings. ([#1852](https://github.com/stashapp/stash/pull/1852)) +* Added interface options to disable creating performers/studios/tags from dropdown selectors. ([#1814](https://github.com/stashapp/stash/pull/1814)) + +### 🎨 Improvements +* Added Italian 🇮🇹, French 🇫🇷, and Spanish 🇪🇸 translations ([#1875](https://github.com/stashapp/stash/pull/1875), [#1967](https://github.com/stashapp/stash/pull/1967), [#1886](https://github.com/stashapp/stash/pull/1886)) +* Added stash-id to scene scrape dialog. ([#1955](https://github.com/stashapp/stash/pull/1955)) +* Reworked main navbar and positioned at bottom for mobile devices. ([#1769](https://github.com/stashapp/stash/pull/1769)) +* Show files being deleted in the Delete dialogs. ([#1852](https://github.com/stashapp/stash/pull/1852)) +* Added specific page titles. ([#1831](https://github.com/stashapp/stash/pull/1831)) +* Show pagination at top and bottom of page. ([#1776](https://github.com/stashapp/stash/pull/1776)) +* Include total duration/megapixels and filesize information on Scenes and Images pages. ([#1776](https://github.com/stashapp/stash/pull/1776)) +* Optimised generate process. ([#1871](https://github.com/stashapp/stash/pull/1871)) +* Added clear button to query text field. ([#1845](https://github.com/stashapp/stash/pull/1845)) +* Moved Performer rating stars from details/edit tabs to heading section of performer page. ([#1844](https://github.com/stashapp/stash/pull/1844)) +* Optimised scanning process. ([#1816](https://github.com/stashapp/stash/pull/1816)) + +### 🐛 Bug fixes +* Fix tag hierarchy not being validated during tag creation. ([#1926](https://github.com/stashapp/stash/pull/1926)) +* Fix tag hierarchy validation incorrectly failing for some hierarchies. ([#1926](https://github.com/stashapp/stash/pull/1926)) +* Fix exclusion pattern fields losing focus on keypress. ([#1952](https://github.com/stashapp/stash/pull/1952)) +* Include stash ids in import/export. ([#1916](https://github.com/stashapp/stash/pull/1916)) +* Fix tiny menu items in scrape menu when a stash-box instance has no name. ([#1889](https://github.com/stashapp/stash/pull/1889)) +* Fix creating missing entities removing the incorrect entry from the missing list in the scrape dialog. ([#1890](https://github.com/stashapp/stash/pull/1890)) +* Allow creating missing Studio during movie scrape. ([#1899](https://github.com/stashapp/stash/pull/1899)) +* Fix image files in folder galleries not being deleting when delete file option is checked. ([#1872](https://github.com/stashapp/stash/pull/1872)) +* Fix marker generation task reading video files unnecessarily. ([#1871](https://github.com/stashapp/stash/pull/1871)) +* Fix accessing Stash via IPv6 link local address causing security tripwire to be activated. ([#1841](https://github.com/stashapp/stash/pull/1841)) +* Fix Twitter value defaulting to freeones in built-in Freeones scraper. ([#1853](https://github.com/stashapp/stash/pull/1853)) +* Fix colour codes not outputting correctly when logging to file on Windows. ([#1846](https://github.com/stashapp/stash/pull/1846)) +* Sort directory listings using case sensitive collation. ([#1823](https://github.com/stashapp/stash/pull/1823)) +* Fix auto-tag logic for names which have single-letter words. ([#1817](https://github.com/stashapp/stash/pull/1817)) +* Fix huge memory usage spike during clean task. ([#1805](https://github.com/stashapp/stash/pull/1805)) diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/FieldOptions.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/FieldOptions.tsx new file mode 100644 index 000000000..fad0ff7a0 --- /dev/null +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/FieldOptions.tsx @@ -0,0 +1,344 @@ +import React, { useState, useEffect, useCallback } from "react"; +import { Form, Button, Table } from "react-bootstrap"; +import { Icon } from "src/components/Shared"; +import * as GQL from "src/core/generated-graphql"; +import { FormattedMessage, useIntl } from "react-intl"; +import { multiValueSceneFields, SceneField, sceneFields } from "./constants"; +import { ThreeStateBoolean } from "./ThreeStateBoolean"; + +interface IFieldOptionsEditor { + options: GQL.IdentifyFieldOptions | undefined; + field: string; + editField: () => void; + editOptions: (o?: GQL.IdentifyFieldOptions | null) => void; + editing: boolean; + allowSetDefault: boolean; + defaultOptions?: GQL.IdentifyMetadataOptionsInput; +} + +interface IFieldOptions { + field: string; + strategy: GQL.IdentifyFieldStrategy | undefined; + createMissing?: GQL.Maybe | undefined; +} + +const FieldOptionsEditor: React.FC = ({ + options, + field, + editField, + editOptions, + editing, + allowSetDefault, + defaultOptions, +}) => { + const intl = useIntl(); + + const [localOptions, setLocalOptions] = useState(); + + const resetOptions = useCallback(() => { + let toSet: IFieldOptions; + if (!options) { + // unset - use default values + toSet = { + field, + strategy: undefined, + createMissing: undefined, + }; + } else { + toSet = { + field, + strategy: options.strategy, + createMissing: options.createMissing, + }; + } + setLocalOptions(toSet); + }, [options, field]); + + useEffect(() => { + resetOptions(); + }, [resetOptions]); + + function renderField() { + return intl.formatMessage({ id: field }); + } + + function renderStrategy() { + if (!localOptions) { + return; + } + + const strategies = Object.entries(GQL.IdentifyFieldStrategy); + let { strategy } = localOptions; + if (strategy === undefined) { + if (!allowSetDefault) { + strategy = GQL.IdentifyFieldStrategy.Merge; + } + } + + if (!editing) { + if (strategy === undefined) { + return intl.formatMessage({ id: "actions.use_default" }); + } + + const f = strategies.find((s) => s[1] === strategy); + return intl.formatMessage({ + id: `actions.${f![0].toLowerCase()}`, + }); + } + + if (!localOptions) { + return <>; + } + + return ( + + {allowSetDefault ? ( + + setLocalOptions({ + ...localOptions, + strategy: undefined, + }) + } + disabled={!editing} + label={intl.formatMessage({ id: "actions.use_default" })} + /> + ) : undefined} + {strategies.map((f) => ( + + setLocalOptions({ + ...localOptions, + strategy: f[1], + }) + } + disabled={!editing} + label={intl.formatMessage({ + id: `actions.${f[0].toLowerCase()}`, + })} + /> + ))} + + ); + } + + function maybeRenderCreateMissing() { + if (!localOptions) { + return; + } + + if ( + multiValueSceneFields.includes(localOptions.field as SceneField) && + localOptions.strategy !== GQL.IdentifyFieldStrategy.Ignore + ) { + const value = + localOptions.createMissing === null + ? undefined + : localOptions.createMissing; + + if (!editing) { + if (value === undefined && allowSetDefault) { + return intl.formatMessage({ id: "actions.use_default" }); + } + if (value) { + return ; + } + + return ; + } + + const defaultVal = defaultOptions?.fieldOptions?.find( + (f) => f.field === localOptions.field + )?.createMissing; + + if (localOptions.strategy === undefined) { + return; + } + + return ( + + setLocalOptions({ ...localOptions, createMissing: v }) + } + defaultValue={defaultVal ?? undefined} + /> + ); + } + } + + function onEditOptions() { + if (!localOptions) { + return; + } + + // send null if strategy is undefined + if (localOptions.strategy === undefined) { + editOptions(null); + resetOptions(); + } else { + let { createMissing } = localOptions; + if (createMissing === undefined && !allowSetDefault) { + createMissing = false; + } + + editOptions({ + ...localOptions, + strategy: localOptions.strategy, + createMissing, + }); + } + } + + return ( + + {renderField()} + {renderStrategy()} + {maybeRenderCreateMissing()} + + {editing ? ( + <> + + + + ) : ( + <> + + + )} + + + ); +}; + +interface IFieldOptionsList { + fieldOptions?: GQL.IdentifyFieldOptions[]; + setFieldOptions: (o: GQL.IdentifyFieldOptions[]) => void; + setEditingField: (v: boolean) => void; + allowSetDefault?: boolean; + defaultOptions?: GQL.IdentifyMetadataOptionsInput; +} + +export const FieldOptionsList: React.FC = ({ + fieldOptions, + setFieldOptions, + setEditingField, + allowSetDefault = true, + defaultOptions, +}) => { + const [localFieldOptions, setLocalFieldOptions] = useState< + GQL.IdentifyFieldOptions[] + >(); + const [editField, setEditField] = useState(); + + useEffect(() => { + if (fieldOptions) { + setLocalFieldOptions([...fieldOptions]); + } else { + setLocalFieldOptions([]); + } + }, [fieldOptions]); + + function handleEditOptions(o?: GQL.IdentifyFieldOptions | null) { + if (!localFieldOptions) { + return; + } + + if (o !== undefined) { + const newOptions = [...localFieldOptions]; + const index = newOptions.findIndex( + (option) => option.field === editField + ); + if (index !== -1) { + // if null, then we're removing + if (o === null) { + newOptions.splice(index, 1); + } else { + // replace in list + newOptions.splice(index, 1, o); + } + } else if (o !== null) { + // don't add if null + newOptions.push(o); + } + + setFieldOptions(newOptions); + } + + setEditField(undefined); + setEditingField(false); + } + + function onEditField(field: string) { + setEditField(field); + setEditingField(true); + } + + if (!localFieldOptions) { + return <>; + } + + return ( + +
+ +
+ + + + + + + {/* eslint-disable-next-line jsx-a11y/control-has-associated-label */} + + + + {sceneFields.map((f) => ( + o.field === f)} + editField={() => onEditField(f)} + editOptions={handleEditOptions} + editing={f === editField} + defaultOptions={defaultOptions} + /> + ))} + +
+ + + + + + +
+
+ ); +}; diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx new file mode 100644 index 000000000..f4e91b4ea --- /dev/null +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/IdentifyDialog.tsx @@ -0,0 +1,451 @@ +import React, { useState, useEffect, useMemo } from "react"; +import { Button, Form, Spinner } from "react-bootstrap"; +import { + mutateMetadataIdentify, + useConfiguration, + useConfigureDefaults, + useListSceneScrapers, +} from "src/core/StashService"; +import { Icon, Modal } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import * as GQL from "src/core/generated-graphql"; +import { FormattedMessage, useIntl } from "react-intl"; +import { withoutTypename } from "src/utils"; +import { + SCRAPER_PREFIX, + STASH_BOX_PREFIX, +} from "src/components/Tagger/constants"; +import { DirectorySelectionDialog } from "src/components/Settings/SettingsTasksPanel/DirectorySelectionDialog"; +import { Manual } from "src/components/Help/Manual"; +import { IScraperSource } from "./constants"; +import { OptionsEditor } from "./Options"; +import { SourcesEditor, SourcesList } from "./Sources"; + +const autoTagScraperID = "builtin_autotag"; + +interface IIdentifyDialogProps { + selectedIds?: string[]; + onClose: () => void; +} + +export const IdentifyDialog: React.FC = ({ + selectedIds, + onClose, +}) => { + function getDefaultOptions(): GQL.IdentifyMetadataOptionsInput { + return { + fieldOptions: [ + { + field: "title", + strategy: GQL.IdentifyFieldStrategy.Overwrite, + }, + ], + includeMalePerformers: true, + setCoverImage: true, + setOrganized: false, + }; + } + + const [configureDefaults] = useConfigureDefaults(); + + const [options, setOptions] = useState( + getDefaultOptions() + ); + const [sources, setSources] = useState([]); + const [editingSource, setEditingSource] = useState< + IScraperSource | undefined + >(); + const [paths, setPaths] = useState([]); + const [showManual, setShowManual] = useState(false); + const [settingPaths, setSettingPaths] = useState(false); + const [animation, setAnimation] = useState(true); + const [editingField, setEditingField] = useState(false); + const [savingDefaults, setSavingDefaults] = useState(false); + + const intl = useIntl(); + const Toast = useToast(); + + const { data: configData, error: configError } = useConfiguration(); + const { data: scraperData, error: scraperError } = useListSceneScrapers(); + + const allSources = useMemo(() => { + if (!configData || !scraperData) return; + + const ret: IScraperSource[] = []; + + ret.push( + ...configData.configuration.general.stashBoxes.map((b, i) => { + return { + id: `${STASH_BOX_PREFIX}${i}`, + displayName: `stash-box: ${b.name}`, + stash_box_endpoint: b.endpoint, + }; + }) + ); + + const scrapers = scraperData.listSceneScrapers; + + const fragmentScrapers = scrapers.filter((s) => + s.scene?.supported_scrapes.includes(GQL.ScrapeType.Fragment) + ); + + ret.push( + ...fragmentScrapers.map((s) => { + return { + id: `${SCRAPER_PREFIX}${s.id}`, + displayName: s.name, + scraper_id: s.id, + }; + }) + ); + + return ret; + }, [configData, scraperData]); + + const selectionStatus = useMemo(() => { + if (selectedIds) { + return ( + + + . + + ); + } + const message = paths.length ? ( +
+ : +
    + {paths.map((p) => ( +
  • {p}
  • + ))} +
+
+ ) : ( + + + . + + ); + + function onClick() { + setAnimation(false); + setSettingPaths(true); + } + + return ( + +
+ {message} +
+ +
+
+
+ ); + }, [selectedIds, intl, paths]); + + useEffect(() => { + if (!configData || !allSources) return; + + const { identify: identifyDefaults } = configData.configuration.defaults; + + if (identifyDefaults) { + const mappedSources = identifyDefaults.sources + .map((s) => { + const found = allSources.find( + (ss) => + ss.scraper_id === s.source.scraper_id || + ss.stash_box_endpoint === s.source.stash_box_endpoint + ); + + if (!found) return; + + const ret: IScraperSource = { + ...found, + }; + + if (s.options) { + const sourceOptions = withoutTypename(s.options); + sourceOptions.fieldOptions = sourceOptions.fieldOptions?.map( + withoutTypename + ); + ret.options = sourceOptions; + } + + return ret; + }) + .filter((s) => s) as IScraperSource[]; + + setSources(mappedSources); + if (identifyDefaults.options) { + const defaultOptions = withoutTypename(identifyDefaults.options); + defaultOptions.fieldOptions = defaultOptions.fieldOptions?.map( + withoutTypename + ); + setOptions(defaultOptions); + } + } else { + // default to first stash-box instance only + const stashBox = allSources.find((s) => s.stash_box_endpoint); + + // add auto-tag as well + const autoTag = allSources.find( + (s) => s.id === `${SCRAPER_PREFIX}${autoTagScraperID}` + ); + + const newSources: IScraperSource[] = []; + if (stashBox) { + newSources.push(stashBox); + } + + // sanity check - this should always be true + if (autoTag) { + // don't set organised by default + const autoTagCopy = { ...autoTag }; + autoTagCopy.options = { + setOrganized: false, + }; + newSources.push(autoTagCopy); + } + + setSources(newSources); + } + }, [allSources, configData]); + + if (configError || scraperError) + return
{configError ?? scraperError}
; + if (!allSources || !configData) return
; + + function makeIdentifyInput(): GQL.IdentifyMetadataInput { + return { + sources: sources.map((s) => { + return { + source: { + scraper_id: s.scraper_id, + stash_box_endpoint: s.stash_box_endpoint, + }, + options: s.options, + }; + }), + options, + sceneIDs: selectedIds, + paths, + }; + } + + function makeDefaultIdentifyInput() { + const ret = makeIdentifyInput(); + const { sceneIDs, paths: _paths, ...withoutSpecifics } = ret; + return withoutSpecifics; + } + + async function onIdentify() { + try { + await mutateMetadataIdentify(makeIdentifyInput()); + + Toast.success({ + content: intl.formatMessage( + { id: "config.tasks.added_job_to_queue" }, + { operation_name: intl.formatMessage({ id: "actions.identify" }) } + ), + }); + } catch (e) { + Toast.error(e); + } finally { + onClose(); + } + } + + function getAvailableSources() { + // only include scrapers not already present + return !editingSource?.id === undefined + ? [] + : allSources?.filter((s) => { + return !sources.some((ss) => ss.id === s.id); + }) ?? []; + } + + function onEditSource(s?: IScraperSource) { + setAnimation(false); + + // if undefined, then set a dummy source to create a new one + if (!s) { + setEditingSource(getAvailableSources()[0]); + } else { + setEditingSource(s); + } + } + + function onShowManual() { + setAnimation(false); + setShowManual(true); + } + + function isNewSource() { + return !!editingSource && !sources.includes(editingSource); + } + + function onSaveSource(s?: IScraperSource) { + if (s) { + let found = false; + const newSources = sources.map((ss) => { + if (ss.id === s.id) { + found = true; + return s; + } + return ss; + }); + + if (!found) { + newSources.push(s); + } + + setSources(newSources); + } + setEditingSource(undefined); + } + + async function setAsDefault() { + try { + setSavingDefaults(true); + await configureDefaults({ + variables: { + input: { + identify: makeDefaultIdentifyInput(), + }, + }, + }); + } catch (e) { + Toast.error(e); + } finally { + setSavingDefaults(false); + } + } + + if (editingSource) { + return ( + + ); + } + + if (settingPaths) { + return ( + { + if (p) { + setPaths(p); + } + setSettingPaths(false); + }} + /> + ); + } + + if (showManual) { + return ( + setShowManual(false)} + defaultActiveTab="Identify.md" + /> + ); + } + + return ( + onClose(), + text: intl.formatMessage({ id: "actions.cancel" }), + variant: "secondary", + }} + disabled={editingField || savingDefaults || sources.length === 0} + footerButtons={ + + } + leftFooterButtons={ + + } + > +
+ {selectionStatus} + setSources(s)} + editSource={onEditSource} + canAdd={sources.length < allSources.length} + /> + setOptions(o)} + setEditingField={(v) => setEditingField(v)} + /> + +
+ ); +}; + +export default IdentifyDialog; diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx new file mode 100644 index 000000000..88655c860 --- /dev/null +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/Options.tsx @@ -0,0 +1,117 @@ +import React from "react"; +import { Form } from "react-bootstrap"; +import * as GQL from "src/core/generated-graphql"; +import { FormattedMessage, useIntl } from "react-intl"; +import { IScraperSource } from "./constants"; +import { FieldOptionsList } from "./FieldOptions"; +import { ThreeStateBoolean } from "./ThreeStateBoolean"; + +interface IOptionsEditor { + options: GQL.IdentifyMetadataOptionsInput; + setOptions: (s: GQL.IdentifyMetadataOptionsInput) => void; + source?: IScraperSource; + defaultOptions?: GQL.IdentifyMetadataOptionsInput; + setEditingField: (v: boolean) => void; +} + +export const OptionsEditor: React.FC = ({ + options, + setOptions: setOptionsState, + source, + setEditingField, + defaultOptions, +}) => { + const intl = useIntl(); + + function setOptions(v: Partial) { + setOptionsState({ ...options, ...v }); + } + + const headingID = !source + ? "config.tasks.identify.default_options" + : "config.tasks.identify.source_options"; + const checkboxProps = { + allowUndefined: !!source, + indeterminateClassname: "text-muted", + }; + + return ( + + +
+ +
+ {!source && ( + + {intl.formatMessage({ + id: "config.tasks.identify.explicit_set_description", + })} + + )} +
+ + + setOptions({ + includeMalePerformers: v, + }) + } + label={intl.formatMessage({ + id: "config.tasks.identify.include_male_performers", + })} + defaultValue={defaultOptions?.includeMalePerformers ?? undefined} + {...checkboxProps} + /> + + setOptions({ + setCoverImage: v, + }) + } + label={intl.formatMessage({ + id: "config.tasks.identify.set_cover_images", + })} + defaultValue={defaultOptions?.setCoverImage ?? undefined} + {...checkboxProps} + /> + + setOptions({ + setOrganized: v, + }) + } + label={intl.formatMessage({ + id: "config.tasks.identify.set_organized", + })} + defaultValue={defaultOptions?.setOrganized ?? undefined} + {...checkboxProps} + /> + + + setOptions({ fieldOptions: o })} + setEditingField={setEditingField} + allowSetDefault={!!source} + defaultOptions={defaultOptions} + /> +
+ ); +}; diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/Sources.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/Sources.tsx new file mode 100644 index 000000000..81d213115 --- /dev/null +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/Sources.tsx @@ -0,0 +1,217 @@ +import React, { useState, useEffect } from "react"; +import { Form, Button, ListGroup } from "react-bootstrap"; +import { Modal, Icon } from "src/components/Shared"; +import { FormattedMessage, useIntl } from "react-intl"; +import * as GQL from "src/core/generated-graphql"; +import { IScraperSource } from "./constants"; +import { OptionsEditor } from "./Options"; + +interface ISourceEditor { + isNew: boolean; + availableSources: IScraperSource[]; + source: IScraperSource; + saveSource: (s?: IScraperSource) => void; + defaultOptions: GQL.IdentifyMetadataOptionsInput; +} + +export const SourcesEditor: React.FC = ({ + isNew, + availableSources, + source: initialSource, + saveSource, + defaultOptions, +}) => { + const [source, setSource] = useState(initialSource); + const [editingField, setEditingField] = useState(false); + + const intl = useIntl(); + + // if id is empty, then we are adding a new source + const headerMsgId = isNew ? "actions.add" : "dialogs.edit_entity_title"; + const acceptMsgId = isNew ? "actions.add" : "actions.confirm"; + + function handleSourceSelect(e: React.ChangeEvent) { + const selectedSource = availableSources.find( + (s) => s.id === e.currentTarget.value + ); + if (!selectedSource) return; + + setSource({ + ...source, + id: selectedSource.id, + displayName: selectedSource.displayName, + scraper_id: selectedSource.scraper_id, + stash_box_endpoint: selectedSource.stash_box_endpoint, + }); + } + + return ( + saveSource(source), + text: intl.formatMessage({ id: acceptMsgId }), + }} + cancel={{ + onClick: () => saveSource(), + text: intl.formatMessage({ id: "actions.cancel" }), + variant: "secondary", + }} + disabled={ + (!source.scraper_id && !source.stash_box_endpoint) || editingField + } + > +
+ {isNew && ( + +
+ +
+ + {availableSources.map((i) => ( + + ))} + +
+ )} + setSource({ ...source, options: o })} + source={source} + setEditingField={(v) => setEditingField(v)} + defaultOptions={defaultOptions} + /> + +
+ ); +}; + +interface ISourcesList { + sources: IScraperSource[]; + setSources: (s: IScraperSource[]) => void; + editSource: (s?: IScraperSource) => void; + canAdd: boolean; +} + +export const SourcesList: React.FC = ({ + sources, + setSources, + editSource, + canAdd, +}) => { + const [tempSources, setTempSources] = useState(sources); + const [dragIndex, setDragIndex] = useState(); + const [mouseOverIndex, setMouseOverIndex] = useState(); + + useEffect(() => { + setTempSources([...sources]); + }, [sources]); + + function removeSource(index: number) { + const newSources = [...sources]; + newSources.splice(index, 1); + setSources(newSources); + } + + function onDragStart(event: React.DragEvent, index: number) { + event.dataTransfer.effectAllowed = "move"; + setDragIndex(index); + } + + function onDragOver(event: React.DragEvent, index?: number) { + if (dragIndex !== undefined && index !== undefined && index !== dragIndex) { + const newSources = [...tempSources]; + const moved = newSources.splice(dragIndex, 1); + newSources.splice(index, 0, moved[0]); + setTempSources(newSources); + setDragIndex(index); + } + + event.dataTransfer.dropEffect = "move"; + event.preventDefault(); + } + + function onDragOverDefault(event: React.DragEvent) { + event.dataTransfer.dropEffect = "move"; + event.preventDefault(); + } + + function onDrop() { + // assume we've already set the temp source list + // feed it up + setSources(tempSources); + setDragIndex(undefined); + setMouseOverIndex(undefined); + } + + return ( + +
+ +
+ + {tempSources.map((s, index) => ( + onDragStart(e, index)} + onDragEnter={(e) => onDragOver(e, index)} + onDrop={() => onDrop()} + > +
+
setMouseOverIndex(index)} + onMouseLeave={() => setMouseOverIndex(undefined)} + > + +
+ {s.displayName} +
+
+ + +
+
+ ))} +
+ {canAdd && ( +
+ +
+ )} +
+ ); +}; diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/ThreeStateBoolean.tsx b/ui/v2.5/src/components/Dialogs/IdentifyDialog/ThreeStateBoolean.tsx new file mode 100644 index 000000000..527bb5165 --- /dev/null +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/ThreeStateBoolean.tsx @@ -0,0 +1,90 @@ +import React from "react"; +import { Form } from "react-bootstrap"; +import { useIntl } from "react-intl"; + +interface IThreeStateBoolean { + id: string; + value: boolean | undefined; + setValue: (v: boolean | undefined) => void; + allowUndefined?: boolean; + label?: React.ReactNode; + disabled?: boolean; + defaultValue?: boolean; +} + +export const ThreeStateBoolean: React.FC = ({ + id, + value, + setValue, + allowUndefined = true, + label, + disabled, + defaultValue, +}) => { + const intl = useIntl(); + + if (!allowUndefined) { + return ( + setValue(!value)} + /> + ); + } + + function getBooleanText(v: boolean) { + if (v) { + return intl.formatMessage({ id: "true" }); + } + return intl.formatMessage({ id: "false" }); + } + + function getButtonText(v: boolean | undefined) { + if (v === undefined) { + const defaultVal = + defaultValue !== undefined ? ( + + {" "} + ({getBooleanText(defaultValue)}) + + ) : ( + "" + ); + return ( + + {intl.formatMessage({ id: "actions.use_default" })} + {defaultVal} + + ); + } + + return getBooleanText(v); + } + + function renderModeButton(v: boolean | undefined) { + return ( + setValue(v)} + disabled={disabled} + label={getButtonText(v)} + /> + ); + } + + return ( + +
{label}
+ + {renderModeButton(undefined)} + {renderModeButton(false)} + {renderModeButton(true)} + +
+ ); +}; diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/constants.ts b/ui/v2.5/src/components/Dialogs/IdentifyDialog/constants.ts new file mode 100644 index 000000000..11c7fe6e8 --- /dev/null +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/constants.ts @@ -0,0 +1,27 @@ +import * as GQL from "src/core/generated-graphql"; + +export interface IScraperSource { + id: string; + displayName: string; + stash_box_endpoint?: string; + scraper_id?: string; + options?: GQL.IdentifyMetadataOptionsInput; +} + +export const sceneFields = [ + "title", + "date", + "details", + "url", + "studio", + "performers", + "tags", + "stash_ids", +] as const; +export type SceneField = typeof sceneFields[number]; + +export const multiValueSceneFields: SceneField[] = [ + "studio", + "performers", + "tags", +]; diff --git a/ui/v2.5/src/components/Dialogs/IdentifyDialog/styles.scss b/ui/v2.5/src/components/Dialogs/IdentifyDialog/styles.scss new file mode 100644 index 000000000..9257d0b1d --- /dev/null +++ b/ui/v2.5/src/components/Dialogs/IdentifyDialog/styles.scss @@ -0,0 +1,45 @@ +.identify-source-editor { + .default-value { + color: #bfccd6; + } +} + +.scraper-source-list { + .list-group-item { + background-color: $textfield-bg; + padding: 0.25em; + + .drag-handle { + cursor: move; + display: inline-block; + margin: -0.25em 0.25em -0.25em -0.25em; + padding: 0.25em 0.5em 0.25em; + } + + .drag-handle:hover, + .drag-handle:active, + .drag-handle:focus, + .drag-handle:focus:active { + background-color: initial; + border-color: initial; + box-shadow: initial; + } + } +} + +.scraper-sources { + .add-scraper-source-button { + margin-right: 0.25em; + } +} + +.field-options-table td:first-child { + padding-left: 0.75rem; +} + +#selected-identify-folders { + & > div { + display: flex; + justify-content: space-between; + } +} diff --git a/ui/v2.5/src/components/Galleries/DeleteGalleriesDialog.tsx b/ui/v2.5/src/components/Galleries/DeleteGalleriesDialog.tsx index 8ff227e5c..2c671a4bb 100644 --- a/ui/v2.5/src/components/Galleries/DeleteGalleriesDialog.tsx +++ b/ui/v2.5/src/components/Galleries/DeleteGalleriesDialog.tsx @@ -4,10 +4,11 @@ import { useGalleryDestroy } from "src/core/StashService"; import * as GQL from "src/core/generated-graphql"; import { Modal } from "src/components/Shared"; import { useToast } from "src/hooks"; -import { useIntl } from "react-intl"; +import { ConfigurationContext } from "src/hooks/Config"; +import { FormattedMessage, useIntl } from "react-intl"; interface IDeleteGalleryDialogProps { - selected: Pick[]; + selected: GQL.SlimGalleryDataFragment[]; onClose: (confirmed: boolean) => void; } @@ -31,8 +32,14 @@ export const DeleteGalleriesDialog: React.FC = ( { count: props.selected.length, singularEntity, pluralEntity } ); - const [deleteFile, setDeleteFile] = useState(false); - const [deleteGenerated, setDeleteGenerated] = useState(true); + const { configuration: config } = React.useContext(ConfigurationContext); + + const [deleteFile, setDeleteFile] = useState( + config?.defaults.deleteFile ?? false + ); + const [deleteGenerated, setDeleteGenerated] = useState( + config?.defaults.deleteGenerated ?? true + ); const Toast = useToast(); const [deleteGallery] = useGalleryDestroy(getGalleriesDeleteInput()); @@ -60,6 +67,50 @@ export const DeleteGalleriesDialog: React.FC = ( props.onClose(true); } + function maybeRenderDeleteFileAlert() { + if (!deleteFile) { + return; + } + + const fsGalleries = props.selected.filter((g) => g.path); + if (fsGalleries.length === 0) { + return; + } + + return ( +
+

+ +

+
    + {fsGalleries.slice(0, 5).map((s) => ( +
  • {s.path}
  • + ))} + {fsGalleries.length > 5 && ( + + )} +
  • + +
  • +
+
+ ); + } + return ( = ( isRunning={isDeleting} >

{message}

+ {maybeRenderDeleteFileAlert()}
setDeleteFile(!deleteFile)} /> ( - - ( - - )} - /> - - -); +const Galleries = () => { + const intl = useIntl(); + + const title_template = `${intl.formatMessage({ + id: "galleries", + })} ${TITLE_SUFFIX}`; + return ( + <> + + + ( + + )} + /> + + + + + ); +}; export default Galleries; diff --git a/ui/v2.5/src/components/Galleries/GalleryCard.tsx b/ui/v2.5/src/components/Galleries/GalleryCard.tsx index 4f9a9ade0..5a72a737c 100644 --- a/ui/v2.5/src/components/Galleries/GalleryCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryCard.tsx @@ -2,7 +2,6 @@ import { Button, ButtonGroup } from "react-bootstrap"; import React from "react"; import { Link } from "react-router-dom"; import * as GQL from "src/core/generated-graphql"; -import { useConfiguration } from "src/core/StashService"; import { GridCard, HoverPopover, @@ -12,6 +11,7 @@ import { } from "src/components/Shared"; import { PopoverCountButton } from "src/components/Shared/PopoverCountButton"; import { NavUtils, TextUtils } from "src/utils"; +import { ConfigurationContext } from "src/hooks/Config"; import { PerformerPopoverButton } from "../Shared/PerformerPopoverButton"; import { RatingBanner } from "../Shared/RatingBanner"; @@ -24,9 +24,8 @@ interface IProps { } export const GalleryCard: React.FC = (props) => { - const config = useConfiguration(); - const showStudioAsText = - config?.data?.configuration.interface.showStudioAsText ?? false; + const { configuration } = React.useContext(ConfigurationContext); + const showStudioAsText = configuration?.interface.showStudioAsText ?? false; function maybeRenderScenePopoverButton() { if (props.gallery.scenes.length === 0) return; diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx index b438d54a7..b9f2673dc 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx @@ -2,6 +2,8 @@ import { Tab, Nav, Dropdown } from "react-bootstrap"; import React, { useEffect, useState } from "react"; import { useParams, useHistory, Link } from "react-router-dom"; import { FormattedMessage, useIntl } from "react-intl"; +import { Helmet } from "react-helmet"; +import * as GQL from "src/core/generated-graphql"; import { mutateMetadataScan, useFindGallery, @@ -9,7 +11,7 @@ import { } from "src/core/StashService"; import { ErrorMessage, LoadingIndicator, Icon } from "src/components/Shared"; import { TextUtils } from "src/utils"; -import * as Mousetrap from "mousetrap"; +import Mousetrap from "mousetrap"; import { useToast } from "src/hooks"; import { OrganizedButton } from "src/components/Scenes/SceneDetails/OrganizedButton"; import { GalleryEditPanel } from "./GalleryEditPanel"; @@ -20,27 +22,26 @@ import { GalleryAddPanel } from "./GalleryAddPanel"; import { GalleryFileInfoPanel } from "./GalleryFileInfoPanel"; import { GalleryScenesPanel } from "./GalleryScenesPanel"; +interface IProps { + gallery: GQL.GalleryDataFragment; +} + interface IGalleryParams { - id?: string; tab?: string; } -export const Gallery: React.FC = () => { - const { tab = "images", id = "new" } = useParams(); +export const GalleryPage: React.FC = ({ gallery }) => { + const { tab = "images" } = useParams(); const history = useHistory(); const Toast = useToast(); const intl = useIntl(); - const isNew = id === "new"; - - const { data, error, loading } = useFindGallery(id); - const gallery = data?.findGallery; const [activeTabKey, setActiveTabKey] = useState("gallery-details-panel"); const activeRightTabKey = tab === "images" || tab === "add" ? tab : "images"; const setActiveRightTabKey = (newTab: string | null) => { if (tab !== newTab) { const tabParam = newTab === "images" ? "" : `/${newTab}`; - history.replace(`/galleries/${id}${tabParam}`); + history.replace(`/galleries/${gallery.id}${tabParam}`); } }; @@ -54,8 +55,8 @@ export const Gallery: React.FC = () => { await updateGallery({ variables: { input: { - id: gallery?.id ?? "", - organized: !gallery?.organized, + id: gallery.id, + organized: !gallery.organized, }, }, }); @@ -99,7 +100,7 @@ export const Gallery: React.FC = () => { if (isDeleteAlertOpen && gallery) { return ( ); @@ -118,7 +119,7 @@ export const Gallery: React.FC = () => { - {gallery?.path ? ( + {gallery.path ? ( { }; }); - if (loading) { - return ; - } - - if (error) return ; - - if (isNew) - return ( -
-
-

- -

- setIsDeleteAlertOpen(true)} - /> -
-
- ); - - if (!gallery) - return ; - return (
+ + + {gallery.title ?? TextUtils.fileNameFromPath(gallery.path ?? "")} + + {maybeRenderDeleteDialog()}
@@ -323,3 +300,17 @@ export const Gallery: React.FC = () => {
); }; + +const GalleryLoader: React.FC = () => { + const { id } = useParams<{ id?: string }>(); + const { data, loading, error } = useFindGallery(id ?? ""); + + if (loading) return ; + if (error) return ; + if (!data?.findGallery) + return ; + + return ; +}; + +export default GalleryLoader; diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx index a25908a3c..e256251e0 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx @@ -11,7 +11,7 @@ import { useIntl } from "react-intl"; import { IconProp } from "@fortawesome/fontawesome-svg-core"; interface IGalleryAddProps { - gallery: Partial; + gallery: GQL.GalleryDataFragment; } export const GalleryAddPanel: React.FC = ({ gallery }) => { @@ -20,7 +20,7 @@ export const GalleryAddPanel: React.FC = ({ gallery }) => { function filterHook(filter: ListFilterModel) { const galleryValue = { - id: gallery.id!, + id: gallery.id, label: gallery.title ?? TextUtils.fileNameFromPath(gallery.path ?? ""), }; // if galleries is already present, then we modify it, otherwise add diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryCreate.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryCreate.tsx new file mode 100644 index 000000000..e7a0407bc --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryCreate.tsx @@ -0,0 +1,28 @@ +import React from "react"; +import { FormattedMessage, useIntl } from "react-intl"; +import { GalleryEditPanel } from "./GalleryEditPanel"; + +const GalleryCreate: React.FC = () => { + const intl = useIntl(); + + return ( +
+
+

+ +

+ {}} + /> +
+
+ ); +}; + +export default GalleryCreate; diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx index 878137d0f..bc2c3b1e7 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx @@ -9,23 +9,25 @@ import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars"; import { sortPerformers } from "src/core/performers"; interface IGalleryDetailProps { - gallery: Partial; + gallery: GQL.GalleryDataFragment; } -export const GalleryDetailPanel: React.FC = (props) => { +export const GalleryDetailPanel: React.FC = ({ + gallery, +}) => { function renderDetails() { - if (!props.gallery.details || props.gallery.details === "") return; + if (!gallery.details) return; return ( <>
Details
-

{props.gallery.details}

+

{gallery.details}

); } function renderTags() { - if (!props.gallery.tags || props.gallery.tags.length === 0) return; - const tags = props.gallery.tags.map((tag) => ( + if (gallery.tags.length === 0) return; + const tags = gallery.tags.map((tag) => ( )); return ( @@ -37,14 +39,13 @@ export const GalleryDetailPanel: React.FC = (props) => { } function renderPerformers() { - if (!props.gallery.performers || props.gallery.performers.length === 0) - return; - const performers = sortPerformers(props.gallery.performers); + if (gallery.performers.length === 0) return; + const performers = sortPerformers(gallery.performers); const cards = performers.map((performer) => ( )); @@ -59,9 +60,8 @@ export const GalleryDetailPanel: React.FC = (props) => { } // filename should use entire row if there is no studio - const galleryDetailsWidth = props.gallery.studio ? "col-9" : "col-12"; - const title = - props.gallery.title ?? TextUtils.fileNameFromPath(props.gallery.path ?? ""); + const galleryDetailsWidth = gallery.studio ? "col-9" : "col-12"; + const title = gallery.title ?? TextUtils.fileNameFromPath(gallery.path ?? ""); return ( <> @@ -70,29 +70,29 @@ export const GalleryDetailPanel: React.FC = (props) => {

- {props.gallery.date ? ( + {gallery.date ? (
) : undefined} - {props.gallery.rating ? ( + {gallery.rating ? (
- Rating: + Rating:
) : ( "" )}
- {props.gallery.studio && ( + {gallery.studio && (
- + {`${props.gallery.studio.name} diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx index cd13c59df..18d824e2c 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx @@ -1,6 +1,6 @@ import React, { useEffect, useState } from "react"; import { FormattedMessage, useIntl } from "react-intl"; -import { useHistory } from "react-router-dom"; +import { useHistory, Prompt } from "react-router-dom"; import { Button, Dropdown, @@ -27,10 +27,10 @@ import { StudioSelect, Icon, LoadingIndicator, + URLField, } from "src/components/Shared"; import { useToast } from "src/hooks"; import { useFormik } from "formik"; -import { Prompt } from "react-router"; import { FormUtils, TextUtils } from "src/utils"; import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars"; import { GalleryScrapeDialog } from "./GalleryScrapeDialog"; @@ -385,21 +385,6 @@ export const GalleryEditPanel: React.FC< } } - function maybeRenderScrapeButton() { - if (!formik.values.url || !urlScrapable(formik.values.url)) { - return undefined; - } - return ( - - ); - } - function renderTextField(field: string, title: string, placeholder?: string) { return ( @@ -462,15 +447,12 @@ export const GalleryEditPanel: React.FC< -
- {maybeRenderScrapeButton()} -
- diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScenesPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScenesPanel.tsx index e2d5d4814..32c4af635 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScenesPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScenesPanel.tsx @@ -11,7 +11,7 @@ export const GalleryScenesPanel: React.FC = ({ }) => (
{scenes.map((scene) => ( - + ))}
); diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx index 2e34fc7af..a0b82a67d 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx @@ -60,7 +60,9 @@ function renderScrapedStudioRow( } onChange={onChange} newValues={newStudio ? [newStudio] : undefined} - onCreateNew={onCreateNew} + onCreateNew={() => { + if (onCreateNew && newStudio) onCreateNew(newStudio); + }} /> ); } @@ -112,7 +114,9 @@ function renderScrapedPerformersRow( } onChange={onChange} newValues={performersCopy} - onCreateNew={onCreateNew} + onCreateNew={(i) => { + if (onCreateNew) onCreateNew(newPerformers[i]); + }} /> ); } @@ -159,7 +163,9 @@ function renderScrapedTagsRow( } newValues={newTags} onChange={onChange} - onCreateNew={onCreateNew} + onCreateNew={(i) => { + if (onCreateNew) onCreateNew(newTags[i]); + }} /> ); } diff --git a/ui/v2.5/src/components/Galleries/GalleryList.tsx b/ui/v2.5/src/components/Galleries/GalleryList.tsx index e56f728fa..cb240f13d 100644 --- a/ui/v2.5/src/components/Galleries/GalleryList.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryList.tsx @@ -88,13 +88,8 @@ export const GalleryList: React.FC = ({ filterCopy.itemsPerPage = 1; filterCopy.currentPage = index + 1; const singleResult = await queryFindGalleries(filterCopy); - if ( - singleResult && - singleResult.data && - singleResult.data.findGalleries && - singleResult.data.findGalleries.galleries.length === 1 - ) { - const { id } = singleResult!.data!.findGalleries!.galleries[0]; + if (singleResult.data.findGalleries.galleries.length === 1) { + const { id } = singleResult.data.findGalleries.galleries[0]; // navigate to the image player page history.push(`/galleries/${id}`); } diff --git a/ui/v2.5/src/components/Help/Manual.tsx b/ui/v2.5/src/components/Help/Manual.tsx index 7455f79b5..9a170abee 100644 --- a/ui/v2.5/src/components/Help/Manual.tsx +++ b/ui/v2.5/src/components/Help/Manual.tsx @@ -8,6 +8,7 @@ import Configuration from "src/docs/en/Configuration.md"; import Interface from "src/docs/en/Interface.md"; import Galleries from "src/docs/en/Galleries.md"; import Scraping from "src/docs/en/Scraping.md"; +import ScraperDevelopment from "src/docs/en/ScraperDevelopment.md"; import Plugins from "src/docs/en/Plugins.md"; import ExternalPlugins from "src/docs/en/ExternalPlugins.md"; import EmbeddedPlugins from "src/docs/en/EmbeddedPlugins.md"; @@ -18,15 +19,18 @@ import KeyboardShortcuts from "src/docs/en/KeyboardShortcuts.md"; import Help from "src/docs/en/Help.md"; import Deduplication from "src/docs/en/Deduplication.md"; import Interactive from "src/docs/en/Interactive.md"; +import Identify from "src/docs/en/Identify.md"; import { MarkdownPage } from "../Shared/MarkdownPage"; interface IManualProps { + animation?: boolean; show: boolean; onClose: () => void; defaultActiveTab?: string; } export const Manual: React.FC = ({ + animation, show, onClose, defaultActiveTab, @@ -52,6 +56,12 @@ export const Manual: React.FC = ({ title: "Tasks", content: Tasks, }, + { + key: "Identify.md", + title: "Identify", + content: Identify, + className: "indent-1", + }, { key: "AutoTagging.md", title: "Auto Tagging", @@ -80,6 +90,12 @@ export const Manual: React.FC = ({ title: "Metadata Scraping", content: Scraping, }, + { + key: "ScraperDevelopment.md", + title: "Scraper Development", + content: ScraperDevelopment, + className: "indent-1", + }, { key: "Plugins.md", title: "Plugins", @@ -152,6 +168,7 @@ export const Manual: React.FC = ({ return ( = ( { count: props.selected.length, singularEntity, pluralEntity } ); - const [deleteFile, setDeleteFile] = useState(false); - const [deleteGenerated, setDeleteGenerated] = useState(true); + const { configuration: config } = React.useContext(ConfigurationContext); + + const [deleteFile, setDeleteFile] = useState( + config?.defaults.deleteFile ?? false + ); + const [deleteGenerated, setDeleteGenerated] = useState( + config?.defaults.deleteGenerated ?? true + ); const Toast = useToast(); const [deleteImage] = useImagesDestroy(getImagesDeleteInput()); @@ -60,6 +67,42 @@ export const DeleteImagesDialog: React.FC = ( props.onClose(true); } + function maybeRenderDeleteFileAlert() { + if (!deleteFile) { + return; + } + + return ( +
+

+ +

+
    + {props.selected.slice(0, 5).map((s) => ( +
  • {s.path}
  • + ))} + {props.selected.length > 5 && ( + + )} +
+
+ ); + } + return ( = ( isRunning={isDeleting} >

{message}

+ {maybeRenderDeleteFileAlert()} { return (
+ + {image.title ?? TextUtils.fileNameFromPath(image.path)} + + {maybeRenderDeleteDialog()}
diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx index ffce99e37..55d2fe42d 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx @@ -14,7 +14,7 @@ import { import { useToast } from "src/hooks"; import { FormUtils } from "src/utils"; import { useFormik } from "formik"; -import { Prompt } from "react-router"; +import { Prompt } from "react-router-dom"; import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars"; interface IProps { diff --git a/ui/v2.5/src/components/Images/Images.tsx b/ui/v2.5/src/components/Images/Images.tsx index dfbd6224c..be16ed0b6 100644 --- a/ui/v2.5/src/components/Images/Images.tsx +++ b/ui/v2.5/src/components/Images/Images.tsx @@ -1,20 +1,36 @@ import React from "react"; import { Route, Switch } from "react-router-dom"; +import { useIntl } from "react-intl"; +import { Helmet } from "react-helmet"; +import { TITLE_SUFFIX } from "src/components/Shared"; import { PersistanceLevel } from "src/hooks/ListHook"; import { Image } from "./ImageDetails/Image"; import { ImageList } from "./ImageList"; -const Images = () => ( - - ( - - )} - /> - - -); +const Images: React.FC = () => { + const intl = useIntl(); + + const title_template = `${intl.formatMessage({ + id: "images", + })} ${TITLE_SUFFIX}`; + return ( + <> + + + ( + + )} + /> + + + + ); +}; export default Images; diff --git a/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx b/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx index c51f4e85b..52a3bae7e 100644 --- a/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx @@ -51,6 +51,16 @@ export const HierarchicalLabelValueFilter: React.FC onDepthChanged(criterion.value.depth !== 0 ? 0 : -1)} diff --git a/ui/v2.5/src/components/List/ListFilter.tsx b/ui/v2.5/src/components/List/ListFilter.tsx index 1adaa37fb..1b6237aeb 100644 --- a/ui/v2.5/src/components/List/ListFilter.tsx +++ b/ui/v2.5/src/components/List/ListFilter.tsx @@ -1,5 +1,6 @@ import _, { debounce } from "lodash"; import React, { HTMLAttributes, useEffect, useRef, useState } from "react"; +import cx from "classnames"; import Mousetrap from "mousetrap"; import { SortDirectionEnum } from "src/core/generated-graphql"; import { @@ -23,6 +24,7 @@ import { FormattedMessage, useIntl } from "react-intl"; import { PersistanceLevel } from "src/hooks/ListHook"; import { SavedFilterList } from "./SavedFilterList"; +const maxPageSize = 1000; interface IListFilterProps { onFilterUpdate: (newFilter: ListFilterModel) => void; filter: ListFilterModel; @@ -44,6 +46,9 @@ export const ListFilter: React.FC = ({ }) => { const [customPageSizeShowing, setCustomPageSizeShowing] = useState(false); const [queryRef, setQueryFocus] = useFocus(); + const [queryClearShowing, setQueryClearShowing] = useState( + !!filter.searchTerm + ); const perPageSelect = useRef(null); const [perPageInput, perPageFocus] = useFocus(); @@ -86,11 +91,16 @@ export const ListFilter: React.FC = ({ setCustomPageSizeShowing(false); - const pp = parseInt(val, 10); + let pp = parseInt(val, 10); if (Number.isNaN(pp) || pp <= 0) { return; } + // don't allow page sizes over 1000 + if (pp > maxPageSize) { + pp = maxPageSize; + } + const newFilter = _.cloneDeep(filter); newFilter.itemsPerPage = pp; newFilter.currentPage = 1; @@ -99,6 +109,14 @@ export const ListFilter: React.FC = ({ function onChangeQuery(event: React.FormEvent) { searchCallback(event.currentTarget.value); + setQueryClearShowing(!!event.currentTarget.value); + } + + function onClearQuery() { + queryRef.current.value = ""; + searchCallback(""); + setQueryFocus(); + setQueryClearShowing(false); } function onChangeSortDirection() { @@ -150,7 +168,7 @@ export const ListFilter: React.FC = ({ const SavedFilterDropdown = React.forwardRef< HTMLDivElement, HTMLAttributes - >(({ style, className }, ref) => ( + >(({ style, className }: HTMLAttributes, ref) => (
= ({ />
)); + SavedFilterDropdown.displayName = "SavedFilterDropdown"; function render() { const currentSortBy = filterOptions.sortByOptions.find( @@ -217,7 +236,17 @@ export const ListFilter: React.FC = ({ onInput={onChangeQuery} className="query-text-field bg-secondary text-white border-secondary" /> - + = ({
- - {currentSortBy - ? intl.formatMessage({ id: currentSortBy.messageID }) - : ""} - + + + {currentSortBy + ? intl.formatMessage({ id: currentSortBy.messageID }) + : ""} + + {renderSortByOptions()} @@ -309,6 +340,7 @@ export const ListFilter: React.FC = ({ ) => { diff --git a/ui/v2.5/src/components/List/ListOperationButtons.tsx b/ui/v2.5/src/components/List/ListOperationButtons.tsx index 7b4da09e4..f16159818 100644 --- a/ui/v2.5/src/components/List/ListOperationButtons.tsx +++ b/ui/v2.5/src/components/List/ListOperationButtons.tsx @@ -100,6 +100,7 @@ export const ListOperationButtons: React.FC = ({ return ( {button.text}} + key={button.text} > - - ); + const { pathname } = location; + const newPath = newPathsList.includes(pathname) ? `${pathname}/new` : null; // set up hotkeys useEffect(() => { Mousetrap.bind("?", () => setShowManual(!showManual)); - Mousetrap.bind("g s", () => goto("/scenes")); - Mousetrap.bind("g i", () => goto("/images")); - Mousetrap.bind("g v", () => goto("/movies")); - Mousetrap.bind("g k", () => goto("/scenes/markers")); - Mousetrap.bind("g l", () => goto("/galleries")); - Mousetrap.bind("g p", () => goto("/performers")); - Mousetrap.bind("g u", () => goto("/studios")); - Mousetrap.bind("g t", () => goto("/tags")); Mousetrap.bind("g z", () => goto("/settings")); + menuItems.forEach((item) => + Mousetrap.bind(item.hotkey, () => goto(item.href)) + ); + if (newPath) { Mousetrap.bind("n", () => history.push(newPath)); } return () => { Mousetrap.unbind("?"); - Mousetrap.unbind("g s"); - Mousetrap.unbind("g v"); - Mousetrap.unbind("g k"); - Mousetrap.unbind("g l"); - Mousetrap.unbind("g p"); - Mousetrap.unbind("g u"); - Mousetrap.unbind("g t"); Mousetrap.unbind("g z"); + menuItems.forEach((item) => Mousetrap.unbind(item.hotkey)); if (newPath) { Mousetrap.unbind("n"); @@ -232,13 +227,60 @@ export const MainNavbar: React.FC = () => { function maybeRenderLogout() { if (SessionUtils.isLoggedIn()) { return ( - ); } } + const handleDismiss = useCallback(() => setExpanded(false), [setExpanded]); + + function renderUtilityButtons() { + return ( + <> + + + + + + + + {maybeRenderLogout()} + + ); + } + return ( <> setShowManual(false)} /> @@ -253,62 +295,54 @@ export const MainNavbar: React.FC = () => { onToggle={setExpanded} ref={navbarRef} > - setExpanded(false)} - > - - - - - - - + - + <> + + + -