diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6bbaf07bb..3aff928c2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,6 +8,9 @@ on: release: types: [ published ] +env: + COMPILER_IMAGE: stashapp/compiler:4 + jobs: build: runs-on: ubuntu-20.04 @@ -17,44 +20,32 @@ jobs: - name: Checkout run: git fetch --prune --unshallow --tags - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1.13 + - name: Pull compiler image + run: docker pull $COMPILER_IMAGE - - name: Set up Node - uses: actions/setup-node@v2 - with: - node-version: '12' - - name: Cache node modules uses: actions/cache@v2 env: cache-name: cache-node_modules with: path: ui/v2.5/node_modules - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock') }} - name: Pre-install - run: make pre-ui + run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make pre-ui" - name: Generate - run: make generate + run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make generate" + # TODO: Replace with `make validate` once `revive` is bundled in COMPILER_IMAGE - name: Validate - run: make ui-validate fmt-check vet it + run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make ui-validate fmt-check vet it" - name: Build UI - run: make ui-only + run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make ui-only" - - name: Cross Compile - run: | - docker pull stashapp/compiler:4 - ./scripts/cross-compile.sh + - name: Compile for all supported platforms + run: ./scripts/cross-compile.sh - name: Generate checksums run: | @@ -93,14 +84,12 @@ jobs: - name: Development Release if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} - uses: meeDamian/github-release@2.0 + uses: marvinpinto/action-automatic-releases@v1.1.2 with: - token: "${{ secrets.GITHUB_TOKEN }}" + repo_token: "${{ secrets.GITHUB_TOKEN }}" prerelease: true - allow_override: true - tag: latest_develop - name: "${{ env.STASH_VERSION }}: Latest development build" - body: "**${{ env.RELEASE_DATE }}**\n This is always the latest committed version on the develop branch. Use as your own risk!" + automatic_release_tag: latest_develop + title: "${{ env.STASH_VERSION }}: Latest development build" files: | dist/stash-osx dist/stash-win.exe @@ -109,8 +98,7 @@ jobs: dist/stash-linux-arm32v7 dist/stash-pi CHECKSUMS_SHA1 - gzip: false - + - name: Master release if: ${{ github.event_name == 'release' && github.ref != 'refs/tags/latest_develop' }} uses: meeDamian/github-release@2.0 diff --git a/README.md b/README.md index 6d0513996..ce1322be4 100644 --- a/README.md +++ b/README.md @@ -8,36 +8,36 @@ https://stashapp.cc **Stash is a locally hosted web-based app written in Go which organizes and serves your porn.** -* It can gather information about videos in your collection from the internet, and is extensible through the use of community-built plugins. -* It supports a wide variety of both video and image formats +* It can gather information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers. +* It supports a wide variety of both video and image formats. * You can tag videos and find them later. * It provides statistics about performers, tags, studios and other things. -You can [watch a demo video](https://vimeo.com/275537038)to see it in action (password is stashapp). +You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action. -For further information you can [read the in-app manual](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en). +For further information you can [read the in-app manual](ui/v2.5/src/docs/en). # Installing stash -## Docker install +## via Docker Follow [this README.md in the docker directory.](docker/production/README.md) ## Pre-Compiled Binaries -Stash supports macOS, Windows, and Linux. Download the [latest release here](https://github.com/stashapp/stash/releases). +The Stash server runs on macOS, Windows, and Linux. Download the [latest release here](https://github.com/stashapp/stash/releases). Run the executable (double click the exe on windows or run `./stash-osx` / `./stash-linux` from the terminal on macOS / Linux) and navigate to either https://localhost:9999 or http://localhost:9999 to get started. -*Note for Windows users:* Running the app might present a security prompt since the binary isn't signed yet. Just click more info and then the "run anyway" button. +*Note for Windows users:* Running the app might present a security prompt since the binary isn't yet signed. Bypass this by clicking "more info" and then the "run anyway" button. #### FFMPEG If stash is unable to find or download FFMPEG then download it yourself from the link for your platform: -* [macOS](https://ffmpeg.zeranoe.com/builds/macos64/static/ffmpeg-4.0-macos64-static.zip) -* [Windows](https://ffmpeg.zeranoe.com/builds/win64/static/ffmpeg-4.0-win64-static.zip) -* [Linux](https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz) +* [macOS ffmpeg](https://evermeet.cx/ffmpeg/ffmpeg-4.3.1.zip), [macOS ffprobe](https://evermeet.cx/ffmpeg/ffprobe-4.3.1.zip) +* [Windows](https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip) +* [Linux](https://www.johnvansickle.com/ffmpeg/) The `ffmpeg(.exe)` and `ffprobe(.exe)` files should be placed in `~/.stash` on macOS / Linux or `C:\Users\YourUsername\.stash` on Windows. @@ -48,9 +48,9 @@ The `ffmpeg(.exe)` and `ffprobe(.exe)` files should be placed in `~/.stash` on m 2) Run Stash. It will prompt you for some configuration options and a directory to index (you can also do this step afterward) 3) After configuration, launch your web browser and navigate to the URL shown within the Stash app. -**Note that Stash does not currently retrieve and organize information about your entire library automatically.** You will need to help it along through the use of [scrapers](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Scraping.md). The Stash community has developed scrapers for many popular data sources which can be downloaded and installed from [this repository](https://github.com/stashapp/CommunityScrapers). +**Note that Stash does not currently retrieve and organize information about your entire library automatically.** You will need to help it along through the use of [scrapers](blob/develop/ui/v2.5/src/docs/en/Scraping.md). The Stash community has developed scrapers for many popular data sources which can be downloaded and installed from [this repository](https://github.com/stashapp/CommunityScrapers). -The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our database. Note that this information is not comprehensive and you may need to use the scrapers to identify some of your media. +The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our stash-box database. Note that this data source is not comprehensive and you may need to use the scrapers to identify some of your media. ## CLI @@ -60,7 +60,7 @@ For example, to run stash locally on port 80 run it like this (OSX / Linux) `sta ## SSL (HTTPS) -Stash supports HTTPS with some additional work. First you must generate a SSL certificate and key combo. Here is an example using openssl: +Stash can run over HTTPS with some additional work. First you must generate a SSL certificate and key combo. Here is an example using openssl: `openssl req -x509 -newkey rsa:4096 -sha256 -days 7300 -nodes -keyout stash.key -out stash.crt -extensions san -config <(echo "[req]"; echo distinguished_name=req; echo "[san]"; echo subjectAltName=DNS:stash.server,IP:127.0.0.1) -subj /CN=stash.server` @@ -70,31 +70,30 @@ Once you have a certificate and key file name them `stash.crt` and `stash.key` a # Customization -## Themes -There is a [directory of themes](https://github.com/stashapp/stash/wiki/Themes) on our Wiki, along with instructions on how to install them.. +## Themes and CSS Customization +There is a [directory of community-created themes](https://github.com/stashapp/stash/wiki/Themes) on our Wiki, along with instructions on how to install them. -## CSS Customization -You can make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks). +You can also make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks). -# Suppport +# Support (FAQ) -Answers to frequently asked questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ) +Answers to other Frequently Asked Questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ) For issues not addressed there, there are a few options. * Read the [Wiki](https://github.com/stashapp/stash/wiki) * Check the in-app documentation (also available [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en) -* Join the [Discord server](https://discord.gg/2TsNFKt). +* Join the [Discord server](https://discord.gg/2TsNFKt), where the community can offer support. -# Building From Source Code +# Compiling From Source Code -## Install +## Pre-requisites * [Go](https://golang.org/dl/) * [Revive](https://github.com/mgechev/revive) - Configurable linter * Go Install: `go get github.com/mgechev/revive` -* [Packr2](https://github.com/gobuffalo/packr/tree/v2.0.2/v2) - Static asset bundler - * Go Install: `go get github.com/gobuffalo/packr/v2/packr2@v2.0.2` +* [Packr2](https://github.com/gobuffalo/packr/) - Static asset bundler + * Go Install: `go get github.com/gobuffalo/packr/v2/packr2` * [Binary Download](https://github.com/gobuffalo/packr/releases) * [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager * Run `yarn install --frozen-lockfile` in the `stash/ui/v2.5` folder (before running make generate for first time). @@ -141,7 +140,7 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MingW. ## Cross compiling -This project uses a modification of [this](https://github.com/bep/dockerfiles/tree/master/ci-goreleaser) docker container to create an environment +This project uses a modification of the [CI-GoReleaser](https://github.com/bep/dockerfiles/tree/master/ci-goreleaser) docker container to create an environment where the app can be cross-compiled. This process is kicked off by CI via the `scripts/cross-compile.sh` script. Run the following command to open a bash shell to the container to poke around: diff --git a/docker/build/x86_64/Dockerfile b/docker/build/x86_64/Dockerfile index 6eabf1105..3a2d8a198 100644 --- a/docker/build/x86_64/Dockerfile +++ b/docker/build/x86_64/Dockerfile @@ -53,6 +53,8 @@ FROM ubuntu:20.04 as app RUN apt-get update && apt-get -y install ca-certificates COPY --from=compiler /stash/stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/ +ENV STASH_CONFIG_FILE=/root/.stash/config.yml + EXPOSE 9999 CMD ["stash"] diff --git a/docker/ci/x86_64/Dockerfile b/docker/ci/x86_64/Dockerfile index 833037c36..1db050163 100644 --- a/docker/ci/x86_64/Dockerfile +++ b/docker/ci/x86_64/Dockerfile @@ -12,6 +12,8 @@ FROM ubuntu:20.04 as app run apt update && apt install -y python3 python3 python-is-python3 python3-requests ffmpeg && rm -rf /var/lib/apt/lists/* COPY --from=prep /stash /usr/bin/ +ENV STASH_CONFIG_FILE=/root/.stash/config.yml + EXPOSE 9999 CMD ["stash"] diff --git a/docker/develop/x86_64/Dockerfile b/docker/develop/x86_64/Dockerfile index cca25aafd..c2efef3a1 100644 --- a/docker/develop/x86_64/Dockerfile +++ b/docker/develop/x86_64/Dockerfile @@ -20,5 +20,8 @@ RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/f FROM ubuntu:20.04 as app RUN apt-get update && apt-get -y install ca-certificates COPY --from=prep /stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/ + +ENV STASH_CONFIG_FILE=/root/.stash/config.yml + EXPOSE 9999 CMD ["stash"] diff --git a/docker/production/x86_64/Dockerfile b/docker/production/x86_64/Dockerfile index bc152161c..95a2516ed 100644 --- a/docker/production/x86_64/Dockerfile +++ b/docker/production/x86_64/Dockerfile @@ -20,5 +20,8 @@ RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/f FROM ubuntu:20.04 as app RUN apt-get update && apt-get -y install ca-certificates COPY --from=prep /stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/ + +ENV STASH_CONFIG_FILE=/root/.stash/config.yml + EXPOSE 9999 CMD ["stash"] diff --git a/go.mod b/go.mod index 50213e41c..d99b84bf4 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,8 @@ require ( github.com/antchfx/htmlquery v1.2.3 github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c github.com/chromedp/chromedp v0.5.3 + github.com/corona10/goimagehash v1.0.3 + github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/disintegration/imaging v1.6.0 github.com/fvbommel/sortorder v1.0.2 github.com/go-chi/chi v4.0.2+incompatible @@ -31,7 +33,7 @@ require ( github.com/vektah/gqlparser/v2 v2.0.1 github.com/vektra/mockery/v2 v2.2.1 golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 - golang.org/x/image v0.0.0-20190802002840-cff245a6509b + golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb golang.org/x/net v0.0.0-20200822124328-c89045814202 golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd golang.org/x/tools v0.0.0-20200915031644-64986481280e // indirect diff --git a/go.sum b/go.sum index 744bb3712..d81e4867b 100644 --- a/go.sum +++ b/go.sum @@ -83,6 +83,8 @@ github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/corona10/goimagehash v1.0.3 h1:NZM518aKLmoNluluhfHGxT3LGOnrojrxhGn63DR/CZA= +github.com/corona10/goimagehash v1.0.3/go.mod h1:VkvE0mLn84L4aF8vCb6mafVajEb6QYMHl2ZJLn0mOGI= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= @@ -99,6 +101,7 @@ github.com/cznic/zappy v0.0.0-20160723133515-2533cb5b45cc/go.mod h1:Y1SNZ4dRUOKX github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= @@ -538,8 +541,6 @@ github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/ github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o= github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= -github.com/mattn/go-sqlite3 v1.13.0 h1:LnJI81JidiW9r7pS/hXe6cFeO5EXNq7KbfvoJLRI69c= -github.com/mattn/go-sqlite3 v1.13.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= @@ -574,6 +575,8 @@ github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+ github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ= github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= +github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ= +github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= @@ -804,6 +807,8 @@ golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86h golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb h1:fqpd0EBDzlHRCjiphRR5Zo/RSWWQlWv34418dnEixWk= +golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= diff --git a/graphql/documents/data/config.graphql b/graphql/documents/data/config.graphql index 07330d63b..e1597c0ca 100644 --- a/graphql/documents/data/config.graphql +++ b/graphql/documents/data/config.graphql @@ -2,7 +2,7 @@ fragment ConfigGeneralData on ConfigGeneralResult { stashes { path excludeVideo - excludeImage + excludeImage } databasePath generatedPath @@ -17,6 +17,7 @@ fragment ConfigGeneralData on ConfigGeneralResult { previewPreset maxTranscodeSize maxStreamingTranscodeSize + apiKey username password maxSessionAge @@ -51,6 +52,7 @@ fragment ConfigInterfaceData on ConfigInterfaceResult { css cssEnabled language + slideshowDelay } fragment ConfigData on ConfigResult { diff --git a/graphql/documents/data/gallery-slim.graphql b/graphql/documents/data/gallery-slim.graphql index 51dbc3484..c408f8deb 100644 --- a/graphql/documents/data/gallery-slim.graphql +++ b/graphql/documents/data/gallery-slim.graphql @@ -1,4 +1,4 @@ -fragment GallerySlimData on Gallery { +fragment SlimGalleryData on Gallery { id checksum path @@ -10,16 +10,31 @@ fragment GallerySlimData on Gallery { organized image_count cover { - ...SlimImageData + file { + size + width + height + } + + paths { + thumbnail + } } studio { - ...StudioData + id + name + image_path } tags { - ...TagData + id + name } performers { - ...PerformerData + id + name + gender + favorite + image_path } scenes { id diff --git a/graphql/documents/data/gallery.graphql b/graphql/documents/data/gallery.graphql index 7c7fd8e24..d1475157a 100644 --- a/graphql/documents/data/gallery.graphql +++ b/graphql/documents/data/gallery.graphql @@ -15,16 +15,16 @@ fragment GalleryData on Gallery { ...SlimImageData } studio { - ...StudioData + ...SlimStudioData } tags { - ...TagData + ...SlimTagData } performers { ...PerformerData } scenes { - ...SceneData + ...SlimSceneData } } diff --git a/graphql/documents/data/image-slim.graphql b/graphql/documents/data/image-slim.graphql index ce80786ee..b1c066ee2 100644 --- a/graphql/documents/data/image-slim.graphql +++ b/graphql/documents/data/image-slim.graphql @@ -38,6 +38,7 @@ fragment SlimImageData on Image { performers { id name + gender favorite image_path } diff --git a/graphql/documents/data/image.graphql b/graphql/documents/data/image.graphql index cf4d30e41..14317988e 100644 --- a/graphql/documents/data/image.graphql +++ b/graphql/documents/data/image.graphql @@ -23,11 +23,11 @@ fragment ImageData on Image { } studio { - ...StudioData + ...SlimStudioData } tags { - ...TagData + ...SlimTagData } performers { diff --git a/graphql/documents/data/movie.graphql b/graphql/documents/data/movie.graphql index ef3ab3f9f..e8e378926 100644 --- a/graphql/documents/data/movie.graphql +++ b/graphql/documents/data/movie.graphql @@ -9,7 +9,7 @@ fragment MovieData on Movie { director studio { - ...StudioData + ...SlimStudioData } synopsis diff --git a/graphql/documents/data/performer-slim.graphql b/graphql/documents/data/performer-slim.graphql index 603744d33..1420d15c4 100644 --- a/graphql/documents/data/performer-slim.graphql +++ b/graphql/documents/data/performer-slim.graphql @@ -12,4 +12,5 @@ fragment SlimPerformerData on Performer { endpoint stash_id } + rating } diff --git a/graphql/documents/data/performer.graphql b/graphql/documents/data/performer.graphql index 253412b8a..4c3033c1a 100644 --- a/graphql/documents/data/performer.graphql +++ b/graphql/documents/data/performer.graphql @@ -20,13 +20,20 @@ fragment PerformerData on Performer { favorite image_path scene_count + image_count + gallery_count tags { - ...TagData + ...SlimTagData } stash_ids { stash_id endpoint } + rating + details + death_date + hair_color + weight } diff --git a/graphql/documents/data/scene-slim.graphql b/graphql/documents/data/scene-slim.graphql index 473012d55..4aacf27e4 100644 --- a/graphql/documents/data/scene-slim.graphql +++ b/graphql/documents/data/scene-slim.graphql @@ -10,6 +10,7 @@ fragment SlimSceneData on Scene { o_counter organized path + phash file { size @@ -29,6 +30,7 @@ fragment SlimSceneData on Scene { webp vtt chapters_vtt + sprite } scene_markers { @@ -66,6 +68,7 @@ fragment SlimSceneData on Scene { performers { id name + gender favorite image_path } diff --git a/graphql/documents/data/scene.graphql b/graphql/documents/data/scene.graphql index 1f8061e06..83077895c 100644 --- a/graphql/documents/data/scene.graphql +++ b/graphql/documents/data/scene.graphql @@ -10,6 +10,7 @@ fragment SceneData on Scene { o_counter organized path + phash file { size @@ -36,11 +37,11 @@ fragment SceneData on Scene { } galleries { - ...GallerySlimData + ...SlimGalleryData } studio { - ...StudioData + ...SlimStudioData } movies { @@ -51,7 +52,7 @@ fragment SceneData on Scene { } tags { - ...TagData + ...SlimTagData } performers { diff --git a/graphql/documents/data/scrapers.graphql b/graphql/documents/data/scrapers.graphql index b4397cdf3..f9fa5a879 100644 --- a/graphql/documents/data/scrapers.graphql +++ b/graphql/documents/data/scrapers.graphql @@ -19,6 +19,10 @@ fragment ScrapedPerformerData on ScrapedPerformer { ...ScrapedSceneTagData } image + details + death_date + hair_color + weight } fragment ScrapedScenePerformerData on ScrapedScenePerformer { @@ -44,6 +48,10 @@ fragment ScrapedScenePerformerData on ScrapedScenePerformer { } remote_site_id images + details + death_date + hair_color + weight } fragment ScrapedMovieStudioData on ScrapedMovieStudio { @@ -189,3 +197,10 @@ fragment ScrapedStashBoxSceneData on ScrapedScene { ...ScrapedSceneMovieData } } + +fragment ScrapedStashBoxPerformerData on StashBoxPerformerQueryResult { + query + results { + ...ScrapedScenePerformerData + } +} diff --git a/graphql/documents/data/studio-slim.graphql b/graphql/documents/data/studio-slim.graphql index a247b4e34..f840ad2fb 100644 --- a/graphql/documents/data/studio-slim.graphql +++ b/graphql/documents/data/studio-slim.graphql @@ -9,4 +9,6 @@ fragment SlimStudioData on Studio { parent_studio { id } + details + rating } diff --git a/graphql/documents/data/studio.graphql b/graphql/documents/data/studio.graphql index d2f60a44b..a9515c32d 100644 --- a/graphql/documents/data/studio.graphql +++ b/graphql/documents/data/studio.graphql @@ -10,6 +10,8 @@ fragment StudioData on Studio { url image_path scene_count + image_count + gallery_count } child_studios { id @@ -18,11 +20,17 @@ fragment StudioData on Studio { url image_path scene_count + image_count + gallery_count } image_path scene_count + image_count + gallery_count stash_ids { stash_id endpoint } + details + rating } diff --git a/graphql/documents/data/tag-slim.graphql b/graphql/documents/data/tag-slim.graphql new file mode 100644 index 000000000..61fd320e5 --- /dev/null +++ b/graphql/documents/data/tag-slim.graphql @@ -0,0 +1,5 @@ +fragment SlimTagData on Tag { + id + name + image_path +} diff --git a/graphql/documents/data/tag.graphql b/graphql/documents/data/tag.graphql index 3a0e84e1c..17d65b908 100644 --- a/graphql/documents/data/tag.graphql +++ b/graphql/documents/data/tag.graphql @@ -4,5 +4,7 @@ fragment TagData on Tag { image_path scene_count scene_marker_count + image_count + gallery_count performer_count } diff --git a/graphql/documents/mutations/config.graphql b/graphql/documents/mutations/config.graphql index 4e273f418..25bc7f01d 100644 --- a/graphql/documents/mutations/config.graphql +++ b/graphql/documents/mutations/config.graphql @@ -1,3 +1,11 @@ +mutation Setup($input: SetupInput!) { + setup(input: $input) +} + +mutation Migrate($input: MigrateInput!) { + migrate(input: $input) +} + mutation ConfigureGeneral($input: ConfigGeneralInput!) { configureGeneral(input: $input) { ...ConfigGeneralData @@ -8,4 +16,8 @@ mutation ConfigureInterface($input: ConfigInterfaceInput!) { configureInterface(input: $input) { ...ConfigInterfaceData } -} \ No newline at end of file +} + +mutation GenerateAPIKey($input: GenerateAPIKeyInput!) { + generateAPIKey(input: $input) +} diff --git a/graphql/documents/mutations/performer.graphql b/graphql/documents/mutations/performer.graphql index e4ccf442e..0e2ad9fa3 100644 --- a/graphql/documents/mutations/performer.graphql +++ b/graphql/documents/mutations/performer.graphql @@ -1,47 +1,7 @@ mutation PerformerCreate( - $name: String!, - $url: String, - $gender: GenderEnum, - $birthdate: String, - $ethnicity: String, - $country: String, - $eye_color: String, - $height: String, - $measurements: String, - $fake_tits: String, - $career_length: String, - $tattoos: String, - $piercings: String, - $aliases: String, - $twitter: String, - $instagram: String, - $favorite: Boolean, - $tag_ids: [ID!], - $stash_ids: [StashIDInput!], - $image: String) { + $input: PerformerCreateInput!) { - performerCreate(input: { - name: $name, - url: $url, - gender: $gender, - birthdate: $birthdate, - ethnicity: $ethnicity, - country: $country, - eye_color: $eye_color, - height: $height, - measurements: $measurements, - fake_tits: $fake_tits, - career_length: $career_length, - tattoos: $tattoos, - piercings: $piercings, - aliases: $aliases, - twitter: $twitter, - instagram: $instagram, - favorite: $favorite, - tag_ids: $tag_ids, - stash_ids: $stash_ids, - image: $image - }) { + performerCreate(input: $input) { ...PerformerData } } diff --git a/graphql/documents/mutations/stash-box.graphql b/graphql/documents/mutations/stash-box.graphql index 24a9dc169..c20cdd25f 100644 --- a/graphql/documents/mutations/stash-box.graphql +++ b/graphql/documents/mutations/stash-box.graphql @@ -1,3 +1,7 @@ mutation SubmitStashBoxFingerprints($input: StashBoxFingerprintSubmissionInput!) { submitStashBoxFingerprints(input: $input) } + +mutation StashBoxBatchPerformerTag($input: StashBoxBatchPerformerTagInput!) { + stashBoxBatchPerformerTag(input: $input) +} diff --git a/graphql/documents/mutations/studio.graphql b/graphql/documents/mutations/studio.graphql index d2d11d222..6d1944dc1 100644 --- a/graphql/documents/mutations/studio.graphql +++ b/graphql/documents/mutations/studio.graphql @@ -1,18 +1,10 @@ -mutation StudioCreate( - $name: String!, - $url: String, - $image: String, - $stash_ids: [StashIDInput!], - $parent_id: ID) { - - studioCreate(input: { name: $name, url: $url, image: $image, stash_ids: $stash_ids, parent_id: $parent_id }) { +mutation StudioCreate($input: StudioCreateInput!) { + studioCreate(input: $input) { ...StudioData } } -mutation StudioUpdate( - $input: StudioUpdateInput!) { - +mutation StudioUpdate($input: StudioUpdateInput!) { studioUpdate(input: $input) { ...StudioData } diff --git a/graphql/documents/queries/gallery.graphql b/graphql/documents/queries/gallery.graphql index c289d9758..bfc034de4 100644 --- a/graphql/documents/queries/gallery.graphql +++ b/graphql/documents/queries/gallery.graphql @@ -2,7 +2,7 @@ query FindGalleries($filter: FindFilterType, $gallery_filter: GalleryFilterType) findGalleries(gallery_filter: $gallery_filter, filter: $filter) { count galleries { - ...GallerySlimData + ...SlimGalleryData } } } diff --git a/graphql/documents/queries/scene.graphql b/graphql/documents/queries/scene.graphql index 87bb3fd7d..daeabbaaf 100644 --- a/graphql/documents/queries/scene.graphql +++ b/graphql/documents/queries/scene.graphql @@ -16,6 +16,12 @@ query FindScenesByPathRegex($filter: FindFilterType) { } } +query FindDuplicateScenes($distance: Int) { + findDuplicateScenes(distance: $distance) { + ...SlimSceneData + } +} + query FindScene($id: ID!, $checksum: String) { findScene(id: $id, checksum: $checksum) { ...SceneData diff --git a/graphql/documents/queries/scrapers/freeones.graphql b/graphql/documents/queries/scrapers/freeones.graphql index 27f6eb926..9f366786d 100644 --- a/graphql/documents/queries/scrapers/freeones.graphql +++ b/graphql/documents/queries/scrapers/freeones.graphql @@ -15,6 +15,10 @@ query ScrapeFreeones($performer_name: String!) { tattoos piercings aliases + details + death_date + hair_color + weight } } diff --git a/graphql/documents/queries/scrapers/scrapers.graphql b/graphql/documents/queries/scrapers/scrapers.graphql index bb9d99284..d5c54bac1 100644 --- a/graphql/documents/queries/scrapers/scrapers.graphql +++ b/graphql/documents/queries/scrapers/scrapers.graphql @@ -90,8 +90,14 @@ query ScrapeMovieURL($url: String!) { } } -query QueryStashBoxScene($input: StashBoxQueryInput!) { +query QueryStashBoxScene($input: StashBoxSceneQueryInput!) { queryStashBoxScene(input: $input) { ...ScrapedStashBoxSceneData } } + +query QueryStashBoxPerformer($input: StashBoxPerformerQueryInput!) { + queryStashBoxPerformer(input: $input) { + ...ScrapedStashBoxPerformerData + } +} diff --git a/graphql/documents/queries/settings/metadata.graphql b/graphql/documents/queries/settings/metadata.graphql index 376f8e4a0..05dd6d04c 100644 --- a/graphql/documents/queries/settings/metadata.graphql +++ b/graphql/documents/queries/settings/metadata.graphql @@ -5,3 +5,13 @@ query JobStatus { message } } + +query SystemStatus { + systemStatus { + databaseSchema + databasePath + appSchema + status + configPath + } +} diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 7286f4591..68bc12c00 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -9,6 +9,9 @@ type Query { findScenesByPathRegex(filter: FindFilterType): FindScenesResultType! + """ Returns any groups of scenes that are perceptual duplicates within the queried distance """ + findDuplicateScenes(distance: Int): [[Scene!]!]! + """Return valid stream paths""" sceneStreams(id: ID): [SceneStreamEndpoint!]! @@ -88,7 +91,8 @@ type Query { scrapeFreeonesPerformerList(query: String!): [String!]! """Query StashBox for scenes""" - queryStashBoxScene(input: StashBoxQueryInput!): [ScrapedScene!]! + queryStashBoxScene(input: StashBoxSceneQueryInput!): [ScrapedScene!]! + queryStashBoxPerformer(input: StashBoxPerformerQueryInput!): [StashBoxPerformerQueryResult!]! # Plugins """List loaded plugins""" @@ -103,7 +107,7 @@ type Query { directory(path: String): Directory! # Metadata - + systemStatus: SystemStatus! jobStatus: MetadataUpdateStatus! # Get everything @@ -123,6 +127,9 @@ type Query { } type Mutation { + setup(input: SetupInput!): Boolean! + migrate(input: MigrateInput!): Boolean! + sceneUpdate(input: SceneUpdateInput!): Scene bulkSceneUpdate(input: BulkSceneUpdateInput!): [Scene!] sceneDestroy(input: SceneDestroyInput!): Boolean! @@ -190,6 +197,9 @@ type Mutation { configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult! configureInterface(input: ConfigInterfaceInput!): ConfigInterfaceResult! + """Generate and set (or clear) API key""" + generateAPIKey(input: GenerateAPIKeyInput!): String! + """Returns a link to download the result""" exportObjects(input: ExportObjectsInput!): String @@ -225,6 +235,9 @@ type Mutation { """Backup the database. Optionally returns a link to download the database file""" backupDatabase(input: BackupDatabaseInput!): String + + """Run batch performer tag task. Returns the job ID.""" + stashBoxBatchPerformerTag(input: StashBoxBatchPerformerTagInput!): String! } type Subscription { diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql index 24b243138..fd13b7419 100644 --- a/graphql/schema/types/config.graphql +++ b/graphql/schema/types/config.graphql @@ -1,3 +1,13 @@ +input SetupInput { + """Empty to indicate $HOME/.stash/config.yml default""" + configLocation: String! + stashes: [StashConfigInput!]! + """Empty to indicate default""" + databaseFile: String! + """Empty to indicate default""" + generatedLocation: String! +} + enum StreamingResolutionEnum { "240p", LOW "480p", STANDARD @@ -120,6 +130,8 @@ type ConfigGeneralResult { maxTranscodeSize: StreamingResolutionEnum """Max streaming transcode size""" maxStreamingTranscodeSize: StreamingResolutionEnum + """API Key""" + apiKey: String! """Username""" username: String! """Password""" @@ -176,6 +188,8 @@ input ConfigInterfaceInput { cssEnabled: Boolean """Interface language""" language: String + """Slideshow Delay""" + slideshowDelay: Int } type ConfigInterfaceResult { @@ -198,6 +212,8 @@ type ConfigInterfaceResult { cssEnabled: Boolean """Interface language""" language: String + """Slideshow Delay""" + slideshowDelay: Int } """All configuration settings""" @@ -225,3 +241,7 @@ type StashConfig { excludeVideo: Boolean! excludeImage: Boolean! } + +input GenerateAPIKeyInput { + clear: Boolean +} diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 750cb6c89..bd6703087 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -6,7 +6,7 @@ enum SortDirectionEnum { input FindFilterType { q: String page: Int - """use per_page = 0 to indicate all results. Defaults to 25.""" + """use per_page = -1 to indicate all results. Defaults to 25.""" per_page: Int sort: String direction: SortDirectionEnum @@ -47,7 +47,7 @@ input PerformerFilterType { measurements: StringCriterionInput """Filter by fake tits value""" fake_tits: StringCriterionInput - """Filter by career length""" + """Filter by career length""" career_length: StringCriterionInput """Filter by tattoos""" tattoos: StringCriterionInput @@ -61,8 +61,26 @@ input PerformerFilterType { is_missing: String """Filter to only include performers with these tags""" tags: MultiCriterionInput + """Filter by tag count""" + tag_count: IntCriterionInput + """Filter by scene count""" + scene_count: IntCriterionInput + """Filter by image count""" + image_count: IntCriterionInput + """Filter by gallery count""" + gallery_count: IntCriterionInput """Filter by StashID""" - stash_id: String + stash_id: StringCriterionInput + """Filter by rating""" + rating: IntCriterionInput + """Filter by url""" + url: StringCriterionInput + """Filter by hair color""" + hair_color: StringCriterionInput + """Filter by weight""" + weight: IntCriterionInput + """Filter by death year""" + death_year: IntCriterionInput } input SceneMarkerFilterType { @@ -80,7 +98,7 @@ input SceneFilterType { AND: SceneFilterType OR: SceneFilterType NOT: SceneFilterType - + """Filter by path""" path: StringCriterionInput """Filter by rating""" @@ -103,12 +121,18 @@ input SceneFilterType { movies: MultiCriterionInput """Filter to only include scenes with these tags""" tags: MultiCriterionInput + """Filter by tag count""" + tag_count: IntCriterionInput """Filter to only include scenes with performers with these tags""" performer_tags: MultiCriterionInput """Filter to only include scenes with these performers""" performers: MultiCriterionInput + """Filter by performer count""" + performer_count: IntCriterionInput """Filter by StashID""" - stash_id: String + stash_id: StringCriterionInput + """Filter by url""" + url: StringCriterionInput } input MovieFilterType { @@ -116,18 +140,34 @@ input MovieFilterType { studios: MultiCriterionInput """Filter to only include movies missing this property""" is_missing: String + """Filter by url""" + url: StringCriterionInput } input StudioFilterType { """Filter to only include studios with this parent studio""" parents: MultiCriterionInput """Filter by StashID""" - stash_id: String + stash_id: StringCriterionInput """Filter to only include studios missing this property""" is_missing: String + """Filter by rating""" + rating: IntCriterionInput + """Filter by scene count""" + scene_count: IntCriterionInput + """Filter by image count""" + image_count: IntCriterionInput + """Filter by gallery count""" + gallery_count: IntCriterionInput + """Filter by url""" + url: StringCriterionInput } input GalleryFilterType { + AND: GalleryFilterType + OR: GalleryFilterType + NOT: GalleryFilterType + """Filter by path""" path: StringCriterionInput """Filter to only include galleries missing this property""" @@ -144,12 +184,18 @@ input GalleryFilterType { studios: MultiCriterionInput """Filter to only include galleries with these tags""" tags: MultiCriterionInput + """Filter by tag count""" + tag_count: IntCriterionInput """Filter to only include galleries with performers with these tags""" performer_tags: MultiCriterionInput """Filter to only include galleries with these performers""" performers: MultiCriterionInput + """Filter by performer count""" + performer_count: IntCriterionInput """Filter by number of images in this gallery""" image_count: IntCriterionInput + """Filter by url""" + url: StringCriterionInput } input TagFilterType { @@ -177,6 +223,10 @@ input TagFilterType { } input ImageFilterType { + AND: ImageFilterType + OR: ImageFilterType + NOT: ImageFilterType + """Filter by path""" path: StringCriterionInput """Filter by rating""" @@ -193,10 +243,14 @@ input ImageFilterType { studios: MultiCriterionInput """Filter to only include images with these tags""" tags: MultiCriterionInput + """Filter by tag count""" + tag_count: IntCriterionInput """Filter to only include images with performers with these tags""" performer_tags: MultiCriterionInput """Filter to only include images with these performers""" performers: MultiCriterionInput + """Filter by performer count""" + performer_count: IntCriterionInput """Filter to only include images with these galleries""" galleries: MultiCriterionInput } diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index f83f3ad78..6c492fdeb 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -7,6 +7,7 @@ input GenerateMetadataInput { previewOptions: GeneratePreviewOptionsInput markers: Boolean! transcodes: Boolean! + phashes: Boolean! """scene ids to generate for""" sceneIDs: [ID!] @@ -42,6 +43,8 @@ input ScanMetadataInput { scanGenerateImagePreviews: Boolean """Generate sprites during scan""" scanGenerateSprites: Boolean + """Generate phashes during scan""" + scanGeneratePhashes: Boolean } input CleanMetadataInput { @@ -103,3 +106,21 @@ input ImportObjectsInput { input BackupDatabaseInput { download: Boolean } + +enum SystemStatusEnum { + SETUP + NEEDS_MIGRATION + OK +} + +type SystemStatus { + databaseSchema: Int + databasePath: String + configPath: String + appSchema: Int! + status: SystemStatusEnum! +} + +input MigrateInput { + backupPath: String! +} diff --git a/graphql/schema/types/movie.graphql b/graphql/schema/types/movie.graphql index 0b41af0c8..cc25b9e02 100644 --- a/graphql/schema/types/movie.graphql +++ b/graphql/schema/types/movie.graphql @@ -28,8 +28,9 @@ input MovieCreateInput { director: String synopsis: String url: String - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" front_image: String + """This should be a URL or a base64 encoded data URL""" back_image: String } @@ -44,8 +45,9 @@ input MovieUpdateInput { director: String synopsis: String url: String - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" front_image: String + """This should be a URL or a base64 encoded data URL""" back_image: String } diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql index 1324f4f81..1e1fe3c03 100644 --- a/graphql/schema/types/performer.graphql +++ b/graphql/schema/types/performer.graphql @@ -31,8 +31,15 @@ type Performer { image_path: String # Resolver scene_count: Int # Resolver + image_count: Int # Resolver + gallery_count: Int # Resolver scenes: [Scene!]! stash_ids: [StashID!]! + rating: Int + details: String + death_date: String + hair_color: String + weight: Int } input PerformerCreateInput { @@ -54,9 +61,14 @@ input PerformerCreateInput { instagram: String favorite: Boolean tag_ids: [ID!] - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" image: String stash_ids: [StashIDInput!] + rating: Int + details: String + death_date: String + hair_color: String + weight: Int } input PerformerUpdateInput { @@ -79,9 +91,14 @@ input PerformerUpdateInput { instagram: String favorite: Boolean tag_ids: [ID!] - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" image: String stash_ids: [StashIDInput!] + rating: Int + details: String + death_date: String + hair_color: String + weight: Int } input BulkPerformerUpdateInput { @@ -104,6 +121,11 @@ input BulkPerformerUpdateInput { instagram: String favorite: Boolean tag_ids: BulkUpdateIds + rating: Int + details: String + death_date: String + hair_color: String + weight: Int } input PerformerDestroyInput { diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index c72ae17ef..84d2fdf79 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -16,6 +16,7 @@ type ScenePathsType { webp: String # Resolver vtt: String # Resolver chapters_vtt: String # Resolver + sprite: String # Resolver } type SceneMovie { @@ -35,6 +36,7 @@ type Scene { organized: Boolean! o_counter: Int path: String! + phash: String file: SceneFileType! # Resolver paths: ScenePathsType! # Resolver @@ -67,7 +69,7 @@ input SceneUpdateInput { performer_ids: [ID!] movies: [SceneMovieInput!] tag_ids: [ID!] - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" cover_image: String stash_ids: [StashIDInput!] } diff --git a/graphql/schema/types/scraped-movie.graphql b/graphql/schema/types/scraped-movie.graphql index ac221fb88..d1546dfb9 100644 --- a/graphql/schema/types/scraped-movie.graphql +++ b/graphql/schema/types/scraped-movie.graphql @@ -17,8 +17,9 @@ type ScrapedMovie { synopsis: String studio: ScrapedMovieStudio - """This should be base64 encoded""" + """This should be a base64 encoded data URL""" front_image: String + """This should be a base64 encoded data URL""" back_image: String } diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql index eadc19160..db6c216a3 100644 --- a/graphql/schema/types/scraped-performer.graphql +++ b/graphql/schema/types/scraped-performer.graphql @@ -19,8 +19,12 @@ type ScrapedPerformer { # Should be ScrapedPerformerTag - but would be identical types tags: [ScrapedSceneTag!] - """This should be base64 encoded""" + """This should be a base64 encoded data URL""" image: String + details: String + death_date: String + hair_color: String + weight: String } input ScrapedPerformerInput { @@ -43,4 +47,8 @@ input ScrapedPerformerInput { # not including tags for the input # not including image for the input + details: String + death_date: String + hair_color: String + weight: String } \ No newline at end of file diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql index 65a474600..860457bb0 100644 --- a/graphql/schema/types/scraper.graphql +++ b/graphql/schema/types/scraper.graphql @@ -49,6 +49,10 @@ type ScrapedScenePerformer { remote_site_id: String images: [String!] + details: String + death_date: String + hair_color: String + weight: String } type ScrapedSceneMovie { @@ -85,7 +89,7 @@ type ScrapedScene { url: String date: String - """This should be base64 encoded""" + """This should be a base64 encoded data URL""" image: String file: SceneFileType # Resolver @@ -111,7 +115,7 @@ type ScrapedGallery { performers: [ScrapedScenePerformer!] } -input StashBoxQueryInput { +input StashBoxSceneQueryInput { """Index of the configured stash-box instance to use""" stash_box_index: Int! """Instructs query by scene fingerprints""" @@ -120,8 +124,30 @@ input StashBoxQueryInput { q: String } +input StashBoxPerformerQueryInput { + """Index of the configured stash-box instance to use""" + stash_box_index: Int! + """Instructs query by scene fingerprints""" + performer_ids: [ID!] + """Query by query string""" + q: String +} + +type StashBoxPerformerQueryResult { + query: String! + results: [ScrapedScenePerformer!]! +} + type StashBoxFingerprint { algorithm: String! hash: String! duration: Int! } + +input StashBoxBatchPerformerTagInput { + endpoint: Int! + exclude_fields: [String!] + refresh: Boolean! + performer_ids: [ID!] + performer_names: [String!] +} diff --git a/graphql/schema/types/studio.graphql b/graphql/schema/types/studio.graphql index 051776e03..26d280f06 100644 --- a/graphql/schema/types/studio.graphql +++ b/graphql/schema/types/studio.graphql @@ -8,16 +8,22 @@ type Studio { image_path: String # Resolver scene_count: Int # Resolver + image_count: Int # Resolver + gallery_count: Int # Resolver stash_ids: [StashID!]! + rating: Int + details: String } input StudioCreateInput { name: String! url: String parent_id: ID - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" image: String stash_ids: [StashIDInput!] + rating: Int + details: String } input StudioUpdateInput { @@ -25,9 +31,11 @@ input StudioUpdateInput { name: String url: String parent_id: ID, - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" image: String stash_ids: [StashIDInput!] + rating: Int + details: String } input StudioDestroyInput { diff --git a/graphql/schema/types/tag.graphql b/graphql/schema/types/tag.graphql index 2cb6765c8..1b855fd36 100644 --- a/graphql/schema/types/tag.graphql +++ b/graphql/schema/types/tag.graphql @@ -5,13 +5,15 @@ type Tag { image_path: String # Resolver scene_count: Int # Resolver scene_marker_count: Int # Resolver + image_count: Int # Resolver + gallery_count: Int # Resolver performer_count: Int } input TagCreateInput { name: String! - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" image: String } @@ -19,7 +21,7 @@ input TagUpdateInput { id: ID! name: String! - """This should be base64 encoded""" + """This should be a URL or a base64 encoded data URL""" image: String } diff --git a/graphql/stash-box/query.graphql b/graphql/stash-box/query.graphql index 4424ce8db..ad1c937f5 100644 --- a/graphql/stash-box/query.graphql +++ b/graphql/stash-box/query.graphql @@ -75,6 +75,11 @@ fragment PerformerFragment on Performer { piercings { ...BodyModificationFragment } + details + death_date { + ...FuzzyDateFragment + } + weight } fragment PerformerAppearanceFragment on PerformerAppearance { @@ -134,6 +139,18 @@ query SearchScene($term: String!) { } } +query SearchPerformer($term: String!) { + searchPerformer(term: $term) { + ...PerformerFragment + } +} + +query FindPerformerByID($id: ID!) { + findPerformer(id: $id) { + ...PerformerFragment + } +} + mutation SubmitFingerprint($input: FingerprintSubmission!) { submitFingerprint(input: $input) } diff --git a/main.go b/main.go index e54d5cad7..bc2a83c6f 100644 --- a/main.go +++ b/main.go @@ -3,9 +3,7 @@ package main import ( "github.com/stashapp/stash/pkg/api" - "github.com/stashapp/stash/pkg/database" "github.com/stashapp/stash/pkg/manager" - "github.com/stashapp/stash/pkg/manager/config" _ "github.com/golang-migrate/migrate/v4/database/sqlite3" _ "github.com/golang-migrate/migrate/v4/source/file" @@ -13,12 +11,6 @@ import ( func main() { manager.Initialize() - - // perform the post-migration for new databases - if database.Initialize(config.GetDatabasePath()) { - manager.GetInstance().PostMigrate() - } - api.Start() blockForever() } diff --git a/pkg/api/migrate.go b/pkg/api/migrate.go deleted file mode 100644 index 4a7bcddf8..000000000 --- a/pkg/api/migrate.go +++ /dev/null @@ -1,96 +0,0 @@ -package api - -import ( - "fmt" - "html/template" - "net/http" - "os" - - "github.com/stashapp/stash/pkg/database" - "github.com/stashapp/stash/pkg/logger" - "github.com/stashapp/stash/pkg/manager" -) - -type migrateData struct { - ExistingVersion uint - MigrateVersion uint - BackupPath string -} - -func getMigrateData() migrateData { - return migrateData{ - ExistingVersion: database.Version(), - MigrateVersion: database.AppSchemaVersion(), - BackupPath: database.DatabaseBackupPath(), - } -} - -func getMigrateHandler(w http.ResponseWriter, r *http.Request) { - if !database.NeedsMigration() { - http.Redirect(w, r, "/", 301) - return - } - - data, _ := setupUIBox.Find("migrate.html") - templ, err := template.New("Migrate").Parse(string(data)) - if err != nil { - http.Error(w, fmt.Sprintf("error: %s", err), 500) - return - } - - err = templ.Execute(w, getMigrateData()) - if err != nil { - http.Error(w, fmt.Sprintf("error: %s", err), 500) - } -} - -func doMigrateHandler(w http.ResponseWriter, r *http.Request) { - err := r.ParseForm() - if err != nil { - http.Error(w, fmt.Sprintf("error: %s", err), 500) - } - - formBackupPath := r.Form.Get("backuppath") - - // always backup so that we can roll back to the previous version if - // migration fails - backupPath := formBackupPath - if formBackupPath == "" { - backupPath = database.DatabaseBackupPath() - } - - // perform database backup - if err = database.Backup(database.DB, backupPath); err != nil { - http.Error(w, fmt.Sprintf("error backing up database: %s", err), 500) - return - } - - err = database.RunMigrations() - if err != nil { - errStr := fmt.Sprintf("error performing migration: %s", err) - - // roll back to the backed up version - restoreErr := database.RestoreFromBackup(backupPath) - if restoreErr != nil { - errStr = fmt.Sprintf("ERROR: unable to restore database from backup after migration failure: %s\n%s", restoreErr.Error(), errStr) - } else { - errStr = "An error occurred migrating the database to the latest schema version. The backup database file was automatically renamed to restore the database.\n" + errStr - } - - http.Error(w, errStr, 500) - return - } - - // perform post-migration operations - manager.GetInstance().PostMigrate() - - // if no backup path was provided, then delete the created backup - if formBackupPath == "" { - err = os.Remove(backupPath) - if err != nil { - logger.Warnf("error removing unwanted database backup (%s): %s", backupPath, err.Error()) - } - } - - http.Redirect(w, r, "/", 301) -} diff --git a/pkg/api/resolver_model_movie.go b/pkg/api/resolver_model_movie.go index afd82ab8a..be105eb9e 100644 --- a/pkg/api/resolver_model_movie.go +++ b/pkg/api/resolver_model_movie.go @@ -89,6 +89,24 @@ func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) ( } func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) { + // don't return any thing if there is no back image + var img []byte + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + var err error + img, err = repo.Movie().GetBackImage(obj.ID) + if err != nil { + return err + } + + return nil + }); err != nil { + return nil, err + } + + if img == nil { + return nil, nil + } + baseURL, _ := ctx.Value(BaseURLCtxKey).(string) backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL() return &backimagePath, nil diff --git a/pkg/api/resolver_model_performer.go b/pkg/api/resolver_model_performer.go index cef67c22a..a5f8e4811 100644 --- a/pkg/api/resolver_model_performer.go +++ b/pkg/api/resolver_model_performer.go @@ -4,6 +4,8 @@ import ( "context" "github.com/stashapp/stash/pkg/api/urlbuilders" + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) @@ -161,6 +163,30 @@ func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performe return &res, nil } +func (r *performerResolver) ImageCount(ctx context.Context, obj *models.Performer) (ret *int, err error) { + var res int + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + res, err = image.CountByPerformerID(repo.Image(), obj.ID) + return err + }); err != nil { + return nil, err + } + + return &res, nil +} + +func (r *performerResolver) GalleryCount(ctx context.Context, obj *models.Performer) (ret *int, err error) { + var res int + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + res, err = gallery.CountByPerformerID(repo.Gallery(), obj.ID) + return err + }); err != nil { + return nil, err + } + + return &res, nil +} + func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (ret []*models.Scene, err error) { if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { ret, err = repo.Scene().FindByPerformerID(obj.ID) @@ -182,3 +208,40 @@ func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) return ret, nil } + +func (r *performerResolver) Rating(ctx context.Context, obj *models.Performer) (*int, error) { + if obj.Rating.Valid { + rating := int(obj.Rating.Int64) + return &rating, nil + } + return nil, nil +} + +func (r *performerResolver) Details(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.Details.Valid { + return &obj.Details.String, nil + } + return nil, nil +} + +func (r *performerResolver) DeathDate(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.DeathDate.Valid { + return &obj.DeathDate.String, nil + } + return nil, nil +} + +func (r *performerResolver) HairColor(ctx context.Context, obj *models.Performer) (*string, error) { + if obj.HairColor.Valid { + return &obj.HairColor.String, nil + } + return nil, nil +} + +func (r *performerResolver) Weight(ctx context.Context, obj *models.Performer) (*int, error) { + if obj.Weight.Valid { + weight := int(obj.Weight.Int64) + return &weight, nil + } + return nil, nil +} diff --git a/pkg/api/resolver_model_scene.go b/pkg/api/resolver_model_scene.go index 960c561ff..f657d8371 100644 --- a/pkg/api/resolver_model_scene.go +++ b/pkg/api/resolver_model_scene.go @@ -4,6 +4,7 @@ import ( "context" "github.com/stashapp/stash/pkg/api/urlbuilders" + "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) @@ -78,11 +79,13 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.Sc func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.ScenePathsType, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID) + builder.APIKey = config.GetInstance().GetAPIKey() screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt.Timestamp) previewPath := builder.GetStreamPreviewURL() streamPath := builder.GetStreamURL() webpPath := builder.GetStreamPreviewImageURL() vttPath := builder.GetSpriteVTTURL() + spritePath := builder.GetSpriteURL() chaptersVttPath := builder.GetChaptersVTTURL() return &models.ScenePathsType{ Screenshot: &screenshotPath, @@ -91,6 +94,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S Webp: &webpPath, Vtt: &vttPath, ChaptersVtt: &chaptersVttPath, + Sprite: &spritePath, }, nil } @@ -200,3 +204,11 @@ func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret [] return ret, nil } + +func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) { + if obj.Phash.Valid { + hexval := utils.PhashToString(obj.Phash.Int64) + return &hexval, nil + } + return nil, nil +} diff --git a/pkg/api/resolver_model_studio.go b/pkg/api/resolver_model_studio.go index 1f866b004..89d2c2bea 100644 --- a/pkg/api/resolver_model_studio.go +++ b/pkg/api/resolver_model_studio.go @@ -4,6 +4,8 @@ import ( "context" "github.com/stashapp/stash/pkg/api/urlbuilders" + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) @@ -54,6 +56,30 @@ func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (re return &res, err } +func (r *studioResolver) ImageCount(ctx context.Context, obj *models.Studio) (ret *int, err error) { + var res int + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + res, err = image.CountByStudioID(repo.Image(), obj.ID) + return err + }); err != nil { + return nil, err + } + + return &res, nil +} + +func (r *studioResolver) GalleryCount(ctx context.Context, obj *models.Studio) (ret *int, err error) { + var res int + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + res, err = gallery.CountByStudioID(repo.Gallery(), obj.ID) + return err + }); err != nil { + return nil, err + } + + return &res, nil +} + func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) { if !obj.ParentID.Valid { return nil, nil @@ -90,3 +116,18 @@ func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) (ret return ret, nil } + +func (r *studioResolver) Rating(ctx context.Context, obj *models.Studio) (*int, error) { + if obj.Rating.Valid { + rating := int(obj.Rating.Int64) + return &rating, nil + } + return nil, nil +} + +func (r *studioResolver) Details(ctx context.Context, obj *models.Studio) (*string, error) { + if obj.Details.Valid { + return &obj.Details.String, nil + } + return nil, nil +} diff --git a/pkg/api/resolver_model_tag.go b/pkg/api/resolver_model_tag.go index 1cbb3acf3..a4fb2cc4e 100644 --- a/pkg/api/resolver_model_tag.go +++ b/pkg/api/resolver_model_tag.go @@ -4,6 +4,8 @@ import ( "context" "github.com/stashapp/stash/pkg/api/urlbuilders" + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) @@ -31,6 +33,30 @@ func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (re return &count, err } +func (r *tagResolver) ImageCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { + var res int + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + res, err = image.CountByTagID(repo.Image(), obj.ID) + return err + }); err != nil { + return nil, err + } + + return &res, nil +} + +func (r *tagResolver) GalleryCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { + var res int + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + res, err = gallery.CountByTagID(repo.Gallery(), obj.ID) + return err + }); err != nil { + return nil, err + } + + return &res, nil +} + func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { var count int if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { diff --git a/pkg/api/resolver_mutation_configure.go b/pkg/api/resolver_mutation_configure.go index 34b416094..ea4ae082c 100644 --- a/pkg/api/resolver_mutation_configure.go +++ b/pkg/api/resolver_mutation_configure.go @@ -13,15 +13,37 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +func (r *mutationResolver) Setup(ctx context.Context, input models.SetupInput) (bool, error) { + err := manager.GetInstance().Setup(input) + return err == nil, err +} + +func (r *mutationResolver) Migrate(ctx context.Context, input models.MigrateInput) (bool, error) { + err := manager.GetInstance().Migrate(input) + return err == nil, err +} + func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.ConfigGeneralInput) (*models.ConfigGeneralResult, error) { + c := config.GetInstance() + existingPaths := c.GetStashPaths() if len(input.Stashes) > 0 { for _, s := range input.Stashes { - exists, err := utils.DirExists(s.Path) - if !exists { - return makeConfigGeneralResult(), err + // Only validate existence of new paths + isNew := true + for _, path := range existingPaths { + if path.Path == s.Path { + isNew = false + break + } + } + if isNew { + exists, err := utils.DirExists(s.Path) + if !exists { + return makeConfigGeneralResult(), err + } } } - config.Set(config.Stash, input.Stashes) + c.Set(config.Stash, input.Stashes) } if input.DatabasePath != nil { @@ -29,138 +51,140 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co if ext != ".db" && ext != ".sqlite" && ext != ".sqlite3" { return makeConfigGeneralResult(), fmt.Errorf("invalid database path, use extension db, sqlite, or sqlite3") } - config.Set(config.Database, input.DatabasePath) + c.Set(config.Database, input.DatabasePath) } if input.GeneratedPath != nil { if err := utils.EnsureDir(*input.GeneratedPath); err != nil { return makeConfigGeneralResult(), err } - config.Set(config.Generated, input.GeneratedPath) + c.Set(config.Generated, input.GeneratedPath) } if input.CachePath != nil { - if err := utils.EnsureDir(*input.CachePath); err != nil { - return makeConfigGeneralResult(), err + if *input.CachePath != "" { + if err := utils.EnsureDir(*input.CachePath); err != nil { + return makeConfigGeneralResult(), err + } } - config.Set(config.Cache, input.CachePath) + c.Set(config.Cache, input.CachePath) } if !input.CalculateMd5 && input.VideoFileNamingAlgorithm == models.HashAlgorithmMd5 { return makeConfigGeneralResult(), errors.New("calculateMD5 must be true if using MD5") } - if input.VideoFileNamingAlgorithm != config.GetVideoFileNamingAlgorithm() { + if input.VideoFileNamingAlgorithm != c.GetVideoFileNamingAlgorithm() { // validate changing VideoFileNamingAlgorithm if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, input.VideoFileNamingAlgorithm); err != nil { return makeConfigGeneralResult(), err } - config.Set(config.VideoFileNamingAlgorithm, input.VideoFileNamingAlgorithm) + c.Set(config.VideoFileNamingAlgorithm, input.VideoFileNamingAlgorithm) } - config.Set(config.CalculateMD5, input.CalculateMd5) + c.Set(config.CalculateMD5, input.CalculateMd5) if input.ParallelTasks != nil { - config.Set(config.ParallelTasks, *input.ParallelTasks) + c.Set(config.ParallelTasks, *input.ParallelTasks) } if input.PreviewSegments != nil { - config.Set(config.PreviewSegments, *input.PreviewSegments) + c.Set(config.PreviewSegments, *input.PreviewSegments) } if input.PreviewSegmentDuration != nil { - config.Set(config.PreviewSegmentDuration, *input.PreviewSegmentDuration) + c.Set(config.PreviewSegmentDuration, *input.PreviewSegmentDuration) } if input.PreviewExcludeStart != nil { - config.Set(config.PreviewExcludeStart, *input.PreviewExcludeStart) + c.Set(config.PreviewExcludeStart, *input.PreviewExcludeStart) } if input.PreviewExcludeEnd != nil { - config.Set(config.PreviewExcludeEnd, *input.PreviewExcludeEnd) + c.Set(config.PreviewExcludeEnd, *input.PreviewExcludeEnd) } if input.PreviewPreset != nil { - config.Set(config.PreviewPreset, input.PreviewPreset.String()) + c.Set(config.PreviewPreset, input.PreviewPreset.String()) } if input.MaxTranscodeSize != nil { - config.Set(config.MaxTranscodeSize, input.MaxTranscodeSize.String()) + c.Set(config.MaxTranscodeSize, input.MaxTranscodeSize.String()) } if input.MaxStreamingTranscodeSize != nil { - config.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String()) + c.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String()) } if input.Username != nil { - config.Set(config.Username, input.Username) + c.Set(config.Username, input.Username) } if input.Password != nil { // bit of a hack - check if the passed in password is the same as the stored hash // and only set if they are different - currentPWHash := config.GetPasswordHash() + currentPWHash := c.GetPasswordHash() if *input.Password != currentPWHash { - config.SetPassword(*input.Password) + c.SetPassword(*input.Password) } } if input.MaxSessionAge != nil { - config.Set(config.MaxSessionAge, *input.MaxSessionAge) + c.Set(config.MaxSessionAge, *input.MaxSessionAge) } if input.LogFile != nil { - config.Set(config.LogFile, input.LogFile) + c.Set(config.LogFile, input.LogFile) } - config.Set(config.LogOut, input.LogOut) - config.Set(config.LogAccess, input.LogAccess) + c.Set(config.LogOut, input.LogOut) + c.Set(config.LogAccess, input.LogAccess) - if input.LogLevel != config.GetLogLevel() { - config.Set(config.LogLevel, input.LogLevel) + if input.LogLevel != c.GetLogLevel() { + c.Set(config.LogLevel, input.LogLevel) logger.SetLogLevel(input.LogLevel) } if input.Excludes != nil { - config.Set(config.Exclude, input.Excludes) + c.Set(config.Exclude, input.Excludes) } if input.ImageExcludes != nil { - config.Set(config.ImageExclude, input.ImageExcludes) + c.Set(config.ImageExclude, input.ImageExcludes) } if input.VideoExtensions != nil { - config.Set(config.VideoExtensions, input.VideoExtensions) + c.Set(config.VideoExtensions, input.VideoExtensions) } if input.ImageExtensions != nil { - config.Set(config.ImageExtensions, input.ImageExtensions) + c.Set(config.ImageExtensions, input.ImageExtensions) } if input.GalleryExtensions != nil { - config.Set(config.GalleryExtensions, input.GalleryExtensions) + c.Set(config.GalleryExtensions, input.GalleryExtensions) } - config.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders) + c.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders) refreshScraperCache := false if input.ScraperUserAgent != nil { - config.Set(config.ScraperUserAgent, input.ScraperUserAgent) + c.Set(config.ScraperUserAgent, input.ScraperUserAgent) refreshScraperCache = true } if input.ScraperCDPPath != nil { - config.Set(config.ScraperCDPPath, input.ScraperCDPPath) + c.Set(config.ScraperCDPPath, input.ScraperCDPPath) refreshScraperCache = true } - config.Set(config.ScraperCertCheck, input.ScraperCertCheck) + c.Set(config.ScraperCertCheck, input.ScraperCertCheck) if input.StashBoxes != nil { - if err := config.ValidateStashBoxes(input.StashBoxes); err != nil { + if err := c.ValidateStashBoxes(input.StashBoxes); err != nil { return nil, err } - config.Set(config.StashBoxes, input.StashBoxes) + c.Set(config.StashBoxes, input.StashBoxes) } - if err := config.Write(); err != nil { + if err := c.Write(); err != nil { return makeConfigGeneralResult(), err } @@ -173,36 +197,41 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co } func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.ConfigInterfaceInput) (*models.ConfigInterfaceResult, error) { + c := config.GetInstance() if input.MenuItems != nil { - config.Set(config.MenuItems, input.MenuItems) + c.Set(config.MenuItems, input.MenuItems) } if input.SoundOnPreview != nil { - config.Set(config.SoundOnPreview, *input.SoundOnPreview) + c.Set(config.SoundOnPreview, *input.SoundOnPreview) } if input.WallShowTitle != nil { - config.Set(config.WallShowTitle, *input.WallShowTitle) + c.Set(config.WallShowTitle, *input.WallShowTitle) } if input.WallPlayback != nil { - config.Set(config.WallPlayback, *input.WallPlayback) + c.Set(config.WallPlayback, *input.WallPlayback) } if input.MaximumLoopDuration != nil { - config.Set(config.MaximumLoopDuration, *input.MaximumLoopDuration) + c.Set(config.MaximumLoopDuration, *input.MaximumLoopDuration) } if input.AutostartVideo != nil { - config.Set(config.AutostartVideo, *input.AutostartVideo) + c.Set(config.AutostartVideo, *input.AutostartVideo) } if input.ShowStudioAsText != nil { - config.Set(config.ShowStudioAsText, *input.ShowStudioAsText) + c.Set(config.ShowStudioAsText, *input.ShowStudioAsText) } if input.Language != nil { - config.Set(config.Language, *input.Language) + c.Set(config.Language, *input.Language) + } + + if input.SlideshowDelay != nil { + c.Set(config.SlideshowDelay, *input.SlideshowDelay) } css := "" @@ -211,15 +240,38 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models. css = *input.CSS } - config.SetCSS(css) + c.SetCSS(css) if input.CSSEnabled != nil { - config.Set(config.CSSEnabled, *input.CSSEnabled) + c.Set(config.CSSEnabled, *input.CSSEnabled) } - if err := config.Write(); err != nil { + if err := c.Write(); err != nil { return makeConfigInterfaceResult(), err } return makeConfigInterfaceResult(), nil } + +func (r *mutationResolver) GenerateAPIKey(ctx context.Context, input models.GenerateAPIKeyInput) (string, error) { + c := config.GetInstance() + + var newAPIKey string + if input.Clear == nil || !*input.Clear { + username := c.GetUsername() + if username != "" { + var err error + newAPIKey, err = manager.GenerateAPIKey(username) + if err != nil { + return "", err + } + } + } + + c.Set(config.ApiKey, newAPIKey) + if err := c.Write(); err != nil { + return newAPIKey, err + } + + return newAPIKey, nil +} diff --git a/pkg/api/resolver_mutation_metadata.go b/pkg/api/resolver_mutation_metadata.go index 19ce4f279..82f678c5c 100644 --- a/pkg/api/resolver_mutation_metadata.go +++ b/pkg/api/resolver_mutation_metadata.go @@ -20,12 +20,15 @@ func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMe } func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) { - manager.GetInstance().Import() + if err := manager.GetInstance().Import(); err != nil { + return "", err + } + return "todo", nil } func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) { - t, err := manager.CreateImportTask(config.GetVideoFileNamingAlgorithm(), input) + t, err := manager.CreateImportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input) if err != nil { return "", err } @@ -39,12 +42,15 @@ func (r *mutationResolver) ImportObjects(ctx context.Context, input models.Impor } func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) { - manager.GetInstance().Export() + if err := manager.GetInstance().Export(); err != nil { + return "", err + } + return "todo", nil } func (r *mutationResolver) ExportObjects(ctx context.Context, input models.ExportObjectsInput) (*string, error) { - t := manager.CreateExportTask(config.GetVideoFileNamingAlgorithm(), input) + t := manager.CreateExportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input) wg, err := manager.GetInstance().RunSingleTask(t) if err != nil { return nil, err diff --git a/pkg/api/resolver_mutation_performer.go b/pkg/api/resolver_mutation_performer.go index 69eb5832c..60af02780 100644 --- a/pkg/api/resolver_mutation_performer.go +++ b/pkg/api/resolver_mutation_performer.go @@ -3,10 +3,12 @@ package api import ( "context" "database/sql" + "fmt" "strconv" "time" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/utils" ) @@ -83,6 +85,30 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per } else { newPerformer.Favorite = sql.NullBool{Bool: false, Valid: true} } + if input.Rating != nil { + newPerformer.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true} + } else { + newPerformer.Rating = sql.NullInt64{Valid: false} + } + if input.Details != nil { + newPerformer.Details = sql.NullString{String: *input.Details, Valid: true} + } + if input.DeathDate != nil { + newPerformer.DeathDate = models.SQLiteDate{String: *input.DeathDate, Valid: true} + } + if input.HairColor != nil { + newPerformer.HairColor = sql.NullString{String: *input.HairColor, Valid: true} + } + if input.Weight != nil { + weight := int64(*input.Weight) + newPerformer.Weight = sql.NullInt64{Int64: weight, Valid: true} + } + + if err := performer.ValidateDeathDate(nil, input.Birthdate, input.DeathDate); err != nil { + if err != nil { + return nil, err + } + } // Start the transaction and save the performer var performer *models.Performer @@ -177,33 +203,53 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.Twitter = translator.nullString(input.Twitter, "twitter") updatedPerformer.Instagram = translator.nullString(input.Instagram, "instagram") updatedPerformer.Favorite = translator.nullBool(input.Favorite, "favorite") + updatedPerformer.Rating = translator.nullInt64(input.Rating, "rating") + updatedPerformer.Details = translator.nullString(input.Details, "details") + updatedPerformer.DeathDate = translator.sqliteDate(input.DeathDate, "death_date") + updatedPerformer.HairColor = translator.nullString(input.HairColor, "hair_color") + updatedPerformer.Weight = translator.nullInt64(input.Weight, "weight") - // Start the transaction and save the performer - var performer *models.Performer + // Start the transaction and save the p + var p *models.Performer if err := r.withTxn(ctx, func(repo models.Repository) error { qb := repo.Performer() - var err error - performer, err = qb.Update(updatedPerformer) + // need to get existing performer + existing, err := qb.Find(updatedPerformer.ID) + if err != nil { + return err + } + + if existing == nil { + return fmt.Errorf("performer with id %d not found", updatedPerformer.ID) + } + + if err := performer.ValidateDeathDate(existing, input.Birthdate, input.DeathDate); err != nil { + if err != nil { + return err + } + } + + p, err = qb.Update(updatedPerformer) if err != nil { return err } // Save the tags if translator.hasField("tag_ids") { - if err := r.updatePerformerTags(qb, performer.ID, input.TagIds); err != nil { + if err := r.updatePerformerTags(qb, p.ID, input.TagIds); err != nil { return err } } // update image table if len(imageData) > 0 { - if err := qb.UpdateImage(performer.ID, imageData); err != nil { + if err := qb.UpdateImage(p.ID, imageData); err != nil { return err } } else if imageIncluded { // must be unsetting - if err := qb.DestroyImage(performer.ID); err != nil { + if err := qb.DestroyImage(p.ID); err != nil { return err } } @@ -221,7 +267,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per return nil, err } - return performer, nil + return p, nil } func (r *mutationResolver) updatePerformerTags(qb models.PerformerReaderWriter, performerID int, tagsIDs []string) error { @@ -264,6 +310,11 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models updatedPerformer.Twitter = translator.nullString(input.Twitter, "twitter") updatedPerformer.Instagram = translator.nullString(input.Instagram, "instagram") updatedPerformer.Favorite = translator.nullBool(input.Favorite, "favorite") + updatedPerformer.Rating = translator.nullInt64(input.Rating, "rating") + updatedPerformer.Details = translator.nullString(input.Details, "details") + updatedPerformer.DeathDate = translator.sqliteDate(input.DeathDate, "death_date") + updatedPerformer.HairColor = translator.nullString(input.HairColor, "hair_color") + updatedPerformer.Weight = translator.nullInt64(input.Weight, "weight") if translator.hasField("gender") { if input.Gender != nil { @@ -282,6 +333,20 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models for _, performerID := range performerIDs { updatedPerformer.ID = performerID + // need to get existing performer + existing, err := qb.Find(performerID) + if err != nil { + return err + } + + if existing == nil { + return fmt.Errorf("performer with id %d not found", performerID) + } + + if err := performer.ValidateDeathDate(existing, input.Birthdate, input.DeathDate); err != nil { + return err + } + performer, err := qb.Update(updatedPerformer) if err != nil { return err diff --git a/pkg/api/resolver_mutation_plugin.go b/pkg/api/resolver_mutation_plugin.go index c5a83e8fc..3e65366c2 100644 --- a/pkg/api/resolver_mutation_plugin.go +++ b/pkg/api/resolver_mutation_plugin.go @@ -23,6 +23,7 @@ func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, t } } + config := config.GetInstance() serverConnection := common.StashServerConnection{ Scheme: "http", Port: config.GetPort(), diff --git a/pkg/api/resolver_mutation_scene.go b/pkg/api/resolver_mutation_scene.go index ba73aecb1..2a187bf1b 100644 --- a/pkg/api/resolver_mutation_scene.go +++ b/pkg/api/resolver_mutation_scene.go @@ -139,7 +139,7 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator // only update the cover image if provided and everything else was successful if coverImageData != nil { - err = manager.SetSceneScreenshot(scene.GetHash(config.GetVideoFileNamingAlgorithm()), coverImageData) + err = manager.SetSceneScreenshot(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), coverImageData) if err != nil { return nil, err } @@ -384,7 +384,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD // if delete generated is true, then delete the generated files // for the scene if input.DeleteGenerated != nil && *input.DeleteGenerated { - manager.DeleteGeneratedSceneFiles(scene, config.GetVideoFileNamingAlgorithm()) + manager.DeleteGeneratedSceneFiles(scene, config.GetInstance().GetVideoFileNamingAlgorithm()) } // if delete file is true, then delete the file as well @@ -426,7 +426,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene f() } - fileNamingAlgo := config.GetVideoFileNamingAlgorithm() + fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() for _, scene := range scenes { // if delete generated is true, then delete the generated files // for the scene @@ -586,7 +586,7 @@ func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, cha // remove the marker preview if the timestamp was changed if scene != nil && existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds { seconds := int(existingMarker.Seconds) - manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm()) + manager.DeleteSceneMarkerFiles(scene, seconds, config.GetInstance().GetVideoFileNamingAlgorithm()) } return sceneMarker, nil diff --git a/pkg/api/resolver_mutation_stash_box.go b/pkg/api/resolver_mutation_stash_box.go index 303b66dae..4161ec91c 100644 --- a/pkg/api/resolver_mutation_stash_box.go +++ b/pkg/api/resolver_mutation_stash_box.go @@ -4,13 +4,14 @@ import ( "context" "fmt" + "github.com/stashapp/stash/pkg/manager" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scraper/stashbox" ) func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input models.StashBoxFingerprintSubmissionInput) (bool, error) { - boxes := config.GetStashBoxes() + boxes := config.GetInstance().GetStashBoxes() if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { return false, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) @@ -20,3 +21,8 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input return client.SubmitStashBoxFingerprints(input.SceneIds, boxes[input.StashBoxIndex].Endpoint) } + +func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) (string, error) { + manager.GetInstance().StashBoxBatchPerformerTag(input) + return "todo", nil +} diff --git a/pkg/api/resolver_mutation_studio.go b/pkg/api/resolver_mutation_studio.go index 82be5d1e9..7b06485b4 100644 --- a/pkg/api/resolver_mutation_studio.go +++ b/pkg/api/resolver_mutation_studio.go @@ -42,6 +42,15 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio newStudio.ParentID = sql.NullInt64{Int64: parentID, Valid: true} } + if input.Rating != nil { + newStudio.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true} + } else { + newStudio.Rating = sql.NullInt64{Valid: false} + } + if input.Details != nil { + newStudio.Details = sql.NullString{String: *input.Details, Valid: true} + } + // Start the transaction and save the studio var studio *models.Studio if err := r.withTxn(ctx, func(repo models.Repository) error { @@ -109,7 +118,9 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio } updatedStudio.URL = translator.nullString(input.URL, "url") + updatedStudio.Details = translator.nullString(input.Details, "details") updatedStudio.ParentID = translator.nullInt64FromString(input.ParentID, "parent_id") + updatedStudio.Rating = translator.nullInt64(input.Rating, "rating") // Start the transaction and save the studio var studio *models.Studio diff --git a/pkg/api/resolver_query_configuration.go b/pkg/api/resolver_query_configuration.go index cd4bbaff4..1d47acb38 100644 --- a/pkg/api/resolver_query_configuration.go +++ b/pkg/api/resolver_query_configuration.go @@ -34,6 +34,7 @@ func makeConfigResult() *models.ConfigResult { } func makeConfigGeneralResult() *models.ConfigGeneralResult { + config := config.GetInstance() logFile := config.GetLogFile() maxTranscodeSize := config.GetMaxTranscodeSize() @@ -59,6 +60,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult { PreviewPreset: config.GetPreviewPreset(), MaxTranscodeSize: &maxTranscodeSize, MaxStreamingTranscodeSize: &maxStreamingTranscodeSize, + APIKey: config.GetAPIKey(), Username: config.GetUsername(), Password: config.GetPasswordHash(), MaxSessionAge: config.GetMaxSessionAge(), @@ -80,6 +82,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult { } func makeConfigInterfaceResult() *models.ConfigInterfaceResult { + config := config.GetInstance() menuItems := config.GetMenuItems() soundOnPreview := config.GetSoundOnPreview() wallShowTitle := config.GetWallShowTitle() @@ -90,6 +93,7 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult { css := config.GetCSS() cssEnabled := config.GetCSSEnabled() language := config.GetLanguage() + slideshowDelay := config.GetSlideshowDelay() return &models.ConfigInterfaceResult{ MenuItems: menuItems, @@ -102,5 +106,6 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult { CSS: &css, CSSEnabled: &cssEnabled, Language: &language, + SlideshowDelay: &slideshowDelay, } } diff --git a/pkg/api/resolver_query_find_scene.go b/pkg/api/resolver_query_find_scene.go index ae8eec249..be250e101 100644 --- a/pkg/api/resolver_query_find_scene.go +++ b/pkg/api/resolver_query_find_scene.go @@ -59,12 +59,25 @@ func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneH return scene, nil } -func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) { +func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIDs []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) { if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { - scenes, total, err := repo.Scene().Query(sceneFilter, filter) + var scenes []*models.Scene + var total int + var err error + + if len(sceneIDs) > 0 { + scenes, err = repo.Scene().FindMany(sceneIDs) + if err == nil { + total = len(scenes) + } + } else { + scenes, total, err = repo.Scene().Query(sceneFilter, filter) + } + if err != nil { return err } + ret = &models.FindScenesResultType{ Count: total, Scenes: scenes, @@ -138,3 +151,18 @@ func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models. return ret, nil } + +func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int) (ret [][]*models.Scene, err error) { + dist := 0 + if distance != nil { + dist = *distance + } + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + ret, err = repo.Scene().FindDuplicates(dist) + return err + }); err != nil { + return nil, err + } + + return ret, nil +} diff --git a/pkg/api/resolver_query_metadata.go b/pkg/api/resolver_query_metadata.go index 862d91eae..cb12d96e7 100644 --- a/pkg/api/resolver_query_metadata.go +++ b/pkg/api/resolver_query_metadata.go @@ -17,3 +17,7 @@ func (r *queryResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateSt return &ret, nil } + +func (r *queryResolver) SystemStatus(ctx context.Context) (*models.SystemStatus, error) { + return manager.GetInstance().GetSystemStatus(), nil +} diff --git a/pkg/api/resolver_query_scene.go b/pkg/api/resolver_query_scene.go index 64110e70d..236913689 100644 --- a/pkg/api/resolver_query_scene.go +++ b/pkg/api/resolver_query_scene.go @@ -30,5 +30,5 @@ func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models baseURL, _ := ctx.Value(BaseURLCtxKey).(string) builder := urlbuilders.NewSceneURLBuilder(baseURL, scene.ID) - return manager.GetSceneStreamPaths(scene, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize()) + return manager.GetSceneStreamPaths(scene, builder.GetStreamURL(), config.GetInstance().GetMaxStreamingTranscodeSize()) } diff --git a/pkg/api/resolver_query_scraper.go b/pkg/api/resolver_query_scraper.go index 7a197a025..301870351 100644 --- a/pkg/api/resolver_query_scraper.go +++ b/pkg/api/resolver_query_scraper.go @@ -88,8 +88,8 @@ func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models return manager.GetInstance().ScraperCache.ScrapeMovieURL(url) } -func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.StashBoxQueryInput) ([]*models.ScrapedScene, error) { - boxes := config.GetStashBoxes() +func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.StashBoxSceneQueryInput) ([]*models.ScrapedScene, error) { + boxes := config.GetInstance().GetStashBoxes() if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) @@ -107,3 +107,23 @@ func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.Sta return nil, nil } + +func (r *queryResolver) QueryStashBoxPerformer(ctx context.Context, input models.StashBoxPerformerQueryInput) ([]*models.StashBoxPerformerQueryResult, error) { + boxes := config.GetInstance().GetStashBoxes() + + if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { + return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) + } + + client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager) + + if len(input.PerformerIds) > 0 { + return client.FindStashBoxPerformersByNames(input.PerformerIds) + } + + if input.Q != nil { + return client.QueryStashBoxPerformer(*input.Q) + } + + return nil, nil +} diff --git a/pkg/api/routes_scene.go b/pkg/api/routes_scene.go index 328983be9..c83876e8a 100644 --- a/pkg/api/routes_scene.go +++ b/pkg/api/routes_scene.go @@ -69,7 +69,7 @@ func getSceneFileContainer(scene *models.Scene) ffmpeg.Container { func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) - fileNamingAlgo := config.GetVideoFileNamingAlgorithm() + fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo)) manager.RegisterStream(filepath, &w) @@ -158,7 +158,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi options := ffmpeg.GetTranscodeStreamOptions(*videoFile, videoCodec, audioCodec) options.StartTime = startTime - options.MaxTranscodeSize = config.GetMaxStreamingTranscodeSize() + options.MaxTranscodeSize = config.GetInstance().GetMaxStreamingTranscodeSize() if requestedSize != "" { options.MaxTranscodeSize = models.StreamingResolutionEnum(requestedSize) } @@ -178,7 +178,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) - filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetVideoFileNamingAlgorithm())) + filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())) // fall back to the scene image blob if the file isn't present screenshotExists, _ := utils.FileExists(filepath) @@ -196,13 +196,13 @@ func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) - filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.GetHash(config.GetVideoFileNamingAlgorithm())) + filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())) utils.ServeFileNoCache(w, r, filepath) } func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) - filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm())) + filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())) http.ServeFile(w, r, filepath) } @@ -267,14 +267,14 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) w.Header().Set("Content-Type", "text/vtt") - filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.GetHash(config.GetVideoFileNamingAlgorithm())) + filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())) http.ServeFile(w, r, filepath) } func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) w.Header().Set("Content-Type", "image/jpeg") - filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.GetHash(config.GetVideoFileNamingAlgorithm())) + filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())) http.ServeFile(w, r, filepath) } @@ -291,7 +291,7 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request) http.Error(w, http.StatusText(500), 500) return } - filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds)) + filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds)) http.ServeFile(w, r, filepath) } @@ -308,7 +308,7 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) http.Error(w, http.StatusText(500), 500) return } - filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds)) + filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds)) // If the image doesn't exist, send the placeholder exists, _ := utils.FileExists(filepath) diff --git a/pkg/api/server.go b/pkg/api/server.go index 331e203f3..db7d51855 100644 --- a/pkg/api/server.go +++ b/pkg/api/server.go @@ -8,9 +8,7 @@ import ( "io/ioutil" "net/http" "net/url" - "os" "path" - "path/filepath" "runtime/debug" "strconv" "strings" @@ -22,7 +20,6 @@ import ( "github.com/gobuffalo/packr/v2" "github.com/gorilla/websocket" "github.com/rs/cors" - "github.com/stashapp/stash/pkg/database" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager" "github.com/stashapp/stash/pkg/manager/config" @@ -38,9 +35,13 @@ var githash string var uiBox *packr.Box //var legacyUiBox *packr.Box -var setupUIBox *packr.Box var loginUIBox *packr.Box +const ( + ApiKeyHeader = "ApiKey" + ApiKeyParameter = "apikey" +) + func allowUnauthenticated(r *http.Request) bool { return strings.HasPrefix(r.URL.Path, "/login") || r.URL.Path == "/css" } @@ -48,14 +49,34 @@ func allowUnauthenticated(r *http.Request) bool { func authenticateHandler() func(http.Handler) http.Handler { return func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + c := config.GetInstance() ctx := r.Context() // translate api key into current user, if present userID := "" + apiKey := r.Header.Get(ApiKeyHeader) var err error - // handle session - userID, err = getSessionUserID(w, r) + // try getting the api key as a query parameter + if apiKey == "" { + apiKey = r.URL.Query().Get(ApiKeyParameter) + } + + if apiKey != "" { + // match against configured API and set userID to the + // configured username. In future, we'll want to + // get the username from the key. + if c.GetAPIKey() != apiKey { + w.Header().Add("WWW-Authenticate", `FormBased`) + w.WriteHeader(http.StatusUnauthorized) + return + } + + userID = c.GetUsername() + } else { + // handle session + userID, err = getSessionUserID(w, r) + } if err != nil { w.WriteHeader(http.StatusInternalServerError) @@ -64,9 +85,7 @@ func authenticateHandler() func(http.Handler) http.Handler { } // handle redirect if no user and user is required - if userID == "" && config.HasCredentials() && !allowUnauthenticated(r) { - // always allow - + if userID == "" && c.HasCredentials() && !allowUnauthenticated(r) { // if we don't have a userID, then redirect // if graphql was requested, we just return a forbidden error if r.URL.Path == "/graphql" { @@ -95,14 +114,11 @@ func authenticateHandler() func(http.Handler) http.Handler { } } -const setupEndPoint = "/setup" -const migrateEndPoint = "/migrate" const loginEndPoint = "/login" func Start() { uiBox = packr.New("UI Box", "../../ui/v2.5/build") //legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend") - setupUIBox = packr.New("Setup UI Box", "../../ui/setup") loginUIBox = packr.New("Login UI Box", "../../ui/login") initSessionStore() @@ -110,18 +126,18 @@ func Start() { r := chi.NewRouter() + r.Use(middleware.Heartbeat("/healthz")) r.Use(authenticateHandler()) r.Use(middleware.Recoverer) - if config.GetLogAccess() { + c := config.GetInstance() + if c.GetLogAccess() { r.Use(middleware.Logger) } r.Use(middleware.DefaultCompress) r.Use(middleware.StripSlashes) r.Use(cors.AllowAll().Handler) r.Use(BaseURLMiddleware) - r.Use(ConfigCheckMiddleware) - r.Use(DatabaseCheckMiddleware) recoverFunc := handler.RecoverFunc(func(ctx context.Context, err interface{}) error { logger.Error(err) @@ -135,7 +151,7 @@ func Start() { return true }, }) - maxUploadSize := handler.UploadMaxSize(config.GetMaxUploadSize()) + maxUploadSize := handler.UploadMaxSize(c.GetMaxUploadSize()) websocketKeepAliveDuration := handler.WebsocketKeepAliveDuration(10 * time.Second) txnManager := manager.GetInstance().TxnManager @@ -176,12 +192,12 @@ func Start() { r.HandleFunc("/css", func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/css") - if !config.GetCSSEnabled() { + if !c.GetCSSEnabled() { return } // search for custom.css in current directory, then $HOME/.stash - fn := config.GetCSSPath() + fn := c.GetCSSPath() exists, _ := utils.FileExists(fn) if !exists { return @@ -190,21 +206,6 @@ func Start() { http.ServeFile(w, r, fn) }) - // Serve the migration UI - r.Get("/migrate", getMigrateHandler) - r.Post("/migrate", doMigrateHandler) - - // Serve the setup UI - r.HandleFunc("/setup*", func(w http.ResponseWriter, r *http.Request) { - ext := path.Ext(r.URL.Path) - if ext == ".html" || ext == "" { - data, _ := setupUIBox.Find("index.html") - _, _ = w.Write(data) - } else { - r.URL.Path = strings.Replace(r.URL.Path, "/setup", "", 1) - http.FileServer(setupUIBox).ServeHTTP(w, r) - } - }) r.HandleFunc("/login*", func(w http.ResponseWriter, r *http.Request) { ext := path.Ext(r.URL.Path) if ext == ".html" || ext == "" { @@ -215,62 +216,9 @@ func Start() { http.FileServer(loginUIBox).ServeHTTP(w, r) } }) - r.Post("/init", func(w http.ResponseWriter, r *http.Request) { - err := r.ParseForm() - if err != nil { - http.Error(w, fmt.Sprintf("error: %s", err), 500) - } - stash := filepath.Clean(r.Form.Get("stash")) - generated := filepath.Clean(r.Form.Get("generated")) - metadata := filepath.Clean(r.Form.Get("metadata")) - cache := filepath.Clean(r.Form.Get("cache")) - //downloads := filepath.Clean(r.Form.Get("downloads")) // TODO - downloads := filepath.Join(metadata, "downloads") - - exists, _ := utils.DirExists(stash) - if !exists || stash == "." { - http.Error(w, fmt.Sprintf("the stash path either doesn't exist, or is not a directory <%s>. Go back and try again.", stash), 500) - return - } - - exists, _ = utils.DirExists(generated) - if !exists || generated == "." { - http.Error(w, fmt.Sprintf("the generated path either doesn't exist, or is not a directory <%s>. Go back and try again.", generated), 500) - return - } - - exists, _ = utils.DirExists(metadata) - if !exists || metadata == "." { - http.Error(w, fmt.Sprintf("the metadata path either doesn't exist, or is not a directory <%s> Go back and try again.", metadata), 500) - return - } - - exists, _ = utils.DirExists(cache) - if !exists || cache == "." { - http.Error(w, fmt.Sprintf("the cache path either doesn't exist, or is not a directory <%s> Go back and try again.", cache), 500) - return - } - - _ = os.Mkdir(downloads, 0755) - - // #536 - set stash as slice of strings - config.Set(config.Stash, []string{stash}) - config.Set(config.Generated, generated) - config.Set(config.Metadata, metadata) - config.Set(config.Cache, cache) - config.Set(config.Downloads, downloads) - if err := config.Write(); err != nil { - http.Error(w, fmt.Sprintf("there was an error saving the config file: %s", err), 500) - return - } - - manager.GetInstance().RefreshConfig() - - http.Redirect(w, r, "/", 301) - }) // Serve static folders - customServedFolders := config.GetCustomServedFolders() + customServedFolders := c.GetCustomServedFolders() if customServedFolders != nil { r.HandleFunc("/custom/*", func(w http.ResponseWriter, r *http.Request) { r.URL.Path = strings.Replace(r.URL.Path, "/custom", "", 1) @@ -286,9 +234,21 @@ func Start() { }) } + customUILocation := c.GetCustomUILocation() + // Serve the web app r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) { ext := path.Ext(r.URL.Path) + + if customUILocation != "" { + if r.URL.Path == "index.html" || ext == "" { + r.URL.Path = "/" + } + + http.FileServer(http.Dir(customUILocation)).ServeHTTP(w, r) + return + } + if ext == ".html" || ext == "" { data, _ := uiBox.Find("index.html") _, _ = w.Write(data) @@ -301,13 +261,13 @@ func Start() { } }) - displayHost := config.GetHost() + displayHost := c.GetHost() if displayHost == "0.0.0.0" { displayHost = "localhost" } - displayAddress := displayHost + ":" + strconv.Itoa(config.GetPort()) + displayAddress := displayHost + ":" + strconv.Itoa(c.GetPort()) - address := config.GetHost() + ":" + strconv.Itoa(config.GetPort()) + address := c.GetHost() + ":" + strconv.Itoa(c.GetPort()) if tlsConfig := makeTLSConfig(); tlsConfig != nil { httpsServer := &http.Server{ Addr: address, @@ -402,7 +362,7 @@ func BaseURLMiddleware(next http.Handler) http.Handler { } baseURL := scheme + "://" + r.Host - externalHost := config.GetExternalHost() + externalHost := config.GetInstance().GetExternalHost() if externalHost != "" { baseURL = externalHost } @@ -413,34 +373,3 @@ func BaseURLMiddleware(next http.Handler) http.Handler { } return http.HandlerFunc(fn) } - -func ConfigCheckMiddleware(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - ext := path.Ext(r.URL.Path) - shouldRedirect := ext == "" && r.Method == "GET" - if !config.IsValid() && shouldRedirect { - // #539 - don't redirect if loading login page - if !strings.HasPrefix(r.URL.Path, setupEndPoint) && !strings.HasPrefix(r.URL.Path, loginEndPoint) { - http.Redirect(w, r, setupEndPoint, http.StatusFound) - return - } - } - next.ServeHTTP(w, r) - }) -} - -func DatabaseCheckMiddleware(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - ext := path.Ext(r.URL.Path) - shouldRedirect := ext == "" && r.Method == "GET" - if shouldRedirect && database.NeedsMigration() { - // #451 - don't redirect if loading login page - // #539 - or setup page - if !strings.HasPrefix(r.URL.Path, migrateEndPoint) && !strings.HasPrefix(r.URL.Path, loginEndPoint) && !strings.HasPrefix(r.URL.Path, setupEndPoint) { - http.Redirect(w, r, migrateEndPoint, http.StatusFound) - return - } - } - next.ServeHTTP(w, r) - }) -} diff --git a/pkg/api/session.go b/pkg/api/session.go index 8be4876bd..a81d37c9e 100644 --- a/pkg/api/session.go +++ b/pkg/api/session.go @@ -19,7 +19,7 @@ const userIDKey = "userID" const returnURLParam = "returnURL" -var sessionStore = sessions.NewCookieStore(config.GetSessionStoreKey()) +var sessionStore = sessions.NewCookieStore(config.GetInstance().GetSessionStoreKey()) type loginTemplateData struct { URL string @@ -27,7 +27,7 @@ type loginTemplateData struct { } func initSessionStore() { - sessionStore.MaxAge(config.GetMaxSessionAge()) + sessionStore.MaxAge(config.GetInstance().GetMaxSessionAge()) } func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string) { @@ -45,7 +45,7 @@ func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string) } func getLoginHandler(w http.ResponseWriter, r *http.Request) { - if !config.HasCredentials() { + if !config.GetInstance().HasCredentials() { http.Redirect(w, r, "/", http.StatusFound) return } @@ -66,7 +66,7 @@ func handleLogin(w http.ResponseWriter, r *http.Request) { password := r.FormValue("password") // authenticate the user - if !config.ValidateCredentials(username, password) { + if !config.GetInstance().ValidateCredentials(username, password) { // redirect back to the login page with an error redirectToLogin(w, url, "Username or password is invalid") return diff --git a/pkg/api/urlbuilders/scene.go b/pkg/api/urlbuilders/scene.go index 57c50f3cf..9a31e504f 100644 --- a/pkg/api/urlbuilders/scene.go +++ b/pkg/api/urlbuilders/scene.go @@ -1,6 +1,7 @@ package urlbuilders import ( + "fmt" "strconv" "time" ) @@ -8,6 +9,7 @@ import ( type SceneURLBuilder struct { BaseURL string SceneID string + APIKey string } func NewSceneURLBuilder(baseURL string, sceneID int) SceneURLBuilder { @@ -18,7 +20,11 @@ func NewSceneURLBuilder(baseURL string, sceneID int) SceneURLBuilder { } func (b SceneURLBuilder) GetStreamURL() string { - return b.BaseURL + "/scene/" + b.SceneID + "/stream" + var apiKeyParam string + if b.APIKey != "" { + apiKeyParam = fmt.Sprintf("?apikey=%s", b.APIKey) + } + return fmt.Sprintf("%s/scene/%s/stream%s", b.BaseURL, b.SceneID, apiKeyParam) } func (b SceneURLBuilder) GetStreamPreviewURL() string { @@ -33,6 +39,10 @@ func (b SceneURLBuilder) GetSpriteVTTURL() string { return b.BaseURL + "/scene/" + b.SceneID + "_thumbs.vtt" } +func (b SceneURLBuilder) GetSpriteURL() string { + return b.BaseURL + "/scene/" + b.SceneID + "_sprite.jpg" +} + func (b SceneURLBuilder) GetScreenshotURL(updateTime time.Time) string { return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?" + strconv.FormatInt(updateTime.Unix(), 10) } diff --git a/pkg/autotag/gallery.go b/pkg/autotag/gallery.go new file mode 100644 index 000000000..fa3ab3a84 --- /dev/null +++ b/pkg/autotag/gallery.go @@ -0,0 +1,117 @@ +package autotag + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/models" +) + +func galleryPathsFilter(paths []string) *models.GalleryFilterType { + if paths == nil { + return nil + } + + sep := string(filepath.Separator) + + var ret *models.GalleryFilterType + var or *models.GalleryFilterType + for _, p := range paths { + newOr := &models.GalleryFilterType{} + if or != nil { + or.Or = newOr + } else { + ret = newOr + } + + or = newOr + + if !strings.HasSuffix(p, sep) { + p = p + sep + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + +func getMatchingGalleries(name string, paths []string, galleryReader models.GalleryReader) ([]*models.Gallery, error) { + regex := getPathQueryRegex(name) + organized := false + filter := models.GalleryFilterType{ + Path: &models.StringCriterionInput{ + Value: "(?i)" + regex, + Modifier: models.CriterionModifierMatchesRegex, + }, + Organized: &organized, + } + + filter.And = galleryPathsFilter(paths) + + pp := models.PerPageAll + gallerys, _, err := galleryReader.Query(&filter, &models.FindFilterType{ + PerPage: &pp, + }) + + if err != nil { + return nil, fmt.Errorf("error querying gallerys with regex '%s': %s", regex, err.Error()) + } + + var ret []*models.Gallery + for _, p := range gallerys { + if nameMatchesPath(name, p.Path.String) { + ret = append(ret, p) + } + } + + return ret, nil +} + +func getGalleryFileTagger(s *models.Gallery) tagger { + return tagger{ + ID: s.ID, + Type: "gallery", + Name: s.GetTitle(), + Path: s.Path.String, + } +} + +// GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path. +func GalleryPerformers(s *models.Gallery, rw models.GalleryReaderWriter, performerReader models.PerformerReader) error { + t := getGalleryFileTagger(s) + + return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) { + return gallery.AddPerformer(rw, subjectID, otherID) + }) +} + +// GalleryStudios tags the provided gallery with the first studio whose name matches the gallery's path. +// +// Gallerys will not be tagged if studio is already set. +func GalleryStudios(s *models.Gallery, rw models.GalleryReaderWriter, studioReader models.StudioReader) error { + if s.StudioID.Valid { + // don't modify + return nil + } + + t := getGalleryFileTagger(s) + + return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) { + return addGalleryStudio(rw, subjectID, otherID) + }) +} + +// GalleryTags tags the provided gallery with tags whose name matches the gallery's path. +func GalleryTags(s *models.Gallery, rw models.GalleryReaderWriter, tagReader models.TagReader) error { + t := getGalleryFileTagger(s) + + return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) { + return gallery.AddTag(rw, subjectID, otherID) + }) +} diff --git a/pkg/autotag/gallery_test.go b/pkg/autotag/gallery_test.go new file mode 100644 index 000000000..ff47f20c1 --- /dev/null +++ b/pkg/autotag/gallery_test.go @@ -0,0 +1,145 @@ +package autotag + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +const galleryExt = "zip" + +func TestGalleryPerformers(t *testing.T) { + const galleryID = 1 + const performerName = "performer name" + const performerID = 2 + performer := models.Performer{ + ID: performerID, + Name: models.NullString(performerName), + } + + const reversedPerformerName = "name performer" + const reversedPerformerID = 3 + reversedPerformer := models.Performer{ + ID: reversedPerformerID, + Name: models.NullString(reversedPerformerName), + } + + testTables := generateTestTable(performerName, galleryExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockPerformerReader := &mocks.PerformerReaderWriter{} + mockGalleryReader := &mocks.GalleryReaderWriter{} + + mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() + + if test.Matches { + mockGalleryReader.On("GetPerformerIDs", galleryID).Return(nil, nil).Once() + mockGalleryReader.On("UpdatePerformers", galleryID, []int{performerID}).Return(nil).Once() + } + + gallery := models.Gallery{ + ID: galleryID, + Path: models.NullString(test.Path), + } + err := GalleryPerformers(&gallery, mockGalleryReader, mockPerformerReader) + + assert.Nil(err) + mockPerformerReader.AssertExpectations(t) + mockGalleryReader.AssertExpectations(t) + } +} + +func TestGalleryStudios(t *testing.T) { + const galleryID = 1 + const studioName = "studio name" + const studioID = 2 + studio := models.Studio{ + ID: studioID, + Name: models.NullString(studioName), + } + + const reversedStudioName = "name studio" + const reversedStudioID = 3 + reversedStudio := models.Studio{ + ID: reversedStudioID, + Name: models.NullString(reversedStudioName), + } + + testTables := generateTestTable(studioName, galleryExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockStudioReader := &mocks.StudioReaderWriter{} + mockGalleryReader := &mocks.GalleryReaderWriter{} + + mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() + + if test.Matches { + mockGalleryReader.On("Find", galleryID).Return(&models.Gallery{}, nil).Once() + expectedStudioID := models.NullInt64(studioID) + mockGalleryReader.On("UpdatePartial", models.GalleryPartial{ + ID: galleryID, + StudioID: &expectedStudioID, + }).Return(nil, nil).Once() + } + + gallery := models.Gallery{ + ID: galleryID, + Path: models.NullString(test.Path), + } + err := GalleryStudios(&gallery, mockGalleryReader, mockStudioReader) + + assert.Nil(err) + mockStudioReader.AssertExpectations(t) + mockGalleryReader.AssertExpectations(t) + } +} + +func TestGalleryTags(t *testing.T) { + const galleryID = 1 + const tagName = "tag name" + const tagID = 2 + tag := models.Tag{ + ID: tagID, + Name: tagName, + } + + const reversedTagName = "name tag" + const reversedTagID = 3 + reversedTag := models.Tag{ + ID: reversedTagID, + Name: reversedTagName, + } + + testTables := generateTestTable(tagName, galleryExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockTagReader := &mocks.TagReaderWriter{} + mockGalleryReader := &mocks.GalleryReaderWriter{} + + mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() + + if test.Matches { + mockGalleryReader.On("GetTagIDs", galleryID).Return(nil, nil).Once() + mockGalleryReader.On("UpdateTags", galleryID, []int{tagID}).Return(nil).Once() + } + + gallery := models.Gallery{ + ID: galleryID, + Path: models.NullString(test.Path), + } + err := GalleryTags(&gallery, mockGalleryReader, mockTagReader) + + assert.Nil(err) + mockTagReader.AssertExpectations(t) + mockGalleryReader.AssertExpectations(t) + } +} diff --git a/pkg/autotag/image.go b/pkg/autotag/image.go new file mode 100644 index 000000000..ff5816c6f --- /dev/null +++ b/pkg/autotag/image.go @@ -0,0 +1,117 @@ +package autotag + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/models" +) + +func imagePathsFilter(paths []string) *models.ImageFilterType { + if paths == nil { + return nil + } + + sep := string(filepath.Separator) + + var ret *models.ImageFilterType + var or *models.ImageFilterType + for _, p := range paths { + newOr := &models.ImageFilterType{} + if or != nil { + or.Or = newOr + } else { + ret = newOr + } + + or = newOr + + if !strings.HasSuffix(p, sep) { + p = p + sep + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + +func getMatchingImages(name string, paths []string, imageReader models.ImageReader) ([]*models.Image, error) { + regex := getPathQueryRegex(name) + organized := false + filter := models.ImageFilterType{ + Path: &models.StringCriterionInput{ + Value: "(?i)" + regex, + Modifier: models.CriterionModifierMatchesRegex, + }, + Organized: &organized, + } + + filter.And = imagePathsFilter(paths) + + pp := models.PerPageAll + images, _, err := imageReader.Query(&filter, &models.FindFilterType{ + PerPage: &pp, + }) + + if err != nil { + return nil, fmt.Errorf("error querying images with regex '%s': %s", regex, err.Error()) + } + + var ret []*models.Image + for _, p := range images { + if nameMatchesPath(name, p.Path) { + ret = append(ret, p) + } + } + + return ret, nil +} + +func getImageFileTagger(s *models.Image) tagger { + return tagger{ + ID: s.ID, + Type: "image", + Name: s.GetTitle(), + Path: s.Path, + } +} + +// ImagePerformers tags the provided image with performers whose name matches the image's path. +func ImagePerformers(s *models.Image, rw models.ImageReaderWriter, performerReader models.PerformerReader) error { + t := getImageFileTagger(s) + + return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) { + return image.AddPerformer(rw, subjectID, otherID) + }) +} + +// ImageStudios tags the provided image with the first studio whose name matches the image's path. +// +// Images will not be tagged if studio is already set. +func ImageStudios(s *models.Image, rw models.ImageReaderWriter, studioReader models.StudioReader) error { + if s.StudioID.Valid { + // don't modify + return nil + } + + t := getImageFileTagger(s) + + return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) { + return addImageStudio(rw, subjectID, otherID) + }) +} + +// ImageTags tags the provided image with tags whose name matches the image's path. +func ImageTags(s *models.Image, rw models.ImageReaderWriter, tagReader models.TagReader) error { + t := getImageFileTagger(s) + + return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) { + return image.AddTag(rw, subjectID, otherID) + }) +} diff --git a/pkg/autotag/image_test.go b/pkg/autotag/image_test.go new file mode 100644 index 000000000..8dba6b6e2 --- /dev/null +++ b/pkg/autotag/image_test.go @@ -0,0 +1,145 @@ +package autotag + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +const imageExt = "jpg" + +func TestImagePerformers(t *testing.T) { + const imageID = 1 + const performerName = "performer name" + const performerID = 2 + performer := models.Performer{ + ID: performerID, + Name: models.NullString(performerName), + } + + const reversedPerformerName = "name performer" + const reversedPerformerID = 3 + reversedPerformer := models.Performer{ + ID: reversedPerformerID, + Name: models.NullString(reversedPerformerName), + } + + testTables := generateTestTable(performerName, imageExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockPerformerReader := &mocks.PerformerReaderWriter{} + mockImageReader := &mocks.ImageReaderWriter{} + + mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() + + if test.Matches { + mockImageReader.On("GetPerformerIDs", imageID).Return(nil, nil).Once() + mockImageReader.On("UpdatePerformers", imageID, []int{performerID}).Return(nil).Once() + } + + image := models.Image{ + ID: imageID, + Path: test.Path, + } + err := ImagePerformers(&image, mockImageReader, mockPerformerReader) + + assert.Nil(err) + mockPerformerReader.AssertExpectations(t) + mockImageReader.AssertExpectations(t) + } +} + +func TestImageStudios(t *testing.T) { + const imageID = 1 + const studioName = "studio name" + const studioID = 2 + studio := models.Studio{ + ID: studioID, + Name: models.NullString(studioName), + } + + const reversedStudioName = "name studio" + const reversedStudioID = 3 + reversedStudio := models.Studio{ + ID: reversedStudioID, + Name: models.NullString(reversedStudioName), + } + + testTables := generateTestTable(studioName, imageExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockStudioReader := &mocks.StudioReaderWriter{} + mockImageReader := &mocks.ImageReaderWriter{} + + mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() + + if test.Matches { + mockImageReader.On("Find", imageID).Return(&models.Image{}, nil).Once() + expectedStudioID := models.NullInt64(studioID) + mockImageReader.On("Update", models.ImagePartial{ + ID: imageID, + StudioID: &expectedStudioID, + }).Return(nil, nil).Once() + } + + image := models.Image{ + ID: imageID, + Path: test.Path, + } + err := ImageStudios(&image, mockImageReader, mockStudioReader) + + assert.Nil(err) + mockStudioReader.AssertExpectations(t) + mockImageReader.AssertExpectations(t) + } +} + +func TestImageTags(t *testing.T) { + const imageID = 1 + const tagName = "tag name" + const tagID = 2 + tag := models.Tag{ + ID: tagID, + Name: tagName, + } + + const reversedTagName = "name tag" + const reversedTagID = 3 + reversedTag := models.Tag{ + ID: reversedTagID, + Name: reversedTagName, + } + + testTables := generateTestTable(tagName, imageExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockTagReader := &mocks.TagReaderWriter{} + mockImageReader := &mocks.ImageReaderWriter{} + + mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() + + if test.Matches { + mockImageReader.On("GetTagIDs", imageID).Return(nil, nil).Once() + mockImageReader.On("UpdateTags", imageID, []int{tagID}).Return(nil).Once() + } + + image := models.Image{ + ID: imageID, + Path: test.Path, + } + err := ImageTags(&image, mockImageReader, mockTagReader) + + assert.Nil(err) + mockTagReader.AssertExpectations(t) + mockImageReader.AssertExpectations(t) + } +} diff --git a/pkg/autotag/integration_test.go b/pkg/autotag/integration_test.go new file mode 100644 index 000000000..6c890c359 --- /dev/null +++ b/pkg/autotag/integration_test.go @@ -0,0 +1,784 @@ +// +build integration + +package autotag + +import ( + "context" + "database/sql" + "fmt" + "io/ioutil" + "os" + "testing" + + "github.com/stashapp/stash/pkg/database" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sqlite" + "github.com/stashapp/stash/pkg/utils" + + _ "github.com/golang-migrate/migrate/v4/database/sqlite3" + _ "github.com/golang-migrate/migrate/v4/source/file" +) + +const testName = "Foo's Bar" +const existingStudioName = "ExistingStudio" + +const existingStudioSceneName = testName + ".dontChangeStudio.mp4" +const existingStudioImageName = testName + ".dontChangeStudio.mp4" +const existingStudioGalleryName = testName + ".dontChangeStudio.mp4" + +var existingStudioID int + +func testTeardown(databaseFile string) { + err := database.DB.Close() + + if err != nil { + panic(err) + } + + err = os.Remove(databaseFile) + if err != nil { + panic(err) + } +} + +func runTests(m *testing.M) int { + // create the database file + f, err := ioutil.TempFile("", "*.sqlite") + if err != nil { + panic(fmt.Sprintf("Could not create temporary file: %s", err.Error())) + } + + f.Close() + databaseFile := f.Name() + database.Initialize(databaseFile) + + // defer close and delete the database + defer testTeardown(databaseFile) + + err = populateDB() + if err != nil { + panic(fmt.Sprintf("Could not populate database: %s", err.Error())) + } else { + // run the tests + return m.Run() + } +} + +func TestMain(m *testing.M) { + ret := runTests(m) + os.Exit(ret) +} + +func createPerformer(pqb models.PerformerWriter) error { + // create the performer + performer := models.Performer{ + Checksum: testName, + Name: sql.NullString{Valid: true, String: testName}, + Favorite: sql.NullBool{Valid: true, Bool: false}, + } + + _, err := pqb.Create(performer) + if err != nil { + return err + } + + return nil +} + +func createStudio(qb models.StudioWriter, name string) (*models.Studio, error) { + // create the studio + studio := models.Studio{ + Checksum: name, + Name: sql.NullString{Valid: true, String: name}, + } + + return qb.Create(studio) +} + +func createTag(qb models.TagWriter) error { + // create the studio + tag := models.Tag{ + Name: testName, + } + + _, err := qb.Create(tag) + if err != nil { + return err + } + + return nil +} + +func createScenes(sqb models.SceneReaderWriter) error { + // create the scenes + scenePatterns, falseScenePatterns := generateTestPaths(testName, sceneExt) + + for _, fn := range scenePatterns { + err := createScene(sqb, makeScene(fn, true)) + if err != nil { + return err + } + } + for _, fn := range falseScenePatterns { + err := createScene(sqb, makeScene(fn, false)) + if err != nil { + return err + } + } + + // add organized scenes + for _, fn := range scenePatterns { + s := makeScene("organized"+fn, false) + s.Organized = true + err := createScene(sqb, s) + if err != nil { + return err + } + } + + // create scene with existing studio io + studioScene := makeScene(existingStudioSceneName, true) + studioScene.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)} + err := createScene(sqb, studioScene) + if err != nil { + return err + } + + return nil +} + +func makeScene(name string, expectedResult bool) *models.Scene { + scene := &models.Scene{ + Checksum: sql.NullString{String: utils.MD5FromString(name), Valid: true}, + Path: name, + } + + // if expectedResult is true then we expect it to match, set the title accordingly + if expectedResult { + scene.Title = sql.NullString{Valid: true, String: name} + } + + return scene +} + +func createScene(sqb models.SceneWriter, scene *models.Scene) error { + _, err := sqb.Create(*scene) + + if err != nil { + return fmt.Errorf("Failed to create scene with name '%s': %s", scene.Path, err.Error()) + } + + return nil +} + +func createImages(sqb models.ImageReaderWriter) error { + // create the images + imagePatterns, falseImagePatterns := generateTestPaths(testName, imageExt) + + for _, fn := range imagePatterns { + err := createImage(sqb, makeImage(fn, true)) + if err != nil { + return err + } + } + for _, fn := range falseImagePatterns { + err := createImage(sqb, makeImage(fn, false)) + if err != nil { + return err + } + } + + // add organized images + for _, fn := range imagePatterns { + s := makeImage("organized"+fn, false) + s.Organized = true + err := createImage(sqb, s) + if err != nil { + return err + } + } + + // create image with existing studio io + studioImage := makeImage(existingStudioImageName, true) + studioImage.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)} + err := createImage(sqb, studioImage) + if err != nil { + return err + } + + return nil +} + +func makeImage(name string, expectedResult bool) *models.Image { + image := &models.Image{ + Checksum: utils.MD5FromString(name), + Path: name, + } + + // if expectedResult is true then we expect it to match, set the title accordingly + if expectedResult { + image.Title = sql.NullString{Valid: true, String: name} + } + + return image +} + +func createImage(sqb models.ImageWriter, image *models.Image) error { + _, err := sqb.Create(*image) + + if err != nil { + return fmt.Errorf("Failed to create image with name '%s': %s", image.Path, err.Error()) + } + + return nil +} + +func createGalleries(sqb models.GalleryReaderWriter) error { + // create the galleries + galleryPatterns, falseGalleryPatterns := generateTestPaths(testName, galleryExt) + + for _, fn := range galleryPatterns { + err := createGallery(sqb, makeGallery(fn, true)) + if err != nil { + return err + } + } + for _, fn := range falseGalleryPatterns { + err := createGallery(sqb, makeGallery(fn, false)) + if err != nil { + return err + } + } + + // add organized galleries + for _, fn := range galleryPatterns { + s := makeGallery("organized"+fn, false) + s.Organized = true + err := createGallery(sqb, s) + if err != nil { + return err + } + } + + // create gallery with existing studio io + studioGallery := makeGallery(existingStudioGalleryName, true) + studioGallery.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)} + err := createGallery(sqb, studioGallery) + if err != nil { + return err + } + + return nil +} + +func makeGallery(name string, expectedResult bool) *models.Gallery { + gallery := &models.Gallery{ + Checksum: utils.MD5FromString(name), + Path: models.NullString(name), + } + + // if expectedResult is true then we expect it to match, set the title accordingly + if expectedResult { + gallery.Title = sql.NullString{Valid: true, String: name} + } + + return gallery +} + +func createGallery(sqb models.GalleryWriter, gallery *models.Gallery) error { + _, err := sqb.Create(*gallery) + + if err != nil { + return fmt.Errorf("Failed to create gallery with name '%s': %s", gallery.Path.String, err.Error()) + } + + return nil +} + +func withTxn(f func(r models.Repository) error) error { + t := sqlite.NewTransactionManager() + return t.WithTxn(context.TODO(), f) +} + +func populateDB() error { + if err := withTxn(func(r models.Repository) error { + err := createPerformer(r.Performer()) + if err != nil { + return err + } + + _, err = createStudio(r.Studio(), testName) + if err != nil { + return err + } + + // create existing studio + existingStudio, err := createStudio(r.Studio(), existingStudioName) + if err != nil { + return err + } + + existingStudioID = existingStudio.ID + + err = createTag(r.Tag()) + if err != nil { + return err + } + + err = createScenes(r.Scene()) + if err != nil { + return err + } + + err = createImages(r.Image()) + if err != nil { + return err + } + + err = createGalleries(r.Gallery()) + if err != nil { + return err + } + + return nil + }); err != nil { + return err + } + + return nil +} + +func TestParsePerformerScenes(t *testing.T) { + var performers []*models.Performer + if err := withTxn(func(r models.Repository) error { + var err error + performers, err = r.Performer().All() + return err + }); err != nil { + t.Errorf("Error getting performer: %s", err) + return + } + + for _, p := range performers { + if err := withTxn(func(r models.Repository) error { + return PerformerScenes(p, nil, r.Scene()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that scenes were tagged correctly + withTxn(func(r models.Repository) error { + pqb := r.Performer() + + scenes, err := r.Scene().All() + if err != nil { + t.Error(err.Error()) + } + + for _, scene := range scenes { + performers, err := pqb.FindBySceneID(scene.ID) + + if err != nil { + t.Errorf("Error getting scene performers: %s", err.Error()) + } + + // title is only set on scenes where we expect performer to be set + if scene.Title.String == scene.Path && len(performers) == 0 { + t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path) + } else if scene.Title.String != scene.Path && len(performers) > 0 { + t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path) + } + } + + return nil + }) +} + +func TestParseStudioScenes(t *testing.T) { + var studios []*models.Studio + if err := withTxn(func(r models.Repository) error { + var err error + studios, err = r.Studio().All() + return err + }); err != nil { + t.Errorf("Error getting studio: %s", err) + return + } + + for _, s := range studios { + if err := withTxn(func(r models.Repository) error { + return StudioScenes(s, nil, r.Scene()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that scenes were tagged correctly + withTxn(func(r models.Repository) error { + scenes, err := r.Scene().All() + if err != nil { + t.Error(err.Error()) + } + + for _, scene := range scenes { + // check for existing studio id scene first + if scene.Path == existingStudioSceneName { + if scene.StudioID.Int64 != int64(existingStudioID) { + t.Error("Incorrectly overwrote studio ID for scene with existing studio ID") + } + } else { + // title is only set on scenes where we expect studio to be set + if scene.Title.String == scene.Path { + if !scene.StudioID.Valid { + t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path) + } else if scene.StudioID.Int64 != int64(studios[1].ID) { + t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID.Int64, scene.Path) + } + + } else if scene.Title.String != scene.Path && scene.StudioID.Int64 == int64(studios[1].ID) { + t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path) + } + } + } + + return nil + }) +} + +func TestParseTagScenes(t *testing.T) { + var tags []*models.Tag + if err := withTxn(func(r models.Repository) error { + var err error + tags, err = r.Tag().All() + return err + }); err != nil { + t.Errorf("Error getting performer: %s", err) + return + } + + for _, s := range tags { + if err := withTxn(func(r models.Repository) error { + return TagScenes(s, nil, r.Scene()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that scenes were tagged correctly + withTxn(func(r models.Repository) error { + scenes, err := r.Scene().All() + if err != nil { + t.Error(err.Error()) + } + + tqb := r.Tag() + + for _, scene := range scenes { + tags, err := tqb.FindBySceneID(scene.ID) + + if err != nil { + t.Errorf("Error getting scene tags: %s", err.Error()) + } + + // title is only set on scenes where we expect performer to be set + if scene.Title.String == scene.Path && len(tags) == 0 { + t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path) + } else if scene.Title.String != scene.Path && len(tags) > 0 { + t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path) + } + } + + return nil + }) +} + +func TestParsePerformerImages(t *testing.T) { + var performers []*models.Performer + if err := withTxn(func(r models.Repository) error { + var err error + performers, err = r.Performer().All() + return err + }); err != nil { + t.Errorf("Error getting performer: %s", err) + return + } + + for _, p := range performers { + if err := withTxn(func(r models.Repository) error { + return PerformerImages(p, nil, r.Image()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that images were tagged correctly + withTxn(func(r models.Repository) error { + pqb := r.Performer() + + images, err := r.Image().All() + if err != nil { + t.Error(err.Error()) + } + + for _, image := range images { + performers, err := pqb.FindByImageID(image.ID) + + if err != nil { + t.Errorf("Error getting image performers: %s", err.Error()) + } + + // title is only set on images where we expect performer to be set + if image.Title.String == image.Path && len(performers) == 0 { + t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path) + } else if image.Title.String != image.Path && len(performers) > 0 { + t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path) + } + } + + return nil + }) +} + +func TestParseStudioImages(t *testing.T) { + var studios []*models.Studio + if err := withTxn(func(r models.Repository) error { + var err error + studios, err = r.Studio().All() + return err + }); err != nil { + t.Errorf("Error getting studio: %s", err) + return + } + + for _, s := range studios { + if err := withTxn(func(r models.Repository) error { + return StudioImages(s, nil, r.Image()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that images were tagged correctly + withTxn(func(r models.Repository) error { + images, err := r.Image().All() + if err != nil { + t.Error(err.Error()) + } + + for _, image := range images { + // check for existing studio id image first + if image.Path == existingStudioImageName { + if image.StudioID.Int64 != int64(existingStudioID) { + t.Error("Incorrectly overwrote studio ID for image with existing studio ID") + } + } else { + // title is only set on images where we expect studio to be set + if image.Title.String == image.Path { + if !image.StudioID.Valid { + t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path) + } else if image.StudioID.Int64 != int64(studios[1].ID) { + t.Errorf("Incorrect studio id %d set for path '%s'", image.StudioID.Int64, image.Path) + } + + } else if image.Title.String != image.Path && image.StudioID.Int64 == int64(studios[1].ID) { + t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path) + } + } + } + + return nil + }) +} + +func TestParseTagImages(t *testing.T) { + var tags []*models.Tag + if err := withTxn(func(r models.Repository) error { + var err error + tags, err = r.Tag().All() + return err + }); err != nil { + t.Errorf("Error getting performer: %s", err) + return + } + + for _, s := range tags { + if err := withTxn(func(r models.Repository) error { + return TagImages(s, nil, r.Image()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that images were tagged correctly + withTxn(func(r models.Repository) error { + images, err := r.Image().All() + if err != nil { + t.Error(err.Error()) + } + + tqb := r.Tag() + + for _, image := range images { + tags, err := tqb.FindByImageID(image.ID) + + if err != nil { + t.Errorf("Error getting image tags: %s", err.Error()) + } + + // title is only set on images where we expect performer to be set + if image.Title.String == image.Path && len(tags) == 0 { + t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path) + } else if image.Title.String != image.Path && len(tags) > 0 { + t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path) + } + } + + return nil + }) +} + +func TestParsePerformerGalleries(t *testing.T) { + var performers []*models.Performer + if err := withTxn(func(r models.Repository) error { + var err error + performers, err = r.Performer().All() + return err + }); err != nil { + t.Errorf("Error getting performer: %s", err) + return + } + + for _, p := range performers { + if err := withTxn(func(r models.Repository) error { + return PerformerGalleries(p, nil, r.Gallery()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that galleries were tagged correctly + withTxn(func(r models.Repository) error { + pqb := r.Performer() + + galleries, err := r.Gallery().All() + if err != nil { + t.Error(err.Error()) + } + + for _, gallery := range galleries { + performers, err := pqb.FindByGalleryID(gallery.ID) + + if err != nil { + t.Errorf("Error getting gallery performers: %s", err.Error()) + } + + // title is only set on galleries where we expect performer to be set + if gallery.Title.String == gallery.Path.String && len(performers) == 0 { + t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path.String) + } else if gallery.Title.String != gallery.Path.String && len(performers) > 0 { + t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path.String) + } + } + + return nil + }) +} + +func TestParseStudioGalleries(t *testing.T) { + var studios []*models.Studio + if err := withTxn(func(r models.Repository) error { + var err error + studios, err = r.Studio().All() + return err + }); err != nil { + t.Errorf("Error getting studio: %s", err) + return + } + + for _, s := range studios { + if err := withTxn(func(r models.Repository) error { + return StudioGalleries(s, nil, r.Gallery()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that galleries were tagged correctly + withTxn(func(r models.Repository) error { + galleries, err := r.Gallery().All() + if err != nil { + t.Error(err.Error()) + } + + for _, gallery := range galleries { + // check for existing studio id gallery first + if gallery.Path.String == existingStudioGalleryName { + if gallery.StudioID.Int64 != int64(existingStudioID) { + t.Error("Incorrectly overwrote studio ID for gallery with existing studio ID") + } + } else { + // title is only set on galleries where we expect studio to be set + if gallery.Title.String == gallery.Path.String { + if !gallery.StudioID.Valid { + t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path.String) + } else if gallery.StudioID.Int64 != int64(studios[1].ID) { + t.Errorf("Incorrect studio id %d set for path '%s'", gallery.StudioID.Int64, gallery.Path.String) + } + + } else if gallery.Title.String != gallery.Path.String && gallery.StudioID.Int64 == int64(studios[1].ID) { + t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path.String) + } + } + } + + return nil + }) +} + +func TestParseTagGalleries(t *testing.T) { + var tags []*models.Tag + if err := withTxn(func(r models.Repository) error { + var err error + tags, err = r.Tag().All() + return err + }); err != nil { + t.Errorf("Error getting performer: %s", err) + return + } + + for _, s := range tags { + if err := withTxn(func(r models.Repository) error { + return TagGalleries(s, nil, r.Gallery()) + }); err != nil { + t.Errorf("Error auto-tagging performers: %s", err) + } + } + + // verify that galleries were tagged correctly + withTxn(func(r models.Repository) error { + galleries, err := r.Gallery().All() + if err != nil { + t.Error(err.Error()) + } + + tqb := r.Tag() + + for _, gallery := range galleries { + tags, err := tqb.FindByGalleryID(gallery.ID) + + if err != nil { + t.Errorf("Error getting gallery tags: %s", err.Error()) + } + + // title is only set on galleries where we expect performer to be set + if gallery.Title.String == gallery.Path.String && len(tags) == 0 { + t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path.String) + } else if gallery.Title.String != gallery.Path.String && len(tags) > 0 { + t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path.String) + } + } + + return nil + }) +} diff --git a/pkg/autotag/performer.go b/pkg/autotag/performer.go new file mode 100644 index 000000000..bdbd497c3 --- /dev/null +++ b/pkg/autotag/performer.go @@ -0,0 +1,62 @@ +package autotag + +import ( + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" +) + +func getMatchingPerformers(path string, performerReader models.PerformerReader) ([]*models.Performer, error) { + words := getPathWords(path) + performers, err := performerReader.QueryForAutoTag(words) + + if err != nil { + return nil, err + } + + var ret []*models.Performer + for _, p := range performers { + // TODO - commenting out alias handling until both sides work correctly + if nameMatchesPath(p.Name.String, path) { // || nameMatchesPath(p.Aliases.String, path) { + ret = append(ret, p) + } + } + + return ret, nil +} + +func getPerformerTagger(p *models.Performer) tagger { + return tagger{ + ID: p.ID, + Type: "performer", + Name: p.Name.String, + } +} + +// PerformerScenes searches for scenes whose path matches the provided performer name and tags the scene with the performer. +func PerformerScenes(p *models.Performer, paths []string, rw models.SceneReaderWriter) error { + t := getPerformerTagger(p) + + return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) { + return scene.AddPerformer(rw, otherID, subjectID) + }) +} + +// PerformerImages searches for images whose path matches the provided performer name and tags the image with the performer. +func PerformerImages(p *models.Performer, paths []string, rw models.ImageReaderWriter) error { + t := getPerformerTagger(p) + + return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) { + return image.AddPerformer(rw, otherID, subjectID) + }) +} + +// PerformerGalleries searches for galleries whose path matches the provided performer name and tags the gallery with the performer. +func PerformerGalleries(p *models.Performer, paths []string, rw models.GalleryReaderWriter) error { + t := getPerformerTagger(p) + + return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) { + return gallery.AddPerformer(rw, otherID, subjectID) + }) +} diff --git a/pkg/autotag/performer_test.go b/pkg/autotag/performer_test.go new file mode 100644 index 000000000..7d78b9304 --- /dev/null +++ b/pkg/autotag/performer_test.go @@ -0,0 +1,225 @@ +package autotag + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" +) + +func TestPerformerScenes(t *testing.T) { + type test struct { + performerName string + expectedRegex string + } + + performerNames := []test{ + { + "performer name", + `(?i)(?:^|_|[^\w\d])performer[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "performer + name", + `(?i)(?:^|_|[^\w\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range performerNames { + testPerformerScenes(t, p.performerName, p.expectedRegex) + } +} + +func testPerformerScenes(t *testing.T, performerName, expectedRegex string) { + mockSceneReader := &mocks.SceneReaderWriter{} + + const performerID = 2 + + var scenes []*models.Scene + matchingPaths, falsePaths := generateTestPaths(performerName, "mp4") + for i, p := range append(matchingPaths, falsePaths...) { + scenes = append(scenes, &models.Scene{ + ID: i + 1, + Path: p, + }) + } + + performer := models.Performer{ + ID: performerID, + Name: models.NullString(performerName), + } + + organized := false + perPage := models.PerPageAll + + expectedSceneFilter := &models.SceneFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once() + + for i := range matchingPaths { + sceneID := i + 1 + mockSceneReader.On("GetPerformerIDs", sceneID).Return(nil, nil).Once() + mockSceneReader.On("UpdatePerformers", sceneID, []int{performerID}).Return(nil).Once() + } + + err := PerformerScenes(&performer, nil, mockSceneReader) + + assert := assert.New(t) + + assert.Nil(err) + mockSceneReader.AssertExpectations(t) +} + +func TestPerformerImages(t *testing.T) { + type test struct { + performerName string + expectedRegex string + } + + performerNames := []test{ + { + "performer name", + `(?i)(?:^|_|[^\w\d])performer[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "performer + name", + `(?i)(?:^|_|[^\w\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range performerNames { + testPerformerImages(t, p.performerName, p.expectedRegex) + } +} + +func testPerformerImages(t *testing.T, performerName, expectedRegex string) { + mockImageReader := &mocks.ImageReaderWriter{} + + const performerID = 2 + + var images []*models.Image + matchingPaths, falsePaths := generateTestPaths(performerName, imageExt) + for i, p := range append(matchingPaths, falsePaths...) { + images = append(images, &models.Image{ + ID: i + 1, + Path: p, + }) + } + + performer := models.Performer{ + ID: performerID, + Name: models.NullString(performerName), + } + + organized := false + perPage := models.PerPageAll + + expectedImageFilter := &models.ImageFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once() + + for i := range matchingPaths { + imageID := i + 1 + mockImageReader.On("GetPerformerIDs", imageID).Return(nil, nil).Once() + mockImageReader.On("UpdatePerformers", imageID, []int{performerID}).Return(nil).Once() + } + + err := PerformerImages(&performer, nil, mockImageReader) + + assert := assert.New(t) + + assert.Nil(err) + mockImageReader.AssertExpectations(t) +} + +func TestPerformerGalleries(t *testing.T) { + type test struct { + performerName string + expectedRegex string + } + + performerNames := []test{ + { + "performer name", + `(?i)(?:^|_|[^\w\d])performer[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "performer + name", + `(?i)(?:^|_|[^\w\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range performerNames { + testPerformerGalleries(t, p.performerName, p.expectedRegex) + } +} + +func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) { + mockGalleryReader := &mocks.GalleryReaderWriter{} + + const performerID = 2 + + var galleries []*models.Gallery + matchingPaths, falsePaths := generateTestPaths(performerName, galleryExt) + for i, p := range append(matchingPaths, falsePaths...) { + galleries = append(galleries, &models.Gallery{ + ID: i + 1, + Path: models.NullString(p), + }) + } + + performer := models.Performer{ + ID: performerID, + Name: models.NullString(performerName), + } + + organized := false + perPage := models.PerPageAll + + expectedGalleryFilter := &models.GalleryFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once() + + for i := range matchingPaths { + galleryID := i + 1 + mockGalleryReader.On("GetPerformerIDs", galleryID).Return(nil, nil).Once() + mockGalleryReader.On("UpdatePerformers", galleryID, []int{performerID}).Return(nil).Once() + } + + err := PerformerGalleries(&performer, nil, mockGalleryReader) + + assert := assert.New(t) + + assert.Nil(err) + mockGalleryReader.AssertExpectations(t) +} diff --git a/pkg/autotag/scene.go b/pkg/autotag/scene.go new file mode 100644 index 000000000..272f5a9fe --- /dev/null +++ b/pkg/autotag/scene.go @@ -0,0 +1,117 @@ +package autotag + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" +) + +func scenePathsFilter(paths []string) *models.SceneFilterType { + if paths == nil { + return nil + } + + sep := string(filepath.Separator) + + var ret *models.SceneFilterType + var or *models.SceneFilterType + for _, p := range paths { + newOr := &models.SceneFilterType{} + if or != nil { + or.Or = newOr + } else { + ret = newOr + } + + or = newOr + + if !strings.HasSuffix(p, sep) { + p = p + sep + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + +func getMatchingScenes(name string, paths []string, sceneReader models.SceneReader) ([]*models.Scene, error) { + regex := getPathQueryRegex(name) + organized := false + filter := models.SceneFilterType{ + Path: &models.StringCriterionInput{ + Value: "(?i)" + regex, + Modifier: models.CriterionModifierMatchesRegex, + }, + Organized: &organized, + } + + filter.And = scenePathsFilter(paths) + + pp := models.PerPageAll + scenes, _, err := sceneReader.Query(&filter, &models.FindFilterType{ + PerPage: &pp, + }) + + if err != nil { + return nil, fmt.Errorf("error querying scenes with regex '%s': %s", regex, err.Error()) + } + + var ret []*models.Scene + for _, p := range scenes { + if nameMatchesPath(name, p.Path) { + ret = append(ret, p) + } + } + + return ret, nil +} + +func getSceneFileTagger(s *models.Scene) tagger { + return tagger{ + ID: s.ID, + Type: "scene", + Name: s.GetTitle(), + Path: s.Path, + } +} + +// ScenePerformers tags the provided scene with performers whose name matches the scene's path. +func ScenePerformers(s *models.Scene, rw models.SceneReaderWriter, performerReader models.PerformerReader) error { + t := getSceneFileTagger(s) + + return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) { + return scene.AddPerformer(rw, subjectID, otherID) + }) +} + +// SceneStudios tags the provided scene with the first studio whose name matches the scene's path. +// +// Scenes will not be tagged if studio is already set. +func SceneStudios(s *models.Scene, rw models.SceneReaderWriter, studioReader models.StudioReader) error { + if s.StudioID.Valid { + // don't modify + return nil + } + + t := getSceneFileTagger(s) + + return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) { + return addSceneStudio(rw, subjectID, otherID) + }) +} + +// SceneTags tags the provided scene with tags whose name matches the scene's path. +func SceneTags(s *models.Scene, rw models.SceneReaderWriter, tagReader models.TagReader) error { + t := getSceneFileTagger(s) + + return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) { + return scene.AddTag(rw, subjectID, otherID) + }) +} diff --git a/pkg/autotag/scene_test.go b/pkg/autotag/scene_test.go new file mode 100644 index 000000000..d2326522c --- /dev/null +++ b/pkg/autotag/scene_test.go @@ -0,0 +1,278 @@ +package autotag + +import ( + "fmt" + "strings" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +const sceneExt = "mp4" + +var testSeparators = []string{ + ".", + "-", + "_", + " ", +} + +var testEndSeparators = []string{ + "{", + "}", + "(", + ")", + ",", +} + +func generateNamePatterns(name, separator, ext string) []string { + var ret []string + ret = append(ret, fmt.Sprintf("%s%saaa.%s", name, separator, ext)) + ret = append(ret, fmt.Sprintf("aaa%s%s.%s", separator, name, ext)) + ret = append(ret, fmt.Sprintf("aaa%s%s%sbbb.%s", separator, name, separator, ext)) + ret = append(ret, fmt.Sprintf("dir/%s%saaa.%s", name, separator, ext)) + ret = append(ret, fmt.Sprintf("dir\\%s%saaa.%s", name, separator, ext)) + ret = append(ret, fmt.Sprintf("%s%saaa/dir/bbb.%s", name, separator, ext)) + ret = append(ret, fmt.Sprintf("%s%saaa\\dir\\bbb.%s", name, separator, ext)) + ret = append(ret, fmt.Sprintf("dir/%s%s/aaa.%s", name, separator, ext)) + ret = append(ret, fmt.Sprintf("dir\\%s%s\\aaa.%s", name, separator, ext)) + + return ret +} + +func generateSplitNamePatterns(name, separator, ext string) []string { + var ret []string + splitted := strings.Split(name, " ") + // only do this for names that are split into two + if len(splitted) == 2 { + ret = append(ret, fmt.Sprintf("%s%s%s.%s", splitted[0], separator, splitted[1], ext)) + } + + return ret +} + +func generateFalseNamePatterns(name string, separator, ext string) []string { + splitted := strings.Split(name, " ") + + var ret []string + // only do this for names that are split into two + if len(splitted) == 2 { + ret = append(ret, fmt.Sprintf("%s%saaa%s%s.%s", splitted[0], separator, separator, splitted[1], ext)) + } + + return ret +} + +func generateTestPaths(testName, ext string) (scenePatterns []string, falseScenePatterns []string) { + separators := append(testSeparators, testEndSeparators...) + + for _, separator := range separators { + scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator, ext)...) + scenePatterns = append(scenePatterns, generateNamePatterns(strings.ToLower(testName), separator, ext)...) + scenePatterns = append(scenePatterns, generateNamePatterns(strings.ReplaceAll(testName, " ", ""), separator, ext)...) + falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, separator, ext)...) + } + + // add test cases for intra-name separators + for _, separator := range testSeparators { + if separator != " " { + scenePatterns = append(scenePatterns, generateNamePatterns(strings.Replace(testName, " ", separator, -1), separator, ext)...) + } + } + + // add basic false scenarios + falseScenePatterns = append(falseScenePatterns, fmt.Sprintf("aaa%s.%s", testName, ext)) + falseScenePatterns = append(falseScenePatterns, fmt.Sprintf("%saaa.%s", testName, ext)) + + // add path separator false scenarios + falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, "/", ext)...) + falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, "\\", ext)...) + + // split patterns only valid for ._- and whitespace + for _, separator := range testSeparators { + scenePatterns = append(scenePatterns, generateSplitNamePatterns(testName, separator, ext)...) + } + + // false patterns for other separators + for _, separator := range testEndSeparators { + falseScenePatterns = append(falseScenePatterns, generateSplitNamePatterns(testName, separator, ext)...) + } + + return +} + +type pathTestTable struct { + Path string + Matches bool +} + +func generateTestTable(testName, ext string) []pathTestTable { + var ret []pathTestTable + + var scenePatterns []string + var falseScenePatterns []string + + separators := append(testSeparators, testEndSeparators...) + + for _, separator := range separators { + scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator, ext)...) + scenePatterns = append(scenePatterns, generateNamePatterns(strings.ToLower(testName), separator, ext)...) + falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, separator, ext)...) + } + + for _, p := range scenePatterns { + t := pathTestTable{ + Path: p, + Matches: true, + } + + ret = append(ret, t) + } + + for _, p := range falseScenePatterns { + t := pathTestTable{ + Path: p, + Matches: false, + } + + ret = append(ret, t) + } + + return ret +} + +func TestScenePerformers(t *testing.T) { + const sceneID = 1 + const performerName = "performer name" + const performerID = 2 + performer := models.Performer{ + ID: performerID, + Name: models.NullString(performerName), + } + + const reversedPerformerName = "name performer" + const reversedPerformerID = 3 + reversedPerformer := models.Performer{ + ID: reversedPerformerID, + Name: models.NullString(reversedPerformerName), + } + + testTables := generateTestTable(performerName, sceneExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockPerformerReader := &mocks.PerformerReaderWriter{} + mockSceneReader := &mocks.SceneReaderWriter{} + + mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() + + if test.Matches { + mockSceneReader.On("GetPerformerIDs", sceneID).Return(nil, nil).Once() + mockSceneReader.On("UpdatePerformers", sceneID, []int{performerID}).Return(nil).Once() + } + + scene := models.Scene{ + ID: sceneID, + Path: test.Path, + } + err := ScenePerformers(&scene, mockSceneReader, mockPerformerReader) + + assert.Nil(err) + mockPerformerReader.AssertExpectations(t) + mockSceneReader.AssertExpectations(t) + } +} + +func TestSceneStudios(t *testing.T) { + const sceneID = 1 + const studioName = "studio name" + const studioID = 2 + studio := models.Studio{ + ID: studioID, + Name: models.NullString(studioName), + } + + const reversedStudioName = "name studio" + const reversedStudioID = 3 + reversedStudio := models.Studio{ + ID: reversedStudioID, + Name: models.NullString(reversedStudioName), + } + + testTables := generateTestTable(studioName, sceneExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockStudioReader := &mocks.StudioReaderWriter{} + mockSceneReader := &mocks.SceneReaderWriter{} + + mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() + + if test.Matches { + mockSceneReader.On("Find", sceneID).Return(&models.Scene{}, nil).Once() + expectedStudioID := models.NullInt64(studioID) + mockSceneReader.On("Update", models.ScenePartial{ + ID: sceneID, + StudioID: &expectedStudioID, + }).Return(nil, nil).Once() + } + + scene := models.Scene{ + ID: sceneID, + Path: test.Path, + } + err := SceneStudios(&scene, mockSceneReader, mockStudioReader) + + assert.Nil(err) + mockStudioReader.AssertExpectations(t) + mockSceneReader.AssertExpectations(t) + } +} + +func TestSceneTags(t *testing.T) { + const sceneID = 1 + const tagName = "tag name" + const tagID = 2 + tag := models.Tag{ + ID: tagID, + Name: tagName, + } + + const reversedTagName = "name tag" + const reversedTagID = 3 + reversedTag := models.Tag{ + ID: reversedTagID, + Name: reversedTagName, + } + + testTables := generateTestTable(tagName, sceneExt) + + assert := assert.New(t) + + for _, test := range testTables { + mockTagReader := &mocks.TagReaderWriter{} + mockSceneReader := &mocks.SceneReaderWriter{} + + mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() + + if test.Matches { + mockSceneReader.On("GetTagIDs", sceneID).Return(nil, nil).Once() + mockSceneReader.On("UpdateTags", sceneID, []int{tagID}).Return(nil).Once() + } + + scene := models.Scene{ + ID: sceneID, + Path: test.Path, + } + err := SceneTags(&scene, mockSceneReader, mockTagReader) + + assert.Nil(err) + mockTagReader.AssertExpectations(t) + mockSceneReader.AssertExpectations(t) + } +} diff --git a/pkg/autotag/studio.go b/pkg/autotag/studio.go new file mode 100644 index 000000000..ba6309c5a --- /dev/null +++ b/pkg/autotag/studio.go @@ -0,0 +1,132 @@ +package autotag + +import ( + "database/sql" + + "github.com/stashapp/stash/pkg/models" +) + +func getMatchingStudios(path string, reader models.StudioReader) ([]*models.Studio, error) { + words := getPathWords(path) + candidates, err := reader.QueryForAutoTag(words) + + if err != nil { + return nil, err + } + + var ret []*models.Studio + for _, c := range candidates { + if nameMatchesPath(c.Name.String, path) { + ret = append(ret, c) + } + } + + return ret, nil +} + +func addSceneStudio(sceneWriter models.SceneReaderWriter, sceneID, studioID int) (bool, error) { + // don't set if already set + scene, err := sceneWriter.Find(sceneID) + if err != nil { + return false, err + } + + if scene.StudioID.Valid { + return false, nil + } + + // set the studio id + s := sql.NullInt64{Int64: int64(studioID), Valid: true} + scenePartial := models.ScenePartial{ + ID: sceneID, + StudioID: &s, + } + + if _, err := sceneWriter.Update(scenePartial); err != nil { + return false, err + } + return true, nil +} + +func addImageStudio(imageWriter models.ImageReaderWriter, imageID, studioID int) (bool, error) { + // don't set if already set + image, err := imageWriter.Find(imageID) + if err != nil { + return false, err + } + + if image.StudioID.Valid { + return false, nil + } + + // set the studio id + s := sql.NullInt64{Int64: int64(studioID), Valid: true} + imagePartial := models.ImagePartial{ + ID: imageID, + StudioID: &s, + } + + if _, err := imageWriter.Update(imagePartial); err != nil { + return false, err + } + return true, nil +} + +func addGalleryStudio(galleryWriter models.GalleryReaderWriter, galleryID, studioID int) (bool, error) { + // don't set if already set + gallery, err := galleryWriter.Find(galleryID) + if err != nil { + return false, err + } + + if gallery.StudioID.Valid { + return false, nil + } + + // set the studio id + s := sql.NullInt64{Int64: int64(studioID), Valid: true} + galleryPartial := models.GalleryPartial{ + ID: galleryID, + StudioID: &s, + } + + if _, err := galleryWriter.UpdatePartial(galleryPartial); err != nil { + return false, err + } + return true, nil +} + +func getStudioTagger(p *models.Studio) tagger { + return tagger{ + ID: p.ID, + Type: "studio", + Name: p.Name.String, + } +} + +// StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene. +func StudioScenes(p *models.Studio, paths []string, rw models.SceneReaderWriter) error { + t := getStudioTagger(p) + + return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) { + return addSceneStudio(rw, otherID, subjectID) + }) +} + +// StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image. +func StudioImages(p *models.Studio, paths []string, rw models.ImageReaderWriter) error { + t := getStudioTagger(p) + + return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) { + return addImageStudio(rw, otherID, subjectID) + }) +} + +// StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery. +func StudioGalleries(p *models.Studio, paths []string, rw models.GalleryReaderWriter) error { + t := getStudioTagger(p) + + return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) { + return addGalleryStudio(rw, otherID, subjectID) + }) +} diff --git a/pkg/autotag/studio_test.go b/pkg/autotag/studio_test.go new file mode 100644 index 000000000..886ea1361 --- /dev/null +++ b/pkg/autotag/studio_test.go @@ -0,0 +1,237 @@ +package autotag + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" +) + +func TestStudioScenes(t *testing.T) { + type test struct { + studioName string + expectedRegex string + } + + studioNames := []test{ + { + "studio name", + `(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "studio + name", + `(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range studioNames { + testStudioScenes(t, p.studioName, p.expectedRegex) + } +} + +func testStudioScenes(t *testing.T, studioName, expectedRegex string) { + mockSceneReader := &mocks.SceneReaderWriter{} + + const studioID = 2 + + var scenes []*models.Scene + matchingPaths, falsePaths := generateTestPaths(studioName, sceneExt) + for i, p := range append(matchingPaths, falsePaths...) { + scenes = append(scenes, &models.Scene{ + ID: i + 1, + Path: p, + }) + } + + studio := models.Studio{ + ID: studioID, + Name: models.NullString(studioName), + } + + organized := false + perPage := models.PerPageAll + + expectedSceneFilter := &models.SceneFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once() + + for i := range matchingPaths { + sceneID := i + 1 + mockSceneReader.On("Find", sceneID).Return(&models.Scene{}, nil).Once() + expectedStudioID := models.NullInt64(studioID) + mockSceneReader.On("Update", models.ScenePartial{ + ID: sceneID, + StudioID: &expectedStudioID, + }).Return(nil, nil).Once() + } + + err := StudioScenes(&studio, nil, mockSceneReader) + + assert := assert.New(t) + + assert.Nil(err) + mockSceneReader.AssertExpectations(t) +} + +func TestStudioImages(t *testing.T) { + type test struct { + studioName string + expectedRegex string + } + + studioNames := []test{ + { + "studio name", + `(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "studio + name", + `(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range studioNames { + testStudioImages(t, p.studioName, p.expectedRegex) + } +} + +func testStudioImages(t *testing.T, studioName, expectedRegex string) { + mockImageReader := &mocks.ImageReaderWriter{} + + const studioID = 2 + + var images []*models.Image + matchingPaths, falsePaths := generateTestPaths(studioName, imageExt) + for i, p := range append(matchingPaths, falsePaths...) { + images = append(images, &models.Image{ + ID: i + 1, + Path: p, + }) + } + + studio := models.Studio{ + ID: studioID, + Name: models.NullString(studioName), + } + + organized := false + perPage := models.PerPageAll + + expectedImageFilter := &models.ImageFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once() + + for i := range matchingPaths { + imageID := i + 1 + mockImageReader.On("Find", imageID).Return(&models.Image{}, nil).Once() + expectedStudioID := models.NullInt64(studioID) + mockImageReader.On("Update", models.ImagePartial{ + ID: imageID, + StudioID: &expectedStudioID, + }).Return(nil, nil).Once() + } + + err := StudioImages(&studio, nil, mockImageReader) + + assert := assert.New(t) + + assert.Nil(err) + mockImageReader.AssertExpectations(t) +} + +func TestStudioGalleries(t *testing.T) { + type test struct { + studioName string + expectedRegex string + } + + studioNames := []test{ + { + "studio name", + `(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "studio + name", + `(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range studioNames { + testStudioGalleries(t, p.studioName, p.expectedRegex) + } +} + +func testStudioGalleries(t *testing.T, studioName, expectedRegex string) { + mockGalleryReader := &mocks.GalleryReaderWriter{} + + const studioID = 2 + + var galleries []*models.Gallery + matchingPaths, falsePaths := generateTestPaths(studioName, galleryExt) + for i, p := range append(matchingPaths, falsePaths...) { + galleries = append(galleries, &models.Gallery{ + ID: i + 1, + Path: models.NullString(p), + }) + } + + studio := models.Studio{ + ID: studioID, + Name: models.NullString(studioName), + } + + organized := false + perPage := models.PerPageAll + + expectedGalleryFilter := &models.GalleryFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once() + + for i := range matchingPaths { + galleryID := i + 1 + mockGalleryReader.On("Find", galleryID).Return(&models.Gallery{}, nil).Once() + expectedStudioID := models.NullInt64(studioID) + mockGalleryReader.On("UpdatePartial", models.GalleryPartial{ + ID: galleryID, + StudioID: &expectedStudioID, + }).Return(nil, nil).Once() + } + + err := StudioGalleries(&studio, nil, mockGalleryReader) + + assert := assert.New(t) + + assert.Nil(err) + mockGalleryReader.AssertExpectations(t) +} diff --git a/pkg/autotag/tag.go b/pkg/autotag/tag.go new file mode 100644 index 000000000..2f8f74841 --- /dev/null +++ b/pkg/autotag/tag.go @@ -0,0 +1,61 @@ +package autotag + +import ( + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" +) + +func getMatchingTags(path string, tagReader models.TagReader) ([]*models.Tag, error) { + words := getPathWords(path) + tags, err := tagReader.QueryForAutoTag(words) + + if err != nil { + return nil, err + } + + var ret []*models.Tag + for _, p := range tags { + if nameMatchesPath(p.Name, path) { + ret = append(ret, p) + } + } + + return ret, nil +} + +func getTagTagger(p *models.Tag) tagger { + return tagger{ + ID: p.ID, + Type: "tag", + Name: p.Name, + } +} + +// TagScenes searches for scenes whose path matches the provided tag name and tags the scene with the tag. +func TagScenes(p *models.Tag, paths []string, rw models.SceneReaderWriter) error { + t := getTagTagger(p) + + return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) { + return scene.AddTag(rw, otherID, subjectID) + }) +} + +// TagImages searches for images whose path matches the provided tag name and tags the image with the tag. +func TagImages(p *models.Tag, paths []string, rw models.ImageReaderWriter) error { + t := getTagTagger(p) + + return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) { + return image.AddTag(rw, otherID, subjectID) + }) +} + +// TagGalleries searches for galleries whose path matches the provided tag name and tags the gallery with the tag. +func TagGalleries(p *models.Tag, paths []string, rw models.GalleryReaderWriter) error { + t := getTagTagger(p) + + return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) { + return gallery.AddTag(rw, otherID, subjectID) + }) +} diff --git a/pkg/autotag/tag_test.go b/pkg/autotag/tag_test.go new file mode 100644 index 000000000..7e70926cb --- /dev/null +++ b/pkg/autotag/tag_test.go @@ -0,0 +1,225 @@ +package autotag + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" +) + +func TestTagScenes(t *testing.T) { + type test struct { + tagName string + expectedRegex string + } + + tagNames := []test{ + { + "tag name", + `(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "tag + name", + `(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range tagNames { + testTagScenes(t, p.tagName, p.expectedRegex) + } +} + +func testTagScenes(t *testing.T, tagName, expectedRegex string) { + mockSceneReader := &mocks.SceneReaderWriter{} + + const tagID = 2 + + var scenes []*models.Scene + matchingPaths, falsePaths := generateTestPaths(tagName, "mp4") + for i, p := range append(matchingPaths, falsePaths...) { + scenes = append(scenes, &models.Scene{ + ID: i + 1, + Path: p, + }) + } + + tag := models.Tag{ + ID: tagID, + Name: tagName, + } + + organized := false + perPage := models.PerPageAll + + expectedSceneFilter := &models.SceneFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once() + + for i := range matchingPaths { + sceneID := i + 1 + mockSceneReader.On("GetTagIDs", sceneID).Return(nil, nil).Once() + mockSceneReader.On("UpdateTags", sceneID, []int{tagID}).Return(nil).Once() + } + + err := TagScenes(&tag, nil, mockSceneReader) + + assert := assert.New(t) + + assert.Nil(err) + mockSceneReader.AssertExpectations(t) +} + +func TestTagImages(t *testing.T) { + type test struct { + tagName string + expectedRegex string + } + + tagNames := []test{ + { + "tag name", + `(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "tag + name", + `(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range tagNames { + testTagImages(t, p.tagName, p.expectedRegex) + } +} + +func testTagImages(t *testing.T, tagName, expectedRegex string) { + mockImageReader := &mocks.ImageReaderWriter{} + + const tagID = 2 + + var images []*models.Image + matchingPaths, falsePaths := generateTestPaths(tagName, "mp4") + for i, p := range append(matchingPaths, falsePaths...) { + images = append(images, &models.Image{ + ID: i + 1, + Path: p, + }) + } + + tag := models.Tag{ + ID: tagID, + Name: tagName, + } + + organized := false + perPage := models.PerPageAll + + expectedImageFilter := &models.ImageFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once() + + for i := range matchingPaths { + imageID := i + 1 + mockImageReader.On("GetTagIDs", imageID).Return(nil, nil).Once() + mockImageReader.On("UpdateTags", imageID, []int{tagID}).Return(nil).Once() + } + + err := TagImages(&tag, nil, mockImageReader) + + assert := assert.New(t) + + assert.Nil(err) + mockImageReader.AssertExpectations(t) +} + +func TestTagGalleries(t *testing.T) { + type test struct { + tagName string + expectedRegex string + } + + tagNames := []test{ + { + "tag name", + `(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + { + "tag + name", + `(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`, + }, + } + + for _, p := range tagNames { + testTagGalleries(t, p.tagName, p.expectedRegex) + } +} + +func testTagGalleries(t *testing.T, tagName, expectedRegex string) { + mockGalleryReader := &mocks.GalleryReaderWriter{} + + const tagID = 2 + + var galleries []*models.Gallery + matchingPaths, falsePaths := generateTestPaths(tagName, "mp4") + for i, p := range append(matchingPaths, falsePaths...) { + galleries = append(galleries, &models.Gallery{ + ID: i + 1, + Path: models.NullString(p), + }) + } + + tag := models.Tag{ + ID: tagID, + Name: tagName, + } + + organized := false + perPage := models.PerPageAll + + expectedGalleryFilter := &models.GalleryFilterType{ + Organized: &organized, + Path: &models.StringCriterionInput{ + Value: expectedRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, + } + + expectedFindFilter := &models.FindFilterType{ + PerPage: &perPage, + } + + mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once() + + for i := range matchingPaths { + galleryID := i + 1 + mockGalleryReader.On("GetTagIDs", galleryID).Return(nil, nil).Once() + mockGalleryReader.On("UpdateTags", galleryID, []int{tagID}).Return(nil).Once() + } + + err := TagGalleries(&tag, nil, mockGalleryReader) + + assert := assert.New(t) + + assert.Nil(err) + mockGalleryReader.AssertExpectations(t) +} diff --git a/pkg/autotag/tagger.go b/pkg/autotag/tagger.go new file mode 100644 index 000000000..9d2759f6c --- /dev/null +++ b/pkg/autotag/tagger.go @@ -0,0 +1,240 @@ +// Package autotag provides methods to auto-tag scenes with performers, +// studios and tags. +// +// The autotag engine tags scenes with performers/studios/tags if the scene's +// path matches the performer/studio/tag name. A scene's path is considered +// a match if it contains the performer/studio/tag's full name, ignoring any +// '.', '-', '_' characters in the path. +// +// For example, for a performer "foo bar", the following paths would be +// considered a match: "foo bar.mp4", "foobar.mp4", "foo.bar.mp4", +// "foo-bar.mp4", "aaa.foo bar.bbb.mp4". +// The following would not be considered a match: +// "aafoo bar.mp4", "foo barbb.mp4", "foo/bar.mp4" +package autotag + +import ( + "fmt" + "path/filepath" + "regexp" + "strings" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +const separatorChars = `.\-_ ` + +// fixes #1292 +func escapePathRegex(name string) string { + ret := name + + chars := `+*?()|[]{}^$` + for _, c := range chars { + cStr := string(c) + ret = strings.ReplaceAll(ret, cStr, `\`+cStr) + } + + return ret +} + +func getPathQueryRegex(name string) string { + // escape specific regex characters + name = escapePathRegex(name) + + // handle path separators + const separator = `[` + separatorChars + `]` + + ret := strings.Replace(name, " ", separator+"*", -1) + ret = `(?:^|_|[^\w\d])` + ret + `(?:$|_|[^\w\d])` + return ret +} + +func nameMatchesPath(name, path string) bool { + // escape specific regex characters + name = escapePathRegex(name) + + name = strings.ToLower(name) + path = strings.ToLower(path) + + // handle path separators + const separator = `[` + separatorChars + `]` + + reStr := strings.Replace(name, " ", separator+"*", -1) + reStr = `(?:^|_|[^\w\d])` + reStr + `(?:$|_|[^\w\d])` + + re := regexp.MustCompile(reStr) + return re.MatchString(path) +} + +func getPathWords(path string) []string { + retStr := path + + // remove the extension + ext := filepath.Ext(retStr) + if ext != "" { + retStr = strings.TrimSuffix(retStr, ext) + } + + // handle path separators + const separator = `(?:_|[^\w\d])+` + re := regexp.MustCompile(separator) + retStr = re.ReplaceAllString(retStr, " ") + + words := strings.Split(retStr, " ") + + // remove any single letter words + var ret []string + for _, w := range words { + if len(w) > 1 { + ret = append(ret, w) + } + } + + return ret +} + +type tagger struct { + ID int + Type string + Name string + Path string +} + +type addLinkFunc func(subjectID, otherID int) (bool, error) + +func (t *tagger) addError(otherType, otherName string, err error) error { + return fmt.Errorf("error adding %s '%s' to %s '%s': %s", otherType, otherName, t.Type, t.Name, err.Error()) +} + +func (t *tagger) addLog(otherType, otherName string) { + logger.Infof("Added %s '%s' to %s '%s'", otherType, otherName, t.Type, t.Name) +} + +func (t *tagger) tagPerformers(performerReader models.PerformerReader, addFunc addLinkFunc) error { + others, err := getMatchingPerformers(t.Path, performerReader) + if err != nil { + return err + } + + for _, p := range others { + added, err := addFunc(t.ID, p.ID) + + if err != nil { + return t.addError("performer", p.Name.String, err) + } + + if added { + t.addLog("performer", p.Name.String) + } + } + + return nil +} + +func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFunc) error { + others, err := getMatchingStudios(t.Path, studioReader) + if err != nil { + return err + } + + // only add first studio + if len(others) > 0 { + studio := others[0] + added, err := addFunc(t.ID, studio.ID) + + if err != nil { + return t.addError("studio", studio.Name.String, err) + } + + if added { + t.addLog("studio", studio.Name.String) + } + } + + return nil +} + +func (t *tagger) tagTags(tagReader models.TagReader, addFunc addLinkFunc) error { + others, err := getMatchingTags(t.Path, tagReader) + if err != nil { + return err + } + + for _, p := range others { + added, err := addFunc(t.ID, p.ID) + + if err != nil { + return t.addError("tag", p.Name, err) + } + + if added { + t.addLog("tag", p.Name) + } + } + + return nil +} + +func (t *tagger) tagScenes(paths []string, sceneReader models.SceneReader, addFunc addLinkFunc) error { + others, err := getMatchingScenes(t.Name, paths, sceneReader) + if err != nil { + return err + } + + for _, p := range others { + added, err := addFunc(t.ID, p.ID) + + if err != nil { + return t.addError("scene", p.GetTitle(), err) + } + + if added { + t.addLog("scene", p.GetTitle()) + } + } + + return nil +} + +func (t *tagger) tagImages(paths []string, imageReader models.ImageReader, addFunc addLinkFunc) error { + others, err := getMatchingImages(t.Name, paths, imageReader) + if err != nil { + return err + } + + for _, p := range others { + added, err := addFunc(t.ID, p.ID) + + if err != nil { + return t.addError("image", p.GetTitle(), err) + } + + if added { + t.addLog("image", p.GetTitle()) + } + } + + return nil +} + +func (t *tagger) tagGalleries(paths []string, galleryReader models.GalleryReader, addFunc addLinkFunc) error { + others, err := getMatchingGalleries(t.Name, paths, galleryReader) + if err != nil { + return err + } + + for _, p := range others { + added, err := addFunc(t.ID, p.ID) + + if err != nil { + return t.addError("gallery", p.GetTitle(), err) + } + + if added { + t.addLog("gallery", p.GetTitle()) + } + } + + return nil +} diff --git a/pkg/database/database.go b/pkg/database/database.go index e4099b073..e3ddff607 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -23,11 +23,30 @@ import ( var DB *sqlx.DB var WriteMu *sync.Mutex var dbPath string -var appSchemaVersion uint = 19 +var appSchemaVersion uint = 22 var databaseSchemaVersion uint +var ( + // ErrMigrationNeeded indicates that a database migration is needed + // before the database can be initialized + ErrMigrationNeeded = errors.New("database migration required") + + // ErrDatabaseNotInitialized indicates that the database is not + // initialized, usually due to an incomplete configuration. + ErrDatabaseNotInitialized = errors.New("database not initialized") +) + const sqlite3Driver = "sqlite3ex" +// Ready returns an error if the database is not ready to begin transactions. +func Ready() error { + if DB == nil { + return ErrDatabaseNotInitialized + } + + return nil +} + func init() { // register custom driver with regexp function registerCustomDriver() @@ -37,20 +56,20 @@ func init() { // performs a full migration to the latest schema version. Otherwise, any // necessary migrations must be run separately using RunMigrations. // Returns true if the database is new. -func Initialize(databasePath string) bool { +func Initialize(databasePath string) error { dbPath = databasePath if err := getDatabaseSchemaVersion(); err != nil { - panic(err) + return fmt.Errorf("error getting database schema version: %s", err.Error()) } if databaseSchemaVersion == 0 { // new database, just run the migrations if err := RunMigrations(); err != nil { - panic(err) + return fmt.Errorf("error running initial schema migrations: %s", err.Error()) } // RunMigrations calls Initialise. Just return - return true + return nil } else { if databaseSchemaVersion > appSchemaVersion { panic(fmt.Sprintf("Database schema version %d is incompatible with required schema version %d", databaseSchemaVersion, appSchemaVersion)) @@ -59,7 +78,7 @@ func Initialize(databasePath string) bool { // if migration is needed, then don't open the connection if NeedsMigration() { logger.Warnf("Database schema version %d does not match required schema version %d.", databaseSchemaVersion, appSchemaVersion) - return false + return nil } } @@ -67,7 +86,7 @@ func Initialize(databasePath string) bool { DB = open(databasePath, disableForeignKeys) WriteMu = &sync.Mutex{} - return false + return nil } func open(databasePath string, disableForeignKeys bool) *sqlx.DB { @@ -150,6 +169,10 @@ func AppSchemaVersion() uint { return appSchemaVersion } +func DatabasePath() string { + return dbPath +} + func DatabaseBackupPath() string { return fmt.Sprintf("%s.%d.%s", dbPath, databaseSchemaVersion, time.Now().Format("20060102_150405")) } diff --git a/pkg/database/migrations/20_phash.up.sql b/pkg/database/migrations/20_phash.up.sql new file mode 100644 index 000000000..c1c889956 --- /dev/null +++ b/pkg/database/migrations/20_phash.up.sql @@ -0,0 +1 @@ +ALTER TABLE `scenes` ADD COLUMN `phash` blob; diff --git a/pkg/database/migrations/21_performers_studios_details.up.sql b/pkg/database/migrations/21_performers_studios_details.up.sql new file mode 100644 index 000000000..d41cf4779 --- /dev/null +++ b/pkg/database/migrations/21_performers_studios_details.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE `performers` ADD COLUMN `details` text; +ALTER TABLE `performers` ADD COLUMN `death_date` date; +ALTER TABLE `performers` ADD COLUMN `hair_color` varchar(255); +ALTER TABLE `performers` ADD COLUMN `weight` integer; +ALTER TABLE `studios` ADD COLUMN `details` text; \ No newline at end of file diff --git a/pkg/database/migrations/22_performers_studios_rating.up.sql b/pkg/database/migrations/22_performers_studios_rating.up.sql new file mode 100644 index 000000000..d87d08f65 --- /dev/null +++ b/pkg/database/migrations/22_performers_studios_rating.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE `performers` ADD COLUMN `rating` tinyint; +ALTER TABLE `studios` ADD COLUMN `rating` tinyint; diff --git a/pkg/ffmpeg/encoder.go b/pkg/ffmpeg/encoder.go index 5beb09410..9d96dadf3 100644 --- a/pkg/ffmpeg/encoder.go +++ b/pkg/ffmpeg/encoder.go @@ -1,7 +1,7 @@ package ffmpeg import ( - "fmt" + "bytes" "io/ioutil" "os" "os/exec" @@ -62,7 +62,7 @@ func KillRunningEncoders(path string) { for _, process := range processes { // assume it worked, don't check for error - fmt.Printf("Killing encoder process for file: %s", path) + logger.Infof("Killing encoder process for file: %s", path) process.Kill() // wait for the process to die before returning @@ -82,7 +82,8 @@ func KillRunningEncoders(path string) { } } -func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) { +// FFmpeg runner with progress output, used for transcodes +func (e *Encoder) runTranscode(probeResult VideoFile, args []string) (string, error) { cmd := exec.Command(e.Path, args...) stderr, err := cmd.StderrPipe() @@ -137,3 +138,26 @@ func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) { return stdoutString, nil } + +func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) { + cmd := exec.Command(e.Path, args...) + + var stdout, stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + if err := cmd.Start(); err != nil { + return "", err + } + + registerRunningEncoder(probeResult.Path, cmd.Process) + err := waitAndDeregister(probeResult.Path, cmd) + + if err != nil { + // error message should be in the stderr stream + logger.Errorf("ffmpeg error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderr.String()) + return stdout.String(), err + } + + return stdout.String(), nil +} diff --git a/pkg/ffmpeg/encoder_sprite_screenshot.go b/pkg/ffmpeg/encoder_sprite_screenshot.go new file mode 100644 index 000000000..c1a87788e --- /dev/null +++ b/pkg/ffmpeg/encoder_sprite_screenshot.go @@ -0,0 +1,38 @@ +package ffmpeg + +import ( + "fmt" + "image" + "strings" +) + +type SpriteScreenshotOptions struct { + Time float64 + Width int +} + +func (e *Encoder) SpriteScreenshot(probeResult VideoFile, options SpriteScreenshotOptions) (image.Image, error) { + args := []string{ + "-v", "error", + "-ss", fmt.Sprintf("%v", options.Time), + "-i", probeResult.Path, + "-vframes", "1", + "-vf", fmt.Sprintf("scale=%v:-1", options.Width), + "-c:v", "bmp", + "-f", "rawvideo", + "-", + } + data, err := e.run(probeResult, args) + if err != nil { + return nil, err + } + + reader := strings.NewReader(data) + + img, _, err := image.Decode(reader) + if err != nil { + return nil, err + } + + return img, err +} diff --git a/pkg/ffmpeg/encoder_transcode.go b/pkg/ffmpeg/encoder_transcode.go index 1349bac70..235fb6959 100644 --- a/pkg/ffmpeg/encoder_transcode.go +++ b/pkg/ffmpeg/encoder_transcode.go @@ -64,7 +64,7 @@ func (e *Encoder) Transcode(probeResult VideoFile, options TranscodeOptions) { "-strict", "-2", options.OutputPath, } - _, _ = e.run(probeResult, args) + _, _ = e.runTranscode(probeResult, args) } //transcode the video, remove the audio @@ -84,7 +84,7 @@ func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions "-vf", "scale=" + scale, options.OutputPath, } - _, _ = e.run(probeResult, args) + _, _ = e.runTranscode(probeResult, args) } //copy the video stream as is, transcode audio @@ -96,7 +96,7 @@ func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions "-strict", "-2", options.OutputPath, } - _, _ = e.run(probeResult, args) + _, _ = e.runTranscode(probeResult, args) } //copy the video stream as is, drop audio @@ -107,5 +107,5 @@ func (e *Encoder) CopyVideo(probeResult VideoFile, options TranscodeOptions) { "-c:v", "copy", options.OutputPath, } - _, _ = e.run(probeResult, args) + _, _ = e.runTranscode(probeResult, args) } diff --git a/pkg/gallery/query.go b/pkg/gallery/query.go new file mode 100644 index 000000000..6cae24321 --- /dev/null +++ b/pkg/gallery/query.go @@ -0,0 +1,40 @@ +package gallery + +import ( + "strconv" + + "github.com/stashapp/stash/pkg/models" +) + +func CountByPerformerID(r models.GalleryReader, id int) (int, error) { + filter := &models.GalleryFilterType{ + Performers: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + }, + } + + return r.QueryCount(filter, nil) +} + +func CountByStudioID(r models.GalleryReader, id int) (int, error) { + filter := &models.GalleryFilterType{ + Studios: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + }, + } + + return r.QueryCount(filter, nil) +} + +func CountByTagID(r models.GalleryReader, id int) (int, error) { + filter := &models.GalleryFilterType{ + Tags: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + }, + } + + return r.QueryCount(filter, nil) +} diff --git a/pkg/gallery/update.go b/pkg/gallery/update.go index 6befc5b1c..9355282a1 100644 --- a/pkg/gallery/update.go +++ b/pkg/gallery/update.go @@ -21,3 +21,43 @@ func AddImage(qb models.GalleryReaderWriter, galleryID int, imageID int) error { imageIDs = utils.IntAppendUnique(imageIDs, imageID) return qb.UpdateImages(galleryID, imageIDs) } + +func AddPerformer(qb models.GalleryReaderWriter, id int, performerID int) (bool, error) { + performerIDs, err := qb.GetPerformerIDs(id) + if err != nil { + return false, err + } + + oldLen := len(performerIDs) + performerIDs = utils.IntAppendUnique(performerIDs, performerID) + + if len(performerIDs) != oldLen { + if err := qb.UpdatePerformers(id, performerIDs); err != nil { + return false, err + } + + return true, nil + } + + return false, nil +} + +func AddTag(qb models.GalleryReaderWriter, id int, tagID int) (bool, error) { + tagIDs, err := qb.GetTagIDs(id) + if err != nil { + return false, err + } + + oldLen := len(tagIDs) + tagIDs = utils.IntAppendUnique(tagIDs, tagID) + + if len(tagIDs) != oldLen { + if err := qb.UpdateTags(id, tagIDs); err != nil { + return false, err + } + + return true, nil + } + + return false, nil +} diff --git a/pkg/image/image.go b/pkg/image/image.go index d54271d66..64b766e6d 100644 --- a/pkg/image/image.go +++ b/pkg/image/image.go @@ -257,3 +257,10 @@ func GetTitle(s *models.Image) string { _, fn := getFilePath(s.Path) return filepath.Base(fn) } + +// GetFilename gets the base name of the image file +// If stripExt is set the file extension is omitted from the name +func GetFilename(s *models.Image, stripExt bool) string { + _, fn := getFilePath(s.Path) + return utils.GetNameFromPath(fn, stripExt) +} diff --git a/pkg/image/query.go b/pkg/image/query.go new file mode 100644 index 000000000..58e276632 --- /dev/null +++ b/pkg/image/query.go @@ -0,0 +1,40 @@ +package image + +import ( + "strconv" + + "github.com/stashapp/stash/pkg/models" +) + +func CountByPerformerID(r models.ImageReader, id int) (int, error) { + filter := &models.ImageFilterType{ + Performers: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + }, + } + + return r.QueryCount(filter, nil) +} + +func CountByStudioID(r models.ImageReader, id int) (int, error) { + filter := &models.ImageFilterType{ + Studios: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + }, + } + + return r.QueryCount(filter, nil) +} + +func CountByTagID(r models.ImageReader, id int) (int, error) { + filter := &models.ImageFilterType{ + Tags: &models.MultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + }, + } + + return r.QueryCount(filter, nil) +} diff --git a/pkg/image/update.go b/pkg/image/update.go index 3728d3187..95b80d697 100644 --- a/pkg/image/update.go +++ b/pkg/image/update.go @@ -1,6 +1,9 @@ package image -import "github.com/stashapp/stash/pkg/models" +import ( + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) func UpdateFileModTime(qb models.ImageWriter, id int, modTime models.NullSQLiteTimestamp) (*models.Image, error) { return qb.Update(models.ImagePartial{ @@ -8,3 +11,43 @@ func UpdateFileModTime(qb models.ImageWriter, id int, modTime models.NullSQLiteT FileModTime: &modTime, }) } + +func AddPerformer(qb models.ImageReaderWriter, id int, performerID int) (bool, error) { + performerIDs, err := qb.GetPerformerIDs(id) + if err != nil { + return false, err + } + + oldLen := len(performerIDs) + performerIDs = utils.IntAppendUnique(performerIDs, performerID) + + if len(performerIDs) != oldLen { + if err := qb.UpdatePerformers(id, performerIDs); err != nil { + return false, err + } + + return true, nil + } + + return false, nil +} + +func AddTag(qb models.ImageReaderWriter, id int, tagID int) (bool, error) { + tagIDs, err := qb.GetTagIDs(id) + if err != nil { + return false, err + } + + oldLen := len(tagIDs) + tagIDs = utils.IntAppendUnique(tagIDs, tagID) + + if len(tagIDs) != oldLen { + if err := qb.UpdateTags(id, tagIDs); err != nil { + return false, err + } + + return true, nil + } + + return false, nil +} diff --git a/pkg/manager/apikey.go b/pkg/manager/apikey.go new file mode 100644 index 000000000..a01a9b221 --- /dev/null +++ b/pkg/manager/apikey.go @@ -0,0 +1,55 @@ +package manager + +import ( + "errors" + "time" + + "github.com/dgrijalva/jwt-go" + "github.com/stashapp/stash/pkg/manager/config" +) + +var ErrInvalidToken = errors.New("invalid apikey") + +const APIKeySubject = "APIKey" + +type APIKeyClaims struct { + UserID string `json:"uid"` + jwt.StandardClaims +} + +func GenerateAPIKey(userID string) (string, error) { + claims := &APIKeyClaims{ + UserID: userID, + StandardClaims: jwt.StandardClaims{ + Subject: APIKeySubject, + IssuedAt: time.Now().Unix(), + }, + } + + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + ss, err := token.SignedString(config.GetInstance().GetJWTSignKey()) + if err != nil { + return "", err + } + + return ss, nil +} + +// GetUserIDFromAPIKey validates the provided api key and returns the user ID +func GetUserIDFromAPIKey(apiKey string) (string, error) { + claims := &APIKeyClaims{} + token, err := jwt.ParseWithClaims(apiKey, claims, func(t *jwt.Token) (interface{}, error) { + return config.GetInstance().GetJWTSignKey(), nil + }) + + if err != nil { + return "", err + } + + if !token.Valid { + return "", ErrInvalidToken + } + + return claims.UserID, nil +} diff --git a/pkg/manager/checksum.go b/pkg/manager/checksum.go index 244549dab..a545008b6 100644 --- a/pkg/manager/checksum.go +++ b/pkg/manager/checksum.go @@ -31,9 +31,11 @@ func setInitialMD5Config(txnManager models.TransactionManager) { defaultAlgorithm = models.HashAlgorithmMd5 } + // TODO - this should use the config instance viper.SetDefault(config.VideoFileNamingAlgorithm, defaultAlgorithm) viper.SetDefault(config.CalculateMD5, usingMD5) + config := config.GetInstance() if err := config.Write(); err != nil { logger.Errorf("Error while writing configuration file: %s", err.Error()) } diff --git a/pkg/manager/config/config.go b/pkg/manager/config/config.go index a858d6722..440a1fded 100644 --- a/pkg/manager/config/config.go +++ b/pkg/manager/config/config.go @@ -1,8 +1,11 @@ package config import ( - "golang.org/x/crypto/bcrypt" + "fmt" "runtime" + "strings" + + "golang.org/x/crypto/bcrypt" "errors" "io/ioutil" @@ -20,6 +23,7 @@ const Cache = "cache" const Generated = "generated" const Metadata = "metadata" const Downloads = "downloads" +const ApiKey = "api_key" const Username = "username" const Password = "password" const MaxSessionAge = "max_session_age" @@ -102,6 +106,10 @@ const Language = "language" // this should be manually configured only const CustomServedFolders = "custom_served_folders" +// UI directory. Overrides to serve the UI from a specific location +// rather than use the embedded UI. +const CustomUILocation = "custom_ui_location" + // Interface options const MenuItems = "menu_items" @@ -114,6 +122,7 @@ const AutostartVideo = "autostart_video" const ShowStudioAsText = "show_studio_as_text" const CSSEnabled = "cssEnabled" const WallPlayback = "wall_playback" +const SlideshowDelay = "slideshow_delay" // Logging options const LogFile = "logFile" @@ -124,33 +133,70 @@ const LogAccess = "logAccess" // File upload options const MaxUploadSize = "max_upload_size" -func Set(key string, value interface{}) { +type MissingConfigError struct { + missingFields []string +} + +func (e MissingConfigError) Error() string { + return fmt.Sprintf("missing the following mandatory settings: %s", strings.Join(e.missingFields, ", ")) +} + +type Instance struct { + isNewSystem bool +} + +var instance *Instance + +func GetInstance() *Instance { + if instance == nil { + instance = &Instance{} + } + return instance +} + +func (i *Instance) IsNewSystem() bool { + return i.isNewSystem +} + +func (i *Instance) SetConfigFile(fn string) { + viper.SetConfigFile(fn) +} + +func (i *Instance) Set(key string, value interface{}) { viper.Set(key, value) } -func SetPassword(value string) { +func (i *Instance) SetPassword(value string) { // if blank, don't bother hashing; we want it to be blank if value == "" { - Set(Password, "") + i.Set(Password, "") } else { - Set(Password, hashPassword(value)) + i.Set(Password, hashPassword(value)) } } -func Write() error { +func (i *Instance) Write() error { return viper.WriteConfig() } -func GetConfigPath() string { - configFileUsed := viper.ConfigFileUsed() - return filepath.Dir(configFileUsed) -} - -func GetConfigFilePath() string { +// GetConfigFile returns the full path to the used configuration file. +func (i *Instance) GetConfigFile() string { return viper.ConfigFileUsed() } -func GetStashPaths() []*models.StashConfig { +// GetConfigPath returns the path of the directory containing the used +// configuration file. +func (i *Instance) GetConfigPath() string { + return filepath.Dir(i.GetConfigFile()) +} + +// GetDefaultDatabaseFilePath returns the default database filename, +// which is located in the same directory as the config file. +func (i *Instance) GetDefaultDatabaseFilePath() string { + return filepath.Join(i.GetConfigPath(), "stash-go.sqlite") +} + +func (i *Instance) GetStashPaths() []*models.StashConfig { var ret []*models.StashConfig if err := viper.UnmarshalKey(Stash, &ret); err != nil || len(ret) == 0 { // fallback to legacy format @@ -167,47 +213,51 @@ func GetStashPaths() []*models.StashConfig { return ret } -func GetCachePath() string { +func (i *Instance) GetConfigFilePath() string { + return viper.ConfigFileUsed() +} + +func (i *Instance) GetCachePath() string { return viper.GetString(Cache) } -func GetGeneratedPath() string { +func (i *Instance) GetGeneratedPath() string { return viper.GetString(Generated) } -func GetMetadataPath() string { +func (i *Instance) GetMetadataPath() string { return viper.GetString(Metadata) } -func GetDatabasePath() string { +func (i *Instance) GetDatabasePath() string { return viper.GetString(Database) } -func GetJWTSignKey() []byte { +func (i *Instance) GetJWTSignKey() []byte { return []byte(viper.GetString(JWTSignKey)) } -func GetSessionStoreKey() []byte { +func (i *Instance) GetSessionStoreKey() []byte { return []byte(viper.GetString(SessionStoreKey)) } -func GetDefaultScrapersPath() string { +func (i *Instance) GetDefaultScrapersPath() string { // default to the same directory as the config file - fn := filepath.Join(GetConfigPath(), "scrapers") + fn := filepath.Join(i.GetConfigPath(), "scrapers") return fn } -func GetExcludes() []string { +func (i *Instance) GetExcludes() []string { return viper.GetStringSlice(Exclude) } -func GetImageExcludes() []string { +func (i *Instance) GetImageExcludes() []string { return viper.GetStringSlice(ImageExclude) } -func GetVideoExtensions() []string { +func (i *Instance) GetVideoExtensions() []string { ret := viper.GetStringSlice(VideoExtensions) if ret == nil { ret = defaultVideoExtensions @@ -215,7 +265,7 @@ func GetVideoExtensions() []string { return ret } -func GetImageExtensions() []string { +func (i *Instance) GetImageExtensions() []string { ret := viper.GetStringSlice(ImageExtensions) if ret == nil { ret = defaultImageExtensions @@ -223,7 +273,7 @@ func GetImageExtensions() []string { return ret } -func GetGalleryExtensions() []string { +func (i *Instance) GetGalleryExtensions() []string { ret := viper.GetStringSlice(GalleryExtensions) if ret == nil { ret = defaultGalleryExtensions @@ -231,11 +281,11 @@ func GetGalleryExtensions() []string { return ret } -func GetCreateGalleriesFromFolders() bool { +func (i *Instance) GetCreateGalleriesFromFolders() bool { return viper.GetBool(CreateGalleriesFromFolders) } -func GetLanguage() string { +func (i *Instance) GetLanguage() string { ret := viper.GetString(Language) // default to English @@ -248,13 +298,13 @@ func GetLanguage() string { // IsCalculateMD5 returns true if MD5 checksums should be generated for // scene video files. -func IsCalculateMD5() bool { +func (i *Instance) IsCalculateMD5() bool { return viper.GetBool(CalculateMD5) } // GetVideoFileNamingAlgorithm returns what hash algorithm should be used for // naming generated scene video files. -func GetVideoFileNamingAlgorithm() models.HashAlgorithm { +func (i *Instance) GetVideoFileNamingAlgorithm() models.HashAlgorithm { ret := viper.GetString(VideoFileNamingAlgorithm) // default to oshash @@ -265,23 +315,23 @@ func GetVideoFileNamingAlgorithm() models.HashAlgorithm { return models.HashAlgorithm(ret) } -func GetScrapersPath() string { +func (i *Instance) GetScrapersPath() string { return viper.GetString(ScrapersPath) } -func GetScraperUserAgent() string { +func (i *Instance) GetScraperUserAgent() string { return viper.GetString(ScraperUserAgent) } // GetScraperCDPPath gets the path to the Chrome executable or remote address // to an instance of Chrome. -func GetScraperCDPPath() string { +func (i *Instance) GetScraperCDPPath() string { return viper.GetString(ScraperCDPPath) } // GetScraperCertCheck returns true if the scraper should check for insecure // certificates when fetching an image or a page. -func GetScraperCertCheck() bool { +func (i *Instance) GetScraperCertCheck() bool { ret := true if viper.IsSet(ScraperCertCheck) { ret = viper.GetBool(ScraperCertCheck) @@ -290,48 +340,48 @@ func GetScraperCertCheck() bool { return ret } -func GetStashBoxes() []*models.StashBox { +func (i *Instance) GetStashBoxes() []*models.StashBox { var boxes []*models.StashBox viper.UnmarshalKey(StashBoxes, &boxes) return boxes } -func GetDefaultPluginsPath() string { +func (i *Instance) GetDefaultPluginsPath() string { // default to the same directory as the config file - fn := filepath.Join(GetConfigPath(), "plugins") + fn := filepath.Join(i.GetConfigPath(), "plugins") return fn } -func GetPluginsPath() string { +func (i *Instance) GetPluginsPath() string { return viper.GetString(PluginsPath) } -func GetHost() string { +func (i *Instance) GetHost() string { return viper.GetString(Host) } -func GetPort() int { +func (i *Instance) GetPort() int { return viper.GetInt(Port) } -func GetExternalHost() string { +func (i *Instance) GetExternalHost() string { return viper.GetString(ExternalHost) } // GetPreviewSegmentDuration returns the duration of a single segment in a // scene preview file, in seconds. -func GetPreviewSegmentDuration() float64 { +func (i *Instance) GetPreviewSegmentDuration() float64 { return viper.GetFloat64(PreviewSegmentDuration) } // GetParallelTasks returns the number of parallel tasks that should be started // by scan or generate task. -func GetParallelTasks() int { +func (i *Instance) GetParallelTasks() int { return viper.GetInt(ParallelTasks) } -func GetParallelTasksWithAutoDetection() int { +func (i *Instance) GetParallelTasksWithAutoDetection() int { parallelTasks := viper.GetInt(ParallelTasks) if parallelTasks <= 0 { parallelTasks = (runtime.NumCPU() / 4) + 1 @@ -340,7 +390,7 @@ func GetParallelTasksWithAutoDetection() int { } // GetPreviewSegments returns the amount of segments in a scene preview file. -func GetPreviewSegments() int { +func (i *Instance) GetPreviewSegments() int { return viper.GetInt(PreviewSegments) } @@ -350,7 +400,7 @@ func GetPreviewSegments() int { // of seconds to exclude from the start of the video before it is included // in the preview. If the value is suffixed with a '%' character (for example // '2%'), then it is interpreted as a proportion of the total video duration. -func GetPreviewExcludeStart() string { +func (i *Instance) GetPreviewExcludeStart() string { return viper.GetString(PreviewExcludeStart) } @@ -359,13 +409,13 @@ func GetPreviewExcludeStart() string { // is interpreted as the amount of seconds to exclude from the end of the video // when generating previews. If the value is suffixed with a '%' character, // then it is interpreted as a proportion of the total video duration. -func GetPreviewExcludeEnd() string { +func (i *Instance) GetPreviewExcludeEnd() string { return viper.GetString(PreviewExcludeEnd) } // GetPreviewPreset returns the preset when generating previews. Defaults to // Slow. -func GetPreviewPreset() models.PreviewPreset { +func (i *Instance) GetPreviewPreset() models.PreviewPreset { ret := viper.GetString(PreviewPreset) // default to slow @@ -376,7 +426,7 @@ func GetPreviewPreset() models.PreviewPreset { return models.PreviewPreset(ret) } -func GetMaxTranscodeSize() models.StreamingResolutionEnum { +func (i *Instance) GetMaxTranscodeSize() models.StreamingResolutionEnum { ret := viper.GetString(MaxTranscodeSize) // default to original @@ -387,7 +437,7 @@ func GetMaxTranscodeSize() models.StreamingResolutionEnum { return models.StreamingResolutionEnum(ret) } -func GetMaxStreamingTranscodeSize() models.StreamingResolutionEnum { +func (i *Instance) GetMaxStreamingTranscodeSize() models.StreamingResolutionEnum { ret := viper.GetString(MaxStreamingTranscodeSize) // default to original @@ -398,29 +448,33 @@ func GetMaxStreamingTranscodeSize() models.StreamingResolutionEnum { return models.StreamingResolutionEnum(ret) } -func GetUsername() string { +func (i *Instance) GetAPIKey() string { + return viper.GetString(ApiKey) +} + +func (i *Instance) GetUsername() string { return viper.GetString(Username) } -func GetPasswordHash() string { +func (i *Instance) GetPasswordHash() string { return viper.GetString(Password) } -func GetCredentials() (string, string) { - if HasCredentials() { +func (i *Instance) GetCredentials() (string, string) { + if i.HasCredentials() { return viper.GetString(Username), viper.GetString(Password) } return "", "" } -func HasCredentials() bool { +func (i *Instance) HasCredentials() bool { if !viper.IsSet(Username) || !viper.IsSet(Password) { return false } - username := GetUsername() - pwHash := GetPasswordHash() + username := i.GetUsername() + pwHash := i.GetPasswordHash() return username != "" && pwHash != "" } @@ -431,20 +485,20 @@ func hashPassword(password string) string { return string(hash) } -func ValidateCredentials(username string, password string) bool { - if !HasCredentials() { +func (i *Instance) ValidateCredentials(username string, password string) bool { + if !i.HasCredentials() { // don't need to authenticate if no credentials saved return true } - authUser, authPWHash := GetCredentials() + authUser, authPWHash := i.GetCredentials() err := bcrypt.CompareHashAndPassword([]byte(authPWHash), []byte(password)) return username == authUser && err == nil } -func ValidateStashBoxes(boxes []*models.StashBoxInput) error { +func (i *Instance) ValidateStashBoxes(boxes []*models.StashBoxInput) error { isMulti := len(boxes) > 1 re, err := regexp.Compile("^http.*graphql$") @@ -468,56 +522,65 @@ func ValidateStashBoxes(boxes []*models.StashBoxInput) error { // GetMaxSessionAge gets the maximum age for session cookies, in seconds. // Session cookie expiry times are refreshed every request. -func GetMaxSessionAge() int { +func (i *Instance) GetMaxSessionAge() int { viper.SetDefault(MaxSessionAge, DefaultMaxSessionAge) return viper.GetInt(MaxSessionAge) } // GetCustomServedFolders gets the map of custom paths to their applicable // filesystem locations -func GetCustomServedFolders() URLMap { +func (i *Instance) GetCustomServedFolders() URLMap { return viper.GetStringMapString(CustomServedFolders) } +func (i *Instance) GetCustomUILocation() string { + return viper.GetString(CustomUILocation) +} + // Interface options -func GetMenuItems() []string { +func (i *Instance) GetMenuItems() []string { if viper.IsSet(MenuItems) { return viper.GetStringSlice(MenuItems) } return defaultMenuItems } -func GetSoundOnPreview() bool { - viper.SetDefault(SoundOnPreview, true) +func (i *Instance) GetSoundOnPreview() bool { + viper.SetDefault(SoundOnPreview, false) return viper.GetBool(SoundOnPreview) } -func GetWallShowTitle() bool { +func (i *Instance) GetWallShowTitle() bool { viper.SetDefault(WallShowTitle, true) return viper.GetBool(WallShowTitle) } -func GetWallPlayback() string { +func (i *Instance) GetWallPlayback() string { viper.SetDefault(WallPlayback, "video") return viper.GetString(WallPlayback) } -func GetMaximumLoopDuration() int { +func (i *Instance) GetMaximumLoopDuration() int { viper.SetDefault(MaximumLoopDuration, 0) return viper.GetInt(MaximumLoopDuration) } -func GetAutostartVideo() bool { +func (i *Instance) GetAutostartVideo() bool { viper.SetDefault(AutostartVideo, false) return viper.GetBool(AutostartVideo) } -func GetShowStudioAsText() bool { +func (i *Instance) GetShowStudioAsText() bool { viper.SetDefault(ShowStudioAsText, false) return viper.GetBool(ShowStudioAsText) } -func GetCSSPath() string { +func (i *Instance) GetSlideshowDelay() int { + viper.SetDefault(SlideshowDelay, 5000) + return viper.GetInt(SlideshowDelay) +} + +func (i *Instance) GetCSSPath() string { // use custom.css in the same directory as the config file configFileUsed := viper.ConfigFileUsed() configDir := filepath.Dir(configFileUsed) @@ -527,8 +590,8 @@ func GetCSSPath() string { return fn } -func GetCSS() string { - fn := GetCSSPath() +func (i *Instance) GetCSS() string { + fn := i.GetCSSPath() exists, _ := utils.FileExists(fn) if !exists { @@ -544,28 +607,28 @@ func GetCSS() string { return string(buf) } -func SetCSS(css string) { - fn := GetCSSPath() +func (i *Instance) SetCSS(css string) { + fn := i.GetCSSPath() buf := []byte(css) ioutil.WriteFile(fn, buf, 0777) } -func GetCSSEnabled() bool { +func (i *Instance) GetCSSEnabled() bool { return viper.GetBool(CSSEnabled) } // GetLogFile returns the filename of the file to output logs to. // An empty string means that file logging will be disabled. -func GetLogFile() string { +func (i *Instance) GetLogFile() string { return viper.GetString(LogFile) } // GetLogOut returns true if logging should be output to the terminal // in addition to writing to a log file. Logging will be output to the // terminal if file logging is disabled. Defaults to true. -func GetLogOut() bool { +func (i *Instance) GetLogOut() bool { ret := true if viper.IsSet(LogOut) { ret = viper.GetBool(LogOut) @@ -576,7 +639,7 @@ func GetLogOut() bool { // GetLogLevel returns the lowest log level to write to the log. // Should be one of "Debug", "Info", "Warning", "Error" -func GetLogLevel() string { +func (i *Instance) GetLogLevel() string { const defaultValue = "Info" value := viper.GetString(LogLevel) @@ -589,7 +652,7 @@ func GetLogLevel() string { // GetLogAccess returns true if http requests should be logged to the terminal. // HTTP requests are not logged to the log file. Defaults to true. -func GetLogAccess() bool { +func (i *Instance) GetLogAccess() bool { ret := true if viper.IsSet(LogAccess) { ret = viper.GetBool(LogAccess) @@ -599,7 +662,7 @@ func GetLogAccess() bool { } // Max allowed graphql upload size in megabytes -func GetMaxUploadSize() int64 { +func (i *Instance) GetMaxUploadSize() int64 { ret := int64(1024) if viper.IsSet(MaxUploadSize) { ret = viper.GetInt64(MaxUploadSize) @@ -607,37 +670,65 @@ func GetMaxUploadSize() int64 { return ret << 20 } -func IsValid() bool { - setPaths := viper.IsSet(Stash) && viper.IsSet(Cache) && viper.IsSet(Generated) && viper.IsSet(Metadata) +func (i *Instance) Validate() error { + mandatoryPaths := []string{ + Database, + Generated, + } - // TODO: check valid paths - return setPaths + var missingFields []string + + for _, p := range mandatoryPaths { + if !viper.IsSet(p) || viper.GetString(p) == "" { + missingFields = append(missingFields, p) + } + } + + if len(missingFields) > 0 { + return MissingConfigError{ + missingFields: missingFields, + } + } + + return nil } -func setDefaultValues() { +func (i *Instance) setDefaultValues() error { viper.SetDefault(ParallelTasks, parallelTasksDefault) viper.SetDefault(PreviewSegmentDuration, previewSegmentDurationDefault) viper.SetDefault(PreviewSegments, previewSegmentsDefault) viper.SetDefault(PreviewExcludeStart, previewExcludeStartDefault) viper.SetDefault(PreviewExcludeEnd, previewExcludeEndDefault) + + viper.SetDefault(Database, i.GetDefaultDatabaseFilePath()) + + // Set generated to the metadata path for backwards compat + viper.SetDefault(Generated, viper.GetString(Metadata)) + + // Set default scrapers and plugins paths + viper.SetDefault(ScrapersPath, i.GetDefaultScrapersPath()) + viper.SetDefault(PluginsPath, i.GetDefaultPluginsPath()) + return viper.WriteConfig() } // SetInitialConfig fills in missing required config fields -func SetInitialConfig() error { +func (i *Instance) SetInitialConfig() error { // generate some api keys const apiKeyLength = 32 - if string(GetJWTSignKey()) == "" { + if string(i.GetJWTSignKey()) == "" { signKey := utils.GenerateRandomKey(apiKeyLength) - Set(JWTSignKey, signKey) + i.Set(JWTSignKey, signKey) } - if string(GetSessionStoreKey()) == "" { + if string(i.GetSessionStoreKey()) == "" { sessionStoreKey := utils.GenerateRandomKey(apiKeyLength) - Set(SessionStoreKey, sessionStoreKey) + i.Set(SessionStoreKey, sessionStoreKey) } - setDefaultValues() - - return Write() + return i.setDefaultValues() +} + +func (i *Instance) FinalizeSetup() { + i.isNewSystem = false } diff --git a/pkg/manager/config/init.go b/pkg/manager/config/init.go new file mode 100644 index 000000000..8b6c2ff0e --- /dev/null +++ b/pkg/manager/config/init.go @@ -0,0 +1,123 @@ +package config + +import ( + "fmt" + "net" + "os" + "sync" + + "github.com/spf13/pflag" + "github.com/spf13/viper" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/utils" +) + +var once sync.Once + +type flagStruct struct { + configFilePath string +} + +func Initialize() (*Instance, error) { + var err error + once.Do(func() { + instance = &Instance{} + + flags := initFlags() + if err = initConfig(flags); err != nil { + return + } + + initEnvs() + + if instance.isNewSystem { + if instance.Validate() == nil { + // system has been initialised by the environment + instance.isNewSystem = false + } + } + + if !instance.isNewSystem { + err = instance.SetInitialConfig() + } + }) + return instance, err +} + +func initConfig(flags flagStruct) error { + // The config file is called config. Leave off the file extension. + viper.SetConfigName("config") + + viper.AddConfigPath(".") // Look for config in the working directory + viper.AddConfigPath("$HOME/.stash") // Look for the config in the home directory + + configFile := "" + envConfigFile := os.Getenv("STASH_CONFIG_FILE") + + if flags.configFilePath != "" { + configFile = flags.configFilePath + } else if envConfigFile != "" { + configFile = envConfigFile + } + + if configFile != "" { + viper.SetConfigFile(configFile) + + // if file does not exist, assume it is a new system + if exists, _ := utils.FileExists(configFile); !exists { + instance.isNewSystem = true + + // ensure we can write to the file + if err := utils.Touch(configFile); err != nil { + return fmt.Errorf(`could not write to provided config path "%s": %s`, configFile, err.Error()) + } else { + // remove the file + os.Remove(configFile) + } + + return nil + } + } + + err := viper.ReadInConfig() // Find and read the config file + // if not found, assume its a new system + if _, isMissing := err.(viper.ConfigFileNotFoundError); isMissing { + instance.isNewSystem = true + return nil + } else if err != nil { + return err + } + + return nil +} + +func initFlags() flagStruct { + flags := flagStruct{} + + pflag.IP("host", net.IPv4(0, 0, 0, 0), "ip address for the host") + pflag.Int("port", 9999, "port to serve from") + pflag.StringVarP(&flags.configFilePath, "config", "c", "", "config file to use") + + pflag.Parse() + if err := viper.BindPFlags(pflag.CommandLine); err != nil { + logger.Infof("failed to bind flags: %s", err.Error()) + } + + return flags +} + +func initEnvs() { + viper.SetEnvPrefix("stash") // will be uppercased automatically + viper.BindEnv("host") // STASH_HOST + viper.BindEnv("port") // STASH_PORT + viper.BindEnv("external_host") // STASH_EXTERNAL_HOST + viper.BindEnv("generated") // STASH_GENERATED + viper.BindEnv("metadata") // STASH_METADATA + viper.BindEnv("cache") // STASH_CACHE + + // only set stash config flag if not already set + if instance.GetStashPaths() == nil { + viper.BindEnv("stash") // STASH_STASH + } +} diff --git a/pkg/manager/generator_phash.go b/pkg/manager/generator_phash.go new file mode 100644 index 000000000..5ea390452 --- /dev/null +++ b/pkg/manager/generator_phash.go @@ -0,0 +1,99 @@ +package manager + +import ( + "fmt" + "image" + "image/color" + "math" + + "github.com/corona10/goimagehash" + "github.com/disintegration/imaging" + + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/utils" +) + +type PhashGenerator struct { + Info *GeneratorInfo + + VideoChecksum string + Columns int + Rows int +} + +func NewPhashGenerator(videoFile ffmpeg.VideoFile, checksum string) (*PhashGenerator, error) { + exists, err := utils.FileExists(videoFile.Path) + if !exists { + return nil, err + } + + generator, err := newGeneratorInfo(videoFile) + if err != nil { + return nil, err + } + + return &PhashGenerator{ + Info: generator, + VideoChecksum: checksum, + Columns: 5, + Rows: 5, + }, nil +} + +func (g *PhashGenerator) Generate() (*uint64, error) { + encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) + + sprite, err := g.generateSprite(&encoder) + if err != nil { + return nil, err + } + + hash, err := goimagehash.PerceptionHash(sprite) + if err != nil { + return nil, err + } + hashValue := hash.GetHash() + return &hashValue, nil +} + +func (g *PhashGenerator) generateSprite(encoder *ffmpeg.Encoder) (image.Image, error) { + logger.Infof("[generator] generating phash sprite for %s", g.Info.VideoFile.Path) + + // Generate sprite image offset by 5% on each end to avoid intro/outros + chunkCount := g.Columns * g.Rows + offset := 0.05 * g.Info.VideoFile.Duration + stepSize := (0.9 * g.Info.VideoFile.Duration) / float64(chunkCount) + var images []image.Image + for i := 0; i < chunkCount; i++ { + time := offset + (float64(i) * stepSize) + + options := ffmpeg.SpriteScreenshotOptions{ + Time: time, + Width: 160, + } + img, err := encoder.SpriteScreenshot(g.Info.VideoFile, options) + if err != nil { + return nil, err + } + images = append(images, img) + } + + // Combine all of the thumbnails into a sprite image + if len(images) == 0 { + return nil, fmt.Errorf("images slice is empty, failed to generate phash sprite for %s", g.Info.VideoFile.Path) + } + width := images[0].Bounds().Size().X + height := images[0].Bounds().Size().Y + canvasWidth := width * g.Columns + canvasHeight := height * g.Rows + montage := imaging.New(canvasWidth, canvasHeight, color.NRGBA{}) + for index := 0; index < len(images); index++ { + x := width * (index % g.Columns) + y := height * int(math.Floor(float64(index)/float64(g.Rows))) + img := images[index] + montage = imaging.Paste(montage, img, image.Pt(x, y)) + } + + return montage, nil +} diff --git a/pkg/manager/generator_preview.go b/pkg/manager/generator_preview.go index c777aa073..c723b9e69 100644 --- a/pkg/manager/generator_preview.go +++ b/pkg/manager/generator_preview.go @@ -58,11 +58,6 @@ func (g *PreviewGenerator) Generate() error { } encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) - - if err := g.generateConcatFile(); err != nil { - return err - } - if g.GenerateVideo { if err := g.generateVideo(&encoder, false); err != nil { logger.Warnf("[generator] failed generating scene preview, trying fallback") @@ -101,18 +96,32 @@ func (g *PreviewGenerator) generateVideo(encoder *ffmpeg.Encoder, fallback bool) if !g.Overwrite && outputExists { return nil } + err := g.generateConcatFile() + if err != nil { + return err + } + + var tmpFiles []string // a list of tmp files used during the preview generation + tmpFiles = append(tmpFiles, g.getConcatFilePath()) // add concat filename to tmpFiles + defer func() { removeFiles(tmpFiles) }() // remove tmpFiles when done stepSize, offset := g.Info.getStepSizeAndOffset() + durationSegment := g.Info.ChunkDuration + if durationSegment < 0.75 { // a very short duration can create files without a video stream + durationSegment = 0.75 // use 0.75 in that case + logger.Warnf("[generator] Segment duration (%f) too short.Using 0.75 instead.", g.Info.ChunkDuration) + } + for i := 0; i < g.Info.ChunkCount; i++ { time := offset + (float64(i) * stepSize) num := fmt.Sprintf("%.3d", i) filename := "preview_" + g.VideoChecksum + "_" + num + ".mp4" chunkOutputPath := instance.Paths.Generated.GetTmpPath(filename) - + tmpFiles = append(tmpFiles, chunkOutputPath) // add chunk filename to tmpFiles options := ffmpeg.ScenePreviewChunkOptions{ StartTime: time, - Duration: g.Info.ChunkDuration, + Duration: durationSegment, Width: 640, OutputPath: chunkOutputPath, } @@ -152,3 +161,11 @@ func (g *PreviewGenerator) generateImage(encoder *ffmpeg.Encoder) error { func (g *PreviewGenerator) getConcatFilePath() string { return instance.Paths.Generated.GetTmpPath(fmt.Sprintf("files_%s.txt", g.VideoChecksum)) } + +func removeFiles(list []string) { + for _, f := range list { + if err := os.Remove(f); err != nil { + logger.Warnf("[generator] Delete error: %s", err) + } + } +} diff --git a/pkg/manager/generator_sprite.go b/pkg/manager/generator_sprite.go index 7bbc780c4..457ad4ca5 100644 --- a/pkg/manager/generator_sprite.go +++ b/pkg/manager/generator_sprite.go @@ -8,11 +8,9 @@ import ( "math" "os" "path/filepath" - "sort" "strings" "github.com/disintegration/imaging" - "github.com/fvbommel/sortorder" "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/logger" @@ -75,29 +73,15 @@ func (g *SpriteGenerator) generateSpriteImage(encoder *ffmpeg.Encoder) error { // Create `this.chunkCount` thumbnails in the tmp directory stepSize := g.Info.VideoFile.Duration / float64(g.Info.ChunkCount) + var images []image.Image for i := 0; i < g.Info.ChunkCount; i++ { time := float64(i) * stepSize - num := fmt.Sprintf("%.3d", i) - filename := "thumbnail_" + g.VideoChecksum + "_" + num + ".jpg" - options := ffmpeg.ScreenshotOptions{ - OutputPath: instance.Paths.Generated.GetTmpPath(filename), - Time: time, - Width: 160, + options := ffmpeg.SpriteScreenshotOptions{ + Time: time, + Width: 160, } - encoder.Screenshot(g.Info.VideoFile, options) - } - - // Combine all of the thumbnails into a sprite image - pattern := fmt.Sprintf("thumbnail_%s_.+\\.jpg$", g.VideoChecksum) - imagePaths, err := utils.MatchEntries(instance.Paths.Generated.Tmp, pattern) - if err != nil { - return err - } - sort.Sort(sortorder.Natural(imagePaths)) - var images []image.Image - for _, imagePath := range imagePaths { - img, err := imaging.Open(imagePath) + img, err := encoder.SpriteScreenshot(g.Info.VideoFile, options) if err != nil { return err } @@ -107,6 +91,7 @@ func (g *SpriteGenerator) generateSpriteImage(encoder *ffmpeg.Encoder) error { if len(images) == 0 { return fmt.Errorf("images slice is empty, failed to generate sprite images for %s", g.Info.VideoFile.Path) } + // Combine all of the thumbnails into a sprite image width := images[0].Bounds().Size().X height := images[0].Bounds().Size().Y canvasWidth := width * g.Columns diff --git a/pkg/manager/job_status.go b/pkg/manager/job_status.go index 4a6c7197a..ef4dfad62 100644 --- a/pkg/manager/job_status.go +++ b/pkg/manager/job_status.go @@ -3,16 +3,17 @@ package manager type JobStatus int const ( - Idle JobStatus = 0 - Import JobStatus = 1 - Export JobStatus = 2 - Scan JobStatus = 3 - Generate JobStatus = 4 - Clean JobStatus = 5 - Scrape JobStatus = 6 - AutoTag JobStatus = 7 - Migrate JobStatus = 8 - PluginOperation JobStatus = 9 + Idle JobStatus = 0 + Import JobStatus = 1 + Export JobStatus = 2 + Scan JobStatus = 3 + Generate JobStatus = 4 + Clean JobStatus = 5 + Scrape JobStatus = 6 + AutoTag JobStatus = 7 + Migrate JobStatus = 8 + PluginOperation JobStatus = 9 + StashBoxBatchPerformer JobStatus = 10 ) func (s JobStatus) String() string { @@ -37,6 +38,8 @@ func (s JobStatus) String() string { statusMessage = "Clean" case PluginOperation: statusMessage = "Plugin Operation" + case StashBoxBatchPerformer: + statusMessage = "Stash-Box Performer Batch Operation" } return statusMessage diff --git a/pkg/manager/jsonschema/performer.go b/pkg/manager/jsonschema/performer.go index a145f9bce..0ff38bff6 100644 --- a/pkg/manager/jsonschema/performer.go +++ b/pkg/manager/jsonschema/performer.go @@ -30,6 +30,11 @@ type Performer struct { Image string `json:"image,omitempty"` CreatedAt models.JSONTime `json:"created_at,omitempty"` UpdatedAt models.JSONTime `json:"updated_at,omitempty"` + Rating int `json:"rating,omitempty"` + Details string `json:"details,omitempty"` + DeathDate string `json:"death_date,omitempty"` + HairColor string `json:"hair_color,omitempty"` + Weight int `json:"weight,omitempty"` } func LoadPerformerFile(filePath string) (*Performer, error) { diff --git a/pkg/manager/jsonschema/scene.go b/pkg/manager/jsonschema/scene.go index 79c466be6..540447757 100644 --- a/pkg/manager/jsonschema/scene.go +++ b/pkg/manager/jsonschema/scene.go @@ -39,6 +39,7 @@ type Scene struct { Title string `json:"title,omitempty"` Checksum string `json:"checksum,omitempty"` OSHash string `json:"oshash,omitempty"` + Phash string `json:"phash,omitempty"` Studio string `json:"studio,omitempty"` URL string `json:"url,omitempty"` Date string `json:"date,omitempty"` diff --git a/pkg/manager/jsonschema/studio.go b/pkg/manager/jsonschema/studio.go index ed1f7dea0..82a7e740a 100644 --- a/pkg/manager/jsonschema/studio.go +++ b/pkg/manager/jsonschema/studio.go @@ -15,6 +15,8 @@ type Studio struct { Image string `json:"image,omitempty"` CreatedAt models.JSONTime `json:"created_at,omitempty"` UpdatedAt models.JSONTime `json:"updated_at,omitempty"` + Rating int `json:"rating,omitempty"` + Details string `json:"details,omitempty"` } func LoadStudioFile(filePath string) (*Studio, error) { diff --git a/pkg/manager/manager.go b/pkg/manager/manager.go index acd23f8bd..4aa05dcb4 100644 --- a/pkg/manager/manager.go +++ b/pkg/manager/manager.go @@ -1,11 +1,14 @@ package manager import ( - "net" + "errors" + "fmt" + "os" + "path/filepath" "sync" + "time" - "github.com/spf13/pflag" - "github.com/spf13/viper" + "github.com/stashapp/stash/pkg/database" "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" @@ -18,6 +21,8 @@ import ( ) type singleton struct { + Config *config.Instance + Status TaskStatus Paths *paths.Paths @@ -35,12 +40,6 @@ type singleton struct { var instance *singleton var once sync.Once -type flagStruct struct { - configFilePath string -} - -var flags = flagStruct{} - func GetInstance() *singleton { Initialize() return instance @@ -48,29 +47,43 @@ func GetInstance() *singleton { func Initialize() *singleton { once.Do(func() { - _ = utils.EnsureDir(paths.GetConfigDirectory()) - initFlags() - initConfig() - initLog() - initEnvs() - instance = &singleton{ - Status: TaskStatus{Status: Idle, Progress: -1}, - Paths: paths.NewPaths(), + _ = utils.EnsureDir(paths.GetStashHomeDirectory()) + cfg, err := config.Initialize() - PluginCache: initPluginCache(), - - DownloadStore: NewDownloadStore(), - TxnManager: sqlite.NewTransactionManager(), + if err != nil { + panic(fmt.Sprintf("error initializing configuration: %s", err.Error())) } - instance.ScraperCache = instance.initScraperCache() - instance.RefreshConfig() + initLog() - // clear the downloads and tmp directories - // #1021 - only clear these directories if the generated folder is non-empty - if config.GetGeneratedPath() != "" { - utils.EmptyDir(instance.Paths.Generated.Downloads) - utils.EmptyDir(instance.Paths.Generated.Tmp) + instance = &singleton{ + Config: cfg, + Status: TaskStatus{Status: Idle, Progress: -1}, + DownloadStore: NewDownloadStore(), + + TxnManager: sqlite.NewTransactionManager(), + } + + if !cfg.IsNewSystem() { + logger.Infof("using config file: %s", cfg.GetConfigFile()) + + if err == nil { + err = cfg.Validate() + } + + if err != nil { + panic(fmt.Sprintf("error initializing configuration: %s", err.Error())) + } else { + if err := instance.PostInit(); err != nil { + panic(err) + } + } + } else { + cfgFile := cfg.GetConfigFile() + if cfgFile != "" { + cfgFile = cfgFile + " " + } + logger.Warnf("config file %snot found. Assuming new system...", cfgFile) } initFFMPEG() @@ -79,78 +92,8 @@ func Initialize() *singleton { return instance } -func initConfig() { - // The config file is called config. Leave off the file extension. - viper.SetConfigName("config") - - if flagConfigFileExists, _ := utils.FileExists(flags.configFilePath); flagConfigFileExists { - viper.SetConfigFile(flags.configFilePath) - } - viper.AddConfigPath(".") // Look for config in the working directory - viper.AddConfigPath("$HOME/.stash") // Look for the config in the home directory - - err := viper.ReadInConfig() // Find and read the config file - if err != nil { // Handle errors reading the config file - _ = utils.Touch(paths.GetDefaultConfigFilePath()) - if err = viper.ReadInConfig(); err != nil { - panic(err) - } - } - logger.Infof("using config file: %s", viper.ConfigFileUsed()) - - config.SetInitialConfig() - - viper.SetDefault(config.Database, paths.GetDefaultDatabaseFilePath()) - - // Set generated to the metadata path for backwards compat - viper.SetDefault(config.Generated, viper.GetString(config.Metadata)) - - // Set default scrapers and plugins paths - viper.SetDefault(config.ScrapersPath, config.GetDefaultScrapersPath()) - viper.SetDefault(config.PluginsPath, config.GetDefaultPluginsPath()) - - // Disabling config watching due to race condition issue - // See: https://github.com/spf13/viper/issues/174 - // Changes to the config outside the system will require a restart - // Watch for changes - // viper.WatchConfig() - // viper.OnConfigChange(func(e fsnotify.Event) { - // fmt.Println("Config file changed:", e.Name) - // instance.refreshConfig() - // }) - - //viper.Set("stash", []string{"/", "/stuff"}) - //viper.WriteConfig() -} - -func initFlags() { - pflag.IP("host", net.IPv4(0, 0, 0, 0), "ip address for the host") - pflag.Int("port", 9999, "port to serve from") - pflag.StringVarP(&flags.configFilePath, "config", "c", "", "config file to use") - - pflag.Parse() - if err := viper.BindPFlags(pflag.CommandLine); err != nil { - logger.Infof("failed to bind flags: %s", err.Error()) - } -} - -func initEnvs() { - viper.SetEnvPrefix("stash") // will be uppercased automatically - viper.BindEnv("host") // STASH_HOST - viper.BindEnv("port") // STASH_PORT - viper.BindEnv("external_host") // STASH_EXTERNAL_HOST - viper.BindEnv("generated") // STASH_GENERATED - viper.BindEnv("metadata") // STASH_METADATA - viper.BindEnv("cache") // STASH_CACHE - - // only set stash config flag if not already set - if config.GetStashPaths() == nil { - viper.BindEnv("stash") // STASH_STASH - } -} - func initFFMPEG() { - configDirectory := paths.GetConfigDirectory() + configDirectory := paths.GetStashHomeDirectory() ffmpegPath, ffprobePath := ffmpeg.GetPaths(configDirectory) if ffmpegPath == "" || ffprobePath == "" { logger.Infof("couldn't find FFMPEG, attempting to download it") @@ -174,10 +117,12 @@ The error was: %s } func initLog() { + config := config.GetInstance() logger.Init(config.GetLogFile(), config.GetLogOut(), config.GetLogLevel()) } func initPluginCache() *plugin.Cache { + config := config.GetInstance() ret, err := plugin.NewCache(config.GetPluginsPath()) if err != nil { @@ -187,14 +132,47 @@ func initPluginCache() *plugin.Cache { return ret } +// PostInit initialises the paths, caches and txnManager after the initial +// configuration has been set. Should only be called if the configuration +// is valid. +func (s *singleton) PostInit() error { + s.Config.SetInitialConfig() + + s.Paths = paths.NewPaths(s.Config.GetGeneratedPath()) + s.PluginCache = initPluginCache() + s.ScraperCache = instance.initScraperCache() + + s.RefreshConfig() + + // clear the downloads and tmp directories + // #1021 - only clear these directories if the generated folder is non-empty + if s.Config.GetGeneratedPath() != "" { + const deleteTimeout = 1 * time.Second + + utils.Timeout(func() { + utils.EmptyDir(instance.Paths.Generated.Downloads) + utils.EmptyDir(instance.Paths.Generated.Tmp) + }, deleteTimeout, func(done chan struct{}) { + logger.Info("Please wait. Deleting temporary files...") // print + <-done // and wait for deletion + logger.Info("Temporary files deleted.") + }) + } + + if err := database.Initialize(s.Config.GetDatabasePath()); err != nil { + return err + } + + if database.Ready() == nil { + s.PostMigrate() + } + + return nil +} + // initScraperCache initializes a new scraper cache and returns it. func (s *singleton) initScraperCache() *scraper.Cache { - scraperConfig := scraper.GlobalConfig{ - Path: config.GetScrapersPath(), - UserAgent: config.GetScraperUserAgent(), - CDPPath: config.GetScraperCDPPath(), - } - ret, err := scraper.NewCache(scraperConfig, s.TxnManager) + ret, err := scraper.NewCache(config.GetInstance(), s.TxnManager) if err != nil { logger.Errorf("Error reading scraper configs: %s", err.Error()) @@ -204,14 +182,14 @@ func (s *singleton) initScraperCache() *scraper.Cache { } func (s *singleton) RefreshConfig() { - s.Paths = paths.NewPaths() - if config.IsValid() { + s.Paths = paths.NewPaths(s.Config.GetGeneratedPath()) + config := s.Config + if config.Validate() == nil { utils.EnsureDir(s.Paths.Generated.Screenshots) utils.EnsureDir(s.Paths.Generated.Vtt) utils.EnsureDir(s.Paths.Generated.Markers) utils.EnsureDir(s.Paths.Generated.Transcodes) utils.EnsureDir(s.Paths.Generated.Downloads) - paths.EnsureJSONDirs(config.GetMetadataPath()) } } @@ -220,3 +198,114 @@ func (s *singleton) RefreshConfig() { func (s *singleton) RefreshScraperCache() { s.ScraperCache = s.initScraperCache() } + +func setSetupDefaults(input *models.SetupInput) { + if input.ConfigLocation == "" { + input.ConfigLocation = filepath.Join(utils.GetHomeDirectory(), ".stash", "config.yml") + } + + configDir := filepath.Dir(input.ConfigLocation) + if input.GeneratedLocation == "" { + input.GeneratedLocation = filepath.Join(configDir, "generated") + } + + if input.DatabaseFile == "" { + input.DatabaseFile = filepath.Join(configDir, "stash-go.sqlite") + } +} + +func (s *singleton) Setup(input models.SetupInput) error { + setSetupDefaults(&input) + + // create the generated directory if it does not exist + if exists, _ := utils.DirExists(input.GeneratedLocation); !exists { + if err := os.Mkdir(input.GeneratedLocation, 0755); err != nil { + return fmt.Errorf("error creating generated directory: %s", err.Error()) + } + } + + if err := utils.Touch(input.ConfigLocation); err != nil { + return fmt.Errorf("error creating config file: %s", err.Error()) + } + + s.Config.SetConfigFile(input.ConfigLocation) + + // set the configuration + s.Config.Set(config.Generated, input.GeneratedLocation) + s.Config.Set(config.Database, input.DatabaseFile) + s.Config.Set(config.Stash, input.Stashes) + if err := s.Config.Write(); err != nil { + return fmt.Errorf("error writing configuration file: %s", err.Error()) + } + + // initialise the database + if err := s.PostInit(); err != nil { + return fmt.Errorf("error initializing the database: %s", err.Error()) + } + + s.Config.FinalizeSetup() + + return nil +} + +func (s *singleton) Migrate(input models.MigrateInput) error { + // always backup so that we can roll back to the previous version if + // migration fails + backupPath := input.BackupPath + if backupPath == "" { + backupPath = database.DatabaseBackupPath() + } + + // perform database backup + if err := database.Backup(database.DB, backupPath); err != nil { + return fmt.Errorf("error backing up database: %s", err) + } + + if err := database.RunMigrations(); err != nil { + errStr := fmt.Sprintf("error performing migration: %s", err) + + // roll back to the backed up version + restoreErr := database.RestoreFromBackup(backupPath) + if restoreErr != nil { + errStr = fmt.Sprintf("ERROR: unable to restore database from backup after migration failure: %s\n%s", restoreErr.Error(), errStr) + } else { + errStr = "An error occurred migrating the database to the latest schema version. The backup database file was automatically renamed to restore the database.\n" + errStr + } + + return errors.New(errStr) + } + + // perform post-migration operations + s.PostMigrate() + + // if no backup path was provided, then delete the created backup + if input.BackupPath == "" { + if err := os.Remove(backupPath); err != nil { + logger.Warnf("error removing unwanted database backup (%s): %s", backupPath, err.Error()) + } + } + + return nil +} + +func (s *singleton) GetSystemStatus() *models.SystemStatus { + status := models.SystemStatusEnumOk + dbSchema := int(database.Version()) + dbPath := database.DatabasePath() + appSchema := int(database.AppSchemaVersion()) + configFile := s.Config.GetConfigFile() + + if s.Config.IsNewSystem() { + status = models.SystemStatusEnumSetup + } else if dbSchema < appSchema { + status = models.SystemStatusEnumNeedsMigration + } + + return &models.SystemStatus{ + DatabaseSchema: &dbSchema, + DatabasePath: &dbPath, + AppSchema: appSchema, + Status: status, + ConfigPath: &configFile, + } +} diff --git a/pkg/manager/manager_tasks.go b/pkg/manager/manager_tasks.go index 28e42022b..2455d70f7 100644 --- a/pkg/manager/manager_tasks.go +++ b/pkg/manager/manager_tasks.go @@ -11,6 +11,7 @@ import ( "github.com/remeh/sizedwaitgroup" + "github.com/stashapp/stash/pkg/autotag" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" @@ -18,17 +19,17 @@ import ( ) func isGallery(pathname string) bool { - gExt := config.GetGalleryExtensions() + gExt := config.GetInstance().GetGalleryExtensions() return matchExtension(pathname, gExt) } func isVideo(pathname string) bool { - vidExt := config.GetVideoExtensions() + vidExt := config.GetInstance().GetVideoExtensions() return matchExtension(pathname, vidExt) } func isImage(pathname string) bool { - imgExt := config.GetImageExtensions() + imgExt := config.GetInstance().GetImageExtensions() return matchExtension(pathname, imgExt) } @@ -84,7 +85,7 @@ func (t *TaskStatus) updated() { func getScanPaths(inputPaths []string) []*models.StashConfig { if len(inputPaths) == 0 { - return config.GetStashPaths() + return config.GetInstance().GetStashPaths() } var ret []*models.StashConfig @@ -181,6 +182,7 @@ func (s *singleton) Scan(input models.ScanMetadataInput) { } start := time.Now() + config := config.GetInstance() parallelTasks := config.GetParallelTasksWithAutoDetection() logger.Infof("Scan started with %d parallel tasks", parallelTasks) wg := sizedwaitgroup.New(parallelTasks) @@ -191,11 +193,12 @@ func (s *singleton) Scan(input models.ScanMetadataInput) { i := 0 stoppingErr := errors.New("stopping") + var err error var galleries []string for _, sp := range paths { - err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error { + err = walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error { if total != nil { s.Status.setProgress(i, *total) i++ @@ -222,6 +225,7 @@ func (s *singleton) Scan(input models.ScanMetadataInput) { GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews), GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews), GenerateSprite: utils.IsTrue(input.ScanGenerateSprites), + GeneratePhash: utils.IsTrue(input.ScanGeneratePhashes), } go task.Start(&wg) @@ -229,26 +233,25 @@ func (s *singleton) Scan(input models.ScanMetadataInput) { }) if err == stoppingErr { + logger.Info("Stopping due to user request") break } if err != nil { logger.Errorf("Error encountered scanning files: %s", err.Error()) - return + break } } - if s.Status.stopping { - logger.Info("Stopping due to user request") - return - } - wg.Wait() instance.Paths.Generated.EmptyTmpDir() - elapsed := time.Since(start) logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed)) + if s.Status.stopping || err != nil { + return + } + for _, path := range galleries { wg.Add() task := ScanTask{ @@ -263,9 +266,15 @@ func (s *singleton) Scan(input models.ScanMetadataInput) { }() } -func (s *singleton) Import() { +func (s *singleton) Import() error { + config := config.GetInstance() + metadataPath := config.GetMetadataPath() + if metadataPath == "" { + return errors.New("metadata path must be set in config") + } + if s.Status.Status != Idle { - return + return nil } s.Status.SetStatus(Import) s.Status.indefiniteProgress() @@ -275,9 +284,10 @@ func (s *singleton) Import() { var wg sync.WaitGroup wg.Add(1) + task := ImportTask{ txnManager: s.TxnManager, - BaseDir: config.GetMetadataPath(), + BaseDir: metadataPath, Reset: true, DuplicateBehaviour: models.ImportDuplicateEnumFail, MissingRefBehaviour: models.ImportMissingRefEnumFail, @@ -286,11 +296,19 @@ func (s *singleton) Import() { go task.Start(&wg) wg.Wait() }() + + return nil } -func (s *singleton) Export() { +func (s *singleton) Export() error { + config := config.GetInstance() + metadataPath := config.GetMetadataPath() + if metadataPath == "" { + return errors.New("metadata path must be set in config") + } + if s.Status.Status != Idle { - return + return nil } s.Status.SetStatus(Export) s.Status.indefiniteProgress() @@ -308,6 +326,8 @@ func (s *singleton) Export() { go task.Start(&wg) wg.Wait() }() + + return nil } func (s *singleton) RunSingleTask(t Task) (*sync.WaitGroup, error) { @@ -331,6 +351,7 @@ func (s *singleton) RunSingleTask(t Task) (*sync.WaitGroup, error) { } func setGeneratePreviewOptionsInput(optionsInput *models.GeneratePreviewOptionsInput) { + config := config.GetInstance() if optionsInput.PreviewSegments == nil { val := config.GetPreviewSegments() optionsInput.PreviewSegments = &val @@ -408,6 +429,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { return } + config := config.GetInstance() parallelTasks := config.GetParallelTasksWithAutoDetection() logger.Infof("Generate started with %d parallel tasks", parallelTasks) @@ -427,7 +449,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { logger.Infof("Taking too long to count content. Skipping...") logger.Infof("Generating content") } else { - logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes) + logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes, totalsNeeded.phashes) } fileNamingAlgo := config.GetVideoFileNamingAlgorithm() @@ -443,7 +465,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { } setGeneratePreviewOptionsInput(generatePreviewOptions) - // Start measuring how long the scan has taken. (consider moving this up) + // Start measuring how long the generate has taken. (consider moving this up) start := time.Now() instance.Paths.Generated.EnsureTmpDir() @@ -451,6 +473,8 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { s.Status.setProgress(i, total) if s.Status.stopping { logger.Info("Stopping due to user request") + wg.Wait() + instance.Paths.Generated.EmptyTmpDir() return } @@ -501,6 +525,16 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { } go task.Start(&wg) } + + if input.Phashes { + task := GeneratePhashTask{ + Scene: *scene, + fileNamingAlgorithm: fileNamingAlgo, + txnManager: s.TxnManager, + } + wg.Add() + go task.Start(&wg) + } } wg.Wait() @@ -509,6 +543,10 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { s.Status.setProgress(lenScenes+i, total) if s.Status.stopping { logger.Info("Stopping due to user request") + wg.Wait() + instance.Paths.Generated.EmptyTmpDir() + elapsed := time.Since(start) + logger.Info(fmt.Sprintf("Generate finished (%s)", elapsed)) return } @@ -576,7 +614,7 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) { txnManager: s.TxnManager, Scene: *scene, ScreenshotAt: at, - fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), + fileNamingAlgorithm: config.GetInstance().GetVideoFileNamingAlgorithm(), } var wg sync.WaitGroup @@ -585,10 +623,19 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) { wg.Wait() - logger.Infof("Generate finished") + logger.Infof("Generate screenshot finished") }() } +func (s *singleton) isFileBasedAutoTag(input models.AutoTagMetadataInput) bool { + const wildcard = "*" + performerIds := input.Performers + studioIds := input.Studios + tagIds := input.Tags + + return (len(performerIds) == 0 || performerIds[0] == wildcard) && (len(studioIds) == 0 || studioIds[0] == wildcard) && (len(tagIds) == 0 || tagIds[0] == wildcard) +} + func (s *singleton) AutoTag(input models.AutoTagMetadataInput) { if s.Status.Status != Idle { return @@ -599,58 +646,87 @@ func (s *singleton) AutoTag(input models.AutoTagMetadataInput) { go func() { defer s.returnToIdleState() - performerIds := input.Performers - studioIds := input.Studios - tagIds := input.Tags - - // calculate work load - performerCount := len(performerIds) - studioCount := len(studioIds) - tagCount := len(tagIds) - - if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { - performerQuery := r.Performer() - studioQuery := r.Studio() - tagQuery := r.Tag() - - const wildcard = "*" - var err error - if performerCount == 1 && performerIds[0] == wildcard { - performerCount, err = performerQuery.Count() - if err != nil { - return fmt.Errorf("Error getting performer count: %s", err.Error()) - } - } - if studioCount == 1 && studioIds[0] == wildcard { - studioCount, err = studioQuery.Count() - if err != nil { - return fmt.Errorf("Error getting studio count: %s", err.Error()) - } - } - if tagCount == 1 && tagIds[0] == wildcard { - tagCount, err = tagQuery.Count() - if err != nil { - return fmt.Errorf("Error getting tag count: %s", err.Error()) - } - } - - return nil - }); err != nil { - logger.Error(err.Error()) - return + if s.isFileBasedAutoTag(input) { + // doing file-based auto-tag + s.autoTagFiles(input.Paths, len(input.Performers) > 0, len(input.Studios) > 0, len(input.Tags) > 0) + } else { + // doing specific performer/studio/tag auto-tag + s.autoTagSpecific(input) } - - total := performerCount + studioCount + tagCount - s.Status.setProgress(0, total) - - s.autoTagPerformers(input.Paths, performerIds) - s.autoTagStudios(input.Paths, studioIds) - s.autoTagTags(input.Paths, tagIds) }() } +func (s *singleton) autoTagFiles(paths []string, performers, studios, tags bool) { + t := autoTagFilesTask{ + paths: paths, + performers: performers, + studios: studios, + tags: tags, + txnManager: s.TxnManager, + status: &s.Status, + } + + t.process() +} + +func (s *singleton) autoTagSpecific(input models.AutoTagMetadataInput) { + performerIds := input.Performers + studioIds := input.Studios + tagIds := input.Tags + + performerCount := len(performerIds) + studioCount := len(studioIds) + tagCount := len(tagIds) + + if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + performerQuery := r.Performer() + studioQuery := r.Studio() + tagQuery := r.Tag() + + const wildcard = "*" + var err error + if performerCount == 1 && performerIds[0] == wildcard { + performerCount, err = performerQuery.Count() + if err != nil { + return fmt.Errorf("error getting performer count: %s", err.Error()) + } + } + if studioCount == 1 && studioIds[0] == wildcard { + studioCount, err = studioQuery.Count() + if err != nil { + return fmt.Errorf("error getting studio count: %s", err.Error()) + } + } + if tagCount == 1 && tagIds[0] == wildcard { + tagCount, err = tagQuery.Count() + if err != nil { + return fmt.Errorf("error getting tag count: %s", err.Error()) + } + } + + return nil + }); err != nil { + logger.Error(err.Error()) + return + } + + total := performerCount + studioCount + tagCount + s.Status.setProgress(0, total) + + logger.Infof("Starting autotag of %d performers, %d studios, %d tags", performerCount, studioCount, tagCount) + + s.autoTagPerformers(input.Paths, performerIds) + s.autoTagStudios(input.Paths, studioIds) + s.autoTagTags(input.Paths, tagIds) + + logger.Info("Finished autotag") +} + func (s *singleton) autoTagPerformers(paths []string, performerIds []string) { - var wg sync.WaitGroup + if s.Status.stopping { + return + } + for _, performerId := range performerIds { var performers []*models.Performer @@ -661,46 +737,63 @@ func (s *singleton) autoTagPerformers(paths []string, performerIds []string) { var err error performers, err = performerQuery.All() if err != nil { - return fmt.Errorf("Error querying performers: %s", err.Error()) + return fmt.Errorf("error querying performers: %s", err.Error()) } } else { performerIdInt, err := strconv.Atoi(performerId) if err != nil { - return fmt.Errorf("Error parsing performer id %s: %s", performerId, err.Error()) + return fmt.Errorf("error parsing performer id %s: %s", performerId, err.Error()) } performer, err := performerQuery.Find(performerIdInt) if err != nil { - return fmt.Errorf("Error finding performer id %s: %s", performerId, err.Error()) + return fmt.Errorf("error finding performer id %s: %s", performerId, err.Error()) + } + + if performer == nil { + return fmt.Errorf("performer with id %s not found", performerId) } performers = append(performers, performer) } + for _, performer := range performers { + if s.Status.stopping { + logger.Info("Stopping due to user request") + return nil + } + + if err := s.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + if err := autotag.PerformerScenes(performer, paths, r.Scene()); err != nil { + return err + } + if err := autotag.PerformerImages(performer, paths, r.Image()); err != nil { + return err + } + if err := autotag.PerformerGalleries(performer, paths, r.Gallery()); err != nil { + return err + } + + return nil + }); err != nil { + return fmt.Errorf("error auto-tagging performer '%s': %s", performer.Name.String, err.Error()) + } + + s.Status.incrementProgress() + } + return nil }); err != nil { logger.Error(err.Error()) continue } - - for _, performer := range performers { - wg.Add(1) - task := AutoTagPerformerTask{ - AutoTagTask: AutoTagTask{ - txnManager: s.TxnManager, - paths: paths, - }, - performer: performer, - } - go task.Start(&wg) - wg.Wait() - - s.Status.incrementProgress() - } } } func (s *singleton) autoTagStudios(paths []string, studioIds []string) { - var wg sync.WaitGroup + if s.Status.stopping { + return + } + for _, studioId := range studioIds { var studios []*models.Studio @@ -710,46 +803,64 @@ func (s *singleton) autoTagStudios(paths []string, studioIds []string) { var err error studios, err = studioQuery.All() if err != nil { - return fmt.Errorf("Error querying studios: %s", err.Error()) + return fmt.Errorf("error querying studios: %s", err.Error()) } } else { studioIdInt, err := strconv.Atoi(studioId) if err != nil { - return fmt.Errorf("Error parsing studio id %s: %s", studioId, err.Error()) + return fmt.Errorf("error parsing studio id %s: %s", studioId, err.Error()) } studio, err := studioQuery.Find(studioIdInt) if err != nil { - return fmt.Errorf("Error finding studio id %s: %s", studioId, err.Error()) + return fmt.Errorf("error finding studio id %s: %s", studioId, err.Error()) } + + if studio == nil { + return fmt.Errorf("studio with id %s not found", studioId) + } + studios = append(studios, studio) } + for _, studio := range studios { + if s.Status.stopping { + logger.Info("Stopping due to user request") + return nil + } + + if err := s.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + if err := autotag.StudioScenes(studio, paths, r.Scene()); err != nil { + return err + } + if err := autotag.StudioImages(studio, paths, r.Image()); err != nil { + return err + } + if err := autotag.StudioGalleries(studio, paths, r.Gallery()); err != nil { + return err + } + + return nil + }); err != nil { + return fmt.Errorf("error auto-tagging studio '%s': %s", studio.Name.String, err.Error()) + } + + s.Status.incrementProgress() + } + return nil }); err != nil { logger.Error(err.Error()) continue } - - for _, studio := range studios { - wg.Add(1) - task := AutoTagStudioTask{ - AutoTagTask: AutoTagTask{ - txnManager: s.TxnManager, - paths: paths, - }, - studio: studio, - } - go task.Start(&wg) - wg.Wait() - - s.Status.incrementProgress() - } } } func (s *singleton) autoTagTags(paths []string, tagIds []string) { - var wg sync.WaitGroup + if s.Status.stopping { + return + } + for _, tagId := range tagIds { var tags []*models.Tag if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { @@ -758,41 +869,51 @@ func (s *singleton) autoTagTags(paths []string, tagIds []string) { var err error tags, err = tagQuery.All() if err != nil { - return fmt.Errorf("Error querying tags: %s", err.Error()) + return fmt.Errorf("error querying tags: %s", err.Error()) } } else { tagIdInt, err := strconv.Atoi(tagId) if err != nil { - return fmt.Errorf("Error parsing tag id %s: %s", tagId, err.Error()) + return fmt.Errorf("error parsing tag id %s: %s", tagId, err.Error()) } tag, err := tagQuery.Find(tagIdInt) if err != nil { - return fmt.Errorf("Error finding tag id %s: %s", tagId, err.Error()) + return fmt.Errorf("error finding tag id %s: %s", tagId, err.Error()) } tags = append(tags, tag) } + for _, tag := range tags { + if s.Status.stopping { + logger.Info("Stopping due to user request") + return nil + } + + if err := s.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + if err := autotag.TagScenes(tag, paths, r.Scene()); err != nil { + return err + } + if err := autotag.TagImages(tag, paths, r.Image()); err != nil { + return err + } + if err := autotag.TagGalleries(tag, paths, r.Gallery()); err != nil { + return err + } + + return nil + }); err != nil { + return fmt.Errorf("error auto-tagging tag '%s': %s", tag.Name, err.Error()) + } + + s.Status.incrementProgress() + } + return nil }); err != nil { logger.Error(err.Error()) continue } - - for _, tag := range tags { - wg.Add(1) - task := AutoTagTagTask{ - AutoTagTask: AutoTagTask{ - txnManager: s.TxnManager, - paths: paths, - }, - tag: tag, - } - go task.Start(&wg) - wg.Wait() - - s.Status.incrementProgress() - } } } @@ -851,7 +972,7 @@ func (s *singleton) Clean(input models.CleanMetadataInput) { var wg sync.WaitGroup s.Status.Progress = 0 total := len(scenes) + len(images) + len(galleries) - fileNamingAlgo := config.GetVideoFileNamingAlgorithm() + fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() for i, scene := range scenes { s.Status.setProgress(i, total) if s.Status.stopping { @@ -933,7 +1054,7 @@ func (s *singleton) MigrateHash() { go func() { defer s.returnToIdleState() - fileNamingAlgo := config.GetVideoFileNamingAlgorithm() + fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() logger.Infof("Migrating generated files for %s naming hash", fileNamingAlgo.String()) var scenes []*models.Scene @@ -992,6 +1113,7 @@ type totalsGenerate struct { imagePreviews int64 markers int64 transcodes int64 + phashes int64 } func (s *singleton) neededGenerate(scenes []*models.Scene, input models.GenerateMetadataInput) *totalsGenerate { @@ -1008,7 +1130,7 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate chTimeout <- struct{}{} }() - fileNamingAlgo := config.GetVideoFileNamingAlgorithm() + fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() overwrite := false if input.Overwrite != nil { overwrite = *input.Overwrite @@ -1065,6 +1187,17 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate totals.transcodes++ } } + + if input.Phashes { + task := GeneratePhashTask{ + Scene: *scene, + fileNamingAlgorithm: fileNamingAlgo, + } + + if task.shouldGenerate() { + totals.phashes++ + } + } } //check for timeout select { @@ -1076,3 +1209,109 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate } return &totals } + +func (s *singleton) StashBoxBatchPerformerTag(input models.StashBoxBatchPerformerTagInput) { + if s.Status.Status != Idle { + return + } + s.Status.SetStatus(StashBoxBatchPerformer) + s.Status.indefiniteProgress() + + go func() { + defer s.returnToIdleState() + logger.Infof("Initiating stash-box batch performer tag") + + boxes := config.GetInstance().GetStashBoxes() + if input.Endpoint < 0 || input.Endpoint >= len(boxes) { + logger.Error(fmt.Errorf("invalid stash_box_index %d", input.Endpoint)) + return + } + box := boxes[input.Endpoint] + + var tasks []StashBoxPerformerTagTask + + if len(input.PerformerIds) > 0 { + if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + performerQuery := r.Performer() + + for _, performerID := range input.PerformerIds { + if id, err := strconv.Atoi(performerID); err == nil { + performer, err := performerQuery.Find(id) + if err == nil { + tasks = append(tasks, StashBoxPerformerTagTask{ + txnManager: s.TxnManager, + performer: performer, + refresh: input.Refresh, + box: box, + excluded_fields: input.ExcludeFields, + }) + } else { + return err + } + } + } + return nil + }); err != nil { + logger.Error(err.Error()) + } + } else if len(input.PerformerNames) > 0 { + for i := range input.PerformerNames { + if len(input.PerformerNames[i]) > 0 { + tasks = append(tasks, StashBoxPerformerTagTask{ + txnManager: s.TxnManager, + name: &input.PerformerNames[i], + refresh: input.Refresh, + box: box, + excluded_fields: input.ExcludeFields, + }) + } + } + } else { + if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + performerQuery := r.Performer() + var performers []*models.Performer + var err error + if input.Refresh { + performers, err = performerQuery.FindByStashIDStatus(true, box.Endpoint) + } else { + performers, err = performerQuery.FindByStashIDStatus(false, box.Endpoint) + } + if err != nil { + return fmt.Errorf("Error querying performers: %s", err.Error()) + } + + for _, performer := range performers { + tasks = append(tasks, StashBoxPerformerTagTask{ + txnManager: s.TxnManager, + performer: performer, + refresh: input.Refresh, + box: box, + excluded_fields: input.ExcludeFields, + }) + } + return nil + }); err != nil { + logger.Error(err.Error()) + return + } + } + + if len(tasks) == 0 { + s.returnToIdleState() + return + } + + s.Status.setProgress(0, len(tasks)) + + logger.Infof("Starting stash-box batch operation for %d performers", len(tasks)) + + var wg sync.WaitGroup + for _, task := range tasks { + wg.Add(1) + go task.Start(&wg) + wg.Wait() + + s.Status.incrementProgress() + } + }() +} diff --git a/pkg/manager/paths/paths.go b/pkg/manager/paths/paths.go index 459c60943..0d06af2c0 100644 --- a/pkg/manager/paths/paths.go +++ b/pkg/manager/paths/paths.go @@ -13,31 +13,27 @@ type Paths struct { SceneMarkers *sceneMarkerPaths } -func NewPaths() *Paths { +func NewPaths(generatedPath string) *Paths { p := Paths{} - p.Generated = newGeneratedPaths() + p.Generated = newGeneratedPaths(generatedPath) p.Scene = newScenePaths(p) p.SceneMarkers = newSceneMarkerPaths(p) return &p } -func GetConfigDirectory() string { +func GetStashHomeDirectory() string { return filepath.Join(utils.GetHomeDirectory(), ".stash") } func GetDefaultDatabaseFilePath() string { - return filepath.Join(GetConfigDirectory(), "stash-go.sqlite") -} - -func GetDefaultConfigFilePath() string { - return filepath.Join(GetConfigDirectory(), "config.yml") + return filepath.Join(GetStashHomeDirectory(), "stash-go.sqlite") } func GetSSLKey() string { - return filepath.Join(GetConfigDirectory(), "stash.key") + return filepath.Join(GetStashHomeDirectory(), "stash.key") } func GetSSLCert() string { - return filepath.Join(GetConfigDirectory(), "stash.crt") + return filepath.Join(GetStashHomeDirectory(), "stash.crt") } diff --git a/pkg/manager/paths/paths_generated.go b/pkg/manager/paths/paths_generated.go index 25aef7f45..234f3918b 100644 --- a/pkg/manager/paths/paths_generated.go +++ b/pkg/manager/paths/paths_generated.go @@ -5,7 +5,6 @@ import ( "io/ioutil" "path/filepath" - "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/utils" ) @@ -22,15 +21,15 @@ type generatedPaths struct { Tmp string } -func newGeneratedPaths() *generatedPaths { +func newGeneratedPaths(path string) *generatedPaths { gp := generatedPaths{} - gp.Screenshots = filepath.Join(config.GetGeneratedPath(), "screenshots") - gp.Thumbnails = filepath.Join(config.GetGeneratedPath(), "thumbnails") - gp.Vtt = filepath.Join(config.GetGeneratedPath(), "vtt") - gp.Markers = filepath.Join(config.GetGeneratedPath(), "markers") - gp.Transcodes = filepath.Join(config.GetGeneratedPath(), "transcodes") - gp.Downloads = filepath.Join(config.GetGeneratedPath(), "download_stage") - gp.Tmp = filepath.Join(config.GetGeneratedPath(), "tmp") + gp.Screenshots = filepath.Join(path, "screenshots") + gp.Thumbnails = filepath.Join(path, "thumbnails") + gp.Vtt = filepath.Join(path, "vtt") + gp.Markers = filepath.Join(path, "markers") + gp.Transcodes = filepath.Join(path, "transcodes") + gp.Downloads = filepath.Join(path, "download_stage") + gp.Tmp = filepath.Join(path, "tmp") return &gp } diff --git a/pkg/manager/scene.go b/pkg/manager/scene.go index 52ff02e5f..e262dda18 100644 --- a/pkg/manager/scene.go +++ b/pkg/manager/scene.go @@ -54,7 +54,7 @@ func DestroySceneMarker(scene *models.Scene, sceneMarker *models.SceneMarker, qb // delete the preview for the marker return func() { seconds := int(sceneMarker.Seconds) - DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm()) + DeleteSceneMarkerFiles(scene, seconds, config.GetInstance().GetVideoFileNamingAlgorithm()) }, nil } @@ -243,12 +243,11 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami if scene.AudioCodec.Valid { audioCodec = ffmpeg.AudioCodec(scene.AudioCodec.String) } - container, err := GetSceneFileContainer(scene) - if err != nil { - return nil, err - } - if HasTranscode(scene, config.GetVideoFileNamingAlgorithm()) || ffmpeg.IsValidAudioForContainer(audioCodec, container) { + // don't care if we can't get the container + container, _ := GetSceneFileContainer(scene) + + if HasTranscode(scene, config.GetInstance().GetVideoFileNamingAlgorithm()) || ffmpeg.IsValidAudioForContainer(audioCodec, container) { label := "Direct stream" ret = append(ret, &models.SceneStreamEndpoint{ URL: directStreamURL, diff --git a/pkg/manager/task_autotag.go b/pkg/manager/task_autotag.go index cbd7cdc32..8cb3f80cf 100644 --- a/pkg/manager/task_autotag.go +++ b/pkg/manager/task_autotag.go @@ -2,156 +2,351 @@ package manager import ( "context" - "database/sql" - "fmt" "path/filepath" "strings" "sync" + "github.com/stashapp/stash/pkg/autotag" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" ) -type AutoTagTask struct { +type autoTagFilesTask struct { paths []string + performers bool + studios bool + tags bool + txnManager models.TransactionManager + status *TaskStatus } -type AutoTagPerformerTask struct { - AutoTagTask - performer *models.Performer -} - -func (t *AutoTagPerformerTask) Start(wg *sync.WaitGroup) { - defer wg.Done() - - t.autoTagPerformer() -} - -func (t *AutoTagTask) getQueryRegex(name string) string { - const separatorChars = `.\-_ ` - // handle path separators - const separator = `[` + separatorChars + `]` - - ret := strings.Replace(name, " ", separator+"*", -1) - ret = `(?:^|_|[^\w\d])` + ret + `(?:$|_|[^\w\d])` - return ret -} - -func (t *AutoTagTask) getQueryFilter(regex string) *models.SceneFilterType { - organized := false - ret := &models.SceneFilterType{ - Path: &models.StringCriterionInput{ - Modifier: models.CriterionModifierMatchesRegex, - Value: "(?i)" + regex, - }, - Organized: &organized, - } - +func (t *autoTagFilesTask) makeSceneFilter() *models.SceneFilterType { + ret := &models.SceneFilterType{} + or := ret sep := string(filepath.Separator) - var or *models.SceneFilterType for _, p := range t.paths { - newOr := &models.SceneFilterType{} - if or == nil { - ret.And = newOr - } else { - or.Or = newOr - } - - or = newOr - if !strings.HasSuffix(p, sep) { p = p + sep } + if ret.Path == nil { + or = ret + } else { + newOr := &models.SceneFilterType{} + or.Or = newOr + or = newOr + } + or.Path = &models.StringCriterionInput{ Modifier: models.CriterionModifierEquals, Value: p + "%", } } + organized := false + ret.Organized = &organized + return ret } -func (t *AutoTagTask) getFindFilter() *models.FindFilterType { - perPage := 0 +func (t *autoTagFilesTask) makeImageFilter() *models.ImageFilterType { + ret := &models.ImageFilterType{} + or := ret + sep := string(filepath.Separator) + + for _, p := range t.paths { + if !strings.HasSuffix(p, sep) { + p = p + sep + } + + if ret.Path == nil { + or = ret + } else { + newOr := &models.ImageFilterType{} + or.Or = newOr + or = newOr + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + organized := false + ret.Organized = &organized + + return ret +} + +func (t *autoTagFilesTask) makeGalleryFilter() *models.GalleryFilterType { + ret := &models.GalleryFilterType{} + or := ret + sep := string(filepath.Separator) + + for _, p := range t.paths { + if !strings.HasSuffix(p, sep) { + p = p + sep + } + + if ret.Path == nil { + or = ret + } else { + newOr := &models.GalleryFilterType{} + or.Or = newOr + or = newOr + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + organized := false + ret.Organized = &organized + + return ret +} + +func (t *autoTagFilesTask) getCount(r models.ReaderRepository) (int, error) { + pp := 0 + findFilter := &models.FindFilterType{ + PerPage: &pp, + } + + _, sceneCount, err := r.Scene().Query(t.makeSceneFilter(), findFilter) + if err != nil { + return 0, err + } + + _, imageCount, err := r.Image().Query(t.makeImageFilter(), findFilter) + if err != nil { + return 0, err + } + + _, galleryCount, err := r.Gallery().Query(t.makeGalleryFilter(), findFilter) + if err != nil { + return 0, err + } + + return sceneCount + imageCount + galleryCount, nil +} + +func (t *autoTagFilesTask) batchFindFilter(batchSize int) *models.FindFilterType { + page := 1 return &models.FindFilterType{ - PerPage: &perPage, + PerPage: &batchSize, + Page: &page, } } -func (t *AutoTagPerformerTask) autoTagPerformer() { - regex := t.getQueryRegex(t.performer.Name.String) +func (t *autoTagFilesTask) processScenes(r models.ReaderRepository) error { + if t.status.stopping { + return nil + } - if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Scene() + batchSize := 1000 - scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter()) + findFilter := t.batchFindFilter(batchSize) + sceneFilter := t.makeSceneFilter() + more := true + for more { + scenes, _, err := r.Scene().Query(sceneFilter, findFilter) if err != nil { - return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error()) + return err } - for _, s := range scenes { - added, err := scene.AddPerformer(qb, s.ID, t.performer.ID) - - if err != nil { - return fmt.Errorf("Error adding performer '%s' to scene '%s': %s", t.performer.Name.String, s.GetTitle(), err.Error()) + for _, ss := range scenes { + if t.status.stopping { + return nil } - if added { - logger.Infof("Added performer '%s' to scene '%s'", t.performer.Name.String, s.GetTitle()) + tt := autoTagSceneTask{ + txnManager: t.txnManager, + scene: ss, + performers: t.performers, + studios: t.studios, + tags: t.tags, } + + var wg sync.WaitGroup + wg.Add(1) + go tt.Start(&wg) + wg.Wait() + + t.status.incrementProgress() + } + + if len(scenes) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + + return nil +} + +func (t *autoTagFilesTask) processImages(r models.ReaderRepository) error { + if t.status.stopping { + return nil + } + + batchSize := 1000 + + findFilter := t.batchFindFilter(batchSize) + imageFilter := t.makeImageFilter() + + more := true + for more { + images, _, err := r.Image().Query(imageFilter, findFilter) + if err != nil { + return err + } + + for _, ss := range images { + if t.status.stopping { + return nil + } + + tt := autoTagImageTask{ + txnManager: t.txnManager, + image: ss, + performers: t.performers, + studios: t.studios, + tags: t.tags, + } + + var wg sync.WaitGroup + wg.Add(1) + go tt.Start(&wg) + wg.Wait() + + t.status.incrementProgress() + } + + if len(images) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + + return nil +} + +func (t *autoTagFilesTask) processGalleries(r models.ReaderRepository) error { + if t.status.stopping { + return nil + } + + batchSize := 1000 + + findFilter := t.batchFindFilter(batchSize) + galleryFilter := t.makeGalleryFilter() + + more := true + for more { + galleries, _, err := r.Gallery().Query(galleryFilter, findFilter) + if err != nil { + return err + } + + for _, ss := range galleries { + if t.status.stopping { + return nil + } + + tt := autoTagGalleryTask{ + txnManager: t.txnManager, + gallery: ss, + performers: t.performers, + studios: t.studios, + tags: t.tags, + } + + var wg sync.WaitGroup + wg.Add(1) + go tt.Start(&wg) + wg.Wait() + + t.status.incrementProgress() + } + + if len(galleries) != batchSize { + more = false + } else { + *findFilter.Page++ + } + } + + return nil +} + +func (t *autoTagFilesTask) process() { + if err := t.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + total, err := t.getCount(r) + if err != nil { + return err + } + + t.status.total = total + + logger.Infof("Starting autotag of %d files", total) + + if err := t.processScenes(r); err != nil { + return err + } + + if err := t.processImages(r); err != nil { + return err + } + + if err := t.processGalleries(r); err != nil { + return err + } + + if t.status.stopping { + logger.Info("Stopping due to user request") } return nil }); err != nil { logger.Error(err.Error()) } + + logger.Info("Finished autotag") } -type AutoTagStudioTask struct { - AutoTagTask - studio *models.Studio +type autoTagSceneTask struct { + txnManager models.TransactionManager + scene *models.Scene + + performers bool + studios bool + tags bool } -func (t *AutoTagStudioTask) Start(wg *sync.WaitGroup) { +func (t *autoTagSceneTask) Start(wg *sync.WaitGroup) { defer wg.Done() - - t.autoTagStudio() -} - -func (t *AutoTagStudioTask) autoTagStudio() { - regex := t.getQueryRegex(t.studio.Name.String) - if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Scene() - scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter()) - - if err != nil { - return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error()) + if t.performers { + if err := autotag.ScenePerformers(t.scene, r.Scene(), r.Performer()); err != nil { + return err + } } - - for _, s := range scenes { - // #306 - don't overwrite studio if already present - if s.StudioID.Valid { - // don't modify - continue + if t.studios { + if err := autotag.SceneStudios(t.scene, r.Scene(), r.Studio()); err != nil { + return err } - - logger.Infof("Adding studio '%s' to scene '%s'", t.studio.Name.String, s.GetTitle()) - - // set the studio id - studioID := sql.NullInt64{Int64: int64(t.studio.ID), Valid: true} - scenePartial := models.ScenePartial{ - ID: s.ID, - StudioID: &studioID, - } - - if _, err := qb.Update(scenePartial); err != nil { - return fmt.Errorf("Error adding studio to scene: %s", err.Error()) + } + if t.tags { + if err := autotag.SceneTags(t.scene, r.Scene(), r.Tag()); err != nil { + return err } } @@ -161,37 +356,65 @@ func (t *AutoTagStudioTask) autoTagStudio() { } } -type AutoTagTagTask struct { - AutoTagTask - tag *models.Tag +type autoTagImageTask struct { + txnManager models.TransactionManager + image *models.Image + + performers bool + studios bool + tags bool } -func (t *AutoTagTagTask) Start(wg *sync.WaitGroup) { +func (t *autoTagImageTask) Start(wg *sync.WaitGroup) { defer wg.Done() - - t.autoTagTag() -} - -func (t *AutoTagTagTask) autoTagTag() { - regex := t.getQueryRegex(t.tag.Name) - if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { - qb := r.Scene() - scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter()) - - if err != nil { - return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error()) - } - - for _, s := range scenes { - added, err := scene.AddTag(qb, s.ID, t.tag.ID) - - if err != nil { - return fmt.Errorf("Error adding tag '%s' to scene '%s': %s", t.tag.Name, s.GetTitle(), err.Error()) + if t.performers { + if err := autotag.ImagePerformers(t.image, r.Image(), r.Performer()); err != nil { + return err } - - if added { - logger.Infof("Added tag '%s' to scene '%s'", t.tag.Name, s.GetTitle()) + } + if t.studios { + if err := autotag.ImageStudios(t.image, r.Image(), r.Studio()); err != nil { + return err + } + } + if t.tags { + if err := autotag.ImageTags(t.image, r.Image(), r.Tag()); err != nil { + return err + } + } + + return nil + }); err != nil { + logger.Error(err.Error()) + } +} + +type autoTagGalleryTask struct { + txnManager models.TransactionManager + gallery *models.Gallery + + performers bool + studios bool + tags bool +} + +func (t *autoTagGalleryTask) Start(wg *sync.WaitGroup) { + defer wg.Done() + if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { + if t.performers { + if err := autotag.GalleryPerformers(t.gallery, r.Gallery(), r.Performer()); err != nil { + return err + } + } + if t.studios { + if err := autotag.GalleryStudios(t.gallery, r.Gallery(), r.Studio()); err != nil { + return err + } + } + if t.tags { + if err := autotag.GalleryTags(t.gallery, r.Gallery(), r.Tag()); err != nil { + return err } } diff --git a/pkg/manager/task_autotag_test.go b/pkg/manager/task_autotag_test.go deleted file mode 100644 index 0eb755c4d..000000000 --- a/pkg/manager/task_autotag_test.go +++ /dev/null @@ -1,417 +0,0 @@ -// +build integration - -package manager - -import ( - "context" - "database/sql" - "fmt" - "io/ioutil" - "os" - "strings" - "sync" - "testing" - - "github.com/stashapp/stash/pkg/database" - "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sqlite" - "github.com/stashapp/stash/pkg/utils" - - _ "github.com/golang-migrate/migrate/v4/database/sqlite3" - _ "github.com/golang-migrate/migrate/v4/source/file" -) - -const testName = "Foo's Bar" -const testExtension = ".mp4" -const existingStudioName = "ExistingStudio" - -const existingStudioSceneName = testName + ".dontChangeStudio" + testExtension - -var existingStudioID int - -var testSeparators = []string{ - ".", - "-", - "_", - " ", -} - -var testEndSeparators = []string{ - "{", - "}", - "(", - ")", - ",", -} - -func generateNamePatterns(name, separator string) []string { - var ret []string - ret = append(ret, fmt.Sprintf("%s%saaa"+testExtension, name, separator)) - ret = append(ret, fmt.Sprintf("aaa%s%s"+testExtension, separator, name)) - ret = append(ret, fmt.Sprintf("aaa%s%s%sbbb"+testExtension, separator, name, separator)) - ret = append(ret, fmt.Sprintf("dir/%s%saaa"+testExtension, name, separator)) - ret = append(ret, fmt.Sprintf("dir\\%s%saaa"+testExtension, name, separator)) - ret = append(ret, fmt.Sprintf("%s%saaa/dir/bbb"+testExtension, name, separator)) - ret = append(ret, fmt.Sprintf("%s%saaa\\dir\\bbb"+testExtension, name, separator)) - ret = append(ret, fmt.Sprintf("dir/%s%s/aaa"+testExtension, name, separator)) - ret = append(ret, fmt.Sprintf("dir\\%s%s\\aaa"+testExtension, name, separator)) - - return ret -} - -func generateFalseNamePattern(name string, separator string) string { - splitted := strings.Split(name, " ") - - return fmt.Sprintf("%s%saaa%s%s"+testExtension, splitted[0], separator, separator, splitted[1]) -} - -func testTeardown(databaseFile string) { - err := database.DB.Close() - - if err != nil { - panic(err) - } - - err = os.Remove(databaseFile) - if err != nil { - panic(err) - } -} - -func runTests(m *testing.M) int { - // create the database file - f, err := ioutil.TempFile("", "*.sqlite") - if err != nil { - panic(fmt.Sprintf("Could not create temporary file: %s", err.Error())) - } - - f.Close() - databaseFile := f.Name() - database.Initialize(databaseFile) - - // defer close and delete the database - defer testTeardown(databaseFile) - - err = populateDB() - if err != nil { - panic(fmt.Sprintf("Could not populate database: %s", err.Error())) - } else { - // run the tests - return m.Run() - } -} - -func TestMain(m *testing.M) { - ret := runTests(m) - os.Exit(ret) -} - -func createPerformer(pqb models.PerformerWriter) error { - // create the performer - performer := models.Performer{ - Checksum: testName, - Name: sql.NullString{Valid: true, String: testName}, - Favorite: sql.NullBool{Valid: true, Bool: false}, - } - - _, err := pqb.Create(performer) - if err != nil { - return err - } - - return nil -} - -func createStudio(qb models.StudioWriter, name string) (*models.Studio, error) { - // create the studio - studio := models.Studio{ - Checksum: name, - Name: sql.NullString{Valid: true, String: testName}, - } - - return qb.Create(studio) -} - -func createTag(qb models.TagWriter) error { - // create the studio - tag := models.Tag{ - Name: testName, - } - - _, err := qb.Create(tag) - if err != nil { - return err - } - - return nil -} - -func createScenes(sqb models.SceneReaderWriter) error { - // create the scenes - var scenePatterns []string - var falseScenePatterns []string - - separators := append(testSeparators, testEndSeparators...) - - for _, separator := range separators { - scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator)...) - scenePatterns = append(scenePatterns, generateNamePatterns(strings.ToLower(testName), separator)...) - falseScenePatterns = append(falseScenePatterns, generateFalseNamePattern(testName, separator)) - } - - // add test cases for intra-name separators - for _, separator := range testSeparators { - if separator != " " { - scenePatterns = append(scenePatterns, generateNamePatterns(strings.Replace(testName, " ", separator, -1), separator)...) - } - } - - for _, fn := range scenePatterns { - err := createScene(sqb, makeScene(fn, true)) - if err != nil { - return err - } - } - for _, fn := range falseScenePatterns { - err := createScene(sqb, makeScene(fn, false)) - if err != nil { - return err - } - } - - // add organized scenes - for _, fn := range scenePatterns { - s := makeScene("organized"+fn, false) - s.Organized = true - err := createScene(sqb, s) - if err != nil { - return err - } - } - - // create scene with existing studio io - studioScene := makeScene(existingStudioSceneName, true) - studioScene.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)} - err := createScene(sqb, studioScene) - if err != nil { - return err - } - - return nil -} - -func makeScene(name string, expectedResult bool) *models.Scene { - scene := &models.Scene{ - Checksum: sql.NullString{String: utils.MD5FromString(name), Valid: true}, - Path: name, - } - - // if expectedResult is true then we expect it to match, set the title accordingly - if expectedResult { - scene.Title = sql.NullString{Valid: true, String: name} - } - - return scene -} - -func createScene(sqb models.SceneWriter, scene *models.Scene) error { - _, err := sqb.Create(*scene) - - if err != nil { - return fmt.Errorf("Failed to create scene with name '%s': %s", scene.Path, err.Error()) - } - - return nil -} - -func withTxn(f func(r models.Repository) error) error { - t := sqlite.NewTransactionManager() - return t.WithTxn(context.TODO(), f) -} - -func populateDB() error { - if err := withTxn(func(r models.Repository) error { - err := createPerformer(r.Performer()) - if err != nil { - return err - } - - _, err = createStudio(r.Studio(), testName) - if err != nil { - return err - } - - // create existing studio - existingStudio, err := createStudio(r.Studio(), existingStudioName) - if err != nil { - return err - } - - existingStudioID = existingStudio.ID - - err = createTag(r.Tag()) - if err != nil { - return err - } - - err = createScenes(r.Scene()) - if err != nil { - return err - } - - return nil - }); err != nil { - return err - } - - return nil -} - -func TestParsePerformers(t *testing.T) { - var performers []*models.Performer - if err := withTxn(func(r models.Repository) error { - var err error - performers, err = r.Performer().All() - return err - }); err != nil { - t.Errorf("Error getting performer: %s", err) - return - } - - task := AutoTagPerformerTask{ - AutoTagTask: AutoTagTask{ - txnManager: sqlite.NewTransactionManager(), - }, - performer: performers[0], - } - - var wg sync.WaitGroup - wg.Add(1) - task.Start(&wg) - - // verify that scenes were tagged correctly - withTxn(func(r models.Repository) error { - pqb := r.Performer() - - scenes, err := r.Scene().All() - if err != nil { - t.Error(err.Error()) - } - - for _, scene := range scenes { - performers, err := pqb.FindBySceneID(scene.ID) - - if err != nil { - t.Errorf("Error getting scene performers: %s", err.Error()) - } - - // title is only set on scenes where we expect performer to be set - if scene.Title.String == scene.Path && len(performers) == 0 { - t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path) - } else if scene.Title.String != scene.Path && len(performers) > 0 { - t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path) - } - } - - return nil - }) -} - -func TestParseStudios(t *testing.T) { - var studios []*models.Studio - if err := withTxn(func(r models.Repository) error { - var err error - studios, err = r.Studio().All() - return err - }); err != nil { - t.Errorf("Error getting studio: %s", err) - return - } - - task := AutoTagStudioTask{ - AutoTagTask: AutoTagTask{ - txnManager: sqlite.NewTransactionManager(), - }, - studio: studios[0], - } - - var wg sync.WaitGroup - wg.Add(1) - task.Start(&wg) - - // verify that scenes were tagged correctly - withTxn(func(r models.Repository) error { - scenes, err := r.Scene().All() - if err != nil { - t.Error(err.Error()) - } - - for _, scene := range scenes { - // check for existing studio id scene first - if scene.Path == existingStudioSceneName { - if scene.StudioID.Int64 != int64(existingStudioID) { - t.Error("Incorrectly overwrote studio ID for scene with existing studio ID") - } - } else { - // title is only set on scenes where we expect studio to be set - if scene.Title.String == scene.Path && scene.StudioID.Int64 != int64(studios[0].ID) { - t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path) - } else if scene.Title.String != scene.Path && scene.StudioID.Int64 == int64(studios[0].ID) { - t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path) - } - } - } - - return nil - }) -} - -func TestParseTags(t *testing.T) { - var tags []*models.Tag - if err := withTxn(func(r models.Repository) error { - var err error - tags, err = r.Tag().All() - return err - }); err != nil { - t.Errorf("Error getting performer: %s", err) - return - } - - task := AutoTagTagTask{ - AutoTagTask: AutoTagTask{ - txnManager: sqlite.NewTransactionManager(), - }, - tag: tags[0], - } - - var wg sync.WaitGroup - wg.Add(1) - task.Start(&wg) - - // verify that scenes were tagged correctly - withTxn(func(r models.Repository) error { - scenes, err := r.Scene().All() - if err != nil { - t.Error(err.Error()) - } - - tqb := r.Tag() - - for _, scene := range scenes { - tags, err := tqb.FindBySceneID(scene.ID) - - if err != nil { - t.Errorf("Error getting scene tags: %s", err.Error()) - } - - // title is only set on scenes where we expect performer to be set - if scene.Title.String == scene.Path && len(tags) == 0 { - t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path) - } else if scene.Title.String != scene.Path && len(tags) > 0 { - t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path) - } - } - - return nil - }) -} diff --git a/pkg/manager/task_clean.go b/pkg/manager/task_clean.go index fd13277b5..a36fcebb8 100644 --- a/pkg/manager/task_clean.go +++ b/pkg/manager/task_clean.go @@ -42,7 +42,7 @@ func (t *CleanTask) shouldClean(path string) bool { fileExists := image.FileExists(path) // #1102 - clean anything in generated path - generatedPath := config.GetGeneratedPath() + generatedPath := config.GetInstance().GetGeneratedPath() if !fileExists || getStashFromPath(path) == nil || utils.IsPathInDir(generatedPath, path) { logger.Infof("File not found. Cleaning: \"%s\"", path) return true @@ -62,6 +62,7 @@ func (t *CleanTask) shouldCleanScene(s *models.Scene) bool { return true } + config := config.GetInstance() if !matchExtension(s.Path, config.GetVideoExtensions()) { logger.Infof("File extension does not match video extensions. Cleaning: \"%s\"", s.Path) return true @@ -92,6 +93,7 @@ func (t *CleanTask) shouldCleanGallery(g *models.Gallery) bool { return true } + config := config.GetInstance() if !matchExtension(path, config.GetGalleryExtensions()) { logger.Infof("File extension does not match gallery extensions. Cleaning: \"%s\"", path) return true @@ -121,6 +123,7 @@ func (t *CleanTask) shouldCleanImage(s *models.Image) bool { return true } + config := config.GetInstance() if !matchExtension(s.Path, config.GetImageExtensions()) { logger.Infof("File extension does not match image extensions. Cleaning: \"%s\"", s.Path) return true @@ -199,7 +202,7 @@ func (t *CleanTask) fileExists(filename string) (bool, error) { } func getStashFromPath(pathToCheck string) *models.StashConfig { - for _, s := range config.GetStashPaths() { + for _, s := range config.GetInstance().GetStashPaths() { if utils.IsPathInDir(s.Path, filepath.Dir(pathToCheck)) { return s } @@ -208,7 +211,7 @@ func getStashFromPath(pathToCheck string) *models.StashConfig { } func getStashFromDirPath(pathToCheck string) *models.StashConfig { - for _, s := range config.GetStashPaths() { + for _, s := range config.GetInstance().GetStashPaths() { if utils.IsPathInDir(s.Path, pathToCheck) { return s } diff --git a/pkg/manager/task_export.go b/pkg/manager/task_export.go index b949b9389..59bacca24 100644 --- a/pkg/manager/task_export.go +++ b/pkg/manager/task_export.go @@ -107,7 +107,7 @@ func (t *ExportTask) Start(wg *sync.WaitGroup) { startTime := time.Now() if t.full { - t.baseDir = config.GetMetadataPath() + t.baseDir = config.GetInstance().GetMetadataPath() } else { var err error t.baseDir, err = instance.Paths.Generated.TempDir("export") diff --git a/pkg/manager/task_generate_phash.go b/pkg/manager/task_generate_phash.go new file mode 100644 index 000000000..f8ef6d6be --- /dev/null +++ b/pkg/manager/task_generate_phash.go @@ -0,0 +1,62 @@ +package manager + +import ( + "github.com/remeh/sizedwaitgroup" + + "context" + "database/sql" + + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type GeneratePhashTask struct { + Scene models.Scene + fileNamingAlgorithm models.HashAlgorithm + txnManager models.TransactionManager +} + +func (t *GeneratePhashTask) Start(wg *sizedwaitgroup.SizedWaitGroup) { + defer wg.Done() + + if !t.shouldGenerate() { + return + } + + videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + if err != nil { + logger.Errorf("error reading video file: %s", err.Error()) + return + } + + sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) + generator, err := NewPhashGenerator(*videoFile, sceneHash) + + if err != nil { + logger.Errorf("error creating phash generator: %s", err.Error()) + return + } + hash, err := generator.Generate() + if err != nil { + logger.Errorf("error generating phash: %s", err.Error()) + return + } + + if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { + qb := r.Scene() + hashValue := sql.NullInt64{Int64: int64(*hash), Valid: true} + scenePartial := models.ScenePartial{ + ID: t.Scene.ID, + Phash: &hashValue, + } + _, err := qb.Update(scenePartial) + return err + }); err != nil { + logger.Error(err.Error()) + } +} + +func (t *GeneratePhashTask) shouldGenerate() bool { + return !t.Scene.Phash.Valid +} diff --git a/pkg/manager/task_import.go b/pkg/manager/task_import.go index d5f8b720b..76a9a2954 100644 --- a/pkg/manager/task_import.go +++ b/pkg/manager/task_import.go @@ -120,7 +120,7 @@ func (t *ImportTask) Start(wg *sync.WaitGroup) { t.scraped = scraped if t.Reset { - err := database.Reset(config.GetDatabasePath()) + err := database.Reset(config.GetInstance().GetDatabasePath()) if err != nil { logger.Errorf("Error resetting database: %s", err.Error()) diff --git a/pkg/manager/task_scan.go b/pkg/manager/task_scan.go index d35432f4a..41497cb9e 100644 --- a/pkg/manager/task_scan.go +++ b/pkg/manager/task_scan.go @@ -31,6 +31,7 @@ type ScanTask struct { calculateMD5 bool fileNamingAlgorithm models.HashAlgorithm GenerateSprite bool + GeneratePhash bool GeneratePreview bool GenerateImagePreview bool zipGallery *models.Gallery @@ -55,9 +56,20 @@ func (t *ScanTask) Start(wg *sizedwaitgroup.SizedWaitGroup) { go taskSprite.Start(&iwg) } + if t.GeneratePhash { + iwg.Add() + taskPhash := GeneratePhashTask{ + Scene: *s, + fileNamingAlgorithm: t.fileNamingAlgorithm, + txnManager: t.TxnManager, + } + go taskPhash.Start(&iwg) + } + if t.GeneratePreview { iwg.Add() + config := config.GetInstance() var previewSegmentDuration = config.GetPreviewSegmentDuration() var previewSegments = config.GetPreviewSegments() var previewExcludeStart = config.GetPreviewExcludeStart() @@ -228,6 +240,10 @@ func (t *ScanTask) scanGallery() { Timestamp: fileModTime, Valid: true, }, + Title: sql.NullString{ + String: utils.GetNameFromPath(t.FilePath, t.StripFileExtension), + Valid: true, + }, CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, } @@ -302,7 +318,7 @@ func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) { basename := strings.TrimSuffix(t.FilePath, filepath.Ext(t.FilePath)) var relatedFiles []string - vExt := config.GetVideoExtensions() + vExt := config.GetInstance().GetVideoExtensions() // make a list of media files that can be related to the gallery for _, ext := range vExt { related := basename + "." + ext @@ -387,6 +403,7 @@ func (t *ScanTask) scanScene() *models.Scene { // if the mod time of the file is different than that of the associated // scene, then recalculate the checksum and regenerate the thumbnail modified := t.isFileModified(fileModTime, s.FileModTime) + config := config.GetInstance() if modified || !s.Size.Valid { oldHash := s.GetHash(config.GetVideoFileNamingAlgorithm()) s, err = t.rescanScene(s, fileModTime) @@ -840,6 +857,9 @@ func (t *ScanTask) scanImage() { CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, } + newImage.Title.String = image.GetFilename(&newImage, t.StripFileExtension) + newImage.Title.Valid = true + if err := image.SetFileDetails(&newImage); err != nil { logger.Error(err.Error()) return @@ -863,7 +883,7 @@ func (t *ScanTask) scanImage() { logger.Error(err.Error()) return } - } else if config.GetCreateGalleriesFromFolders() { + } else if config.GetInstance().GetCreateGalleriesFromFolders() { // create gallery from folder or associate with existing gallery logger.Infof("Associating image %s with folder gallery", i.Path) if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { @@ -953,6 +973,10 @@ func (t *ScanTask) associateImageWithFolderGallery(imageID int, qb models.Galler }, CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + Title: sql.NullString{ + String: utils.GetNameFromPath(path, false), + Valid: true, + }, } logger.Infof("Creating gallery for folder %s", path) @@ -1016,6 +1040,7 @@ func (t *ScanTask) calculateImageChecksum() (string, error) { } func (t *ScanTask) doesPathExist() bool { + config := config.GetInstance() vidExt := config.GetVideoExtensions() imgExt := config.GetImageExtensions() gExt := config.GetGalleryExtensions() @@ -1046,6 +1071,7 @@ func (t *ScanTask) doesPathExist() bool { } func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error { + config := config.GetInstance() vidExt := config.GetVideoExtensions() imgExt := config.GetImageExtensions() gExt := config.GetGalleryExtensions() diff --git a/pkg/manager/task_stash_box_tag.go b/pkg/manager/task_stash_box_tag.go new file mode 100644 index 000000000..ed049bc92 --- /dev/null +++ b/pkg/manager/task_stash_box_tag.go @@ -0,0 +1,252 @@ +package manager + +import ( + "context" + "database/sql" + "sync" + "time" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scraper/stashbox" + "github.com/stashapp/stash/pkg/utils" +) + +type StashBoxPerformerTagTask struct { + txnManager models.TransactionManager + box *models.StashBox + name *string + performer *models.Performer + refresh bool + excluded_fields []string +} + +func (t *StashBoxPerformerTagTask) Start(wg *sync.WaitGroup) { + defer wg.Done() + + t.stashBoxPerformerTag() +} + +func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() { + var performer *models.ScrapedScenePerformer + var err error + + client := stashbox.NewClient(*t.box, t.txnManager) + + if t.refresh { + var performerID string + t.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + stashids, _ := r.Performer().GetStashIDs(t.performer.ID) + for _, id := range stashids { + if id.Endpoint == t.box.Endpoint { + performerID = id.StashID + } + } + return nil + }) + if performerID != "" { + performer, err = client.FindStashBoxPerformerByID(performerID) + } + } else { + var name string + if t.name != nil { + name = *t.name + } else { + name = t.performer.Name.String + } + performer, err = client.FindStashBoxPerformerByName(name) + } + + if err != nil { + logger.Errorf("Error fetching performer data from stash-box: %s", err.Error()) + return + } + + excluded := map[string]bool{} + for _, field := range t.excluded_fields { + excluded[field] = true + } + + if performer != nil { + updatedTime := time.Now() + + if t.performer != nil { + partial := models.PerformerPartial{ + ID: t.performer.ID, + UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, + } + + if performer.Aliases != nil && !excluded["aliases"] { + value := getNullString(performer.Aliases) + partial.Aliases = &value + } + if performer.Birthdate != nil && *performer.Birthdate != "" && !excluded["birthdate"] { + value := getDate(performer.Birthdate) + partial.Birthdate = &value + } + if performer.CareerLength != nil && !excluded["career_length"] { + value := getNullString(performer.CareerLength) + partial.CareerLength = &value + } + if performer.Country != nil && !excluded["country"] { + value := getNullString(performer.Country) + partial.Country = &value + } + if performer.Ethnicity != nil && !excluded["ethnicity"] { + value := getNullString(performer.Ethnicity) + partial.Ethnicity = &value + } + if performer.EyeColor != nil && !excluded["eye_color"] { + value := getNullString(performer.EyeColor) + partial.EyeColor = &value + } + if performer.FakeTits != nil && !excluded["fake_tits"] { + value := getNullString(performer.FakeTits) + partial.FakeTits = &value + } + if performer.Gender != nil && !excluded["gender"] { + value := getNullString(performer.Gender) + partial.Gender = &value + } + if performer.Height != nil && !excluded["height"] { + value := getNullString(performer.Height) + partial.Height = &value + } + if performer.Instagram != nil && !excluded["instagram"] { + value := getNullString(performer.Instagram) + partial.Instagram = &value + } + if performer.Measurements != nil && !excluded["measurements"] { + value := getNullString(performer.Measurements) + partial.Measurements = &value + } + if excluded["name"] { + value := sql.NullString{String: performer.Name, Valid: true} + partial.Name = &value + } + if performer.Piercings != nil && !excluded["piercings"] { + value := getNullString(performer.Piercings) + partial.Piercings = &value + } + if performer.Tattoos != nil && !excluded["tattoos"] { + value := getNullString(performer.Tattoos) + partial.Tattoos = &value + } + if performer.Twitter != nil && !excluded["twitter"] { + value := getNullString(performer.Tattoos) + partial.Twitter = &value + } + if performer.URL != nil && !excluded["url"] { + value := getNullString(performer.URL) + partial.URL = &value + } + + t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { + _, err := r.Performer().Update(partial) + + if !t.refresh { + err = r.Performer().UpdateStashIDs(t.performer.ID, []models.StashID{ + { + Endpoint: t.box.Endpoint, + StashID: *performer.RemoteSiteID, + }, + }) + if err != nil { + return err + } + } + + if len(performer.Images) > 0 && !excluded["image"] { + image, err := utils.ReadImageFromURL(performer.Images[0]) + if err != nil { + return err + } + err = r.Performer().UpdateImage(t.performer.ID, image) + } + + if err == nil { + logger.Infof("Updated performer %s", performer.Name) + } + return err + }) + } else if t.name != nil { + currentTime := time.Now() + newPerformer := models.Performer{ + Aliases: getNullString(performer.Aliases), + Birthdate: getDate(performer.Birthdate), + CareerLength: getNullString(performer.CareerLength), + Checksum: utils.MD5FromString(performer.Name), + Country: getNullString(performer.Country), + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + Ethnicity: getNullString(performer.Ethnicity), + EyeColor: getNullString(performer.EyeColor), + FakeTits: getNullString(performer.FakeTits), + Favorite: sql.NullBool{Bool: false, Valid: true}, + Gender: getNullString(performer.Gender), + Height: getNullString(performer.Height), + Instagram: getNullString(performer.Instagram), + Measurements: getNullString(performer.Measurements), + Name: sql.NullString{String: performer.Name, Valid: true}, + Piercings: getNullString(performer.Piercings), + Tattoos: getNullString(performer.Tattoos), + Twitter: getNullString(performer.Twitter), + URL: getNullString(performer.URL), + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + } + err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { + createdPerformer, err := r.Performer().Create(newPerformer) + if err != nil { + return err + } + + err = r.Performer().UpdateStashIDs(createdPerformer.ID, []models.StashID{ + { + Endpoint: t.box.Endpoint, + StashID: *performer.RemoteSiteID, + }, + }) + if err != nil { + return err + } + + if len(performer.Images) > 0 { + image, err := utils.ReadImageFromURL(performer.Images[0]) + if err != nil { + return err + } + err = r.Performer().UpdateImage(createdPerformer.ID, image) + } + return err + }) + if err != nil { + logger.Errorf("Failed to save performer %s: %s", *t.name, err.Error()) + } else { + logger.Infof("Saved performer %s", *t.name) + } + } + } else { + var name string + if t.name != nil { + name = *t.name + } else if t.performer != nil { + name = t.performer.Name.String + } + logger.Infof("No match found for %s", name) + } +} + +func getDate(val *string) models.SQLiteDate { + if val == nil { + return models.SQLiteDate{Valid: false} + } else { + return models.SQLiteDate{String: *val, Valid: false} + } +} + +func getNullString(val *string) sql.NullString { + if val == nil { + return sql.NullString{Valid: false} + } else { + return sql.NullString{String: *val, Valid: true} + } +} diff --git a/pkg/manager/task_transcode.go b/pkg/manager/task_transcode.go index f5a46e58f..7b1ed33cf 100644 --- a/pkg/manager/task_transcode.go +++ b/pkg/manager/task_transcode.go @@ -57,7 +57,7 @@ func (t *GenerateTranscodeTask) Start(wg *sizedwaitgroup.SizedWaitGroup) { sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) outputPath := instance.Paths.Generated.GetTmpPath(sceneHash + ".mp4") - transcodeSize := config.GetMaxTranscodeSize() + transcodeSize := config.GetInstance().GetMaxTranscodeSize() options := ffmpeg.TranscodeOptions{ OutputPath: outputPath, MaxTranscodeSize: transcodeSize, diff --git a/pkg/models/extension_find_filter.go b/pkg/models/extension_find_filter.go index e2d4f8d7c..8dc1ed515 100644 --- a/pkg/models/extension_find_filter.go +++ b/pkg/models/extension_find_filter.go @@ -1,5 +1,9 @@ package models +// PerPageAll is the value used for perPage to indicate all results should be +// returned. +const PerPageAll = -1 + func (ff FindFilterType) GetSort(defaultSort string) string { var sort string if ff.Sort == nil { @@ -35,17 +39,19 @@ func (ff FindFilterType) GetPage() int { func (ff FindFilterType) GetPageSize() int { const defaultPerPage = 25 - const minPerPage = 1 + const minPerPage = 0 const maxPerPage = 1000 if ff.PerPage == nil { return defaultPerPage } - if *ff.PerPage > 1000 { + if *ff.PerPage > maxPerPage { return maxPerPage - } else if *ff.PerPage < 0 { - // PerPage == 0 -> no limit + } else if *ff.PerPage < minPerPage { + // negative page sizes should return all results + // this is a sanity check in case GetPageSize is + // called with a negative page size. return minPerPage } @@ -53,5 +59,5 @@ func (ff FindFilterType) GetPageSize() int { } func (ff FindFilterType) IsGetAll() bool { - return ff.PerPage != nil && *ff.PerPage == 0 + return ff.PerPage != nil && *ff.PerPage < 0 } diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 71f19a666..75fcfc896 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -11,6 +11,7 @@ type GalleryReader interface { Count() (int, error) All() ([]*Gallery, error) Query(galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error) + QueryCount(galleryFilter *GalleryFilterType, findFilter *FindFilterType) (int, error) GetPerformerIDs(galleryID int) ([]int, error) GetTagIDs(galleryID int) ([]int, error) GetSceneIDs(galleryID int) ([]int, error) diff --git a/pkg/models/image.go b/pkg/models/image.go index d160aeba5..c3f3c5b2e 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -17,6 +17,7 @@ type ImageReader interface { // CountByTagID(tagID int) (int, error) All() ([]*Image, error) Query(imageFilter *ImageFilterType, findFilter *FindFilterType) ([]*Image, int, error) + QueryCount(imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) GetGalleryIDs(imageID int) ([]int, error) GetTagIDs(imageID int) ([]int, error) GetPerformerIDs(imageID int) ([]int, error) diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index 3585c3036..8bbd2e78b 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -376,6 +376,27 @@ func (_m *GalleryReaderWriter) Query(galleryFilter *models.GalleryFilterType, fi return r0, r1, r2 } +// QueryCount provides a mock function with given fields: galleryFilter, findFilter +func (_m *GalleryReaderWriter) QueryCount(galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (int, error) { + ret := _m.Called(galleryFilter, findFilter) + + var r0 int + if rf, ok := ret.Get(0).(func(*models.GalleryFilterType, *models.FindFilterType) int); ok { + r0 = rf(galleryFilter, findFilter) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(*models.GalleryFilterType, *models.FindFilterType) error); ok { + r1 = rf(galleryFilter, findFilter) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Update provides a mock function with given fields: updatedGallery func (_m *GalleryReaderWriter) Update(updatedGallery models.Gallery) (*models.Gallery, error) { ret := _m.Called(updatedGallery) diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index b00cacfc9..a8a8c4b4a 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -370,6 +370,27 @@ func (_m *ImageReaderWriter) Query(imageFilter *models.ImageFilterType, findFilt return r0, r1, r2 } +// QueryCount provides a mock function with given fields: imageFilter, findFilter +func (_m *ImageReaderWriter) QueryCount(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) (int, error) { + ret := _m.Called(imageFilter, findFilter) + + var r0 int + if rf, ok := ret.Get(0).(func(*models.ImageFilterType, *models.FindFilterType) int); ok { + r0 = rf(imageFilter, findFilter) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(*models.ImageFilterType, *models.FindFilterType) error); ok { + r1 = rf(imageFilter, findFilter) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // ResetOCounter provides a mock function with given fields: id func (_m *ImageReaderWriter) ResetOCounter(id int) (int, error) { ret := _m.Called(id) diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index 60575ab3b..5d3c5cb6f 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -388,6 +388,29 @@ func (_m *PerformerReaderWriter) Query(performerFilter *models.PerformerFilterTy return r0, r1, r2 } +// QueryForAutoTag provides a mock function with given fields: words +func (_m *PerformerReaderWriter) QueryForAutoTag(words []string) ([]*models.Performer, error) { + ret := _m.Called(words) + + var r0 []*models.Performer + if rf, ok := ret.Get(0).(func([]string) []*models.Performer); ok { + r0 = rf(words) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Performer) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func([]string) error); ok { + r1 = rf(words) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Update provides a mock function with given fields: updatedPerformer func (_m *PerformerReaderWriter) Update(updatedPerformer models.PerformerPartial) (*models.Performer, error) { ret := _m.Called(updatedPerformer) @@ -475,3 +498,26 @@ func (_m *PerformerReaderWriter) UpdateTags(sceneID int, tagIDs []int) error { return r0 } + +// FindByStashIDStatus provides a mock function with given fields: hasStashID, stashboxEndpoint +func (_m *PerformerReaderWriter) FindByStashIDStatus(hasStashID bool, stashboxEndpoint string) ([]*models.Performer, error) { + ret := _m.Called(hasStashID, stashboxEndpoint) + + var r0 []*models.Performer + if rf, ok := ret.Get(0).(func(bool, string) []*models.Performer); ok { + r0 = rf(hasStashID, stashboxEndpoint) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Performer) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(bool, string) error); ok { + r1 = rf(hasStashID, stashboxEndpoint) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 0e5295759..796c23878 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -415,6 +415,29 @@ func (_m *SceneReaderWriter) FindByPerformerID(performerID int) ([]*models.Scene return r0, r1 } +// FindDuplicates provides a mock function with given fields: distance +func (_m *SceneReaderWriter) FindDuplicates(distance int) ([][]*models.Scene, error) { + ret := _m.Called(distance) + + var r0 [][]*models.Scene + if rf, ok := ret.Get(0).(func(int) [][]*models.Scene); ok { + r0 = rf(distance) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(distance) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindMany provides a mock function with given fields: ids func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) { ret := _m.Called(ids) @@ -627,29 +650,6 @@ func (_m *SceneReaderWriter) Query(sceneFilter *models.SceneFilterType, findFilt return r0, r1, r2 } -// QueryForAutoTag provides a mock function with given fields: regex, pathPrefixes -func (_m *SceneReaderWriter) QueryForAutoTag(regex string, pathPrefixes []string) ([]*models.Scene, error) { - ret := _m.Called(regex, pathPrefixes) - - var r0 []*models.Scene - if rf, ok := ret.Get(0).(func(string, []string) []*models.Scene); ok { - r0 = rf(regex, pathPrefixes) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Scene) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(string, []string) error); ok { - r1 = rf(regex, pathPrefixes) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // ResetOCounter provides a mock function with given fields: id func (_m *SceneReaderWriter) ResetOCounter(id int) (int, error) { ret := _m.Called(id) diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index fb9c02d7d..fbd8a1936 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -296,6 +296,29 @@ func (_m *StudioReaderWriter) Query(studioFilter *models.StudioFilterType, findF return r0, r1, r2 } +// QueryForAutoTag provides a mock function with given fields: words +func (_m *StudioReaderWriter) QueryForAutoTag(words []string) ([]*models.Studio, error) { + ret := _m.Called(words) + + var r0 []*models.Studio + if rf, ok := ret.Get(0).(func([]string) []*models.Studio); ok { + r0 = rf(words) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func([]string) error); ok { + r1 = rf(words) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Update provides a mock function with given fields: updatedStudio func (_m *StudioReaderWriter) Update(updatedStudio models.StudioPartial) (*models.Studio, error) { ret := _m.Called(updatedStudio) diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index 65dcd8b89..e0d765577 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -367,6 +367,29 @@ func (_m *TagReaderWriter) Query(tagFilter *models.TagFilterType, findFilter *mo return r0, r1, r2 } +// QueryForAutoTag provides a mock function with given fields: words +func (_m *TagReaderWriter) QueryForAutoTag(words []string) ([]*models.Tag, error) { + ret := _m.Called(words) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func([]string) []*models.Tag); ok { + r0 = rf(words) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func([]string) error); ok { + r1 = rf(words) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Update provides a mock function with given fields: updatedTag func (_m *TagReaderWriter) Update(updatedTag models.Tag) (*models.Tag, error) { ret := _m.Called(updatedTag) diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go index 061dbf7d2..977df7663 100644 --- a/pkg/models/model_gallery.go +++ b/pkg/models/model_gallery.go @@ -2,6 +2,7 @@ package models import ( "database/sql" + "path/filepath" ) type Gallery struct { @@ -39,6 +40,20 @@ type GalleryPartial struct { UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` } +// GetTitle returns the title of the scene. If the Title field is empty, +// then the base filename is returned. +func (s Gallery) GetTitle() string { + if s.Title.String != "" { + return s.Title.String + } + + if s.Path.Valid { + return filepath.Base(s.Path.String) + } + + return "" +} + const DefaultGthumbWidth int = 640 type Galleries []*Gallery diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go index 47c21fcd5..6470e619d 100644 --- a/pkg/models/model_image.go +++ b/pkg/models/model_image.go @@ -2,6 +2,7 @@ package models import ( "database/sql" + "path/filepath" ) // Image stores the metadata for a single image. @@ -40,6 +41,16 @@ type ImagePartial struct { UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` } +// GetTitle returns the title of the image. If the Title field is empty, +// then the base filename is returned. +func (s Image) GetTitle() string { + if s.Title.String != "" { + return s.Title.String + } + + return filepath.Base(s.Path) +} + // ImageFileType represents the file metadata for an image. type ImageFileType struct { Size *int `graphql:"size" json:"size"` diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index 4d6134b8a..eefce07de 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -29,6 +29,11 @@ type Performer struct { Favorite sql.NullBool `db:"favorite" json:"favorite"` CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` + Rating sql.NullInt64 `db:"rating" json:"rating"` + Details sql.NullString `db:"details" json:"details"` + DeathDate SQLiteDate `db:"death_date" json:"death_date"` + HairColor sql.NullString `db:"hair_color" json:"hair_color"` + Weight sql.NullInt64 `db:"weight" json:"weight"` } type PerformerPartial struct { @@ -53,6 +58,11 @@ type PerformerPartial struct { Favorite *sql.NullBool `db:"favorite" json:"favorite"` CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` + Rating *sql.NullInt64 `db:"rating" json:"rating"` + Details *sql.NullString `db:"details" json:"details"` + DeathDate *SQLiteDate `db:"death_date" json:"death_date"` + HairColor *sql.NullString `db:"hair_color" json:"hair_color"` + Weight *sql.NullInt64 `db:"weight" json:"weight"` } func NewPerformer(name string) *Performer { diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index 40bcd43e9..514ef8cbf 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -29,6 +29,7 @@ type Scene struct { Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"` StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"` + Phash sql.NullInt64 `db:"phash,omitempty" json:"phash"` CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` } @@ -58,6 +59,7 @@ type ScenePartial struct { StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"` FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"` + Phash *sql.NullInt64 `db:"phash,omitempty" json:"phash"` CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` } diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go index e9fa33118..0c102bcbe 100644 --- a/pkg/models/model_scraped_item.go +++ b/pkg/models/model_scraped_item.go @@ -42,6 +42,10 @@ type ScrapedPerformer struct { Aliases *string `graphql:"aliases" json:"aliases"` Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"` Image *string `graphql:"image" json:"image"` + Details *string `graphql:"details" json:"details"` + DeathDate *string `graphql:"death_date" json:"death_date"` + HairColor *string `graphql:"hair_color" json:"hair_color"` + Weight *string `graphql:"weight" json:"weight"` } // this type has no Image field @@ -63,6 +67,10 @@ type ScrapedPerformerStash struct { Piercings *string `graphql:"piercings" json:"piercings"` Aliases *string `graphql:"aliases" json:"aliases"` Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"` + Details *string `graphql:"details" json:"details"` + DeathDate *string `graphql:"death_date" json:"death_date"` + HairColor *string `graphql:"hair_color" json:"hair_color"` + Weight *string `graphql:"weight" json:"weight"` } type ScrapedScene struct { @@ -128,6 +136,10 @@ type ScrapedScenePerformer struct { Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"` RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"` Images []string `graphql:"images" json:"images"` + Details *string `graphql:"details" json:"details"` + DeathDate *string `graphql:"death_date" json:"death_date"` + HairColor *string `graphql:"hair_color" json:"hair_color"` + Weight *string `graphql:"weight" json:"weight"` } type ScrapedSceneStudio struct { diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go index 4bc687526..769acb8e2 100644 --- a/pkg/models/model_studio.go +++ b/pkg/models/model_studio.go @@ -15,6 +15,8 @@ type Studio struct { ParentID sql.NullInt64 `db:"parent_id,omitempty" json:"parent_id"` CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` + Rating sql.NullInt64 `db:"rating" json:"rating"` + Details sql.NullString `db:"details" json:"details"` } type StudioPartial struct { @@ -25,6 +27,8 @@ type StudioPartial struct { ParentID *sql.NullInt64 `db:"parent_id,omitempty" json:"parent_id"` CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` + Rating *sql.NullInt64 `db:"rating" json:"rating"` + Details *sql.NullString `db:"details" json:"details"` } var DefaultStudioImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC" diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 2c550b720..437921e00 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -8,9 +8,13 @@ type PerformerReader interface { FindByImageID(imageID int) ([]*Performer, error) FindByGalleryID(galleryID int) ([]*Performer, error) FindByNames(names []string, nocase bool) ([]*Performer, error) + FindByStashIDStatus(hasStashID bool, stashboxEndpoint string) ([]*Performer, error) CountByTagID(tagID int) (int, error) Count() (int, error) All() ([]*Performer, error) + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(words []string) ([]*Performer, error) Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error) GetImage(performerID int) ([]byte, error) GetStashIDs(performerID int) ([]*StashID, error) diff --git a/pkg/models/scene.go b/pkg/models/scene.go index ef4485717..8e77b2497 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -8,6 +8,7 @@ type SceneReader interface { FindByPath(path string) (*Scene, error) FindByPerformerID(performerID int) ([]*Scene, error) FindByGalleryID(performerID int) ([]*Scene, error) + FindDuplicates(distance int) ([][]*Scene, error) CountByPerformerID(performerID int) (int, error) // FindByStudioID(studioID int) ([]*Scene, error) FindByMovieID(movieID int) ([]*Scene, error) diff --git a/pkg/models/studio.go b/pkg/models/studio.go index 358abf596..7aa2e87b8 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -7,6 +7,9 @@ type StudioReader interface { FindByName(name string, nocase bool) (*Studio, error) Count() (int, error) All() ([]*Studio, error) + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(words []string) ([]*Studio, error) Query(studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) GetImage(studioID int) ([]byte, error) HasImage(studioID int) (bool, error) diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 5f03e33b5..a675bfbdf 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -12,6 +12,9 @@ type TagReader interface { FindByNames(names []string, nocase bool) ([]*Tag, error) Count() (int, error) All() ([]*Tag, error) + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(words []string) ([]*Tag, error) Query(tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) GetImage(tagID int) ([]byte, error) } diff --git a/pkg/performer/export.go b/pkg/performer/export.go index a038a2560..555abe58d 100644 --- a/pkg/performer/export.go +++ b/pkg/performer/export.go @@ -66,6 +66,21 @@ func ToJSON(reader models.PerformerReader, performer *models.Performer) (*jsonsc if performer.Favorite.Valid { newPerformerJSON.Favorite = performer.Favorite.Bool } + if performer.Rating.Valid { + newPerformerJSON.Rating = int(performer.Rating.Int64) + } + if performer.Details.Valid { + newPerformerJSON.Details = performer.Details.String + } + if performer.DeathDate.Valid { + newPerformerJSON.DeathDate = utils.GetYMDFromDatabaseDate(performer.DeathDate.String) + } + if performer.HairColor.Valid { + newPerformerJSON.HairColor = performer.HairColor.String + } + if performer.Weight.Valid { + newPerformerJSON.Weight = int(performer.Weight.Int64) + } image, err := reader.GetImage(performer.ID) if err != nil { diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index aa880e40c..0d143b2d5 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -36,6 +36,10 @@ const ( piercings = "piercings" tattoos = "tattoos" twitter = "twitter" + rating = 5 + details = "details" + hairColor = "hairColor" + weight = 60 ) var imageBytes = []byte("imageBytes") @@ -46,6 +50,10 @@ var birthDate = models.SQLiteDate{ String: "2001-01-01", Valid: true, } +var deathDate = models.SQLiteDate{ + String: "2021-02-02", + Valid: true, +} var createTime time.Time = time.Date(2001, 01, 01, 0, 0, 0, 0, time.Local) var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.Local) @@ -79,6 +87,14 @@ func createFullPerformer(id int, name string) *models.Performer { UpdatedAt: models.SQLiteTimestamp{ Timestamp: updateTime, }, + Rating: models.NullInt64(rating), + Details: models.NullString(details), + DeathDate: deathDate, + HairColor: models.NullString(hairColor), + Weight: sql.NullInt64{ + Int64: weight, + Valid: true, + }, } } @@ -119,7 +135,12 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer { UpdatedAt: models.JSONTime{ Time: updateTime, }, - Image: image, + Rating: rating, + Image: image, + Details: details, + DeathDate: deathDate.String, + HairColor: hairColor, + Weight: weight, } } diff --git a/pkg/performer/import.go b/pkg/performer/import.go index 2131b1e57..db32e1286 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -224,6 +224,21 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform if performerJSON.Instagram != "" { newPerformer.Instagram = sql.NullString{String: performerJSON.Instagram, Valid: true} } + if performerJSON.Rating != 0 { + newPerformer.Rating = sql.NullInt64{Int64: int64(performerJSON.Rating), Valid: true} + } + if performerJSON.Details != "" { + newPerformer.Details = sql.NullString{String: performerJSON.Details, Valid: true} + } + if performerJSON.DeathDate != "" { + newPerformer.DeathDate = models.SQLiteDate{String: performerJSON.DeathDate, Valid: true} + } + if performerJSON.HairColor != "" { + newPerformer.HairColor = sql.NullString{String: performerJSON.HairColor, Valid: true} + } + if performerJSON.Weight != 0 { + newPerformer.Weight = sql.NullInt64{Int64: int64(performerJSON.Weight), Valid: true} + } return newPerformer } diff --git a/pkg/performer/validate.go b/pkg/performer/validate.go new file mode 100644 index 000000000..374262590 --- /dev/null +++ b/pkg/performer/validate.go @@ -0,0 +1,37 @@ +package performer + +import ( + "errors" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +func ValidateDeathDate(performer *models.Performer, birthdate *string, deathDate *string) error { + // don't validate existing values + if birthdate == nil && deathDate == nil { + return nil + } + + if performer != nil { + if birthdate == nil && performer.Birthdate.Valid { + birthdate = &performer.Birthdate.String + } + if deathDate == nil && performer.DeathDate.Valid { + deathDate = &performer.DeathDate.String + } + } + + if birthdate == nil || deathDate == nil || *birthdate == "" || *deathDate == "" { + return nil + } + + f, _ := utils.ParseDateStringAsTime(*birthdate) + t, _ := utils.ParseDateStringAsTime(*deathDate) + + if f.After(t) { + return errors.New("the date of death should be higher than the date of birth") + } + + return nil +} diff --git a/pkg/performer/validate_test.go b/pkg/performer/validate_test.go new file mode 100644 index 000000000..33616e184 --- /dev/null +++ b/pkg/performer/validate_test.go @@ -0,0 +1,70 @@ +package performer + +import ( + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stretchr/testify/assert" +) + +func TestValidateDeathDate(t *testing.T) { + assert := assert.New(t) + + date1 := "2001-01-01" + date2 := "2002-01-01" + date3 := "2003-01-01" + date4 := "2004-01-01" + empty := "" + + emptyPerformer := models.Performer{} + invalidPerformer := models.Performer{ + Birthdate: models.SQLiteDate{ + String: date3, + Valid: true, + }, + DeathDate: models.SQLiteDate{ + String: date2, + Valid: true, + }, + } + validPerformer := models.Performer{ + Birthdate: models.SQLiteDate{ + String: date2, + Valid: true, + }, + DeathDate: models.SQLiteDate{ + String: date3, + Valid: true, + }, + } + + // nil values should always return nil + assert.Nil(ValidateDeathDate(nil, nil, &date1)) + assert.Nil(ValidateDeathDate(nil, &date2, nil)) + assert.Nil(ValidateDeathDate(&emptyPerformer, nil, &date1)) + assert.Nil(ValidateDeathDate(&emptyPerformer, &date2, nil)) + + // empty strings should always return nil + assert.Nil(ValidateDeathDate(nil, &empty, &date1)) + assert.Nil(ValidateDeathDate(nil, &date2, &empty)) + assert.Nil(ValidateDeathDate(&emptyPerformer, &empty, &date1)) + assert.Nil(ValidateDeathDate(&emptyPerformer, &date2, &empty)) + assert.Nil(ValidateDeathDate(&validPerformer, &empty, &date1)) + assert.Nil(ValidateDeathDate(&validPerformer, &date2, &empty)) + + // nil inputs should return nil even if performer is invalid + assert.Nil(ValidateDeathDate(&invalidPerformer, nil, nil)) + + // invalid input values should return error + assert.NotNil(ValidateDeathDate(nil, &date2, &date1)) + assert.NotNil(ValidateDeathDate(&validPerformer, &date2, &date1)) + + // valid input values should return nil + assert.Nil(ValidateDeathDate(nil, &date1, &date2)) + + // use performer values if performer set and values available + assert.NotNil(ValidateDeathDate(&validPerformer, nil, &date1)) + assert.NotNil(ValidateDeathDate(&validPerformer, &date4, nil)) + assert.Nil(ValidateDeathDate(&validPerformer, nil, &date4)) + assert.Nil(ValidateDeathDate(&validPerformer, &date1, nil)) +} diff --git a/pkg/scene/export.go b/pkg/scene/export.go index 9fcd6d096..5f723cdf5 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -27,6 +27,10 @@ func ToBasicJSON(reader models.SceneReader, scene *models.Scene) (*jsonschema.Sc newSceneJSON.OSHash = scene.OSHash.String } + if scene.Phash.Valid { + newSceneJSON.Phash = utils.PhashToString(scene.Phash.Int64) + } + if scene.Title.Valid { newSceneJSON.Title = scene.Title.String } diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index 2d30d9672..dc3164f13 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -7,6 +7,7 @@ import ( "github.com/stashapp/stash/pkg/manager/jsonschema" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/utils" "github.com/stretchr/testify/assert" "testing" @@ -43,6 +44,7 @@ const ( checksum = "checksum" oshash = "oshash" title = "title" + phash = -3846826108889195 date = "2001-01-01" rating = 5 ocounter = 2 @@ -112,6 +114,7 @@ func createFullScene(id int) models.Scene { Height: models.NullInt64(height), OCounter: ocounter, OSHash: models.NullString(oshash), + Phash: models.NullInt64(phash), Rating: models.NullInt64(rating), Organized: organized, Size: models.NullString(size), @@ -147,6 +150,7 @@ func createFullJSONScene(image string) *jsonschema.Scene { Details: details, OCounter: ocounter, OSHash: oshash, + Phash: utils.PhashToString(phash), Rating: rating, Organized: organized, URL: url, diff --git a/pkg/scene/import.go b/pkg/scene/import.go index eee87c8a8..a1cad8808 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -73,6 +73,11 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene { Path: i.Path, } + if sceneJSON.Phash != "" { + hash, err := strconv.ParseUint(sceneJSON.Phash, 16, 64) + newScene.Phash = sql.NullInt64{Int64: int64(hash), Valid: err == nil} + } + if sceneJSON.Title != "" { newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true} } diff --git a/pkg/scraper/config.go b/pkg/scraper/config.go index 7d5d49ebf..4eeb97af3 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/config.go @@ -175,11 +175,17 @@ type clickOptions struct { Sleep int `yaml:"sleep"` } +type header struct { + Key string `yaml:"Key"` + Value string `yaml:"Value"` +} + type scraperDriverOptions struct { UseCDP bool `yaml:"useCDP"` Sleep int `yaml:"sleep"` Clicks []*clickOptions `yaml:"clicks"` Cookies []*cookieOptions `yaml:"cookies"` + Headers []*header `yaml:"headers"` } func loadScraperFromYAML(id string, reader io.Reader) (*config, error) { diff --git a/pkg/scraper/freeones.go b/pkg/scraper/freeones.go index 8b72e9df3..c229e874a 100644 --- a/pkg/scraper/freeones.go +++ b/pkg/scraper/freeones.go @@ -103,7 +103,23 @@ xPathScrapers: selector: //div[contains(@class,'image-container')]//a/img/@src Gender: fixed: "Female" -# Last updated March 24, 2021 + Details: //div[@data-test="biography"] + DeathDate: + selector: //div[contains(text(),'Passed away on')] + postProcess: + - replace: + - regex: Passed away on (.+) at the age of \d+ + with: $1 + - parseDate: January 2, 2006 + HairColor: //span[text()='Hair Color']/following-sibling::span/a + Weight: + selector: //span[text()='Weight']/following-sibling::span/a + postProcess: + - replace: + - regex: \D+[\s\S]+ + with: "" + +# Last updated April 13, 2021 ` func getFreeonesScraper() config { diff --git a/pkg/scraper/image.go b/pkg/scraper/image.go index 08cb6725d..ab09f28da 100644 --- a/pkg/scraper/image.go +++ b/pkg/scraper/image.go @@ -8,7 +8,6 @@ import ( "strings" "time" - stashConfig "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) @@ -87,7 +86,7 @@ func setMovieBackImage(m *models.ScrapedMovie, globalConfig GlobalConfig) error func getImage(url string, globalConfig GlobalConfig) (*string, error) { client := &http.Client{ Transport: &http.Transport{ // ignore insecure certificates - TLSClientConfig: &tls.Config{InsecureSkipVerify: !stashConfig.GetScraperCertCheck()}}, + TLSClientConfig: &tls.Config{InsecureSkipVerify: !globalConfig.GetScraperCertCheck()}}, Timeout: imageGetTimeout, } @@ -96,7 +95,7 @@ func getImage(url string, globalConfig GlobalConfig) (*string, error) { return nil, err } - userAgent := globalConfig.UserAgent + userAgent := globalConfig.GetScraperUserAgent() if userAgent != "" { req.Header.Set("User-Agent", userAgent) } diff --git a/pkg/scraper/json_test.go b/pkg/scraper/json_test.go index 6145cc88b..271d83235 100644 --- a/pkg/scraper/json_test.go +++ b/pkg/scraper/json_test.go @@ -23,6 +23,9 @@ jsonScrapers: Piercings: $extras.piercings Aliases: data.aliases Image: data.image + Details: data.bio + HairColor: $extras.hair_colour + Weight: $extras.weight ` const json = ` @@ -41,7 +44,7 @@ jsonScrapers: "ethnicity": "Caucasian", "nationality": "United States", "hair_colour": "Blonde", - "weight": "126 lbs (or 57 kg)", + "weight": 57, "height": "5'6\" (or 167 cm)", "measurements": "34-26-36", "cupsize": "34C (75C)", @@ -90,4 +93,7 @@ jsonScrapers: verifyField(t, "5'6\" (or 167 cm)", scrapedPerformer.Height, "Height") verifyField(t, "None", scrapedPerformer.Tattoos, "Tattoos") verifyField(t, "Navel", scrapedPerformer.Piercings, "Piercings") + verifyField(t, "Some girls are so damn hot that they can get you bent out of shape, and you will not even be mad at them for doing so. Well, tawny blonde Mia Malkova can bend her body into any shape she pleases, and that’s sure to satisfy all of the horny cocks and wet pussies out there. This girl has acrobatic and contortionist abilities that could even twist a pretzel into a new knot, which can be very helpful in the ... arrow_drop_down Some girls are so damn hot that they can get you bent out of shape, and you will not even be mad at them for doing so. Well, tawny blonde Mia Malkova can bend her body into any shape she pleases, and that’s sure to satisfy all of the horny cocks and wet pussies out there. This girl has acrobatic and contortionist abilities that could even twist a pretzel into a new knot, which can be very helpful in the VR Porn movies – trust us. Ankles behind her neck and feet over her back so she can kiss her toes, turned, twisted and gyrating, she can fuck any which way she wants (and that ass!), will surely make you fall in love with this hot Virtual Reality Porn slut, as she is one of the finest of them all. Talking about perfection, maybe it’s all the acrobatic work that keeps it in such gorgeous shape? Who cares really, because you just want to take a big bite out of it and never let go. But it’s not all about the body. Mia’s also got a great smile, which might not sound kinky, but believe us, it is a smile that will heat up your innards and drop your pants. Is it her golden skin, her innocent pink lips or that heart-shaped face? There is just too much good stuff going on with Mia Malkova, which is maybe why these past few years have heaped awards upon awards on this Southern California native. Mia came to VR Bangers for her first VR Porn video, so you know she’s only going for top-notch scenes with top-game performers, men, and women. Better hit up that yoga studio if you ever dream of being able to bang a flexible and talented chick like lady Malkova. arrow_drop_up", scrapedPerformer.Details, "Details") + verifyField(t, "Blonde", scrapedPerformer.HairColor, "HairColor") + verifyField(t, "57", scrapedPerformer.Weight, "Weight") } diff --git a/pkg/scraper/mapped.go b/pkg/scraper/mapped.go index 6b25e4850..87a040141 100644 --- a/pkg/scraper/mapped.go +++ b/pkg/scraper/mapped.go @@ -12,6 +12,7 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" "gopkg.in/yaml.v2" ) @@ -73,7 +74,9 @@ func (s mappedConfig) postProcess(q mappedQuery, attrConfig mappedScraperAttrCon result := attrConfig.concatenateResults(found) result = attrConfig.postProcess(result, q) if attrConfig.hasSplit() { - return attrConfig.splitString(result) + results := attrConfig.splitString(result) + results = attrConfig.distinctResults(results) + return results } ret = []string{result} @@ -86,6 +89,7 @@ func (s mappedConfig) postProcess(q mappedQuery, attrConfig mappedScraperAttrCon ret = append(ret, text) } + ret = attrConfig.distinctResults(ret) } return ret @@ -362,6 +366,17 @@ type postProcessParseDate string func (p *postProcessParseDate) Apply(value string, q mappedQuery) string { parseDate := string(*p) + const internalDateFormat = "2006-01-02" + + value = strings.ToLower(value) + if value == "today" || value == "yesterday" { // handle today, yesterday + dt := time.Now() + if value == "yesterday" { // subtract 1 day from now + dt = dt.AddDate(0, 0, -1) + } + return dt.Format(internalDateFormat) + } + if parseDate == "" { return value } @@ -375,7 +390,6 @@ func (p *postProcessParseDate) Apply(value string, q mappedQuery) string { } // convert it into our date format - const internalDateFormat = "2006-01-02" return parsedValue.Format(internalDateFormat) } @@ -452,12 +466,25 @@ func (p *postProcessFeetToCm) Apply(value string, q mappedQuery) string { return strconv.Itoa(int(math.Round(centimeters))) } +type postProcessLbToKg bool + +func (p *postProcessLbToKg) Apply(value string, q mappedQuery) string { + const lb_in_kg = 0.45359237 + w, err := strconv.ParseFloat(value, 64) + if err == nil { + w = w * lb_in_kg + value = strconv.Itoa(int(math.Round(w))) + } + return value +} + type mappedPostProcessAction struct { ParseDate string `yaml:"parseDate"` Replace mappedRegexConfigs `yaml:"replace"` SubScraper *mappedScraperAttrConfig `yaml:"subScraper"` Map map[string]string `yaml:"map"` FeetToCm bool `yaml:"feetToCm"` + LbToKg bool `yaml:"lbToKg"` } func (a mappedPostProcessAction) ToPostProcessAction() (postProcessAction, error) { @@ -501,6 +528,14 @@ func (a mappedPostProcessAction) ToPostProcessAction() (postProcessAction, error action := postProcessFeetToCm(a.FeetToCm) ret = &action } + if a.LbToKg { + if found != "" { + return nil, fmt.Errorf("post-process actions must have a single field, found %s and %s", found, "lbToKg") + } + found = "lbToKg" + action := postProcessLbToKg(a.LbToKg) + ret = &action + } if ret == nil { return nil, errors.New("invalid post-process action") @@ -608,6 +643,10 @@ func (c mappedScraperAttrConfig) concatenateResults(nodes []string) string { return strings.Join(result, separator) } +func (c mappedScraperAttrConfig) distinctResults(nodes []string) []string { + return utils.StrUnique(nodes) +} + func (c mappedScraperAttrConfig) splitString(value string) []string { separator := c.Split var res []string diff --git a/pkg/scraper/scrapers.go b/pkg/scraper/scrapers.go index 6e2ee3fd2..f82dbd223 100644 --- a/pkg/scraper/scrapers.go +++ b/pkg/scraper/scrapers.go @@ -14,21 +14,19 @@ import ( ) // GlobalConfig contains the global scraper options. -type GlobalConfig struct { - // User Agent used when scraping using http. - UserAgent string - - // Path (file or remote address) to a Chrome CDP instance. - CDPPath string - Path string +type GlobalConfig interface { + GetScraperUserAgent() string + GetScrapersPath() string + GetScraperCDPPath() string + GetScraperCertCheck() bool } -func (c GlobalConfig) isCDPPathHTTP() bool { - return strings.HasPrefix(c.CDPPath, "http://") || strings.HasPrefix(c.CDPPath, "https://") +func isCDPPathHTTP(c GlobalConfig) bool { + return strings.HasPrefix(c.GetScraperCDPPath(), "http://") || strings.HasPrefix(c.GetScraperCDPPath(), "https://") } -func (c GlobalConfig) isCDPPathWS() bool { - return strings.HasPrefix(c.CDPPath, "ws://") +func isCDPPathWS(c GlobalConfig) bool { + return strings.HasPrefix(c.GetScraperCDPPath(), "ws://") } // Cache stores scraper details. @@ -45,7 +43,7 @@ type Cache struct { // Scraper configurations are loaded from yml files in the provided scrapers // directory and any subdirectories. func NewCache(globalConfig GlobalConfig, txnManager models.TransactionManager) (*Cache, error) { - scrapers, err := loadScrapers(globalConfig.Path) + scrapers, err := loadScrapers(globalConfig.GetScrapersPath()) if err != nil { return nil, err } @@ -93,7 +91,7 @@ func loadScrapers(path string) ([]config, error) { // In the event of an error during loading, the cache will be left empty. func (c *Cache) ReloadScrapers() error { c.scrapers = nil - scrapers, err := loadScrapers(c.globalConfig.Path) + scrapers, err := loadScrapers(c.globalConfig.GetScrapersPath()) if err != nil { return err } @@ -102,6 +100,7 @@ func (c *Cache) ReloadScrapers() error { return nil } +// TODO - don't think this is needed // UpdateConfig updates the global config for the cache. If the scraper path // has changed, ReloadScrapers will need to be called separately. func (c *Cache) UpdateConfig(globalConfig GlobalConfig) { diff --git a/pkg/scraper/stashbox/graphql/generated_client.go b/pkg/scraper/stashbox/graphql/generated_client.go index 0cd062fc1..e3f4b45dd 100644 --- a/pkg/scraper/stashbox/graphql/generated_client.go +++ b/pkg/scraper/stashbox/graphql/generated_client.go @@ -18,56 +18,67 @@ func NewClient(cli *http.Client, baseURL string, options ...client.HTTPRequestOp } type Query struct { - FindPerformer *Performer "json:\"findPerformer\" graphql:\"findPerformer\"" - QueryPerformers QueryPerformersResultType "json:\"queryPerformers\" graphql:\"queryPerformers\"" - FindStudio *Studio "json:\"findStudio\" graphql:\"findStudio\"" - QueryStudios QueryStudiosResultType "json:\"queryStudios\" graphql:\"queryStudios\"" - FindTag *Tag "json:\"findTag\" graphql:\"findTag\"" - QueryTags QueryTagsResultType "json:\"queryTags\" graphql:\"queryTags\"" - FindScene *Scene "json:\"findScene\" graphql:\"findScene\"" - FindSceneByFingerprint []*Scene "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" - FindScenesByFingerprints []*Scene "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" - QueryScenes QueryScenesResultType "json:\"queryScenes\" graphql:\"queryScenes\"" - FindEdit *Edit "json:\"findEdit\" graphql:\"findEdit\"" - QueryEdits QueryEditsResultType "json:\"queryEdits\" graphql:\"queryEdits\"" - FindUser *User "json:\"findUser\" graphql:\"findUser\"" - QueryUsers QueryUsersResultType "json:\"queryUsers\" graphql:\"queryUsers\"" - Me *User "json:\"me\" graphql:\"me\"" - SearchPerformer []*Performer "json:\"searchPerformer\" graphql:\"searchPerformer\"" - SearchScene []*Scene "json:\"searchScene\" graphql:\"searchScene\"" - Version Version "json:\"version\" graphql:\"version\"" + FindPerformer *Performer "json:\"findPerformer\" graphql:\"findPerformer\"" + QueryPerformers QueryPerformersResultType "json:\"queryPerformers\" graphql:\"queryPerformers\"" + FindStudio *Studio "json:\"findStudio\" graphql:\"findStudio\"" + QueryStudios QueryStudiosResultType "json:\"queryStudios\" graphql:\"queryStudios\"" + FindTag *Tag "json:\"findTag\" graphql:\"findTag\"" + QueryTags QueryTagsResultType "json:\"queryTags\" graphql:\"queryTags\"" + FindTagCategory *TagCategory "json:\"findTagCategory\" graphql:\"findTagCategory\"" + QueryTagCategories QueryTagCategoriesResultType "json:\"queryTagCategories\" graphql:\"queryTagCategories\"" + FindScene *Scene "json:\"findScene\" graphql:\"findScene\"" + FindSceneByFingerprint []*Scene "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" + FindScenesByFingerprints []*Scene "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" + QueryScenes QueryScenesResultType "json:\"queryScenes\" graphql:\"queryScenes\"" + FindEdit *Edit "json:\"findEdit\" graphql:\"findEdit\"" + QueryEdits QueryEditsResultType "json:\"queryEdits\" graphql:\"queryEdits\"" + FindUser *User "json:\"findUser\" graphql:\"findUser\"" + QueryUsers QueryUsersResultType "json:\"queryUsers\" graphql:\"queryUsers\"" + Me *User "json:\"me\" graphql:\"me\"" + SearchPerformer []*Performer "json:\"searchPerformer\" graphql:\"searchPerformer\"" + SearchScene []*Scene "json:\"searchScene\" graphql:\"searchScene\"" + Version Version "json:\"version\" graphql:\"version\"" } type Mutation struct { - SceneCreate *Scene "json:\"sceneCreate\" graphql:\"sceneCreate\"" - SceneUpdate *Scene "json:\"sceneUpdate\" graphql:\"sceneUpdate\"" - SceneDestroy bool "json:\"sceneDestroy\" graphql:\"sceneDestroy\"" - PerformerCreate *Performer "json:\"performerCreate\" graphql:\"performerCreate\"" - PerformerUpdate *Performer "json:\"performerUpdate\" graphql:\"performerUpdate\"" - PerformerDestroy bool "json:\"performerDestroy\" graphql:\"performerDestroy\"" - StudioCreate *Studio "json:\"studioCreate\" graphql:\"studioCreate\"" - StudioUpdate *Studio "json:\"studioUpdate\" graphql:\"studioUpdate\"" - StudioDestroy bool "json:\"studioDestroy\" graphql:\"studioDestroy\"" - TagCreate *Tag "json:\"tagCreate\" graphql:\"tagCreate\"" - TagUpdate *Tag "json:\"tagUpdate\" graphql:\"tagUpdate\"" - TagDestroy bool "json:\"tagDestroy\" graphql:\"tagDestroy\"" - UserCreate *User "json:\"userCreate\" graphql:\"userCreate\"" - UserUpdate *User "json:\"userUpdate\" graphql:\"userUpdate\"" - UserDestroy bool "json:\"userDestroy\" graphql:\"userDestroy\"" - ImageCreate *Image "json:\"imageCreate\" graphql:\"imageCreate\"" - ImageUpdate *Image "json:\"imageUpdate\" graphql:\"imageUpdate\"" - ImageDestroy bool "json:\"imageDestroy\" graphql:\"imageDestroy\"" - RegenerateAPIKey string "json:\"regenerateAPIKey\" graphql:\"regenerateAPIKey\"" - ChangePassword bool "json:\"changePassword\" graphql:\"changePassword\"" - SceneEdit Edit "json:\"sceneEdit\" graphql:\"sceneEdit\"" - PerformerEdit Edit "json:\"performerEdit\" graphql:\"performerEdit\"" - StudioEdit Edit "json:\"studioEdit\" graphql:\"studioEdit\"" - TagEdit Edit "json:\"tagEdit\" graphql:\"tagEdit\"" - EditVote Edit "json:\"editVote\" graphql:\"editVote\"" - EditComment Edit "json:\"editComment\" graphql:\"editComment\"" - ApplyEdit Edit "json:\"applyEdit\" graphql:\"applyEdit\"" - CancelEdit Edit "json:\"cancelEdit\" graphql:\"cancelEdit\"" - SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\"" + SceneCreate *Scene "json:\"sceneCreate\" graphql:\"sceneCreate\"" + SceneUpdate *Scene "json:\"sceneUpdate\" graphql:\"sceneUpdate\"" + SceneDestroy bool "json:\"sceneDestroy\" graphql:\"sceneDestroy\"" + PerformerCreate *Performer "json:\"performerCreate\" graphql:\"performerCreate\"" + PerformerUpdate *Performer "json:\"performerUpdate\" graphql:\"performerUpdate\"" + PerformerDestroy bool "json:\"performerDestroy\" graphql:\"performerDestroy\"" + StudioCreate *Studio "json:\"studioCreate\" graphql:\"studioCreate\"" + StudioUpdate *Studio "json:\"studioUpdate\" graphql:\"studioUpdate\"" + StudioDestroy bool "json:\"studioDestroy\" graphql:\"studioDestroy\"" + TagCreate *Tag "json:\"tagCreate\" graphql:\"tagCreate\"" + TagUpdate *Tag "json:\"tagUpdate\" graphql:\"tagUpdate\"" + TagDestroy bool "json:\"tagDestroy\" graphql:\"tagDestroy\"" + UserCreate *User "json:\"userCreate\" graphql:\"userCreate\"" + UserUpdate *User "json:\"userUpdate\" graphql:\"userUpdate\"" + UserDestroy bool "json:\"userDestroy\" graphql:\"userDestroy\"" + ImageCreate *Image "json:\"imageCreate\" graphql:\"imageCreate\"" + ImageDestroy bool "json:\"imageDestroy\" graphql:\"imageDestroy\"" + NewUser *string "json:\"newUser\" graphql:\"newUser\"" + ActivateNewUser *User "json:\"activateNewUser\" graphql:\"activateNewUser\"" + GenerateInviteCode string "json:\"generateInviteCode\" graphql:\"generateInviteCode\"" + RescindInviteCode bool "json:\"rescindInviteCode\" graphql:\"rescindInviteCode\"" + GrantInvite int "json:\"grantInvite\" graphql:\"grantInvite\"" + RevokeInvite int "json:\"revokeInvite\" graphql:\"revokeInvite\"" + TagCategoryCreate *TagCategory "json:\"tagCategoryCreate\" graphql:\"tagCategoryCreate\"" + TagCategoryUpdate *TagCategory "json:\"tagCategoryUpdate\" graphql:\"tagCategoryUpdate\"" + TagCategoryDestroy bool "json:\"tagCategoryDestroy\" graphql:\"tagCategoryDestroy\"" + RegenerateAPIKey string "json:\"regenerateAPIKey\" graphql:\"regenerateAPIKey\"" + ResetPassword bool "json:\"resetPassword\" graphql:\"resetPassword\"" + ChangePassword bool "json:\"changePassword\" graphql:\"changePassword\"" + SceneEdit Edit "json:\"sceneEdit\" graphql:\"sceneEdit\"" + PerformerEdit Edit "json:\"performerEdit\" graphql:\"performerEdit\"" + StudioEdit Edit "json:\"studioEdit\" graphql:\"studioEdit\"" + TagEdit Edit "json:\"tagEdit\" graphql:\"tagEdit\"" + EditVote Edit "json:\"editVote\" graphql:\"editVote\"" + EditComment Edit "json:\"editComment\" graphql:\"editComment\"" + ApplyEdit Edit "json:\"applyEdit\" graphql:\"applyEdit\"" + CancelEdit Edit "json:\"cancelEdit\" graphql:\"cancelEdit\"" + SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\"" } type URLFragment struct { URL string "json:\"url\" graphql:\"url\"" @@ -76,8 +87,8 @@ type URLFragment struct { type ImageFragment struct { ID string "json:\"id\" graphql:\"id\"" URL string "json:\"url\" graphql:\"url\"" - Width *int "json:\"width\" graphql:\"width\"" - Height *int "json:\"height\" graphql:\"height\"" + Width int "json:\"width\" graphql:\"width\"" + Height int "json:\"height\" graphql:\"height\"" } type StudioFragment struct { Name string "json:\"name\" graphql:\"name\"" @@ -155,6 +166,12 @@ type FindScenesByFingerprints struct { type SearchScene struct { SearchScene []*SceneFragment "json:\"searchScene\" graphql:\"searchScene\"" } +type SearchPerformer struct { + SearchPerformer []*PerformerFragment "json:\"searchPerformer\" graphql:\"searchPerformer\"" +} +type FindPerformerByID struct { + FindPerformer *PerformerFragment "json:\"findPerformer\" graphql:\"findPerformer\"" +} type SubmitFingerprintPayload struct { SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\"" } @@ -164,40 +181,9 @@ const FindSceneByFingerprintQuery = `query FindSceneByFingerprint ($fingerprint: ... SceneFragment } } -fragment SceneFragment on Scene { +fragment TagFragment on Tag { + name id - title - details - duration - date - urls { - ... URLFragment - } - images { - ... ImageFragment - } - studio { - ... StudioFragment - } - tags { - ... TagFragment - } - performers { - ... PerformerAppearanceFragment - } - fingerprints { - ... FingerprintFragment - } -} -fragment URLFragment on URL { - url - type -} -fragment PerformerAppearanceFragment on PerformerAppearance { - as - performer { - ... PerformerFragment - } } fragment PerformerFragment on Performer { id @@ -236,16 +222,43 @@ fragment FuzzyDateFragment on FuzzyDate { date accuracy } +fragment BodyModificationFragment on BodyModification { + location + description +} fragment FingerprintFragment on Fingerprint { algorithm hash duration } -fragment ImageFragment on Image { +fragment SceneFragment on Scene { id + title + details + duration + date + urls { + ... URLFragment + } + images { + ... ImageFragment + } + studio { + ... StudioFragment + } + tags { + ... TagFragment + } + performers { + ... PerformerAppearanceFragment + } + fingerprints { + ... FingerprintFragment + } +} +fragment URLFragment on URL { url - width - height + type } fragment StudioFragment on Studio { name @@ -257,9 +270,17 @@ fragment StudioFragment on Studio { ... ImageFragment } } -fragment TagFragment on Tag { - name +fragment ImageFragment on Image { id + url + width + height +} +fragment PerformerAppearanceFragment on PerformerAppearance { + as + performer { + ... PerformerFragment + } } fragment MeasurementsFragment on Measurements { band_size @@ -267,10 +288,6 @@ fragment MeasurementsFragment on Measurements { waist hip } -fragment BodyModificationFragment on BodyModification { - location - description -} ` func (c *Client) FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, httpRequestOptions ...client.HTTPRequestOption) (*FindSceneByFingerprint, error) { @@ -291,6 +308,14 @@ const FindScenesByFingerprintsQuery = `query FindScenesByFingerprints ($fingerpr ... SceneFragment } } +fragment TagFragment on Tag { + name + id +} +fragment FuzzyDateFragment on FuzzyDate { + date + accuracy +} fragment PerformerAppearanceFragment on PerformerAppearance { as performer { @@ -336,10 +361,9 @@ fragment MeasurementsFragment on Measurements { waist hip } -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration +fragment BodyModificationFragment on BodyModification { + location + description } fragment SceneFragment on Scene { id @@ -376,10 +400,6 @@ fragment ImageFragment on Image { width height } -fragment TagFragment on Tag { - name - id -} fragment StudioFragment on Studio { name id @@ -390,13 +410,10 @@ fragment StudioFragment on Studio { ... ImageFragment } } -fragment FuzzyDateFragment on FuzzyDate { - date - accuracy -} -fragment BodyModificationFragment on BodyModification { - location - description +fragment FingerprintFragment on Fingerprint { + algorithm + hash + duration } ` @@ -418,6 +435,21 @@ const SearchSceneQuery = `query SearchScene ($term: String!) { ... SceneFragment } } +fragment MeasurementsFragment on Measurements { + band_size + cup_size + waist + hip +} +fragment BodyModificationFragment on BodyModification { + location + description +} +fragment FingerprintFragment on Fingerprint { + algorithm + hash + duration +} fragment URLFragment on URL { url type @@ -432,49 +464,10 @@ fragment TagFragment on Tag { name id } -fragment PerformerFragment on Performer { - id - name - disambiguation - aliases - gender - urls { - ... URLFragment - } - images { - ... ImageFragment - } - birthdate { - ... FuzzyDateFragment - } - ethnicity - country - eye_color - hair_color - height - measurements { - ... MeasurementsFragment - } - breast_type - career_start_year - career_end_year - tattoos { - ... BodyModificationFragment - } - piercings { - ... BodyModificationFragment - } -} fragment FuzzyDateFragment on FuzzyDate { date accuracy } -fragment MeasurementsFragment on Measurements { - band_size - cup_size - waist - hip -} fragment SceneFragment on Scene { id title @@ -516,14 +509,38 @@ fragment PerformerAppearanceFragment on PerformerAppearance { ... PerformerFragment } } -fragment BodyModificationFragment on BodyModification { - location - description -} -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration +fragment PerformerFragment on Performer { + id + name + disambiguation + aliases + gender + urls { + ... URLFragment + } + images { + ... ImageFragment + } + birthdate { + ... FuzzyDateFragment + } + ethnicity + country + eye_color + hair_color + height + measurements { + ... MeasurementsFragment + } + breast_type + career_start_year + career_end_year + tattoos { + ... BodyModificationFragment + } + piercings { + ... BodyModificationFragment + } } ` @@ -540,6 +557,160 @@ func (c *Client) SearchScene(ctx context.Context, term string, httpRequestOption return &res, nil } +const SearchPerformerQuery = `query SearchPerformer ($term: String!) { + searchPerformer(term: $term) { + ... PerformerFragment + } +} +fragment FuzzyDateFragment on FuzzyDate { + date + accuracy +} +fragment MeasurementsFragment on Measurements { + band_size + cup_size + waist + hip +} +fragment BodyModificationFragment on BodyModification { + location + description +} +fragment PerformerFragment on Performer { + id + name + disambiguation + aliases + gender + urls { + ... URLFragment + } + images { + ... ImageFragment + } + birthdate { + ... FuzzyDateFragment + } + ethnicity + country + eye_color + hair_color + height + measurements { + ... MeasurementsFragment + } + breast_type + career_start_year + career_end_year + tattoos { + ... BodyModificationFragment + } + piercings { + ... BodyModificationFragment + } +} +fragment URLFragment on URL { + url + type +} +fragment ImageFragment on Image { + id + url + width + height +} +` + +func (c *Client) SearchPerformer(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchPerformer, error) { + vars := map[string]interface{}{ + "term": term, + } + + var res SearchPerformer + if err := c.Client.Post(ctx, SearchPerformerQuery, &res, vars, httpRequestOptions...); err != nil { + return nil, err + } + + return &res, nil +} + +const FindPerformerByIDQuery = `query FindPerformerByID ($id: ID!) { + findPerformer(id: $id) { + ... PerformerFragment + } +} +fragment FuzzyDateFragment on FuzzyDate { + date + accuracy +} +fragment MeasurementsFragment on Measurements { + band_size + cup_size + waist + hip +} +fragment BodyModificationFragment on BodyModification { + location + description +} +fragment PerformerFragment on Performer { + id + name + disambiguation + aliases + gender + urls { + ... URLFragment + } + images { + ... ImageFragment + } + birthdate { + ... FuzzyDateFragment + } + ethnicity + country + eye_color + hair_color + height + measurements { + ... MeasurementsFragment + } + breast_type + career_start_year + career_end_year + tattoos { + ... BodyModificationFragment + } + piercings { + ... BodyModificationFragment + } +} +fragment URLFragment on URL { + url + type +} +fragment ImageFragment on Image { + id + url + width + height +} +` + +func (c *Client) FindPerformerByID(ctx context.Context, id string, httpRequestOptions ...client.HTTPRequestOption) (*FindPerformerByID, error) { + vars := map[string]interface{}{ + "id": id, + } + + var res FindPerformerByID + if err := c.Client.Post(ctx, FindPerformerByIDQuery, &res, vars, httpRequestOptions...); err != nil { + return nil, err + } + + return &res, nil +} + const SubmitFingerprintQuery = `mutation SubmitFingerprint ($input: FingerprintSubmission!) { submitFingerprint(input: $input) } diff --git a/pkg/scraper/stashbox/graphql/generated_models.go b/pkg/scraper/stashbox/graphql/generated_models.go index a8715092b..9fa66170f 100644 --- a/pkg/scraper/stashbox/graphql/generated_models.go +++ b/pkg/scraper/stashbox/graphql/generated_models.go @@ -7,6 +7,8 @@ import ( "io" "strconv" "time" + + "github.com/99designs/gqlgen/graphql" ) type EditDetails interface { @@ -17,6 +19,13 @@ type EditTarget interface { IsEditTarget() } +type ActivateNewUserInput struct { + Name string `json:"name"` + Email string `json:"email"` + ActivationKey string `json:"activation_key"` + Password string `json:"password"` +} + type ApplyEditInput struct { ID string `json:"id"` } @@ -58,11 +67,15 @@ type Edit struct { Target EditTarget `json:"target"` TargetType TargetTypeEnum `json:"target_type"` // Objects to merge with the target. Only applicable to merges - MergeSources []EditTarget `json:"merge_sources"` - Operation OperationEnum `json:"operation"` - Details EditDetails `json:"details"` - Comments []*EditComment `json:"comments"` - Votes []*VoteComment `json:"votes"` + MergeSources []EditTarget `json:"merge_sources"` + Operation OperationEnum `json:"operation"` + Details EditDetails `json:"details"` + // Previous state of fields being modified - null if operation is create or delete. + OldDetails EditDetails `json:"old_details"` + // Entity specific options + Options *PerformerEditOptions `json:"options"` + Comments []*EditComment `json:"comments"` + Votes []*VoteComment `json:"votes"` // = Accepted - Rejected VoteCount int `json:"vote_count"` Status VoteStatusEnum `json:"status"` @@ -115,11 +128,6 @@ type EditVoteInput struct { Type VoteTypeEnum `json:"type"` } -type EthnicityCriterionInput struct { - Value *EthnicityEnum `json:"value"` - Modifier CriterionModifier `json:"modifier"` -} - type EyeColorCriterionInput struct { Value *EyeColorEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` @@ -157,6 +165,11 @@ type FuzzyDateInput struct { Accuracy DateAccuracyEnum `json:"accuracy"` } +type GrantInviteInput struct { + UserID string `json:"user_id"` + Amount int `json:"amount"` +} + type HairColorCriterionInput struct { Value *HairColorEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` @@ -170,12 +183,13 @@ type IDCriterionInput struct { type Image struct { ID string `json:"id"` URL string `json:"url"` - Width *int `json:"width"` - Height *int `json:"height"` + Width int `json:"width"` + Height int `json:"height"` } type ImageCreateInput struct { - URL string `json:"url"` + URL *string `json:"url"` + File *graphql.Upload `json:"file"` } type ImageDestroyInput struct { @@ -183,8 +197,8 @@ type ImageDestroyInput struct { } type ImageUpdateInput struct { - ID string `json:"id"` - URL string `json:"url"` + ID string `json:"id"` + URL *string `json:"url"` } type IntCriterionInput struct { @@ -211,6 +225,11 @@ type MultiIDCriterionInput struct { Modifier CriterionModifier `json:"modifier"` } +type NewUserInput struct { + Email string `json:"email"` + InviteKey *string `json:"invite_key"` +} + type Performer struct { ID string `json:"id"` Name string `json:"name"` @@ -234,6 +253,8 @@ type Performer struct { Piercings []*BodyModification `json:"piercings"` Images []*Image `json:"images"` Deleted bool `json:"deleted"` + Edits []*Edit `json:"edits"` + SceneCount int `json:"scene_count"` } func (Performer) IsEditTarget() {} @@ -276,21 +297,25 @@ type PerformerDestroyInput struct { } type PerformerEdit struct { - Name *string `json:"name"` - Disambiguation *string `json:"disambiguation"` - AddedAliases []string `json:"added_aliases"` - RemovedAliases []string `json:"removed_aliases"` - Gender *GenderEnum `json:"gender"` - AddedUrls []*URL `json:"added_urls"` - RemovedUrls []*URL `json:"removed_urls"` - Birthdate *FuzzyDate `json:"birthdate"` - Ethnicity *EthnicityEnum `json:"ethnicity"` - Country *string `json:"country"` - EyeColor *EyeColorEnum `json:"eye_color"` - HairColor *HairColorEnum `json:"hair_color"` + Name *string `json:"name"` + Disambiguation *string `json:"disambiguation"` + AddedAliases []string `json:"added_aliases"` + RemovedAliases []string `json:"removed_aliases"` + Gender *GenderEnum `json:"gender"` + AddedUrls []*URL `json:"added_urls"` + RemovedUrls []*URL `json:"removed_urls"` + Birthdate *string `json:"birthdate"` + BirthdateAccuracy *string `json:"birthdate_accuracy"` + Ethnicity *EthnicityEnum `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *EyeColorEnum `json:"eye_color"` + HairColor *HairColorEnum `json:"hair_color"` // Height in cm Height *int `json:"height"` - Measurements *Measurements `json:"measurements"` + CupSize *string `json:"cup_size"` + BandSize *int `json:"band_size"` + WaistSize *int `json:"waist_size"` + HipSize *int `json:"hip_size"` BreastType *BreastTypeEnum `json:"breast_type"` CareerStartYear *int `json:"career_start_year"` CareerEndYear *int `json:"career_end_year"` @@ -329,6 +354,22 @@ type PerformerEditInput struct { Edit *EditInput `json:"edit"` // Not required for destroy type Details *PerformerEditDetailsInput `json:"details"` + // Controls aliases modification for merges and name modifications + Options *PerformerEditOptionsInput `json:"options"` +} + +type PerformerEditOptions struct { + // Set performer alias on scenes without alias to old name if name is changed + SetModifyAliases bool `json:"set_modify_aliases"` + // Set performer alias on scenes attached to merge sources to old name + SetMergeAliases bool `json:"set_merge_aliases"` +} + +type PerformerEditOptionsInput struct { + // Set performer alias on scenes without alias to old name if name is changed + SetModifyAliases *bool `json:"set_modify_aliases"` + // Set performer alias on scenes attached to merge sources to old name + SetMergeAliases *bool `json:"set_merge_aliases"` } type PerformerFilterType struct { @@ -339,13 +380,13 @@ type PerformerFilterType struct { // Search aliases only - assumes like query unless quoted Alias *string `json:"alias"` Disambiguation *StringCriterionInput `json:"disambiguation"` - Gender *GenderEnum `json:"gender"` + Gender *GenderFilterEnum `json:"gender"` // Filter to search urls - assumes like query unless quoted URL *string `json:"url"` Birthdate *DateCriterionInput `json:"birthdate"` BirthYear *IntCriterionInput `json:"birth_year"` Age *IntCriterionInput `json:"age"` - Ethnicity *EthnicityCriterionInput `json:"ethnicity"` + Ethnicity *EthnicityFilterEnum `json:"ethnicity"` Country *StringCriterionInput `json:"country"` EyeColor *EyeColorCriterionInput `json:"eye_color"` HairColor *HairColorCriterionInput `json:"hair_color"` @@ -410,6 +451,11 @@ type QueryStudiosResultType struct { Studios []*Studio `json:"studios"` } +type QueryTagCategoriesResultType struct { + Count int `json:"count"` + TagCategories []*TagCategory `json:"tag_categories"` +} + type QueryTagsResultType struct { Count int `json:"count"` Tags []*Tag `json:"tags"` @@ -420,6 +466,15 @@ type QueryUsersResultType struct { Users []*User `json:"users"` } +type ResetPasswordInput struct { + Email string `json:"email"` +} + +type RevokeInviteInput struct { + UserID string `json:"user_id"` + Amount int `json:"amount"` +} + type RoleCriterionInput struct { Value []RoleEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` @@ -515,6 +570,8 @@ type SceneFilterType struct { Date *DateCriterionInput `json:"date"` // Filter to only include scenes with this studio Studios *MultiIDCriterionInput `json:"studios"` + // Filter to only include scenes with this studio as primary or parent + ParentStudio *string `json:"parentStudio"` // Filter to only include scenes with these tags Tags *MultiIDCriterionInput `json:"tags"` // Filter to only include scenes with these performers @@ -598,9 +655,12 @@ type StudioEditInput struct { type StudioFilterType struct { // Filter to search name - assumes like query unless quoted Name *string `json:"name"` + // Filter to search studio and parent studio name - assumes like query unless quoted + Names *string `json:"names"` // Filter to search url - assumes like query unless quoted - URL *string `json:"url"` - Parent *IDCriterionInput `json:"parent"` + URL *string `json:"url"` + Parent *IDCriterionInput `json:"parent"` + HasParent *bool `json:"has_parent"` } type StudioUpdateInput struct { @@ -613,20 +673,46 @@ type StudioUpdateInput struct { } type Tag struct { - ID string `json:"id"` - Name string `json:"name"` - Description *string `json:"description"` - Aliases []string `json:"aliases"` - Deleted bool `json:"deleted"` - Edits []*Edit `json:"edits"` + ID string `json:"id"` + Name string `json:"name"` + Description *string `json:"description"` + Aliases []string `json:"aliases"` + Deleted bool `json:"deleted"` + Edits []*Edit `json:"edits"` + Category *TagCategory `json:"category"` } func (Tag) IsEditTarget() {} +type TagCategory struct { + ID string `json:"id"` + Name string `json:"name"` + Group TagGroupEnum `json:"group"` + Description *string `json:"description"` +} + +type TagCategoryCreateInput struct { + Name string `json:"name"` + Group TagGroupEnum `json:"group"` + Description *string `json:"description"` +} + +type TagCategoryDestroyInput struct { + ID string `json:"id"` +} + +type TagCategoryUpdateInput struct { + ID string `json:"id"` + Name *string `json:"name"` + Group *TagGroupEnum `json:"group"` + Description *string `json:"description"` +} + type TagCreateInput struct { Name string `json:"name"` Description *string `json:"description"` Aliases []string `json:"aliases"` + CategoryID *string `json:"category_id"` } type TagDestroyInput struct { @@ -638,6 +724,7 @@ type TagEdit struct { Description *string `json:"description"` AddedAliases []string `json:"added_aliases"` RemovedAliases []string `json:"removed_aliases"` + CategoryID *string `json:"category_id"` } func (TagEdit) IsEditDetails() {} @@ -646,6 +733,7 @@ type TagEditDetailsInput struct { Name *string `json:"name"` Description *string `json:"description"` Aliases []string `json:"aliases"` + CategoryID *string `json:"category_id"` } type TagEditInput struct { @@ -661,6 +749,8 @@ type TagFilterType struct { Names *string `json:"names"` // Filter to search name - assumes like query unless quoted Name *string `json:"name"` + // Filter to category ID + CategoryID *string `json:"category_id"` } type TagUpdateInput struct { @@ -668,6 +758,7 @@ type TagUpdateInput struct { Name *string `json:"name"` Description *string `json:"description"` Aliases []string `json:"aliases"` + CategoryID *string `json:"category_id"` } type URL struct { @@ -695,21 +786,26 @@ type User struct { // Votes on unsuccessful edits UnsuccessfulVotes int `json:"unsuccessful_votes"` // Calls to the API from this user over a configurable time period - APICalls int `json:"api_calls"` + APICalls int `json:"api_calls"` + InvitedBy *User `json:"invited_by"` + InviteTokens *int `json:"invite_tokens"` + ActiveInviteCodes []string `json:"active_invite_codes"` } type UserChangePasswordInput struct { // Password in plain text - ExistingPassword string `json:"existing_password"` - NewPassword string `json:"new_password"` + ExistingPassword *string `json:"existing_password"` + NewPassword string `json:"new_password"` + ResetKey *string `json:"reset_key"` } type UserCreateInput struct { Name string `json:"name"` // Password in plain text - Password string `json:"password"` - Roles []RoleEnum `json:"roles"` - Email string `json:"email"` + Password string `json:"password"` + Roles []RoleEnum `json:"roles"` + Email string `json:"email"` + InvitedByID *string `json:"invited_by_id"` } type UserDestroyInput struct { @@ -735,6 +831,8 @@ type UserFilterType struct { UnsuccessfulVotes *IntCriterionInput `json:"unsuccessful_votes"` // Filter by number of API calls APICalls *IntCriterionInput `json:"api_calls"` + // Filter by user that invited + InvitedBy *string `json:"invited_by"` } type UserUpdateInput struct { @@ -960,6 +1058,61 @@ func (e EthnicityEnum) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } +type EthnicityFilterEnum string + +const ( + EthnicityFilterEnumUnknown EthnicityFilterEnum = "UNKNOWN" + EthnicityFilterEnumCaucasian EthnicityFilterEnum = "CAUCASIAN" + EthnicityFilterEnumBlack EthnicityFilterEnum = "BLACK" + EthnicityFilterEnumAsian EthnicityFilterEnum = "ASIAN" + EthnicityFilterEnumIndian EthnicityFilterEnum = "INDIAN" + EthnicityFilterEnumLatin EthnicityFilterEnum = "LATIN" + EthnicityFilterEnumMiddleEastern EthnicityFilterEnum = "MIDDLE_EASTERN" + EthnicityFilterEnumMixed EthnicityFilterEnum = "MIXED" + EthnicityFilterEnumOther EthnicityFilterEnum = "OTHER" +) + +var AllEthnicityFilterEnum = []EthnicityFilterEnum{ + EthnicityFilterEnumUnknown, + EthnicityFilterEnumCaucasian, + EthnicityFilterEnumBlack, + EthnicityFilterEnumAsian, + EthnicityFilterEnumIndian, + EthnicityFilterEnumLatin, + EthnicityFilterEnumMiddleEastern, + EthnicityFilterEnumMixed, + EthnicityFilterEnumOther, +} + +func (e EthnicityFilterEnum) IsValid() bool { + switch e { + case EthnicityFilterEnumUnknown, EthnicityFilterEnumCaucasian, EthnicityFilterEnumBlack, EthnicityFilterEnumAsian, EthnicityFilterEnumIndian, EthnicityFilterEnumLatin, EthnicityFilterEnumMiddleEastern, EthnicityFilterEnumMixed, EthnicityFilterEnumOther: + return true + } + return false +} + +func (e EthnicityFilterEnum) String() string { + return string(e) +} + +func (e *EthnicityFilterEnum) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = EthnicityFilterEnum(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid EthnicityFilterEnum", str) + } + return nil +} + +func (e EthnicityFilterEnum) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + type EyeColorEnum string const ( @@ -1014,16 +1167,18 @@ type FingerprintAlgorithm string const ( FingerprintAlgorithmMd5 FingerprintAlgorithm = "MD5" FingerprintAlgorithmOshash FingerprintAlgorithm = "OSHASH" + FingerprintAlgorithmPhash FingerprintAlgorithm = "PHASH" ) var AllFingerprintAlgorithm = []FingerprintAlgorithm{ FingerprintAlgorithmMd5, FingerprintAlgorithmOshash, + FingerprintAlgorithmPhash, } func (e FingerprintAlgorithm) IsValid() bool { switch e { - case FingerprintAlgorithmMd5, FingerprintAlgorithmOshash: + case FingerprintAlgorithmMd5, FingerprintAlgorithmOshash, FingerprintAlgorithmPhash: return true } return false @@ -1097,6 +1252,55 @@ func (e GenderEnum) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } +type GenderFilterEnum string + +const ( + GenderFilterEnumUnknown GenderFilterEnum = "UNKNOWN" + GenderFilterEnumMale GenderFilterEnum = "MALE" + GenderFilterEnumFemale GenderFilterEnum = "FEMALE" + GenderFilterEnumTransgenderMale GenderFilterEnum = "TRANSGENDER_MALE" + GenderFilterEnumTransgenderFemale GenderFilterEnum = "TRANSGENDER_FEMALE" + GenderFilterEnumIntersex GenderFilterEnum = "INTERSEX" +) + +var AllGenderFilterEnum = []GenderFilterEnum{ + GenderFilterEnumUnknown, + GenderFilterEnumMale, + GenderFilterEnumFemale, + GenderFilterEnumTransgenderMale, + GenderFilterEnumTransgenderFemale, + GenderFilterEnumIntersex, +} + +func (e GenderFilterEnum) IsValid() bool { + switch e { + case GenderFilterEnumUnknown, GenderFilterEnumMale, GenderFilterEnumFemale, GenderFilterEnumTransgenderMale, GenderFilterEnumTransgenderFemale, GenderFilterEnumIntersex: + return true + } + return false +} + +func (e GenderFilterEnum) String() string { + return string(e) +} + +func (e *GenderFilterEnum) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = GenderFilterEnum(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid GenderFilterEnum", str) + } + return nil +} + +func (e GenderFilterEnum) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + type HairColorEnum string const ( @@ -1205,6 +1409,10 @@ const ( RoleEnumEdit RoleEnum = "EDIT" RoleEnumModify RoleEnum = "MODIFY" RoleEnumAdmin RoleEnum = "ADMIN" + // May generate invites without tokens + RoleEnumInvite RoleEnum = "INVITE" + // May grant and rescind invite tokens and resind invite keys + RoleEnumManageInvites RoleEnum = "MANAGE_INVITES" ) var AllRoleEnum = []RoleEnum{ @@ -1213,11 +1421,13 @@ var AllRoleEnum = []RoleEnum{ RoleEnumEdit, RoleEnumModify, RoleEnumAdmin, + RoleEnumInvite, + RoleEnumManageInvites, } func (e RoleEnum) IsValid() bool { switch e { - case RoleEnumRead, RoleEnumVote, RoleEnumEdit, RoleEnumModify, RoleEnumAdmin: + case RoleEnumRead, RoleEnumVote, RoleEnumEdit, RoleEnumModify, RoleEnumAdmin, RoleEnumInvite, RoleEnumManageInvites: return true } return false @@ -1285,6 +1495,49 @@ func (e SortDirectionEnum) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } +type TagGroupEnum string + +const ( + TagGroupEnumPeople TagGroupEnum = "PEOPLE" + TagGroupEnumScene TagGroupEnum = "SCENE" + TagGroupEnumAction TagGroupEnum = "ACTION" +) + +var AllTagGroupEnum = []TagGroupEnum{ + TagGroupEnumPeople, + TagGroupEnumScene, + TagGroupEnumAction, +} + +func (e TagGroupEnum) IsValid() bool { + switch e { + case TagGroupEnumPeople, TagGroupEnumScene, TagGroupEnumAction: + return true + } + return false +} + +func (e TagGroupEnum) String() string { + return string(e) +} + +func (e *TagGroupEnum) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = TagGroupEnum(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid TagGroupEnum", str) + } + return nil +} + +func (e TagGroupEnum) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + type TargetTypeEnum string const ( diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index 1dac41422..222462d5b 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -66,7 +66,7 @@ func (c Client) QueryStashBoxScene(queryStr string) ([]*models.ScrapedScene, err } // FindStashBoxScenesByFingerprints queries stash-box for scenes using every -// scene's MD5 checksum and/or oshash. +// scene's MD5/OSHASH checksum, or PHash func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.ScrapedScene, error) { ids, err := utils.StringSliceToIntSlice(sceneIDs) if err != nil { @@ -95,6 +95,10 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.S if scene.OSHash.Valid { fingerprints = append(fingerprints, scene.OSHash.String) } + + if scene.Phash.Valid { + fingerprints = append(fingerprints, utils.PhashToString(scene.Phash.Int64)) + } } return nil @@ -189,6 +193,18 @@ func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) ( Fingerprint: &fingerprint, }) } + + if scene.Phash.Valid && scene.Duration.Valid { + fingerprint := graphql.FingerprintInput{ + Hash: utils.PhashToString(scene.Phash.Int64), + Algorithm: graphql.FingerprintAlgorithmPhash, + Duration: int(scene.Duration.Float64), + } + fingerprints = append(fingerprints, graphql.FingerprintSubmission{ + SceneID: sceneStashID, + Fingerprint: &fingerprint, + }) + } } } @@ -211,6 +227,92 @@ func (c Client) submitStashBoxFingerprints(fingerprints []graphql.FingerprintSub return true, nil } +// QueryStashBoxPerformer queries stash-box for performers using a query string. +func (c Client) QueryStashBoxPerformer(queryStr string) ([]*models.StashBoxPerformerQueryResult, error) { + performers, err := c.queryStashBoxPerformer(queryStr) + + res := []*models.StashBoxPerformerQueryResult{ + { + Query: queryStr, + Results: performers, + }, + } + return res, err +} + +func (c Client) queryStashBoxPerformer(queryStr string) ([]*models.ScrapedScenePerformer, error) { + performers, err := c.client.SearchPerformer(context.TODO(), queryStr) + if err != nil { + return nil, err + } + + performerFragments := performers.SearchPerformer + + var ret []*models.ScrapedScenePerformer + for _, fragment := range performerFragments { + performer := performerFragmentToScrapedScenePerformer(*fragment) + ret = append(ret, performer) + } + + return ret, nil +} + +// FindStashBoxPerformersByNames queries stash-box for performers by name +func (c Client) FindStashBoxPerformersByNames(performerIDs []string) ([]*models.StashBoxPerformerQueryResult, error) { + ids, err := utils.StringSliceToIntSlice(performerIDs) + if err != nil { + return nil, err + } + + var performers []*models.Performer + + if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + qb := r.Performer() + + for _, performerID := range ids { + performer, err := qb.Find(performerID) + if err != nil { + return err + } + + if performer == nil { + return fmt.Errorf("performer with id %d not found", performerID) + } + + if performer.Name.Valid { + performers = append(performers, performer) + } + } + + return nil + }); err != nil { + return nil, err + } + + return c.findStashBoxPerformersByNames(performers) +} + +func (c Client) findStashBoxPerformersByNames(performers []*models.Performer) ([]*models.StashBoxPerformerQueryResult, error) { + var ret []*models.StashBoxPerformerQueryResult + for _, performer := range performers { + if performer.Name.Valid { + performerResults, err := c.queryStashBoxPerformer(performer.Name.String) + if err != nil { + return nil, err + } + + result := models.StashBoxPerformerQueryResult{ + Query: strconv.Itoa(performer.ID), + Results: performerResults, + } + + ret = append(ret, &result) + } + } + + return ret, nil +} + func findURL(urls []*graphql.URLFragment, urlType string) *string { for _, u := range urls { if u.Type == urlType { @@ -222,9 +324,12 @@ func findURL(urls []*graphql.URLFragment, urlType string) *string { return nil } -func enumToStringPtr(e fmt.Stringer) *string { +func enumToStringPtr(e fmt.Stringer, titleCase bool) *string { if e != nil { ret := e.String() + if titleCase { + ret = strings.Title(strings.ToLower(ret)) + } return &ret } @@ -248,6 +353,8 @@ func formatCareerLength(start, end *int) *string { var ret string if end == nil { ret = fmt.Sprintf("%d -", *start) + } else if start == nil { + ret = fmt.Sprintf("- %d", *end) } else { ret = fmt.Sprintf("%d - %d", *start, *end) } @@ -338,19 +445,19 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode } if p.Gender != nil { - sp.Gender = enumToStringPtr(p.Gender) + sp.Gender = enumToStringPtr(p.Gender, false) } if p.Ethnicity != nil { - sp.Ethnicity = enumToStringPtr(p.Ethnicity) + sp.Ethnicity = enumToStringPtr(p.Ethnicity, true) } if p.EyeColor != nil { - sp.EyeColor = enumToStringPtr(p.EyeColor) + sp.EyeColor = enumToStringPtr(p.EyeColor, true) } if p.BreastType != nil { - sp.FakeTits = enumToStringPtr(p.BreastType) + sp.FakeTits = enumToStringPtr(p.BreastType, true) } return sp @@ -447,3 +554,29 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq return ss, nil } + +func (c Client) FindStashBoxPerformerByID(id string) (*models.ScrapedScenePerformer, error) { + performer, err := c.client.FindPerformerByID(context.TODO(), id) + if err != nil { + return nil, err + } + + ret := performerFragmentToScrapedScenePerformer(*performer.FindPerformer) + return ret, nil +} + +func (c Client) FindStashBoxPerformerByName(name string) (*models.ScrapedScenePerformer, error) { + performers, err := c.client.SearchPerformer(context.TODO(), name) + if err != nil { + return nil, err + } + + var ret *models.ScrapedScenePerformer + for _, performer := range performers.SearchPerformer { + if strings.ToLower(performer.Name) == strings.ToLower(name) { + ret = performerFragmentToScrapedScenePerformer(*performer) + } + } + + return ret, nil +} diff --git a/pkg/scraper/url.go b/pkg/scraper/url.go index 85e1590ee..baa35b07e 100644 --- a/pkg/scraper/url.go +++ b/pkg/scraper/url.go @@ -23,7 +23,6 @@ import ( "golang.org/x/net/publicsuffix" "github.com/stashapp/stash/pkg/logger" - stashConfig "github.com/stashapp/stash/pkg/manager/config" ) // Timeout for the scrape http request. Includes transfer time. May want to make this @@ -52,7 +51,7 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re client := &http.Client{ Transport: &http.Transport{ // ignore insecure certificates - TLSClientConfig: &tls.Config{InsecureSkipVerify: !stashConfig.GetScraperCertCheck()}, + TLSClientConfig: &tls.Config{InsecureSkipVerify: !globalConfig.GetScraperCertCheck()}, }, Timeout: scrapeGetTimeout, // defaultCheckRedirect code with max changed from 10 to 20 @@ -70,17 +69,26 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re return nil, err } - userAgent := globalConfig.UserAgent + userAgent := globalConfig.GetScraperUserAgent() if userAgent != "" { req.Header.Set("User-Agent", userAgent) } + if driverOptions != nil { // setting the Headers after the UA allows us to override it from inside the scraper + for _, h := range driverOptions.Headers { + if h.Key != "" { + req.Header.Set(h.Key, h.Value) + logger.Debugf("[scraper] adding header <%s:%s>", h.Key, h.Value) + } + } + } + resp, err := client.Do(req) if err != nil { return nil, err } if resp.StatusCode >= 400 { - return nil, fmt.Errorf("http error %d", resp.StatusCode) + return nil, fmt.Errorf("http error %d:%s", resp.StatusCode, http.StatusText(resp.StatusCode)) } defer resp.Body.Close() @@ -114,14 +122,15 @@ func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig Glo act := context.Background() // if scraperCDPPath is a remote address, then allocate accordingly - if globalConfig.CDPPath != "" { + cdpPath := globalConfig.GetScraperCDPPath() + if cdpPath != "" { var cancelAct context.CancelFunc - if globalConfig.isCDPPathHTTP() || globalConfig.isCDPPathWS() { - remote := globalConfig.CDPPath + if isCDPPathHTTP(globalConfig) || isCDPPathWS(globalConfig) { + remote := cdpPath // if CDPPath is http(s) then we need to get the websocket URL - if globalConfig.isCDPPathHTTP() { + if isCDPPathHTTP(globalConfig) { var err error remote, err = getRemoteCDPWSAddress(remote) if err != nil { @@ -140,7 +149,7 @@ func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig Glo opts := append(chromedp.DefaultExecAllocatorOptions[:], chromedp.UserDataDir(dir), - chromedp.ExecPath(globalConfig.CDPPath), + chromedp.ExecPath(cdpPath), ) act, cancelAct = chromedp.NewExecAllocator(act, opts...) } @@ -156,10 +165,13 @@ func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig Glo defer cancel() var res string + headers := cdpHeaders(driverOptions) + err := chromedp.Run(ctx, network.Enable(), setCDPCookies(driverOptions), printCDPCookies(driverOptions, "Cookies found"), + network.SetExtraHTTPHeaders(network.Headers(headers)), chromedp.Navigate(url), chromedp.Sleep(sleepDuration), setCDPClicks(driverOptions), @@ -241,3 +253,16 @@ func cdpNetwork(enable bool) chromedp.Action { return nil }) } + +func cdpHeaders(driverOptions scraperDriverOptions) map[string]interface{} { + headers := map[string]interface{}{} + if driverOptions.Headers != nil { + for _, h := range driverOptions.Headers { + if h.Key != "" { + headers[h.Key] = h.Value + logger.Debugf("[scraper] adding header <%s:%s>", h.Key, h.Value) + } + } + } + return headers +} diff --git a/pkg/scraper/xpath_test.go b/pkg/scraper/xpath_test.go index 275d59830..5983bd7a0 100644 --- a/pkg/scraper/xpath_test.go +++ b/pkg/scraper/xpath_test.go @@ -100,6 +100,14 @@ const htmlDoc1 = ` 5ft7 +