Merge pull request #1390 from stashapp/develop

Merge to master for 0.7
This commit is contained in:
WithoutPants 2021-05-15 16:17:08 +10:00 committed by GitHub
commit 3acb21d4e1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
358 changed files with 23324 additions and 6539 deletions

View file

@ -8,6 +8,9 @@ on:
release:
types: [ published ]
env:
COMPILER_IMAGE: stashapp/compiler:4
jobs:
build:
runs-on: ubuntu-20.04
@ -17,44 +20,32 @@ jobs:
- name: Checkout
run: git fetch --prune --unshallow --tags
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: 1.13
- name: Pull compiler image
run: docker pull $COMPILER_IMAGE
- name: Set up Node
uses: actions/setup-node@v2
with:
node-version: '12'
- name: Cache node modules
uses: actions/cache@v2
env:
cache-name: cache-node_modules
with:
path: ui/v2.5/node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock') }}
- name: Pre-install
run: make pre-ui
run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make pre-ui"
- name: Generate
run: make generate
run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make generate"
# TODO: Replace with `make validate` once `revive` is bundled in COMPILER_IMAGE
- name: Validate
run: make ui-validate fmt-check vet it
run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make ui-validate fmt-check vet it"
- name: Build UI
run: make ui-only
run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make ui-only"
- name: Cross Compile
run: |
docker pull stashapp/compiler:4
./scripts/cross-compile.sh
- name: Compile for all supported platforms
run: ./scripts/cross-compile.sh
- name: Generate checksums
run: |
@ -93,14 +84,12 @@ jobs:
- name: Development Release
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
uses: meeDamian/github-release@2.0
uses: marvinpinto/action-automatic-releases@v1.1.2
with:
token: "${{ secrets.GITHUB_TOKEN }}"
repo_token: "${{ secrets.GITHUB_TOKEN }}"
prerelease: true
allow_override: true
tag: latest_develop
name: "${{ env.STASH_VERSION }}: Latest development build"
body: "**${{ env.RELEASE_DATE }}**\n This is always the latest committed version on the develop branch. Use as your own risk!"
automatic_release_tag: latest_develop
title: "${{ env.STASH_VERSION }}: Latest development build"
files: |
dist/stash-osx
dist/stash-win.exe
@ -109,8 +98,7 @@ jobs:
dist/stash-linux-arm32v7
dist/stash-pi
CHECKSUMS_SHA1
gzip: false
- name: Master release
if: ${{ github.event_name == 'release' && github.ref != 'refs/tags/latest_develop' }}
uses: meeDamian/github-release@2.0

View file

@ -8,36 +8,36 @@ https://stashapp.cc
**Stash is a locally hosted web-based app written in Go which organizes and serves your porn.**
* It can gather information about videos in your collection from the internet, and is extensible through the use of community-built plugins.
* It supports a wide variety of both video and image formats
* It can gather information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers.
* It supports a wide variety of both video and image formats.
* You can tag videos and find them later.
* It provides statistics about performers, tags, studios and other things.
You can [watch a demo video](https://vimeo.com/275537038)to see it in action (password is stashapp).
You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action.
For further information you can [read the in-app manual](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en).
For further information you can [read the in-app manual](ui/v2.5/src/docs/en).
# Installing stash
## Docker install
## via Docker
Follow [this README.md in the docker directory.](docker/production/README.md)
## Pre-Compiled Binaries
Stash supports macOS, Windows, and Linux. Download the [latest release here](https://github.com/stashapp/stash/releases).
The Stash server runs on macOS, Windows, and Linux. Download the [latest release here](https://github.com/stashapp/stash/releases).
Run the executable (double click the exe on windows or run `./stash-osx` / `./stash-linux` from the terminal on macOS / Linux) and navigate to either https://localhost:9999 or http://localhost:9999 to get started.
*Note for Windows users:* Running the app might present a security prompt since the binary isn't signed yet. Just click more info and then the "run anyway" button.
*Note for Windows users:* Running the app might present a security prompt since the binary isn't yet signed. Bypass this by clicking "more info" and then the "run anyway" button.
#### FFMPEG
If stash is unable to find or download FFMPEG then download it yourself from the link for your platform:
* [macOS](https://ffmpeg.zeranoe.com/builds/macos64/static/ffmpeg-4.0-macos64-static.zip)
* [Windows](https://ffmpeg.zeranoe.com/builds/win64/static/ffmpeg-4.0-win64-static.zip)
* [Linux](https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz)
* [macOS ffmpeg](https://evermeet.cx/ffmpeg/ffmpeg-4.3.1.zip), [macOS ffprobe](https://evermeet.cx/ffmpeg/ffprobe-4.3.1.zip)
* [Windows](https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip)
* [Linux](https://www.johnvansickle.com/ffmpeg/)
The `ffmpeg(.exe)` and `ffprobe(.exe)` files should be placed in `~/.stash` on macOS / Linux or `C:\Users\YourUsername\.stash` on Windows.
@ -48,9 +48,9 @@ The `ffmpeg(.exe)` and `ffprobe(.exe)` files should be placed in `~/.stash` on m
2) Run Stash. It will prompt you for some configuration options and a directory to index (you can also do this step afterward)
3) After configuration, launch your web browser and navigate to the URL shown within the Stash app.
**Note that Stash does not currently retrieve and organize information about your entire library automatically.** You will need to help it along through the use of [scrapers](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Scraping.md). The Stash community has developed scrapers for many popular data sources which can be downloaded and installed from [this repository](https://github.com/stashapp/CommunityScrapers).
**Note that Stash does not currently retrieve and organize information about your entire library automatically.** You will need to help it along through the use of [scrapers](blob/develop/ui/v2.5/src/docs/en/Scraping.md). The Stash community has developed scrapers for many popular data sources which can be downloaded and installed from [this repository](https://github.com/stashapp/CommunityScrapers).
The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our database. Note that this information is not comprehensive and you may need to use the scrapers to identify some of your media.
The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our stash-box database. Note that this data source is not comprehensive and you may need to use the scrapers to identify some of your media.
## CLI
@ -60,7 +60,7 @@ For example, to run stash locally on port 80 run it like this (OSX / Linux) `sta
## SSL (HTTPS)
Stash supports HTTPS with some additional work. First you must generate a SSL certificate and key combo. Here is an example using openssl:
Stash can run over HTTPS with some additional work. First you must generate a SSL certificate and key combo. Here is an example using openssl:
`openssl req -x509 -newkey rsa:4096 -sha256 -days 7300 -nodes -keyout stash.key -out stash.crt -extensions san -config <(echo "[req]"; echo distinguished_name=req; echo "[san]"; echo subjectAltName=DNS:stash.server,IP:127.0.0.1) -subj /CN=stash.server`
@ -70,31 +70,30 @@ Once you have a certificate and key file name them `stash.crt` and `stash.key` a
# Customization
## Themes
There is a [directory of themes](https://github.com/stashapp/stash/wiki/Themes) on our Wiki, along with instructions on how to install them..
## Themes and CSS Customization
There is a [directory of community-created themes](https://github.com/stashapp/stash/wiki/Themes) on our Wiki, along with instructions on how to install them.
## CSS Customization
You can make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks).
You can also make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks).
# Suppport
# Support (FAQ)
Answers to frequently asked questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ)
Answers to other Frequently Asked Questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ)
For issues not addressed there, there are a few options.
* Read the [Wiki](https://github.com/stashapp/stash/wiki)
* Check the in-app documentation (also available [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en)
* Join the [Discord server](https://discord.gg/2TsNFKt).
* Join the [Discord server](https://discord.gg/2TsNFKt), where the community can offer support.
# Building From Source Code
# Compiling From Source Code
## Install
## Pre-requisites
* [Go](https://golang.org/dl/)
* [Revive](https://github.com/mgechev/revive) - Configurable linter
* Go Install: `go get github.com/mgechev/revive`
* [Packr2](https://github.com/gobuffalo/packr/tree/v2.0.2/v2) - Static asset bundler
* Go Install: `go get github.com/gobuffalo/packr/v2/packr2@v2.0.2`
* [Packr2](https://github.com/gobuffalo/packr/) - Static asset bundler
* Go Install: `go get github.com/gobuffalo/packr/v2/packr2`
* [Binary Download](https://github.com/gobuffalo/packr/releases)
* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager
* Run `yarn install --frozen-lockfile` in the `stash/ui/v2.5` folder (before running make generate for first time).
@ -141,7 +140,7 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MingW.
## Cross compiling
This project uses a modification of [this](https://github.com/bep/dockerfiles/tree/master/ci-goreleaser) docker container to create an environment
This project uses a modification of the [CI-GoReleaser](https://github.com/bep/dockerfiles/tree/master/ci-goreleaser) docker container to create an environment
where the app can be cross-compiled. This process is kicked off by CI via the `scripts/cross-compile.sh` script. Run the following
command to open a bash shell to the container to poke around:

View file

@ -53,6 +53,8 @@ FROM ubuntu:20.04 as app
RUN apt-get update && apt-get -y install ca-certificates
COPY --from=compiler /stash/stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
EXPOSE 9999
CMD ["stash"]

View file

@ -12,6 +12,8 @@ FROM ubuntu:20.04 as app
run apt update && apt install -y python3 python3 python-is-python3 python3-requests ffmpeg && rm -rf /var/lib/apt/lists/*
COPY --from=prep /stash /usr/bin/
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
EXPOSE 9999
CMD ["stash"]

View file

@ -20,5 +20,8 @@ RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/f
FROM ubuntu:20.04 as app
RUN apt-get update && apt-get -y install ca-certificates
COPY --from=prep /stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
EXPOSE 9999
CMD ["stash"]

View file

@ -20,5 +20,8 @@ RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/f
FROM ubuntu:20.04 as app
RUN apt-get update && apt-get -y install ca-certificates
COPY --from=prep /stash /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
EXPOSE 9999
CMD ["stash"]

4
go.mod
View file

@ -6,6 +6,8 @@ require (
github.com/antchfx/htmlquery v1.2.3
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c
github.com/chromedp/chromedp v0.5.3
github.com/corona10/goimagehash v1.0.3
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/disintegration/imaging v1.6.0
github.com/fvbommel/sortorder v1.0.2
github.com/go-chi/chi v4.0.2+incompatible
@ -31,7 +33,7 @@ require (
github.com/vektah/gqlparser/v2 v2.0.1
github.com/vektra/mockery/v2 v2.2.1
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9
golang.org/x/image v0.0.0-20190802002840-cff245a6509b
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
golang.org/x/net v0.0.0-20200822124328-c89045814202
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd
golang.org/x/tools v0.0.0-20200915031644-64986481280e // indirect

9
go.sum
View file

@ -83,6 +83,8 @@ github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee
github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/corona10/goimagehash v1.0.3 h1:NZM518aKLmoNluluhfHGxT3LGOnrojrxhGn63DR/CZA=
github.com/corona10/goimagehash v1.0.3/go.mod h1:VkvE0mLn84L4aF8vCb6mafVajEb6QYMHl2ZJLn0mOGI=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
@ -99,6 +101,7 @@ github.com/cznic/zappy v0.0.0-20160723133515-2533cb5b45cc/go.mod h1:Y1SNZ4dRUOKX
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
@ -538,8 +541,6 @@ github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o=
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.13.0 h1:LnJI81JidiW9r7pS/hXe6cFeO5EXNq7KbfvoJLRI69c=
github.com/mattn/go-sqlite3 v1.13.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
@ -574,6 +575,8 @@ github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ=
github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo=
github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
@ -804,6 +807,8 @@ golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86h
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb h1:fqpd0EBDzlHRCjiphRR5Zo/RSWWQlWv34418dnEixWk=
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=

View file

@ -2,7 +2,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
stashes {
path
excludeVideo
excludeImage
excludeImage
}
databasePath
generatedPath
@ -17,6 +17,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
previewPreset
maxTranscodeSize
maxStreamingTranscodeSize
apiKey
username
password
maxSessionAge
@ -51,6 +52,7 @@ fragment ConfigInterfaceData on ConfigInterfaceResult {
css
cssEnabled
language
slideshowDelay
}
fragment ConfigData on ConfigResult {

View file

@ -1,4 +1,4 @@
fragment GallerySlimData on Gallery {
fragment SlimGalleryData on Gallery {
id
checksum
path
@ -10,16 +10,31 @@ fragment GallerySlimData on Gallery {
organized
image_count
cover {
...SlimImageData
file {
size
width
height
}
paths {
thumbnail
}
}
studio {
...StudioData
id
name
image_path
}
tags {
...TagData
id
name
}
performers {
...PerformerData
id
name
gender
favorite
image_path
}
scenes {
id

View file

@ -15,16 +15,16 @@ fragment GalleryData on Gallery {
...SlimImageData
}
studio {
...StudioData
...SlimStudioData
}
tags {
...TagData
...SlimTagData
}
performers {
...PerformerData
}
scenes {
...SceneData
...SlimSceneData
}
}

View file

@ -38,6 +38,7 @@ fragment SlimImageData on Image {
performers {
id
name
gender
favorite
image_path
}

View file

@ -23,11 +23,11 @@ fragment ImageData on Image {
}
studio {
...StudioData
...SlimStudioData
}
tags {
...TagData
...SlimTagData
}
performers {

View file

@ -9,7 +9,7 @@ fragment MovieData on Movie {
director
studio {
...StudioData
...SlimStudioData
}
synopsis

View file

@ -12,4 +12,5 @@ fragment SlimPerformerData on Performer {
endpoint
stash_id
}
rating
}

View file

@ -20,13 +20,20 @@ fragment PerformerData on Performer {
favorite
image_path
scene_count
image_count
gallery_count
tags {
...TagData
...SlimTagData
}
stash_ids {
stash_id
endpoint
}
rating
details
death_date
hair_color
weight
}

View file

@ -10,6 +10,7 @@ fragment SlimSceneData on Scene {
o_counter
organized
path
phash
file {
size
@ -29,6 +30,7 @@ fragment SlimSceneData on Scene {
webp
vtt
chapters_vtt
sprite
}
scene_markers {
@ -66,6 +68,7 @@ fragment SlimSceneData on Scene {
performers {
id
name
gender
favorite
image_path
}

View file

@ -10,6 +10,7 @@ fragment SceneData on Scene {
o_counter
organized
path
phash
file {
size
@ -36,11 +37,11 @@ fragment SceneData on Scene {
}
galleries {
...GallerySlimData
...SlimGalleryData
}
studio {
...StudioData
...SlimStudioData
}
movies {
@ -51,7 +52,7 @@ fragment SceneData on Scene {
}
tags {
...TagData
...SlimTagData
}
performers {

View file

@ -19,6 +19,10 @@ fragment ScrapedPerformerData on ScrapedPerformer {
...ScrapedSceneTagData
}
image
details
death_date
hair_color
weight
}
fragment ScrapedScenePerformerData on ScrapedScenePerformer {
@ -44,6 +48,10 @@ fragment ScrapedScenePerformerData on ScrapedScenePerformer {
}
remote_site_id
images
details
death_date
hair_color
weight
}
fragment ScrapedMovieStudioData on ScrapedMovieStudio {
@ -189,3 +197,10 @@ fragment ScrapedStashBoxSceneData on ScrapedScene {
...ScrapedSceneMovieData
}
}
fragment ScrapedStashBoxPerformerData on StashBoxPerformerQueryResult {
query
results {
...ScrapedScenePerformerData
}
}

View file

@ -9,4 +9,6 @@ fragment SlimStudioData on Studio {
parent_studio {
id
}
details
rating
}

View file

@ -10,6 +10,8 @@ fragment StudioData on Studio {
url
image_path
scene_count
image_count
gallery_count
}
child_studios {
id
@ -18,11 +20,17 @@ fragment StudioData on Studio {
url
image_path
scene_count
image_count
gallery_count
}
image_path
scene_count
image_count
gallery_count
stash_ids {
stash_id
endpoint
}
details
rating
}

View file

@ -0,0 +1,5 @@
fragment SlimTagData on Tag {
id
name
image_path
}

View file

@ -4,5 +4,7 @@ fragment TagData on Tag {
image_path
scene_count
scene_marker_count
image_count
gallery_count
performer_count
}

View file

@ -1,3 +1,11 @@
mutation Setup($input: SetupInput!) {
setup(input: $input)
}
mutation Migrate($input: MigrateInput!) {
migrate(input: $input)
}
mutation ConfigureGeneral($input: ConfigGeneralInput!) {
configureGeneral(input: $input) {
...ConfigGeneralData
@ -8,4 +16,8 @@ mutation ConfigureInterface($input: ConfigInterfaceInput!) {
configureInterface(input: $input) {
...ConfigInterfaceData
}
}
}
mutation GenerateAPIKey($input: GenerateAPIKeyInput!) {
generateAPIKey(input: $input)
}

View file

@ -1,47 +1,7 @@
mutation PerformerCreate(
$name: String!,
$url: String,
$gender: GenderEnum,
$birthdate: String,
$ethnicity: String,
$country: String,
$eye_color: String,
$height: String,
$measurements: String,
$fake_tits: String,
$career_length: String,
$tattoos: String,
$piercings: String,
$aliases: String,
$twitter: String,
$instagram: String,
$favorite: Boolean,
$tag_ids: [ID!],
$stash_ids: [StashIDInput!],
$image: String) {
$input: PerformerCreateInput!) {
performerCreate(input: {
name: $name,
url: $url,
gender: $gender,
birthdate: $birthdate,
ethnicity: $ethnicity,
country: $country,
eye_color: $eye_color,
height: $height,
measurements: $measurements,
fake_tits: $fake_tits,
career_length: $career_length,
tattoos: $tattoos,
piercings: $piercings,
aliases: $aliases,
twitter: $twitter,
instagram: $instagram,
favorite: $favorite,
tag_ids: $tag_ids,
stash_ids: $stash_ids,
image: $image
}) {
performerCreate(input: $input) {
...PerformerData
}
}

View file

@ -1,3 +1,7 @@
mutation SubmitStashBoxFingerprints($input: StashBoxFingerprintSubmissionInput!) {
submitStashBoxFingerprints(input: $input)
}
mutation StashBoxBatchPerformerTag($input: StashBoxBatchPerformerTagInput!) {
stashBoxBatchPerformerTag(input: $input)
}

View file

@ -1,18 +1,10 @@
mutation StudioCreate(
$name: String!,
$url: String,
$image: String,
$stash_ids: [StashIDInput!],
$parent_id: ID) {
studioCreate(input: { name: $name, url: $url, image: $image, stash_ids: $stash_ids, parent_id: $parent_id }) {
mutation StudioCreate($input: StudioCreateInput!) {
studioCreate(input: $input) {
...StudioData
}
}
mutation StudioUpdate(
$input: StudioUpdateInput!) {
mutation StudioUpdate($input: StudioUpdateInput!) {
studioUpdate(input: $input) {
...StudioData
}

View file

@ -2,7 +2,7 @@ query FindGalleries($filter: FindFilterType, $gallery_filter: GalleryFilterType)
findGalleries(gallery_filter: $gallery_filter, filter: $filter) {
count
galleries {
...GallerySlimData
...SlimGalleryData
}
}
}

View file

@ -16,6 +16,12 @@ query FindScenesByPathRegex($filter: FindFilterType) {
}
}
query FindDuplicateScenes($distance: Int) {
findDuplicateScenes(distance: $distance) {
...SlimSceneData
}
}
query FindScene($id: ID!, $checksum: String) {
findScene(id: $id, checksum: $checksum) {
...SceneData

View file

@ -15,6 +15,10 @@ query ScrapeFreeones($performer_name: String!) {
tattoos
piercings
aliases
details
death_date
hair_color
weight
}
}

View file

@ -90,8 +90,14 @@ query ScrapeMovieURL($url: String!) {
}
}
query QueryStashBoxScene($input: StashBoxQueryInput!) {
query QueryStashBoxScene($input: StashBoxSceneQueryInput!) {
queryStashBoxScene(input: $input) {
...ScrapedStashBoxSceneData
}
}
query QueryStashBoxPerformer($input: StashBoxPerformerQueryInput!) {
queryStashBoxPerformer(input: $input) {
...ScrapedStashBoxPerformerData
}
}

View file

@ -5,3 +5,13 @@ query JobStatus {
message
}
}
query SystemStatus {
systemStatus {
databaseSchema
databasePath
appSchema
status
configPath
}
}

View file

@ -9,6 +9,9 @@ type Query {
findScenesByPathRegex(filter: FindFilterType): FindScenesResultType!
""" Returns any groups of scenes that are perceptual duplicates within the queried distance """
findDuplicateScenes(distance: Int): [[Scene!]!]!
"""Return valid stream paths"""
sceneStreams(id: ID): [SceneStreamEndpoint!]!
@ -88,7 +91,8 @@ type Query {
scrapeFreeonesPerformerList(query: String!): [String!]!
"""Query StashBox for scenes"""
queryStashBoxScene(input: StashBoxQueryInput!): [ScrapedScene!]!
queryStashBoxScene(input: StashBoxSceneQueryInput!): [ScrapedScene!]!
queryStashBoxPerformer(input: StashBoxPerformerQueryInput!): [StashBoxPerformerQueryResult!]!
# Plugins
"""List loaded plugins"""
@ -103,7 +107,7 @@ type Query {
directory(path: String): Directory!
# Metadata
systemStatus: SystemStatus!
jobStatus: MetadataUpdateStatus!
# Get everything
@ -123,6 +127,9 @@ type Query {
}
type Mutation {
setup(input: SetupInput!): Boolean!
migrate(input: MigrateInput!): Boolean!
sceneUpdate(input: SceneUpdateInput!): Scene
bulkSceneUpdate(input: BulkSceneUpdateInput!): [Scene!]
sceneDestroy(input: SceneDestroyInput!): Boolean!
@ -190,6 +197,9 @@ type Mutation {
configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult!
configureInterface(input: ConfigInterfaceInput!): ConfigInterfaceResult!
"""Generate and set (or clear) API key"""
generateAPIKey(input: GenerateAPIKeyInput!): String!
"""Returns a link to download the result"""
exportObjects(input: ExportObjectsInput!): String
@ -225,6 +235,9 @@ type Mutation {
"""Backup the database. Optionally returns a link to download the database file"""
backupDatabase(input: BackupDatabaseInput!): String
"""Run batch performer tag task. Returns the job ID."""
stashBoxBatchPerformerTag(input: StashBoxBatchPerformerTagInput!): String!
}
type Subscription {

View file

@ -1,3 +1,13 @@
input SetupInput {
"""Empty to indicate $HOME/.stash/config.yml default"""
configLocation: String!
stashes: [StashConfigInput!]!
"""Empty to indicate default"""
databaseFile: String!
"""Empty to indicate default"""
generatedLocation: String!
}
enum StreamingResolutionEnum {
"240p", LOW
"480p", STANDARD
@ -120,6 +130,8 @@ type ConfigGeneralResult {
maxTranscodeSize: StreamingResolutionEnum
"""Max streaming transcode size"""
maxStreamingTranscodeSize: StreamingResolutionEnum
"""API Key"""
apiKey: String!
"""Username"""
username: String!
"""Password"""
@ -176,6 +188,8 @@ input ConfigInterfaceInput {
cssEnabled: Boolean
"""Interface language"""
language: String
"""Slideshow Delay"""
slideshowDelay: Int
}
type ConfigInterfaceResult {
@ -198,6 +212,8 @@ type ConfigInterfaceResult {
cssEnabled: Boolean
"""Interface language"""
language: String
"""Slideshow Delay"""
slideshowDelay: Int
}
"""All configuration settings"""
@ -225,3 +241,7 @@ type StashConfig {
excludeVideo: Boolean!
excludeImage: Boolean!
}
input GenerateAPIKeyInput {
clear: Boolean
}

View file

@ -6,7 +6,7 @@ enum SortDirectionEnum {
input FindFilterType {
q: String
page: Int
"""use per_page = 0 to indicate all results. Defaults to 25."""
"""use per_page = -1 to indicate all results. Defaults to 25."""
per_page: Int
sort: String
direction: SortDirectionEnum
@ -47,7 +47,7 @@ input PerformerFilterType {
measurements: StringCriterionInput
"""Filter by fake tits value"""
fake_tits: StringCriterionInput
"""Filter by career length"""
"""Filter by career length"""
career_length: StringCriterionInput
"""Filter by tattoos"""
tattoos: StringCriterionInput
@ -61,8 +61,26 @@ input PerformerFilterType {
is_missing: String
"""Filter to only include performers with these tags"""
tags: MultiCriterionInput
"""Filter by tag count"""
tag_count: IntCriterionInput
"""Filter by scene count"""
scene_count: IntCriterionInput
"""Filter by image count"""
image_count: IntCriterionInput
"""Filter by gallery count"""
gallery_count: IntCriterionInput
"""Filter by StashID"""
stash_id: String
stash_id: StringCriterionInput
"""Filter by rating"""
rating: IntCriterionInput
"""Filter by url"""
url: StringCriterionInput
"""Filter by hair color"""
hair_color: StringCriterionInput
"""Filter by weight"""
weight: IntCriterionInput
"""Filter by death year"""
death_year: IntCriterionInput
}
input SceneMarkerFilterType {
@ -80,7 +98,7 @@ input SceneFilterType {
AND: SceneFilterType
OR: SceneFilterType
NOT: SceneFilterType
"""Filter by path"""
path: StringCriterionInput
"""Filter by rating"""
@ -103,12 +121,18 @@ input SceneFilterType {
movies: MultiCriterionInput
"""Filter to only include scenes with these tags"""
tags: MultiCriterionInput
"""Filter by tag count"""
tag_count: IntCriterionInput
"""Filter to only include scenes with performers with these tags"""
performer_tags: MultiCriterionInput
"""Filter to only include scenes with these performers"""
performers: MultiCriterionInput
"""Filter by performer count"""
performer_count: IntCriterionInput
"""Filter by StashID"""
stash_id: String
stash_id: StringCriterionInput
"""Filter by url"""
url: StringCriterionInput
}
input MovieFilterType {
@ -116,18 +140,34 @@ input MovieFilterType {
studios: MultiCriterionInput
"""Filter to only include movies missing this property"""
is_missing: String
"""Filter by url"""
url: StringCriterionInput
}
input StudioFilterType {
"""Filter to only include studios with this parent studio"""
parents: MultiCriterionInput
"""Filter by StashID"""
stash_id: String
stash_id: StringCriterionInput
"""Filter to only include studios missing this property"""
is_missing: String
"""Filter by rating"""
rating: IntCriterionInput
"""Filter by scene count"""
scene_count: IntCriterionInput
"""Filter by image count"""
image_count: IntCriterionInput
"""Filter by gallery count"""
gallery_count: IntCriterionInput
"""Filter by url"""
url: StringCriterionInput
}
input GalleryFilterType {
AND: GalleryFilterType
OR: GalleryFilterType
NOT: GalleryFilterType
"""Filter by path"""
path: StringCriterionInput
"""Filter to only include galleries missing this property"""
@ -144,12 +184,18 @@ input GalleryFilterType {
studios: MultiCriterionInput
"""Filter to only include galleries with these tags"""
tags: MultiCriterionInput
"""Filter by tag count"""
tag_count: IntCriterionInput
"""Filter to only include galleries with performers with these tags"""
performer_tags: MultiCriterionInput
"""Filter to only include galleries with these performers"""
performers: MultiCriterionInput
"""Filter by performer count"""
performer_count: IntCriterionInput
"""Filter by number of images in this gallery"""
image_count: IntCriterionInput
"""Filter by url"""
url: StringCriterionInput
}
input TagFilterType {
@ -177,6 +223,10 @@ input TagFilterType {
}
input ImageFilterType {
AND: ImageFilterType
OR: ImageFilterType
NOT: ImageFilterType
"""Filter by path"""
path: StringCriterionInput
"""Filter by rating"""
@ -193,10 +243,14 @@ input ImageFilterType {
studios: MultiCriterionInput
"""Filter to only include images with these tags"""
tags: MultiCriterionInput
"""Filter by tag count"""
tag_count: IntCriterionInput
"""Filter to only include images with performers with these tags"""
performer_tags: MultiCriterionInput
"""Filter to only include images with these performers"""
performers: MultiCriterionInput
"""Filter by performer count"""
performer_count: IntCriterionInput
"""Filter to only include images with these galleries"""
galleries: MultiCriterionInput
}

View file

@ -7,6 +7,7 @@ input GenerateMetadataInput {
previewOptions: GeneratePreviewOptionsInput
markers: Boolean!
transcodes: Boolean!
phashes: Boolean!
"""scene ids to generate for"""
sceneIDs: [ID!]
@ -42,6 +43,8 @@ input ScanMetadataInput {
scanGenerateImagePreviews: Boolean
"""Generate sprites during scan"""
scanGenerateSprites: Boolean
"""Generate phashes during scan"""
scanGeneratePhashes: Boolean
}
input CleanMetadataInput {
@ -103,3 +106,21 @@ input ImportObjectsInput {
input BackupDatabaseInput {
download: Boolean
}
enum SystemStatusEnum {
SETUP
NEEDS_MIGRATION
OK
}
type SystemStatus {
databaseSchema: Int
databasePath: String
configPath: String
appSchema: Int!
status: SystemStatusEnum!
}
input MigrateInput {
backupPath: String!
}

View file

@ -28,8 +28,9 @@ input MovieCreateInput {
director: String
synopsis: String
url: String
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
front_image: String
"""This should be a URL or a base64 encoded data URL"""
back_image: String
}
@ -44,8 +45,9 @@ input MovieUpdateInput {
director: String
synopsis: String
url: String
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
front_image: String
"""This should be a URL or a base64 encoded data URL"""
back_image: String
}

View file

@ -31,8 +31,15 @@ type Performer {
image_path: String # Resolver
scene_count: Int # Resolver
image_count: Int # Resolver
gallery_count: Int # Resolver
scenes: [Scene!]!
stash_ids: [StashID!]!
rating: Int
details: String
death_date: String
hair_color: String
weight: Int
}
input PerformerCreateInput {
@ -54,9 +61,14 @@ input PerformerCreateInput {
instagram: String
favorite: Boolean
tag_ids: [ID!]
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
image: String
stash_ids: [StashIDInput!]
rating: Int
details: String
death_date: String
hair_color: String
weight: Int
}
input PerformerUpdateInput {
@ -79,9 +91,14 @@ input PerformerUpdateInput {
instagram: String
favorite: Boolean
tag_ids: [ID!]
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
image: String
stash_ids: [StashIDInput!]
rating: Int
details: String
death_date: String
hair_color: String
weight: Int
}
input BulkPerformerUpdateInput {
@ -104,6 +121,11 @@ input BulkPerformerUpdateInput {
instagram: String
favorite: Boolean
tag_ids: BulkUpdateIds
rating: Int
details: String
death_date: String
hair_color: String
weight: Int
}
input PerformerDestroyInput {

View file

@ -16,6 +16,7 @@ type ScenePathsType {
webp: String # Resolver
vtt: String # Resolver
chapters_vtt: String # Resolver
sprite: String # Resolver
}
type SceneMovie {
@ -35,6 +36,7 @@ type Scene {
organized: Boolean!
o_counter: Int
path: String!
phash: String
file: SceneFileType! # Resolver
paths: ScenePathsType! # Resolver
@ -67,7 +69,7 @@ input SceneUpdateInput {
performer_ids: [ID!]
movies: [SceneMovieInput!]
tag_ids: [ID!]
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
cover_image: String
stash_ids: [StashIDInput!]
}

View file

@ -17,8 +17,9 @@ type ScrapedMovie {
synopsis: String
studio: ScrapedMovieStudio
"""This should be base64 encoded"""
"""This should be a base64 encoded data URL"""
front_image: String
"""This should be a base64 encoded data URL"""
back_image: String
}

View file

@ -19,8 +19,12 @@ type ScrapedPerformer {
# Should be ScrapedPerformerTag - but would be identical types
tags: [ScrapedSceneTag!]
"""This should be base64 encoded"""
"""This should be a base64 encoded data URL"""
image: String
details: String
death_date: String
hair_color: String
weight: String
}
input ScrapedPerformerInput {
@ -43,4 +47,8 @@ input ScrapedPerformerInput {
# not including tags for the input
# not including image for the input
details: String
death_date: String
hair_color: String
weight: String
}

View file

@ -49,6 +49,10 @@ type ScrapedScenePerformer {
remote_site_id: String
images: [String!]
details: String
death_date: String
hair_color: String
weight: String
}
type ScrapedSceneMovie {
@ -85,7 +89,7 @@ type ScrapedScene {
url: String
date: String
"""This should be base64 encoded"""
"""This should be a base64 encoded data URL"""
image: String
file: SceneFileType # Resolver
@ -111,7 +115,7 @@ type ScrapedGallery {
performers: [ScrapedScenePerformer!]
}
input StashBoxQueryInput {
input StashBoxSceneQueryInput {
"""Index of the configured stash-box instance to use"""
stash_box_index: Int!
"""Instructs query by scene fingerprints"""
@ -120,8 +124,30 @@ input StashBoxQueryInput {
q: String
}
input StashBoxPerformerQueryInput {
"""Index of the configured stash-box instance to use"""
stash_box_index: Int!
"""Instructs query by scene fingerprints"""
performer_ids: [ID!]
"""Query by query string"""
q: String
}
type StashBoxPerformerQueryResult {
query: String!
results: [ScrapedScenePerformer!]!
}
type StashBoxFingerprint {
algorithm: String!
hash: String!
duration: Int!
}
input StashBoxBatchPerformerTagInput {
endpoint: Int!
exclude_fields: [String!]
refresh: Boolean!
performer_ids: [ID!]
performer_names: [String!]
}

View file

@ -8,16 +8,22 @@ type Studio {
image_path: String # Resolver
scene_count: Int # Resolver
image_count: Int # Resolver
gallery_count: Int # Resolver
stash_ids: [StashID!]!
rating: Int
details: String
}
input StudioCreateInput {
name: String!
url: String
parent_id: ID
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
image: String
stash_ids: [StashIDInput!]
rating: Int
details: String
}
input StudioUpdateInput {
@ -25,9 +31,11 @@ input StudioUpdateInput {
name: String
url: String
parent_id: ID,
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
image: String
stash_ids: [StashIDInput!]
rating: Int
details: String
}
input StudioDestroyInput {

View file

@ -5,13 +5,15 @@ type Tag {
image_path: String # Resolver
scene_count: Int # Resolver
scene_marker_count: Int # Resolver
image_count: Int # Resolver
gallery_count: Int # Resolver
performer_count: Int
}
input TagCreateInput {
name: String!
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
image: String
}
@ -19,7 +21,7 @@ input TagUpdateInput {
id: ID!
name: String!
"""This should be base64 encoded"""
"""This should be a URL or a base64 encoded data URL"""
image: String
}

View file

@ -75,6 +75,11 @@ fragment PerformerFragment on Performer {
piercings {
...BodyModificationFragment
}
details
death_date {
...FuzzyDateFragment
}
weight
}
fragment PerformerAppearanceFragment on PerformerAppearance {
@ -134,6 +139,18 @@ query SearchScene($term: String!) {
}
}
query SearchPerformer($term: String!) {
searchPerformer(term: $term) {
...PerformerFragment
}
}
query FindPerformerByID($id: ID!) {
findPerformer(id: $id) {
...PerformerFragment
}
}
mutation SubmitFingerprint($input: FingerprintSubmission!) {
submitFingerprint(input: $input)
}

View file

@ -3,9 +3,7 @@ package main
import (
"github.com/stashapp/stash/pkg/api"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
_ "github.com/golang-migrate/migrate/v4/database/sqlite3"
_ "github.com/golang-migrate/migrate/v4/source/file"
@ -13,12 +11,6 @@ import (
func main() {
manager.Initialize()
// perform the post-migration for new databases
if database.Initialize(config.GetDatabasePath()) {
manager.GetInstance().PostMigrate()
}
api.Start()
blockForever()
}

View file

@ -1,96 +0,0 @@
package api
import (
"fmt"
"html/template"
"net/http"
"os"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager"
)
type migrateData struct {
ExistingVersion uint
MigrateVersion uint
BackupPath string
}
func getMigrateData() migrateData {
return migrateData{
ExistingVersion: database.Version(),
MigrateVersion: database.AppSchemaVersion(),
BackupPath: database.DatabaseBackupPath(),
}
}
func getMigrateHandler(w http.ResponseWriter, r *http.Request) {
if !database.NeedsMigration() {
http.Redirect(w, r, "/", 301)
return
}
data, _ := setupUIBox.Find("migrate.html")
templ, err := template.New("Migrate").Parse(string(data))
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), 500)
return
}
err = templ.Execute(w, getMigrateData())
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), 500)
}
}
func doMigrateHandler(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), 500)
}
formBackupPath := r.Form.Get("backuppath")
// always backup so that we can roll back to the previous version if
// migration fails
backupPath := formBackupPath
if formBackupPath == "" {
backupPath = database.DatabaseBackupPath()
}
// perform database backup
if err = database.Backup(database.DB, backupPath); err != nil {
http.Error(w, fmt.Sprintf("error backing up database: %s", err), 500)
return
}
err = database.RunMigrations()
if err != nil {
errStr := fmt.Sprintf("error performing migration: %s", err)
// roll back to the backed up version
restoreErr := database.RestoreFromBackup(backupPath)
if restoreErr != nil {
errStr = fmt.Sprintf("ERROR: unable to restore database from backup after migration failure: %s\n%s", restoreErr.Error(), errStr)
} else {
errStr = "An error occurred migrating the database to the latest schema version. The backup database file was automatically renamed to restore the database.\n" + errStr
}
http.Error(w, errStr, 500)
return
}
// perform post-migration operations
manager.GetInstance().PostMigrate()
// if no backup path was provided, then delete the created backup
if formBackupPath == "" {
err = os.Remove(backupPath)
if err != nil {
logger.Warnf("error removing unwanted database backup (%s): %s", backupPath, err.Error())
}
}
http.Redirect(w, r, "/", 301)
}

View file

@ -89,6 +89,24 @@ func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (
}
func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
// don't return any thing if there is no back image
var img []byte
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
img, err = repo.Movie().GetBackImage(obj.ID)
if err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
if img == nil {
return nil, nil
}
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL()
return &backimagePath, nil

View file

@ -4,6 +4,8 @@ import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
)
@ -161,6 +163,30 @@ func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performe
return &res, nil
}
func (r *performerResolver) ImageCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = image.CountByPerformerID(repo.Image(), obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, nil
}
func (r *performerResolver) GalleryCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = gallery.CountByPerformerID(repo.Gallery(), obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, nil
}
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (ret []*models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Scene().FindByPerformerID(obj.ID)
@ -182,3 +208,40 @@ func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer)
return ret, nil
}
func (r *performerResolver) Rating(ctx context.Context, obj *models.Performer) (*int, error) {
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *performerResolver) Details(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.Details.Valid {
return &obj.Details.String, nil
}
return nil, nil
}
func (r *performerResolver) DeathDate(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.DeathDate.Valid {
return &obj.DeathDate.String, nil
}
return nil, nil
}
func (r *performerResolver) HairColor(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.HairColor.Valid {
return &obj.HairColor.String, nil
}
return nil, nil
}
func (r *performerResolver) Weight(ctx context.Context, obj *models.Performer) (*int, error) {
if obj.Weight.Valid {
weight := int(obj.Weight.Int64)
return &weight, nil
}
return nil, nil
}

View file

@ -4,6 +4,7 @@ import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
@ -78,11 +79,13 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.Sc
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.ScenePathsType, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
builder.APIKey = config.GetInstance().GetAPIKey()
screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt.Timestamp)
previewPath := builder.GetStreamPreviewURL()
streamPath := builder.GetStreamURL()
webpPath := builder.GetStreamPreviewImageURL()
vttPath := builder.GetSpriteVTTURL()
spritePath := builder.GetSpriteURL()
chaptersVttPath := builder.GetChaptersVTTURL()
return &models.ScenePathsType{
Screenshot: &screenshotPath,
@ -91,6 +94,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
Webp: &webpPath,
Vtt: &vttPath,
ChaptersVtt: &chaptersVttPath,
Sprite: &spritePath,
}, nil
}
@ -200,3 +204,11 @@ func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []
return ret, nil
}
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Phash.Valid {
hexval := utils.PhashToString(obj.Phash.Int64)
return &hexval, nil
}
return nil, nil
}

View file

@ -4,6 +4,8 @@ import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
)
@ -54,6 +56,30 @@ func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (re
return &res, err
}
func (r *studioResolver) ImageCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = image.CountByStudioID(repo.Image(), obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, nil
}
func (r *studioResolver) GalleryCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = gallery.CountByStudioID(repo.Gallery(), obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, nil
}
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) {
if !obj.ParentID.Valid {
return nil, nil
@ -90,3 +116,18 @@ func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) (ret
return ret, nil
}
func (r *studioResolver) Rating(ctx context.Context, obj *models.Studio) (*int, error) {
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *studioResolver) Details(ctx context.Context, obj *models.Studio) (*string, error) {
if obj.Details.Valid {
return &obj.Details.String, nil
}
return nil, nil
}

View file

@ -4,6 +4,8 @@ import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
)
@ -31,6 +33,30 @@ func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (re
return &count, err
}
func (r *tagResolver) ImageCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = image.CountByTagID(repo.Image(), obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, nil
}
func (r *tagResolver) GalleryCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = gallery.CountByTagID(repo.Gallery(), obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, nil
}
func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var count int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {

View file

@ -13,15 +13,37 @@ import (
"github.com/stashapp/stash/pkg/utils"
)
func (r *mutationResolver) Setup(ctx context.Context, input models.SetupInput) (bool, error) {
err := manager.GetInstance().Setup(input)
return err == nil, err
}
func (r *mutationResolver) Migrate(ctx context.Context, input models.MigrateInput) (bool, error) {
err := manager.GetInstance().Migrate(input)
return err == nil, err
}
func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.ConfigGeneralInput) (*models.ConfigGeneralResult, error) {
c := config.GetInstance()
existingPaths := c.GetStashPaths()
if len(input.Stashes) > 0 {
for _, s := range input.Stashes {
exists, err := utils.DirExists(s.Path)
if !exists {
return makeConfigGeneralResult(), err
// Only validate existence of new paths
isNew := true
for _, path := range existingPaths {
if path.Path == s.Path {
isNew = false
break
}
}
if isNew {
exists, err := utils.DirExists(s.Path)
if !exists {
return makeConfigGeneralResult(), err
}
}
}
config.Set(config.Stash, input.Stashes)
c.Set(config.Stash, input.Stashes)
}
if input.DatabasePath != nil {
@ -29,138 +51,140 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
if ext != ".db" && ext != ".sqlite" && ext != ".sqlite3" {
return makeConfigGeneralResult(), fmt.Errorf("invalid database path, use extension db, sqlite, or sqlite3")
}
config.Set(config.Database, input.DatabasePath)
c.Set(config.Database, input.DatabasePath)
}
if input.GeneratedPath != nil {
if err := utils.EnsureDir(*input.GeneratedPath); err != nil {
return makeConfigGeneralResult(), err
}
config.Set(config.Generated, input.GeneratedPath)
c.Set(config.Generated, input.GeneratedPath)
}
if input.CachePath != nil {
if err := utils.EnsureDir(*input.CachePath); err != nil {
return makeConfigGeneralResult(), err
if *input.CachePath != "" {
if err := utils.EnsureDir(*input.CachePath); err != nil {
return makeConfigGeneralResult(), err
}
}
config.Set(config.Cache, input.CachePath)
c.Set(config.Cache, input.CachePath)
}
if !input.CalculateMd5 && input.VideoFileNamingAlgorithm == models.HashAlgorithmMd5 {
return makeConfigGeneralResult(), errors.New("calculateMD5 must be true if using MD5")
}
if input.VideoFileNamingAlgorithm != config.GetVideoFileNamingAlgorithm() {
if input.VideoFileNamingAlgorithm != c.GetVideoFileNamingAlgorithm() {
// validate changing VideoFileNamingAlgorithm
if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, input.VideoFileNamingAlgorithm); err != nil {
return makeConfigGeneralResult(), err
}
config.Set(config.VideoFileNamingAlgorithm, input.VideoFileNamingAlgorithm)
c.Set(config.VideoFileNamingAlgorithm, input.VideoFileNamingAlgorithm)
}
config.Set(config.CalculateMD5, input.CalculateMd5)
c.Set(config.CalculateMD5, input.CalculateMd5)
if input.ParallelTasks != nil {
config.Set(config.ParallelTasks, *input.ParallelTasks)
c.Set(config.ParallelTasks, *input.ParallelTasks)
}
if input.PreviewSegments != nil {
config.Set(config.PreviewSegments, *input.PreviewSegments)
c.Set(config.PreviewSegments, *input.PreviewSegments)
}
if input.PreviewSegmentDuration != nil {
config.Set(config.PreviewSegmentDuration, *input.PreviewSegmentDuration)
c.Set(config.PreviewSegmentDuration, *input.PreviewSegmentDuration)
}
if input.PreviewExcludeStart != nil {
config.Set(config.PreviewExcludeStart, *input.PreviewExcludeStart)
c.Set(config.PreviewExcludeStart, *input.PreviewExcludeStart)
}
if input.PreviewExcludeEnd != nil {
config.Set(config.PreviewExcludeEnd, *input.PreviewExcludeEnd)
c.Set(config.PreviewExcludeEnd, *input.PreviewExcludeEnd)
}
if input.PreviewPreset != nil {
config.Set(config.PreviewPreset, input.PreviewPreset.String())
c.Set(config.PreviewPreset, input.PreviewPreset.String())
}
if input.MaxTranscodeSize != nil {
config.Set(config.MaxTranscodeSize, input.MaxTranscodeSize.String())
c.Set(config.MaxTranscodeSize, input.MaxTranscodeSize.String())
}
if input.MaxStreamingTranscodeSize != nil {
config.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String())
c.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String())
}
if input.Username != nil {
config.Set(config.Username, input.Username)
c.Set(config.Username, input.Username)
}
if input.Password != nil {
// bit of a hack - check if the passed in password is the same as the stored hash
// and only set if they are different
currentPWHash := config.GetPasswordHash()
currentPWHash := c.GetPasswordHash()
if *input.Password != currentPWHash {
config.SetPassword(*input.Password)
c.SetPassword(*input.Password)
}
}
if input.MaxSessionAge != nil {
config.Set(config.MaxSessionAge, *input.MaxSessionAge)
c.Set(config.MaxSessionAge, *input.MaxSessionAge)
}
if input.LogFile != nil {
config.Set(config.LogFile, input.LogFile)
c.Set(config.LogFile, input.LogFile)
}
config.Set(config.LogOut, input.LogOut)
config.Set(config.LogAccess, input.LogAccess)
c.Set(config.LogOut, input.LogOut)
c.Set(config.LogAccess, input.LogAccess)
if input.LogLevel != config.GetLogLevel() {
config.Set(config.LogLevel, input.LogLevel)
if input.LogLevel != c.GetLogLevel() {
c.Set(config.LogLevel, input.LogLevel)
logger.SetLogLevel(input.LogLevel)
}
if input.Excludes != nil {
config.Set(config.Exclude, input.Excludes)
c.Set(config.Exclude, input.Excludes)
}
if input.ImageExcludes != nil {
config.Set(config.ImageExclude, input.ImageExcludes)
c.Set(config.ImageExclude, input.ImageExcludes)
}
if input.VideoExtensions != nil {
config.Set(config.VideoExtensions, input.VideoExtensions)
c.Set(config.VideoExtensions, input.VideoExtensions)
}
if input.ImageExtensions != nil {
config.Set(config.ImageExtensions, input.ImageExtensions)
c.Set(config.ImageExtensions, input.ImageExtensions)
}
if input.GalleryExtensions != nil {
config.Set(config.GalleryExtensions, input.GalleryExtensions)
c.Set(config.GalleryExtensions, input.GalleryExtensions)
}
config.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders)
c.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders)
refreshScraperCache := false
if input.ScraperUserAgent != nil {
config.Set(config.ScraperUserAgent, input.ScraperUserAgent)
c.Set(config.ScraperUserAgent, input.ScraperUserAgent)
refreshScraperCache = true
}
if input.ScraperCDPPath != nil {
config.Set(config.ScraperCDPPath, input.ScraperCDPPath)
c.Set(config.ScraperCDPPath, input.ScraperCDPPath)
refreshScraperCache = true
}
config.Set(config.ScraperCertCheck, input.ScraperCertCheck)
c.Set(config.ScraperCertCheck, input.ScraperCertCheck)
if input.StashBoxes != nil {
if err := config.ValidateStashBoxes(input.StashBoxes); err != nil {
if err := c.ValidateStashBoxes(input.StashBoxes); err != nil {
return nil, err
}
config.Set(config.StashBoxes, input.StashBoxes)
c.Set(config.StashBoxes, input.StashBoxes)
}
if err := config.Write(); err != nil {
if err := c.Write(); err != nil {
return makeConfigGeneralResult(), err
}
@ -173,36 +197,41 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
}
func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.ConfigInterfaceInput) (*models.ConfigInterfaceResult, error) {
c := config.GetInstance()
if input.MenuItems != nil {
config.Set(config.MenuItems, input.MenuItems)
c.Set(config.MenuItems, input.MenuItems)
}
if input.SoundOnPreview != nil {
config.Set(config.SoundOnPreview, *input.SoundOnPreview)
c.Set(config.SoundOnPreview, *input.SoundOnPreview)
}
if input.WallShowTitle != nil {
config.Set(config.WallShowTitle, *input.WallShowTitle)
c.Set(config.WallShowTitle, *input.WallShowTitle)
}
if input.WallPlayback != nil {
config.Set(config.WallPlayback, *input.WallPlayback)
c.Set(config.WallPlayback, *input.WallPlayback)
}
if input.MaximumLoopDuration != nil {
config.Set(config.MaximumLoopDuration, *input.MaximumLoopDuration)
c.Set(config.MaximumLoopDuration, *input.MaximumLoopDuration)
}
if input.AutostartVideo != nil {
config.Set(config.AutostartVideo, *input.AutostartVideo)
c.Set(config.AutostartVideo, *input.AutostartVideo)
}
if input.ShowStudioAsText != nil {
config.Set(config.ShowStudioAsText, *input.ShowStudioAsText)
c.Set(config.ShowStudioAsText, *input.ShowStudioAsText)
}
if input.Language != nil {
config.Set(config.Language, *input.Language)
c.Set(config.Language, *input.Language)
}
if input.SlideshowDelay != nil {
c.Set(config.SlideshowDelay, *input.SlideshowDelay)
}
css := ""
@ -211,15 +240,38 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
css = *input.CSS
}
config.SetCSS(css)
c.SetCSS(css)
if input.CSSEnabled != nil {
config.Set(config.CSSEnabled, *input.CSSEnabled)
c.Set(config.CSSEnabled, *input.CSSEnabled)
}
if err := config.Write(); err != nil {
if err := c.Write(); err != nil {
return makeConfigInterfaceResult(), err
}
return makeConfigInterfaceResult(), nil
}
func (r *mutationResolver) GenerateAPIKey(ctx context.Context, input models.GenerateAPIKeyInput) (string, error) {
c := config.GetInstance()
var newAPIKey string
if input.Clear == nil || !*input.Clear {
username := c.GetUsername()
if username != "" {
var err error
newAPIKey, err = manager.GenerateAPIKey(username)
if err != nil {
return "", err
}
}
}
c.Set(config.ApiKey, newAPIKey)
if err := c.Write(); err != nil {
return newAPIKey, err
}
return newAPIKey, nil
}

View file

@ -20,12 +20,15 @@ func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMe
}
func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
manager.GetInstance().Import()
if err := manager.GetInstance().Import(); err != nil {
return "", err
}
return "todo", nil
}
func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) {
t, err := manager.CreateImportTask(config.GetVideoFileNamingAlgorithm(), input)
t, err := manager.CreateImportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
if err != nil {
return "", err
}
@ -39,12 +42,15 @@ func (r *mutationResolver) ImportObjects(ctx context.Context, input models.Impor
}
func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
manager.GetInstance().Export()
if err := manager.GetInstance().Export(); err != nil {
return "", err
}
return "todo", nil
}
func (r *mutationResolver) ExportObjects(ctx context.Context, input models.ExportObjectsInput) (*string, error) {
t := manager.CreateExportTask(config.GetVideoFileNamingAlgorithm(), input)
t := manager.CreateExportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
wg, err := manager.GetInstance().RunSingleTask(t)
if err != nil {
return nil, err

View file

@ -3,10 +3,12 @@ package api
import (
"context"
"database/sql"
"fmt"
"strconv"
"time"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/performer"
"github.com/stashapp/stash/pkg/utils"
)
@ -83,6 +85,30 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
} else {
newPerformer.Favorite = sql.NullBool{Bool: false, Valid: true}
}
if input.Rating != nil {
newPerformer.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
} else {
newPerformer.Rating = sql.NullInt64{Valid: false}
}
if input.Details != nil {
newPerformer.Details = sql.NullString{String: *input.Details, Valid: true}
}
if input.DeathDate != nil {
newPerformer.DeathDate = models.SQLiteDate{String: *input.DeathDate, Valid: true}
}
if input.HairColor != nil {
newPerformer.HairColor = sql.NullString{String: *input.HairColor, Valid: true}
}
if input.Weight != nil {
weight := int64(*input.Weight)
newPerformer.Weight = sql.NullInt64{Int64: weight, Valid: true}
}
if err := performer.ValidateDeathDate(nil, input.Birthdate, input.DeathDate); err != nil {
if err != nil {
return nil, err
}
}
// Start the transaction and save the performer
var performer *models.Performer
@ -177,33 +203,53 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
updatedPerformer.Twitter = translator.nullString(input.Twitter, "twitter")
updatedPerformer.Instagram = translator.nullString(input.Instagram, "instagram")
updatedPerformer.Favorite = translator.nullBool(input.Favorite, "favorite")
updatedPerformer.Rating = translator.nullInt64(input.Rating, "rating")
updatedPerformer.Details = translator.nullString(input.Details, "details")
updatedPerformer.DeathDate = translator.sqliteDate(input.DeathDate, "death_date")
updatedPerformer.HairColor = translator.nullString(input.HairColor, "hair_color")
updatedPerformer.Weight = translator.nullInt64(input.Weight, "weight")
// Start the transaction and save the performer
var performer *models.Performer
// Start the transaction and save the p
var p *models.Performer
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Performer()
var err error
performer, err = qb.Update(updatedPerformer)
// need to get existing performer
existing, err := qb.Find(updatedPerformer.ID)
if err != nil {
return err
}
if existing == nil {
return fmt.Errorf("performer with id %d not found", updatedPerformer.ID)
}
if err := performer.ValidateDeathDate(existing, input.Birthdate, input.DeathDate); err != nil {
if err != nil {
return err
}
}
p, err = qb.Update(updatedPerformer)
if err != nil {
return err
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updatePerformerTags(qb, performer.ID, input.TagIds); err != nil {
if err := r.updatePerformerTags(qb, p.ID, input.TagIds); err != nil {
return err
}
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(performer.ID, imageData); err != nil {
if err := qb.UpdateImage(p.ID, imageData); err != nil {
return err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyImage(performer.ID); err != nil {
if err := qb.DestroyImage(p.ID); err != nil {
return err
}
}
@ -221,7 +267,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
return nil, err
}
return performer, nil
return p, nil
}
func (r *mutationResolver) updatePerformerTags(qb models.PerformerReaderWriter, performerID int, tagsIDs []string) error {
@ -264,6 +310,11 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models
updatedPerformer.Twitter = translator.nullString(input.Twitter, "twitter")
updatedPerformer.Instagram = translator.nullString(input.Instagram, "instagram")
updatedPerformer.Favorite = translator.nullBool(input.Favorite, "favorite")
updatedPerformer.Rating = translator.nullInt64(input.Rating, "rating")
updatedPerformer.Details = translator.nullString(input.Details, "details")
updatedPerformer.DeathDate = translator.sqliteDate(input.DeathDate, "death_date")
updatedPerformer.HairColor = translator.nullString(input.HairColor, "hair_color")
updatedPerformer.Weight = translator.nullInt64(input.Weight, "weight")
if translator.hasField("gender") {
if input.Gender != nil {
@ -282,6 +333,20 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models
for _, performerID := range performerIDs {
updatedPerformer.ID = performerID
// need to get existing performer
existing, err := qb.Find(performerID)
if err != nil {
return err
}
if existing == nil {
return fmt.Errorf("performer with id %d not found", performerID)
}
if err := performer.ValidateDeathDate(existing, input.Birthdate, input.DeathDate); err != nil {
return err
}
performer, err := qb.Update(updatedPerformer)
if err != nil {
return err

View file

@ -23,6 +23,7 @@ func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, t
}
}
config := config.GetInstance()
serverConnection := common.StashServerConnection{
Scheme: "http",
Port: config.GetPort(),

View file

@ -139,7 +139,7 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator
// only update the cover image if provided and everything else was successful
if coverImageData != nil {
err = manager.SetSceneScreenshot(scene.GetHash(config.GetVideoFileNamingAlgorithm()), coverImageData)
err = manager.SetSceneScreenshot(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), coverImageData)
if err != nil {
return nil, err
}
@ -384,7 +384,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// if delete generated is true, then delete the generated files
// for the scene
if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedSceneFiles(scene, config.GetVideoFileNamingAlgorithm())
manager.DeleteGeneratedSceneFiles(scene, config.GetInstance().GetVideoFileNamingAlgorithm())
}
// if delete file is true, then delete the file as well
@ -426,7 +426,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
f()
}
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
for _, scene := range scenes {
// if delete generated is true, then delete the generated files
// for the scene
@ -586,7 +586,7 @@ func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, cha
// remove the marker preview if the timestamp was changed
if scene != nil && existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
seconds := int(existingMarker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetInstance().GetVideoFileNamingAlgorithm())
}
return sceneMarker, nil

View file

@ -4,13 +4,14 @@ import (
"context"
"fmt"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scraper/stashbox"
)
func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input models.StashBoxFingerprintSubmissionInput) (bool, error) {
boxes := config.GetStashBoxes()
boxes := config.GetInstance().GetStashBoxes()
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
return false, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
@ -20,3 +21,8 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
return client.SubmitStashBoxFingerprints(input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
}
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) (string, error) {
manager.GetInstance().StashBoxBatchPerformerTag(input)
return "todo", nil
}

View file

@ -42,6 +42,15 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
newStudio.ParentID = sql.NullInt64{Int64: parentID, Valid: true}
}
if input.Rating != nil {
newStudio.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
} else {
newStudio.Rating = sql.NullInt64{Valid: false}
}
if input.Details != nil {
newStudio.Details = sql.NullString{String: *input.Details, Valid: true}
}
// Start the transaction and save the studio
var studio *models.Studio
if err := r.withTxn(ctx, func(repo models.Repository) error {
@ -109,7 +118,9 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
}
updatedStudio.URL = translator.nullString(input.URL, "url")
updatedStudio.Details = translator.nullString(input.Details, "details")
updatedStudio.ParentID = translator.nullInt64FromString(input.ParentID, "parent_id")
updatedStudio.Rating = translator.nullInt64(input.Rating, "rating")
// Start the transaction and save the studio
var studio *models.Studio

View file

@ -34,6 +34,7 @@ func makeConfigResult() *models.ConfigResult {
}
func makeConfigGeneralResult() *models.ConfigGeneralResult {
config := config.GetInstance()
logFile := config.GetLogFile()
maxTranscodeSize := config.GetMaxTranscodeSize()
@ -59,6 +60,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
PreviewPreset: config.GetPreviewPreset(),
MaxTranscodeSize: &maxTranscodeSize,
MaxStreamingTranscodeSize: &maxStreamingTranscodeSize,
APIKey: config.GetAPIKey(),
Username: config.GetUsername(),
Password: config.GetPasswordHash(),
MaxSessionAge: config.GetMaxSessionAge(),
@ -80,6 +82,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
}
func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
config := config.GetInstance()
menuItems := config.GetMenuItems()
soundOnPreview := config.GetSoundOnPreview()
wallShowTitle := config.GetWallShowTitle()
@ -90,6 +93,7 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
css := config.GetCSS()
cssEnabled := config.GetCSSEnabled()
language := config.GetLanguage()
slideshowDelay := config.GetSlideshowDelay()
return &models.ConfigInterfaceResult{
MenuItems: menuItems,
@ -102,5 +106,6 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
CSS: &css,
CSSEnabled: &cssEnabled,
Language: &language,
SlideshowDelay: &slideshowDelay,
}
}

View file

@ -59,12 +59,25 @@ func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneH
return scene, nil
}
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) {
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIDs []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
scenes, total, err := repo.Scene().Query(sceneFilter, filter)
var scenes []*models.Scene
var total int
var err error
if len(sceneIDs) > 0 {
scenes, err = repo.Scene().FindMany(sceneIDs)
if err == nil {
total = len(scenes)
}
} else {
scenes, total, err = repo.Scene().Query(sceneFilter, filter)
}
if err != nil {
return err
}
ret = &models.FindScenesResultType{
Count: total,
Scenes: scenes,
@ -138,3 +151,18 @@ func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.
return ret, nil
}
func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int) (ret [][]*models.Scene, err error) {
dist := 0
if distance != nil {
dist = *distance
}
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Scene().FindDuplicates(dist)
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -17,3 +17,7 @@ func (r *queryResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateSt
return &ret, nil
}
func (r *queryResolver) SystemStatus(ctx context.Context) (*models.SystemStatus, error) {
return manager.GetInstance().GetSystemStatus(), nil
}

View file

@ -30,5 +30,5 @@ func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
builder := urlbuilders.NewSceneURLBuilder(baseURL, scene.ID)
return manager.GetSceneStreamPaths(scene, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize())
return manager.GetSceneStreamPaths(scene, builder.GetStreamURL(), config.GetInstance().GetMaxStreamingTranscodeSize())
}

View file

@ -88,8 +88,8 @@ func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models
return manager.GetInstance().ScraperCache.ScrapeMovieURL(url)
}
func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.StashBoxQueryInput) ([]*models.ScrapedScene, error) {
boxes := config.GetStashBoxes()
func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.StashBoxSceneQueryInput) ([]*models.ScrapedScene, error) {
boxes := config.GetInstance().GetStashBoxes()
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
@ -107,3 +107,23 @@ func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.Sta
return nil, nil
}
func (r *queryResolver) QueryStashBoxPerformer(ctx context.Context, input models.StashBoxPerformerQueryInput) ([]*models.StashBoxPerformerQueryResult, error) {
boxes := config.GetInstance().GetStashBoxes()
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
}
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
if len(input.PerformerIds) > 0 {
return client.FindStashBoxPerformersByNames(input.PerformerIds)
}
if input.Q != nil {
return client.QueryStashBoxPerformer(*input.Q)
}
return nil, nil
}

View file

@ -69,7 +69,7 @@ func getSceneFileContainer(scene *models.Scene) ffmpeg.Container {
func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo))
manager.RegisterStream(filepath, &w)
@ -158,7 +158,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
options := ffmpeg.GetTranscodeStreamOptions(*videoFile, videoCodec, audioCodec)
options.StartTime = startTime
options.MaxTranscodeSize = config.GetMaxStreamingTranscodeSize()
options.MaxTranscodeSize = config.GetInstance().GetMaxStreamingTranscodeSize()
if requestedSize != "" {
options.MaxTranscodeSize = models.StreamingResolutionEnum(requestedSize)
}
@ -178,7 +178,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
// fall back to the scene image blob if the file isn't present
screenshotExists, _ := utils.FileExists(filepath)
@ -196,13 +196,13 @@ func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
utils.ServeFileNoCache(w, r, filepath)
}
func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath)
}
@ -267,14 +267,14 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "text/vtt")
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath)
}
func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "image/jpeg")
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()))
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath)
}
@ -291,7 +291,7 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
http.Error(w, http.StatusText(500), 500)
return
}
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
http.ServeFile(w, r, filepath)
}
@ -308,7 +308,7 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request)
http.Error(w, http.StatusText(500), 500)
return
}
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
// If the image doesn't exist, send the placeholder
exists, _ := utils.FileExists(filepath)

View file

@ -8,9 +8,7 @@ import (
"io/ioutil"
"net/http"
"net/url"
"os"
"path"
"path/filepath"
"runtime/debug"
"strconv"
"strings"
@ -22,7 +20,6 @@ import (
"github.com/gobuffalo/packr/v2"
"github.com/gorilla/websocket"
"github.com/rs/cors"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
@ -38,9 +35,13 @@ var githash string
var uiBox *packr.Box
//var legacyUiBox *packr.Box
var setupUIBox *packr.Box
var loginUIBox *packr.Box
const (
ApiKeyHeader = "ApiKey"
ApiKeyParameter = "apikey"
)
func allowUnauthenticated(r *http.Request) bool {
return strings.HasPrefix(r.URL.Path, "/login") || r.URL.Path == "/css"
}
@ -48,14 +49,34 @@ func allowUnauthenticated(r *http.Request) bool {
func authenticateHandler() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
c := config.GetInstance()
ctx := r.Context()
// translate api key into current user, if present
userID := ""
apiKey := r.Header.Get(ApiKeyHeader)
var err error
// handle session
userID, err = getSessionUserID(w, r)
// try getting the api key as a query parameter
if apiKey == "" {
apiKey = r.URL.Query().Get(ApiKeyParameter)
}
if apiKey != "" {
// match against configured API and set userID to the
// configured username. In future, we'll want to
// get the username from the key.
if c.GetAPIKey() != apiKey {
w.Header().Add("WWW-Authenticate", `FormBased`)
w.WriteHeader(http.StatusUnauthorized)
return
}
userID = c.GetUsername()
} else {
// handle session
userID, err = getSessionUserID(w, r)
}
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
@ -64,9 +85,7 @@ func authenticateHandler() func(http.Handler) http.Handler {
}
// handle redirect if no user and user is required
if userID == "" && config.HasCredentials() && !allowUnauthenticated(r) {
// always allow
if userID == "" && c.HasCredentials() && !allowUnauthenticated(r) {
// if we don't have a userID, then redirect
// if graphql was requested, we just return a forbidden error
if r.URL.Path == "/graphql" {
@ -95,14 +114,11 @@ func authenticateHandler() func(http.Handler) http.Handler {
}
}
const setupEndPoint = "/setup"
const migrateEndPoint = "/migrate"
const loginEndPoint = "/login"
func Start() {
uiBox = packr.New("UI Box", "../../ui/v2.5/build")
//legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
setupUIBox = packr.New("Setup UI Box", "../../ui/setup")
loginUIBox = packr.New("Login UI Box", "../../ui/login")
initSessionStore()
@ -110,18 +126,18 @@ func Start() {
r := chi.NewRouter()
r.Use(middleware.Heartbeat("/healthz"))
r.Use(authenticateHandler())
r.Use(middleware.Recoverer)
if config.GetLogAccess() {
c := config.GetInstance()
if c.GetLogAccess() {
r.Use(middleware.Logger)
}
r.Use(middleware.DefaultCompress)
r.Use(middleware.StripSlashes)
r.Use(cors.AllowAll().Handler)
r.Use(BaseURLMiddleware)
r.Use(ConfigCheckMiddleware)
r.Use(DatabaseCheckMiddleware)
recoverFunc := handler.RecoverFunc(func(ctx context.Context, err interface{}) error {
logger.Error(err)
@ -135,7 +151,7 @@ func Start() {
return true
},
})
maxUploadSize := handler.UploadMaxSize(config.GetMaxUploadSize())
maxUploadSize := handler.UploadMaxSize(c.GetMaxUploadSize())
websocketKeepAliveDuration := handler.WebsocketKeepAliveDuration(10 * time.Second)
txnManager := manager.GetInstance().TxnManager
@ -176,12 +192,12 @@ func Start() {
r.HandleFunc("/css", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/css")
if !config.GetCSSEnabled() {
if !c.GetCSSEnabled() {
return
}
// search for custom.css in current directory, then $HOME/.stash
fn := config.GetCSSPath()
fn := c.GetCSSPath()
exists, _ := utils.FileExists(fn)
if !exists {
return
@ -190,21 +206,6 @@ func Start() {
http.ServeFile(w, r, fn)
})
// Serve the migration UI
r.Get("/migrate", getMigrateHandler)
r.Post("/migrate", doMigrateHandler)
// Serve the setup UI
r.HandleFunc("/setup*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
if ext == ".html" || ext == "" {
data, _ := setupUIBox.Find("index.html")
_, _ = w.Write(data)
} else {
r.URL.Path = strings.Replace(r.URL.Path, "/setup", "", 1)
http.FileServer(setupUIBox).ServeHTTP(w, r)
}
})
r.HandleFunc("/login*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
if ext == ".html" || ext == "" {
@ -215,62 +216,9 @@ func Start() {
http.FileServer(loginUIBox).ServeHTTP(w, r)
}
})
r.Post("/init", func(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), 500)
}
stash := filepath.Clean(r.Form.Get("stash"))
generated := filepath.Clean(r.Form.Get("generated"))
metadata := filepath.Clean(r.Form.Get("metadata"))
cache := filepath.Clean(r.Form.Get("cache"))
//downloads := filepath.Clean(r.Form.Get("downloads")) // TODO
downloads := filepath.Join(metadata, "downloads")
exists, _ := utils.DirExists(stash)
if !exists || stash == "." {
http.Error(w, fmt.Sprintf("the stash path either doesn't exist, or is not a directory <%s>. Go back and try again.", stash), 500)
return
}
exists, _ = utils.DirExists(generated)
if !exists || generated == "." {
http.Error(w, fmt.Sprintf("the generated path either doesn't exist, or is not a directory <%s>. Go back and try again.", generated), 500)
return
}
exists, _ = utils.DirExists(metadata)
if !exists || metadata == "." {
http.Error(w, fmt.Sprintf("the metadata path either doesn't exist, or is not a directory <%s> Go back and try again.", metadata), 500)
return
}
exists, _ = utils.DirExists(cache)
if !exists || cache == "." {
http.Error(w, fmt.Sprintf("the cache path either doesn't exist, or is not a directory <%s> Go back and try again.", cache), 500)
return
}
_ = os.Mkdir(downloads, 0755)
// #536 - set stash as slice of strings
config.Set(config.Stash, []string{stash})
config.Set(config.Generated, generated)
config.Set(config.Metadata, metadata)
config.Set(config.Cache, cache)
config.Set(config.Downloads, downloads)
if err := config.Write(); err != nil {
http.Error(w, fmt.Sprintf("there was an error saving the config file: %s", err), 500)
return
}
manager.GetInstance().RefreshConfig()
http.Redirect(w, r, "/", 301)
})
// Serve static folders
customServedFolders := config.GetCustomServedFolders()
customServedFolders := c.GetCustomServedFolders()
if customServedFolders != nil {
r.HandleFunc("/custom/*", func(w http.ResponseWriter, r *http.Request) {
r.URL.Path = strings.Replace(r.URL.Path, "/custom", "", 1)
@ -286,9 +234,21 @@ func Start() {
})
}
customUILocation := c.GetCustomUILocation()
// Serve the web app
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
if customUILocation != "" {
if r.URL.Path == "index.html" || ext == "" {
r.URL.Path = "/"
}
http.FileServer(http.Dir(customUILocation)).ServeHTTP(w, r)
return
}
if ext == ".html" || ext == "" {
data, _ := uiBox.Find("index.html")
_, _ = w.Write(data)
@ -301,13 +261,13 @@ func Start() {
}
})
displayHost := config.GetHost()
displayHost := c.GetHost()
if displayHost == "0.0.0.0" {
displayHost = "localhost"
}
displayAddress := displayHost + ":" + strconv.Itoa(config.GetPort())
displayAddress := displayHost + ":" + strconv.Itoa(c.GetPort())
address := config.GetHost() + ":" + strconv.Itoa(config.GetPort())
address := c.GetHost() + ":" + strconv.Itoa(c.GetPort())
if tlsConfig := makeTLSConfig(); tlsConfig != nil {
httpsServer := &http.Server{
Addr: address,
@ -402,7 +362,7 @@ func BaseURLMiddleware(next http.Handler) http.Handler {
}
baseURL := scheme + "://" + r.Host
externalHost := config.GetExternalHost()
externalHost := config.GetInstance().GetExternalHost()
if externalHost != "" {
baseURL = externalHost
}
@ -413,34 +373,3 @@ func BaseURLMiddleware(next http.Handler) http.Handler {
}
return http.HandlerFunc(fn)
}
func ConfigCheckMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
shouldRedirect := ext == "" && r.Method == "GET"
if !config.IsValid() && shouldRedirect {
// #539 - don't redirect if loading login page
if !strings.HasPrefix(r.URL.Path, setupEndPoint) && !strings.HasPrefix(r.URL.Path, loginEndPoint) {
http.Redirect(w, r, setupEndPoint, http.StatusFound)
return
}
}
next.ServeHTTP(w, r)
})
}
func DatabaseCheckMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
shouldRedirect := ext == "" && r.Method == "GET"
if shouldRedirect && database.NeedsMigration() {
// #451 - don't redirect if loading login page
// #539 - or setup page
if !strings.HasPrefix(r.URL.Path, migrateEndPoint) && !strings.HasPrefix(r.URL.Path, loginEndPoint) && !strings.HasPrefix(r.URL.Path, setupEndPoint) {
http.Redirect(w, r, migrateEndPoint, http.StatusFound)
return
}
}
next.ServeHTTP(w, r)
})
}

View file

@ -19,7 +19,7 @@ const userIDKey = "userID"
const returnURLParam = "returnURL"
var sessionStore = sessions.NewCookieStore(config.GetSessionStoreKey())
var sessionStore = sessions.NewCookieStore(config.GetInstance().GetSessionStoreKey())
type loginTemplateData struct {
URL string
@ -27,7 +27,7 @@ type loginTemplateData struct {
}
func initSessionStore() {
sessionStore.MaxAge(config.GetMaxSessionAge())
sessionStore.MaxAge(config.GetInstance().GetMaxSessionAge())
}
func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string) {
@ -45,7 +45,7 @@ func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string)
}
func getLoginHandler(w http.ResponseWriter, r *http.Request) {
if !config.HasCredentials() {
if !config.GetInstance().HasCredentials() {
http.Redirect(w, r, "/", http.StatusFound)
return
}
@ -66,7 +66,7 @@ func handleLogin(w http.ResponseWriter, r *http.Request) {
password := r.FormValue("password")
// authenticate the user
if !config.ValidateCredentials(username, password) {
if !config.GetInstance().ValidateCredentials(username, password) {
// redirect back to the login page with an error
redirectToLogin(w, url, "Username or password is invalid")
return

View file

@ -1,6 +1,7 @@
package urlbuilders
import (
"fmt"
"strconv"
"time"
)
@ -8,6 +9,7 @@ import (
type SceneURLBuilder struct {
BaseURL string
SceneID string
APIKey string
}
func NewSceneURLBuilder(baseURL string, sceneID int) SceneURLBuilder {
@ -18,7 +20,11 @@ func NewSceneURLBuilder(baseURL string, sceneID int) SceneURLBuilder {
}
func (b SceneURLBuilder) GetStreamURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/stream"
var apiKeyParam string
if b.APIKey != "" {
apiKeyParam = fmt.Sprintf("?apikey=%s", b.APIKey)
}
return fmt.Sprintf("%s/scene/%s/stream%s", b.BaseURL, b.SceneID, apiKeyParam)
}
func (b SceneURLBuilder) GetStreamPreviewURL() string {
@ -33,6 +39,10 @@ func (b SceneURLBuilder) GetSpriteVTTURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "_thumbs.vtt"
}
func (b SceneURLBuilder) GetSpriteURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "_sprite.jpg"
}
func (b SceneURLBuilder) GetScreenshotURL(updateTime time.Time) string {
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?" + strconv.FormatInt(updateTime.Unix(), 10)
}

117
pkg/autotag/gallery.go Normal file
View file

@ -0,0 +1,117 @@
package autotag
import (
"fmt"
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/models"
)
func galleryPathsFilter(paths []string) *models.GalleryFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.GalleryFilterType
var or *models.GalleryFilterType
for _, p := range paths {
newOr := &models.GalleryFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p = p + sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}
func getMatchingGalleries(name string, paths []string, galleryReader models.GalleryReader) ([]*models.Gallery, error) {
regex := getPathQueryRegex(name)
organized := false
filter := models.GalleryFilterType{
Path: &models.StringCriterionInput{
Value: "(?i)" + regex,
Modifier: models.CriterionModifierMatchesRegex,
},
Organized: &organized,
}
filter.And = galleryPathsFilter(paths)
pp := models.PerPageAll
gallerys, _, err := galleryReader.Query(&filter, &models.FindFilterType{
PerPage: &pp,
})
if err != nil {
return nil, fmt.Errorf("error querying gallerys with regex '%s': %s", regex, err.Error())
}
var ret []*models.Gallery
for _, p := range gallerys {
if nameMatchesPath(name, p.Path.String) {
ret = append(ret, p)
}
}
return ret, nil
}
func getGalleryFileTagger(s *models.Gallery) tagger {
return tagger{
ID: s.ID,
Type: "gallery",
Name: s.GetTitle(),
Path: s.Path.String,
}
}
// GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path.
func GalleryPerformers(s *models.Gallery, rw models.GalleryReaderWriter, performerReader models.PerformerReader) error {
t := getGalleryFileTagger(s)
return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) {
return gallery.AddPerformer(rw, subjectID, otherID)
})
}
// GalleryStudios tags the provided gallery with the first studio whose name matches the gallery's path.
//
// Gallerys will not be tagged if studio is already set.
func GalleryStudios(s *models.Gallery, rw models.GalleryReaderWriter, studioReader models.StudioReader) error {
if s.StudioID.Valid {
// don't modify
return nil
}
t := getGalleryFileTagger(s)
return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) {
return addGalleryStudio(rw, subjectID, otherID)
})
}
// GalleryTags tags the provided gallery with tags whose name matches the gallery's path.
func GalleryTags(s *models.Gallery, rw models.GalleryReaderWriter, tagReader models.TagReader) error {
t := getGalleryFileTagger(s)
return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) {
return gallery.AddTag(rw, subjectID, otherID)
})
}

145
pkg/autotag/gallery_test.go Normal file
View file

@ -0,0 +1,145 @@
package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
const galleryExt = "zip"
func TestGalleryPerformers(t *testing.T) {
const galleryID = 1
const performerName = "performer name"
const performerID = 2
performer := models.Performer{
ID: performerID,
Name: models.NullString(performerName),
}
const reversedPerformerName = "name performer"
const reversedPerformerID = 3
reversedPerformer := models.Performer{
ID: reversedPerformerID,
Name: models.NullString(reversedPerformerName),
}
testTables := generateTestTable(performerName, galleryExt)
assert := assert.New(t)
for _, test := range testTables {
mockPerformerReader := &mocks.PerformerReaderWriter{}
mockGalleryReader := &mocks.GalleryReaderWriter{}
mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches {
mockGalleryReader.On("GetPerformerIDs", galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdatePerformers", galleryID, []int{performerID}).Return(nil).Once()
}
gallery := models.Gallery{
ID: galleryID,
Path: models.NullString(test.Path),
}
err := GalleryPerformers(&gallery, mockGalleryReader, mockPerformerReader)
assert.Nil(err)
mockPerformerReader.AssertExpectations(t)
mockGalleryReader.AssertExpectations(t)
}
}
func TestGalleryStudios(t *testing.T) {
const galleryID = 1
const studioName = "studio name"
const studioID = 2
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
}
const reversedStudioName = "name studio"
const reversedStudioID = 3
reversedStudio := models.Studio{
ID: reversedStudioID,
Name: models.NullString(reversedStudioName),
}
testTables := generateTestTable(studioName, galleryExt)
assert := assert.New(t)
for _, test := range testTables {
mockStudioReader := &mocks.StudioReaderWriter{}
mockGalleryReader := &mocks.GalleryReaderWriter{}
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
if test.Matches {
mockGalleryReader.On("Find", galleryID).Return(&models.Gallery{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockGalleryReader.On("UpdatePartial", models.GalleryPartial{
ID: galleryID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once()
}
gallery := models.Gallery{
ID: galleryID,
Path: models.NullString(test.Path),
}
err := GalleryStudios(&gallery, mockGalleryReader, mockStudioReader)
assert.Nil(err)
mockStudioReader.AssertExpectations(t)
mockGalleryReader.AssertExpectations(t)
}
}
func TestGalleryTags(t *testing.T) {
const galleryID = 1
const tagName = "tag name"
const tagID = 2
tag := models.Tag{
ID: tagID,
Name: tagName,
}
const reversedTagName = "name tag"
const reversedTagID = 3
reversedTag := models.Tag{
ID: reversedTagID,
Name: reversedTagName,
}
testTables := generateTestTable(tagName, galleryExt)
assert := assert.New(t)
for _, test := range testTables {
mockTagReader := &mocks.TagReaderWriter{}
mockGalleryReader := &mocks.GalleryReaderWriter{}
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
if test.Matches {
mockGalleryReader.On("GetTagIDs", galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdateTags", galleryID, []int{tagID}).Return(nil).Once()
}
gallery := models.Gallery{
ID: galleryID,
Path: models.NullString(test.Path),
}
err := GalleryTags(&gallery, mockGalleryReader, mockTagReader)
assert.Nil(err)
mockTagReader.AssertExpectations(t)
mockGalleryReader.AssertExpectations(t)
}
}

117
pkg/autotag/image.go Normal file
View file

@ -0,0 +1,117 @@
package autotag
import (
"fmt"
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
)
func imagePathsFilter(paths []string) *models.ImageFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.ImageFilterType
var or *models.ImageFilterType
for _, p := range paths {
newOr := &models.ImageFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p = p + sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}
func getMatchingImages(name string, paths []string, imageReader models.ImageReader) ([]*models.Image, error) {
regex := getPathQueryRegex(name)
organized := false
filter := models.ImageFilterType{
Path: &models.StringCriterionInput{
Value: "(?i)" + regex,
Modifier: models.CriterionModifierMatchesRegex,
},
Organized: &organized,
}
filter.And = imagePathsFilter(paths)
pp := models.PerPageAll
images, _, err := imageReader.Query(&filter, &models.FindFilterType{
PerPage: &pp,
})
if err != nil {
return nil, fmt.Errorf("error querying images with regex '%s': %s", regex, err.Error())
}
var ret []*models.Image
for _, p := range images {
if nameMatchesPath(name, p.Path) {
ret = append(ret, p)
}
}
return ret, nil
}
func getImageFileTagger(s *models.Image) tagger {
return tagger{
ID: s.ID,
Type: "image",
Name: s.GetTitle(),
Path: s.Path,
}
}
// ImagePerformers tags the provided image with performers whose name matches the image's path.
func ImagePerformers(s *models.Image, rw models.ImageReaderWriter, performerReader models.PerformerReader) error {
t := getImageFileTagger(s)
return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) {
return image.AddPerformer(rw, subjectID, otherID)
})
}
// ImageStudios tags the provided image with the first studio whose name matches the image's path.
//
// Images will not be tagged if studio is already set.
func ImageStudios(s *models.Image, rw models.ImageReaderWriter, studioReader models.StudioReader) error {
if s.StudioID.Valid {
// don't modify
return nil
}
t := getImageFileTagger(s)
return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) {
return addImageStudio(rw, subjectID, otherID)
})
}
// ImageTags tags the provided image with tags whose name matches the image's path.
func ImageTags(s *models.Image, rw models.ImageReaderWriter, tagReader models.TagReader) error {
t := getImageFileTagger(s)
return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) {
return image.AddTag(rw, subjectID, otherID)
})
}

145
pkg/autotag/image_test.go Normal file
View file

@ -0,0 +1,145 @@
package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
const imageExt = "jpg"
func TestImagePerformers(t *testing.T) {
const imageID = 1
const performerName = "performer name"
const performerID = 2
performer := models.Performer{
ID: performerID,
Name: models.NullString(performerName),
}
const reversedPerformerName = "name performer"
const reversedPerformerID = 3
reversedPerformer := models.Performer{
ID: reversedPerformerID,
Name: models.NullString(reversedPerformerName),
}
testTables := generateTestTable(performerName, imageExt)
assert := assert.New(t)
for _, test := range testTables {
mockPerformerReader := &mocks.PerformerReaderWriter{}
mockImageReader := &mocks.ImageReaderWriter{}
mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches {
mockImageReader.On("GetPerformerIDs", imageID).Return(nil, nil).Once()
mockImageReader.On("UpdatePerformers", imageID, []int{performerID}).Return(nil).Once()
}
image := models.Image{
ID: imageID,
Path: test.Path,
}
err := ImagePerformers(&image, mockImageReader, mockPerformerReader)
assert.Nil(err)
mockPerformerReader.AssertExpectations(t)
mockImageReader.AssertExpectations(t)
}
}
func TestImageStudios(t *testing.T) {
const imageID = 1
const studioName = "studio name"
const studioID = 2
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
}
const reversedStudioName = "name studio"
const reversedStudioID = 3
reversedStudio := models.Studio{
ID: reversedStudioID,
Name: models.NullString(reversedStudioName),
}
testTables := generateTestTable(studioName, imageExt)
assert := assert.New(t)
for _, test := range testTables {
mockStudioReader := &mocks.StudioReaderWriter{}
mockImageReader := &mocks.ImageReaderWriter{}
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
if test.Matches {
mockImageReader.On("Find", imageID).Return(&models.Image{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockImageReader.On("Update", models.ImagePartial{
ID: imageID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once()
}
image := models.Image{
ID: imageID,
Path: test.Path,
}
err := ImageStudios(&image, mockImageReader, mockStudioReader)
assert.Nil(err)
mockStudioReader.AssertExpectations(t)
mockImageReader.AssertExpectations(t)
}
}
func TestImageTags(t *testing.T) {
const imageID = 1
const tagName = "tag name"
const tagID = 2
tag := models.Tag{
ID: tagID,
Name: tagName,
}
const reversedTagName = "name tag"
const reversedTagID = 3
reversedTag := models.Tag{
ID: reversedTagID,
Name: reversedTagName,
}
testTables := generateTestTable(tagName, imageExt)
assert := assert.New(t)
for _, test := range testTables {
mockTagReader := &mocks.TagReaderWriter{}
mockImageReader := &mocks.ImageReaderWriter{}
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
if test.Matches {
mockImageReader.On("GetTagIDs", imageID).Return(nil, nil).Once()
mockImageReader.On("UpdateTags", imageID, []int{tagID}).Return(nil).Once()
}
image := models.Image{
ID: imageID,
Path: test.Path,
}
err := ImageTags(&image, mockImageReader, mockTagReader)
assert.Nil(err)
mockTagReader.AssertExpectations(t)
mockImageReader.AssertExpectations(t)
}
}

View file

@ -0,0 +1,784 @@
// +build integration
package autotag
import (
"context"
"database/sql"
"fmt"
"io/ioutil"
"os"
"testing"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sqlite"
"github.com/stashapp/stash/pkg/utils"
_ "github.com/golang-migrate/migrate/v4/database/sqlite3"
_ "github.com/golang-migrate/migrate/v4/source/file"
)
const testName = "Foo's Bar"
const existingStudioName = "ExistingStudio"
const existingStudioSceneName = testName + ".dontChangeStudio.mp4"
const existingStudioImageName = testName + ".dontChangeStudio.mp4"
const existingStudioGalleryName = testName + ".dontChangeStudio.mp4"
var existingStudioID int
func testTeardown(databaseFile string) {
err := database.DB.Close()
if err != nil {
panic(err)
}
err = os.Remove(databaseFile)
if err != nil {
panic(err)
}
}
func runTests(m *testing.M) int {
// create the database file
f, err := ioutil.TempFile("", "*.sqlite")
if err != nil {
panic(fmt.Sprintf("Could not create temporary file: %s", err.Error()))
}
f.Close()
databaseFile := f.Name()
database.Initialize(databaseFile)
// defer close and delete the database
defer testTeardown(databaseFile)
err = populateDB()
if err != nil {
panic(fmt.Sprintf("Could not populate database: %s", err.Error()))
} else {
// run the tests
return m.Run()
}
}
func TestMain(m *testing.M) {
ret := runTests(m)
os.Exit(ret)
}
func createPerformer(pqb models.PerformerWriter) error {
// create the performer
performer := models.Performer{
Checksum: testName,
Name: sql.NullString{Valid: true, String: testName},
Favorite: sql.NullBool{Valid: true, Bool: false},
}
_, err := pqb.Create(performer)
if err != nil {
return err
}
return nil
}
func createStudio(qb models.StudioWriter, name string) (*models.Studio, error) {
// create the studio
studio := models.Studio{
Checksum: name,
Name: sql.NullString{Valid: true, String: name},
}
return qb.Create(studio)
}
func createTag(qb models.TagWriter) error {
// create the studio
tag := models.Tag{
Name: testName,
}
_, err := qb.Create(tag)
if err != nil {
return err
}
return nil
}
func createScenes(sqb models.SceneReaderWriter) error {
// create the scenes
scenePatterns, falseScenePatterns := generateTestPaths(testName, sceneExt)
for _, fn := range scenePatterns {
err := createScene(sqb, makeScene(fn, true))
if err != nil {
return err
}
}
for _, fn := range falseScenePatterns {
err := createScene(sqb, makeScene(fn, false))
if err != nil {
return err
}
}
// add organized scenes
for _, fn := range scenePatterns {
s := makeScene("organized"+fn, false)
s.Organized = true
err := createScene(sqb, s)
if err != nil {
return err
}
}
// create scene with existing studio io
studioScene := makeScene(existingStudioSceneName, true)
studioScene.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createScene(sqb, studioScene)
if err != nil {
return err
}
return nil
}
func makeScene(name string, expectedResult bool) *models.Scene {
scene := &models.Scene{
Checksum: sql.NullString{String: utils.MD5FromString(name), Valid: true},
Path: name,
}
// if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult {
scene.Title = sql.NullString{Valid: true, String: name}
}
return scene
}
func createScene(sqb models.SceneWriter, scene *models.Scene) error {
_, err := sqb.Create(*scene)
if err != nil {
return fmt.Errorf("Failed to create scene with name '%s': %s", scene.Path, err.Error())
}
return nil
}
func createImages(sqb models.ImageReaderWriter) error {
// create the images
imagePatterns, falseImagePatterns := generateTestPaths(testName, imageExt)
for _, fn := range imagePatterns {
err := createImage(sqb, makeImage(fn, true))
if err != nil {
return err
}
}
for _, fn := range falseImagePatterns {
err := createImage(sqb, makeImage(fn, false))
if err != nil {
return err
}
}
// add organized images
for _, fn := range imagePatterns {
s := makeImage("organized"+fn, false)
s.Organized = true
err := createImage(sqb, s)
if err != nil {
return err
}
}
// create image with existing studio io
studioImage := makeImage(existingStudioImageName, true)
studioImage.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createImage(sqb, studioImage)
if err != nil {
return err
}
return nil
}
func makeImage(name string, expectedResult bool) *models.Image {
image := &models.Image{
Checksum: utils.MD5FromString(name),
Path: name,
}
// if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult {
image.Title = sql.NullString{Valid: true, String: name}
}
return image
}
func createImage(sqb models.ImageWriter, image *models.Image) error {
_, err := sqb.Create(*image)
if err != nil {
return fmt.Errorf("Failed to create image with name '%s': %s", image.Path, err.Error())
}
return nil
}
func createGalleries(sqb models.GalleryReaderWriter) error {
// create the galleries
galleryPatterns, falseGalleryPatterns := generateTestPaths(testName, galleryExt)
for _, fn := range galleryPatterns {
err := createGallery(sqb, makeGallery(fn, true))
if err != nil {
return err
}
}
for _, fn := range falseGalleryPatterns {
err := createGallery(sqb, makeGallery(fn, false))
if err != nil {
return err
}
}
// add organized galleries
for _, fn := range galleryPatterns {
s := makeGallery("organized"+fn, false)
s.Organized = true
err := createGallery(sqb, s)
if err != nil {
return err
}
}
// create gallery with existing studio io
studioGallery := makeGallery(existingStudioGalleryName, true)
studioGallery.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createGallery(sqb, studioGallery)
if err != nil {
return err
}
return nil
}
func makeGallery(name string, expectedResult bool) *models.Gallery {
gallery := &models.Gallery{
Checksum: utils.MD5FromString(name),
Path: models.NullString(name),
}
// if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult {
gallery.Title = sql.NullString{Valid: true, String: name}
}
return gallery
}
func createGallery(sqb models.GalleryWriter, gallery *models.Gallery) error {
_, err := sqb.Create(*gallery)
if err != nil {
return fmt.Errorf("Failed to create gallery with name '%s': %s", gallery.Path.String, err.Error())
}
return nil
}
func withTxn(f func(r models.Repository) error) error {
t := sqlite.NewTransactionManager()
return t.WithTxn(context.TODO(), f)
}
func populateDB() error {
if err := withTxn(func(r models.Repository) error {
err := createPerformer(r.Performer())
if err != nil {
return err
}
_, err = createStudio(r.Studio(), testName)
if err != nil {
return err
}
// create existing studio
existingStudio, err := createStudio(r.Studio(), existingStudioName)
if err != nil {
return err
}
existingStudioID = existingStudio.ID
err = createTag(r.Tag())
if err != nil {
return err
}
err = createScenes(r.Scene())
if err != nil {
return err
}
err = createImages(r.Image())
if err != nil {
return err
}
err = createGalleries(r.Gallery())
if err != nil {
return err
}
return nil
}); err != nil {
return err
}
return nil
}
func TestParsePerformerScenes(t *testing.T) {
var performers []*models.Performer
if err := withTxn(func(r models.Repository) error {
var err error
performers, err = r.Performer().All()
return err
}); err != nil {
t.Errorf("Error getting performer: %s", err)
return
}
for _, p := range performers {
if err := withTxn(func(r models.Repository) error {
return PerformerScenes(p, nil, r.Scene())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that scenes were tagged correctly
withTxn(func(r models.Repository) error {
pqb := r.Performer()
scenes, err := r.Scene().All()
if err != nil {
t.Error(err.Error())
}
for _, scene := range scenes {
performers, err := pqb.FindBySceneID(scene.ID)
if err != nil {
t.Errorf("Error getting scene performers: %s", err.Error())
}
// title is only set on scenes where we expect performer to be set
if scene.Title.String == scene.Path && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path)
} else if scene.Title.String != scene.Path && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path)
}
}
return nil
})
}
func TestParseStudioScenes(t *testing.T) {
var studios []*models.Studio
if err := withTxn(func(r models.Repository) error {
var err error
studios, err = r.Studio().All()
return err
}); err != nil {
t.Errorf("Error getting studio: %s", err)
return
}
for _, s := range studios {
if err := withTxn(func(r models.Repository) error {
return StudioScenes(s, nil, r.Scene())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that scenes were tagged correctly
withTxn(func(r models.Repository) error {
scenes, err := r.Scene().All()
if err != nil {
t.Error(err.Error())
}
for _, scene := range scenes {
// check for existing studio id scene first
if scene.Path == existingStudioSceneName {
if scene.StudioID.Int64 != int64(existingStudioID) {
t.Error("Incorrectly overwrote studio ID for scene with existing studio ID")
}
} else {
// title is only set on scenes where we expect studio to be set
if scene.Title.String == scene.Path {
if !scene.StudioID.Valid {
t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path)
} else if scene.StudioID.Int64 != int64(studios[1].ID) {
t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID.Int64, scene.Path)
}
} else if scene.Title.String != scene.Path && scene.StudioID.Int64 == int64(studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path)
}
}
}
return nil
})
}
func TestParseTagScenes(t *testing.T) {
var tags []*models.Tag
if err := withTxn(func(r models.Repository) error {
var err error
tags, err = r.Tag().All()
return err
}); err != nil {
t.Errorf("Error getting performer: %s", err)
return
}
for _, s := range tags {
if err := withTxn(func(r models.Repository) error {
return TagScenes(s, nil, r.Scene())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that scenes were tagged correctly
withTxn(func(r models.Repository) error {
scenes, err := r.Scene().All()
if err != nil {
t.Error(err.Error())
}
tqb := r.Tag()
for _, scene := range scenes {
tags, err := tqb.FindBySceneID(scene.ID)
if err != nil {
t.Errorf("Error getting scene tags: %s", err.Error())
}
// title is only set on scenes where we expect performer to be set
if scene.Title.String == scene.Path && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path)
} else if scene.Title.String != scene.Path && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path)
}
}
return nil
})
}
func TestParsePerformerImages(t *testing.T) {
var performers []*models.Performer
if err := withTxn(func(r models.Repository) error {
var err error
performers, err = r.Performer().All()
return err
}); err != nil {
t.Errorf("Error getting performer: %s", err)
return
}
for _, p := range performers {
if err := withTxn(func(r models.Repository) error {
return PerformerImages(p, nil, r.Image())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that images were tagged correctly
withTxn(func(r models.Repository) error {
pqb := r.Performer()
images, err := r.Image().All()
if err != nil {
t.Error(err.Error())
}
for _, image := range images {
performers, err := pqb.FindByImageID(image.ID)
if err != nil {
t.Errorf("Error getting image performers: %s", err.Error())
}
// title is only set on images where we expect performer to be set
if image.Title.String == image.Path && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path)
} else if image.Title.String != image.Path && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path)
}
}
return nil
})
}
func TestParseStudioImages(t *testing.T) {
var studios []*models.Studio
if err := withTxn(func(r models.Repository) error {
var err error
studios, err = r.Studio().All()
return err
}); err != nil {
t.Errorf("Error getting studio: %s", err)
return
}
for _, s := range studios {
if err := withTxn(func(r models.Repository) error {
return StudioImages(s, nil, r.Image())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that images were tagged correctly
withTxn(func(r models.Repository) error {
images, err := r.Image().All()
if err != nil {
t.Error(err.Error())
}
for _, image := range images {
// check for existing studio id image first
if image.Path == existingStudioImageName {
if image.StudioID.Int64 != int64(existingStudioID) {
t.Error("Incorrectly overwrote studio ID for image with existing studio ID")
}
} else {
// title is only set on images where we expect studio to be set
if image.Title.String == image.Path {
if !image.StudioID.Valid {
t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path)
} else if image.StudioID.Int64 != int64(studios[1].ID) {
t.Errorf("Incorrect studio id %d set for path '%s'", image.StudioID.Int64, image.Path)
}
} else if image.Title.String != image.Path && image.StudioID.Int64 == int64(studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path)
}
}
}
return nil
})
}
func TestParseTagImages(t *testing.T) {
var tags []*models.Tag
if err := withTxn(func(r models.Repository) error {
var err error
tags, err = r.Tag().All()
return err
}); err != nil {
t.Errorf("Error getting performer: %s", err)
return
}
for _, s := range tags {
if err := withTxn(func(r models.Repository) error {
return TagImages(s, nil, r.Image())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that images were tagged correctly
withTxn(func(r models.Repository) error {
images, err := r.Image().All()
if err != nil {
t.Error(err.Error())
}
tqb := r.Tag()
for _, image := range images {
tags, err := tqb.FindByImageID(image.ID)
if err != nil {
t.Errorf("Error getting image tags: %s", err.Error())
}
// title is only set on images where we expect performer to be set
if image.Title.String == image.Path && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path)
} else if image.Title.String != image.Path && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path)
}
}
return nil
})
}
func TestParsePerformerGalleries(t *testing.T) {
var performers []*models.Performer
if err := withTxn(func(r models.Repository) error {
var err error
performers, err = r.Performer().All()
return err
}); err != nil {
t.Errorf("Error getting performer: %s", err)
return
}
for _, p := range performers {
if err := withTxn(func(r models.Repository) error {
return PerformerGalleries(p, nil, r.Gallery())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that galleries were tagged correctly
withTxn(func(r models.Repository) error {
pqb := r.Performer()
galleries, err := r.Gallery().All()
if err != nil {
t.Error(err.Error())
}
for _, gallery := range galleries {
performers, err := pqb.FindByGalleryID(gallery.ID)
if err != nil {
t.Errorf("Error getting gallery performers: %s", err.Error())
}
// title is only set on galleries where we expect performer to be set
if gallery.Title.String == gallery.Path.String && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path.String)
} else if gallery.Title.String != gallery.Path.String && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path.String)
}
}
return nil
})
}
func TestParseStudioGalleries(t *testing.T) {
var studios []*models.Studio
if err := withTxn(func(r models.Repository) error {
var err error
studios, err = r.Studio().All()
return err
}); err != nil {
t.Errorf("Error getting studio: %s", err)
return
}
for _, s := range studios {
if err := withTxn(func(r models.Repository) error {
return StudioGalleries(s, nil, r.Gallery())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that galleries were tagged correctly
withTxn(func(r models.Repository) error {
galleries, err := r.Gallery().All()
if err != nil {
t.Error(err.Error())
}
for _, gallery := range galleries {
// check for existing studio id gallery first
if gallery.Path.String == existingStudioGalleryName {
if gallery.StudioID.Int64 != int64(existingStudioID) {
t.Error("Incorrectly overwrote studio ID for gallery with existing studio ID")
}
} else {
// title is only set on galleries where we expect studio to be set
if gallery.Title.String == gallery.Path.String {
if !gallery.StudioID.Valid {
t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path.String)
} else if gallery.StudioID.Int64 != int64(studios[1].ID) {
t.Errorf("Incorrect studio id %d set for path '%s'", gallery.StudioID.Int64, gallery.Path.String)
}
} else if gallery.Title.String != gallery.Path.String && gallery.StudioID.Int64 == int64(studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path.String)
}
}
}
return nil
})
}
func TestParseTagGalleries(t *testing.T) {
var tags []*models.Tag
if err := withTxn(func(r models.Repository) error {
var err error
tags, err = r.Tag().All()
return err
}); err != nil {
t.Errorf("Error getting performer: %s", err)
return
}
for _, s := range tags {
if err := withTxn(func(r models.Repository) error {
return TagGalleries(s, nil, r.Gallery())
}); err != nil {
t.Errorf("Error auto-tagging performers: %s", err)
}
}
// verify that galleries were tagged correctly
withTxn(func(r models.Repository) error {
galleries, err := r.Gallery().All()
if err != nil {
t.Error(err.Error())
}
tqb := r.Tag()
for _, gallery := range galleries {
tags, err := tqb.FindByGalleryID(gallery.ID)
if err != nil {
t.Errorf("Error getting gallery tags: %s", err.Error())
}
// title is only set on galleries where we expect performer to be set
if gallery.Title.String == gallery.Path.String && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path.String)
} else if gallery.Title.String != gallery.Path.String && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path.String)
}
}
return nil
})
}

62
pkg/autotag/performer.go Normal file
View file

@ -0,0 +1,62 @@
package autotag
import (
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
)
func getMatchingPerformers(path string, performerReader models.PerformerReader) ([]*models.Performer, error) {
words := getPathWords(path)
performers, err := performerReader.QueryForAutoTag(words)
if err != nil {
return nil, err
}
var ret []*models.Performer
for _, p := range performers {
// TODO - commenting out alias handling until both sides work correctly
if nameMatchesPath(p.Name.String, path) { // || nameMatchesPath(p.Aliases.String, path) {
ret = append(ret, p)
}
}
return ret, nil
}
func getPerformerTagger(p *models.Performer) tagger {
return tagger{
ID: p.ID,
Type: "performer",
Name: p.Name.String,
}
}
// PerformerScenes searches for scenes whose path matches the provided performer name and tags the scene with the performer.
func PerformerScenes(p *models.Performer, paths []string, rw models.SceneReaderWriter) error {
t := getPerformerTagger(p)
return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) {
return scene.AddPerformer(rw, otherID, subjectID)
})
}
// PerformerImages searches for images whose path matches the provided performer name and tags the image with the performer.
func PerformerImages(p *models.Performer, paths []string, rw models.ImageReaderWriter) error {
t := getPerformerTagger(p)
return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) {
return image.AddPerformer(rw, otherID, subjectID)
})
}
// PerformerGalleries searches for galleries whose path matches the provided performer name and tags the gallery with the performer.
func PerformerGalleries(p *models.Performer, paths []string, rw models.GalleryReaderWriter) error {
t := getPerformerTagger(p)
return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) {
return gallery.AddPerformer(rw, otherID, subjectID)
})
}

View file

@ -0,0 +1,225 @@
package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
)
func TestPerformerScenes(t *testing.T) {
type test struct {
performerName string
expectedRegex string
}
performerNames := []test{
{
"performer name",
`(?i)(?:^|_|[^\w\d])performer[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"performer + name",
`(?i)(?:^|_|[^\w\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range performerNames {
testPerformerScenes(t, p.performerName, p.expectedRegex)
}
}
func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
mockSceneReader := &mocks.SceneReaderWriter{}
const performerID = 2
var scenes []*models.Scene
matchingPaths, falsePaths := generateTestPaths(performerName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{
ID: i + 1,
Path: p,
})
}
performer := models.Performer{
ID: performerID,
Name: models.NullString(performerName),
}
organized := false
perPage := models.PerPageAll
expectedSceneFilter := &models.SceneFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once()
for i := range matchingPaths {
sceneID := i + 1
mockSceneReader.On("GetPerformerIDs", sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdatePerformers", sceneID, []int{performerID}).Return(nil).Once()
}
err := PerformerScenes(&performer, nil, mockSceneReader)
assert := assert.New(t)
assert.Nil(err)
mockSceneReader.AssertExpectations(t)
}
func TestPerformerImages(t *testing.T) {
type test struct {
performerName string
expectedRegex string
}
performerNames := []test{
{
"performer name",
`(?i)(?:^|_|[^\w\d])performer[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"performer + name",
`(?i)(?:^|_|[^\w\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range performerNames {
testPerformerImages(t, p.performerName, p.expectedRegex)
}
}
func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
mockImageReader := &mocks.ImageReaderWriter{}
const performerID = 2
var images []*models.Image
matchingPaths, falsePaths := generateTestPaths(performerName, imageExt)
for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{
ID: i + 1,
Path: p,
})
}
performer := models.Performer{
ID: performerID,
Name: models.NullString(performerName),
}
organized := false
perPage := models.PerPageAll
expectedImageFilter := &models.ImageFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once()
for i := range matchingPaths {
imageID := i + 1
mockImageReader.On("GetPerformerIDs", imageID).Return(nil, nil).Once()
mockImageReader.On("UpdatePerformers", imageID, []int{performerID}).Return(nil).Once()
}
err := PerformerImages(&performer, nil, mockImageReader)
assert := assert.New(t)
assert.Nil(err)
mockImageReader.AssertExpectations(t)
}
func TestPerformerGalleries(t *testing.T) {
type test struct {
performerName string
expectedRegex string
}
performerNames := []test{
{
"performer name",
`(?i)(?:^|_|[^\w\d])performer[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"performer + name",
`(?i)(?:^|_|[^\w\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range performerNames {
testPerformerGalleries(t, p.performerName, p.expectedRegex)
}
}
func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
mockGalleryReader := &mocks.GalleryReaderWriter{}
const performerID = 2
var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(performerName, galleryExt)
for i, p := range append(matchingPaths, falsePaths...) {
galleries = append(galleries, &models.Gallery{
ID: i + 1,
Path: models.NullString(p),
})
}
performer := models.Performer{
ID: performerID,
Name: models.NullString(performerName),
}
organized := false
perPage := models.PerPageAll
expectedGalleryFilter := &models.GalleryFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
for i := range matchingPaths {
galleryID := i + 1
mockGalleryReader.On("GetPerformerIDs", galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdatePerformers", galleryID, []int{performerID}).Return(nil).Once()
}
err := PerformerGalleries(&performer, nil, mockGalleryReader)
assert := assert.New(t)
assert.Nil(err)
mockGalleryReader.AssertExpectations(t)
}

117
pkg/autotag/scene.go Normal file
View file

@ -0,0 +1,117 @@
package autotag
import (
"fmt"
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
)
func scenePathsFilter(paths []string) *models.SceneFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.SceneFilterType
var or *models.SceneFilterType
for _, p := range paths {
newOr := &models.SceneFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p = p + sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}
func getMatchingScenes(name string, paths []string, sceneReader models.SceneReader) ([]*models.Scene, error) {
regex := getPathQueryRegex(name)
organized := false
filter := models.SceneFilterType{
Path: &models.StringCriterionInput{
Value: "(?i)" + regex,
Modifier: models.CriterionModifierMatchesRegex,
},
Organized: &organized,
}
filter.And = scenePathsFilter(paths)
pp := models.PerPageAll
scenes, _, err := sceneReader.Query(&filter, &models.FindFilterType{
PerPage: &pp,
})
if err != nil {
return nil, fmt.Errorf("error querying scenes with regex '%s': %s", regex, err.Error())
}
var ret []*models.Scene
for _, p := range scenes {
if nameMatchesPath(name, p.Path) {
ret = append(ret, p)
}
}
return ret, nil
}
func getSceneFileTagger(s *models.Scene) tagger {
return tagger{
ID: s.ID,
Type: "scene",
Name: s.GetTitle(),
Path: s.Path,
}
}
// ScenePerformers tags the provided scene with performers whose name matches the scene's path.
func ScenePerformers(s *models.Scene, rw models.SceneReaderWriter, performerReader models.PerformerReader) error {
t := getSceneFileTagger(s)
return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) {
return scene.AddPerformer(rw, subjectID, otherID)
})
}
// SceneStudios tags the provided scene with the first studio whose name matches the scene's path.
//
// Scenes will not be tagged if studio is already set.
func SceneStudios(s *models.Scene, rw models.SceneReaderWriter, studioReader models.StudioReader) error {
if s.StudioID.Valid {
// don't modify
return nil
}
t := getSceneFileTagger(s)
return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) {
return addSceneStudio(rw, subjectID, otherID)
})
}
// SceneTags tags the provided scene with tags whose name matches the scene's path.
func SceneTags(s *models.Scene, rw models.SceneReaderWriter, tagReader models.TagReader) error {
t := getSceneFileTagger(s)
return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) {
return scene.AddTag(rw, subjectID, otherID)
})
}

278
pkg/autotag/scene_test.go Normal file
View file

@ -0,0 +1,278 @@
package autotag
import (
"fmt"
"strings"
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
const sceneExt = "mp4"
var testSeparators = []string{
".",
"-",
"_",
" ",
}
var testEndSeparators = []string{
"{",
"}",
"(",
")",
",",
}
func generateNamePatterns(name, separator, ext string) []string {
var ret []string
ret = append(ret, fmt.Sprintf("%s%saaa.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("aaa%s%s.%s", separator, name, ext))
ret = append(ret, fmt.Sprintf("aaa%s%s%sbbb.%s", separator, name, separator, ext))
ret = append(ret, fmt.Sprintf("dir/%s%saaa.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("dir\\%s%saaa.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("%s%saaa/dir/bbb.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("%s%saaa\\dir\\bbb.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("dir/%s%s/aaa.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("dir\\%s%s\\aaa.%s", name, separator, ext))
return ret
}
func generateSplitNamePatterns(name, separator, ext string) []string {
var ret []string
splitted := strings.Split(name, " ")
// only do this for names that are split into two
if len(splitted) == 2 {
ret = append(ret, fmt.Sprintf("%s%s%s.%s", splitted[0], separator, splitted[1], ext))
}
return ret
}
func generateFalseNamePatterns(name string, separator, ext string) []string {
splitted := strings.Split(name, " ")
var ret []string
// only do this for names that are split into two
if len(splitted) == 2 {
ret = append(ret, fmt.Sprintf("%s%saaa%s%s.%s", splitted[0], separator, separator, splitted[1], ext))
}
return ret
}
func generateTestPaths(testName, ext string) (scenePatterns []string, falseScenePatterns []string) {
separators := append(testSeparators, testEndSeparators...)
for _, separator := range separators {
scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator, ext)...)
scenePatterns = append(scenePatterns, generateNamePatterns(strings.ToLower(testName), separator, ext)...)
scenePatterns = append(scenePatterns, generateNamePatterns(strings.ReplaceAll(testName, " ", ""), separator, ext)...)
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, separator, ext)...)
}
// add test cases for intra-name separators
for _, separator := range testSeparators {
if separator != " " {
scenePatterns = append(scenePatterns, generateNamePatterns(strings.Replace(testName, " ", separator, -1), separator, ext)...)
}
}
// add basic false scenarios
falseScenePatterns = append(falseScenePatterns, fmt.Sprintf("aaa%s.%s", testName, ext))
falseScenePatterns = append(falseScenePatterns, fmt.Sprintf("%saaa.%s", testName, ext))
// add path separator false scenarios
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, "/", ext)...)
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, "\\", ext)...)
// split patterns only valid for ._- and whitespace
for _, separator := range testSeparators {
scenePatterns = append(scenePatterns, generateSplitNamePatterns(testName, separator, ext)...)
}
// false patterns for other separators
for _, separator := range testEndSeparators {
falseScenePatterns = append(falseScenePatterns, generateSplitNamePatterns(testName, separator, ext)...)
}
return
}
type pathTestTable struct {
Path string
Matches bool
}
func generateTestTable(testName, ext string) []pathTestTable {
var ret []pathTestTable
var scenePatterns []string
var falseScenePatterns []string
separators := append(testSeparators, testEndSeparators...)
for _, separator := range separators {
scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator, ext)...)
scenePatterns = append(scenePatterns, generateNamePatterns(strings.ToLower(testName), separator, ext)...)
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, separator, ext)...)
}
for _, p := range scenePatterns {
t := pathTestTable{
Path: p,
Matches: true,
}
ret = append(ret, t)
}
for _, p := range falseScenePatterns {
t := pathTestTable{
Path: p,
Matches: false,
}
ret = append(ret, t)
}
return ret
}
func TestScenePerformers(t *testing.T) {
const sceneID = 1
const performerName = "performer name"
const performerID = 2
performer := models.Performer{
ID: performerID,
Name: models.NullString(performerName),
}
const reversedPerformerName = "name performer"
const reversedPerformerID = 3
reversedPerformer := models.Performer{
ID: reversedPerformerID,
Name: models.NullString(reversedPerformerName),
}
testTables := generateTestTable(performerName, sceneExt)
assert := assert.New(t)
for _, test := range testTables {
mockPerformerReader := &mocks.PerformerReaderWriter{}
mockSceneReader := &mocks.SceneReaderWriter{}
mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches {
mockSceneReader.On("GetPerformerIDs", sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdatePerformers", sceneID, []int{performerID}).Return(nil).Once()
}
scene := models.Scene{
ID: sceneID,
Path: test.Path,
}
err := ScenePerformers(&scene, mockSceneReader, mockPerformerReader)
assert.Nil(err)
mockPerformerReader.AssertExpectations(t)
mockSceneReader.AssertExpectations(t)
}
}
func TestSceneStudios(t *testing.T) {
const sceneID = 1
const studioName = "studio name"
const studioID = 2
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
}
const reversedStudioName = "name studio"
const reversedStudioID = 3
reversedStudio := models.Studio{
ID: reversedStudioID,
Name: models.NullString(reversedStudioName),
}
testTables := generateTestTable(studioName, sceneExt)
assert := assert.New(t)
for _, test := range testTables {
mockStudioReader := &mocks.StudioReaderWriter{}
mockSceneReader := &mocks.SceneReaderWriter{}
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
if test.Matches {
mockSceneReader.On("Find", sceneID).Return(&models.Scene{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockSceneReader.On("Update", models.ScenePartial{
ID: sceneID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once()
}
scene := models.Scene{
ID: sceneID,
Path: test.Path,
}
err := SceneStudios(&scene, mockSceneReader, mockStudioReader)
assert.Nil(err)
mockStudioReader.AssertExpectations(t)
mockSceneReader.AssertExpectations(t)
}
}
func TestSceneTags(t *testing.T) {
const sceneID = 1
const tagName = "tag name"
const tagID = 2
tag := models.Tag{
ID: tagID,
Name: tagName,
}
const reversedTagName = "name tag"
const reversedTagID = 3
reversedTag := models.Tag{
ID: reversedTagID,
Name: reversedTagName,
}
testTables := generateTestTable(tagName, sceneExt)
assert := assert.New(t)
for _, test := range testTables {
mockTagReader := &mocks.TagReaderWriter{}
mockSceneReader := &mocks.SceneReaderWriter{}
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
if test.Matches {
mockSceneReader.On("GetTagIDs", sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdateTags", sceneID, []int{tagID}).Return(nil).Once()
}
scene := models.Scene{
ID: sceneID,
Path: test.Path,
}
err := SceneTags(&scene, mockSceneReader, mockTagReader)
assert.Nil(err)
mockTagReader.AssertExpectations(t)
mockSceneReader.AssertExpectations(t)
}
}

132
pkg/autotag/studio.go Normal file
View file

@ -0,0 +1,132 @@
package autotag
import (
"database/sql"
"github.com/stashapp/stash/pkg/models"
)
func getMatchingStudios(path string, reader models.StudioReader) ([]*models.Studio, error) {
words := getPathWords(path)
candidates, err := reader.QueryForAutoTag(words)
if err != nil {
return nil, err
}
var ret []*models.Studio
for _, c := range candidates {
if nameMatchesPath(c.Name.String, path) {
ret = append(ret, c)
}
}
return ret, nil
}
func addSceneStudio(sceneWriter models.SceneReaderWriter, sceneID, studioID int) (bool, error) {
// don't set if already set
scene, err := sceneWriter.Find(sceneID)
if err != nil {
return false, err
}
if scene.StudioID.Valid {
return false, nil
}
// set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
scenePartial := models.ScenePartial{
ID: sceneID,
StudioID: &s,
}
if _, err := sceneWriter.Update(scenePartial); err != nil {
return false, err
}
return true, nil
}
func addImageStudio(imageWriter models.ImageReaderWriter, imageID, studioID int) (bool, error) {
// don't set if already set
image, err := imageWriter.Find(imageID)
if err != nil {
return false, err
}
if image.StudioID.Valid {
return false, nil
}
// set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
imagePartial := models.ImagePartial{
ID: imageID,
StudioID: &s,
}
if _, err := imageWriter.Update(imagePartial); err != nil {
return false, err
}
return true, nil
}
func addGalleryStudio(galleryWriter models.GalleryReaderWriter, galleryID, studioID int) (bool, error) {
// don't set if already set
gallery, err := galleryWriter.Find(galleryID)
if err != nil {
return false, err
}
if gallery.StudioID.Valid {
return false, nil
}
// set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
galleryPartial := models.GalleryPartial{
ID: galleryID,
StudioID: &s,
}
if _, err := galleryWriter.UpdatePartial(galleryPartial); err != nil {
return false, err
}
return true, nil
}
func getStudioTagger(p *models.Studio) tagger {
return tagger{
ID: p.ID,
Type: "studio",
Name: p.Name.String,
}
}
// StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene.
func StudioScenes(p *models.Studio, paths []string, rw models.SceneReaderWriter) error {
t := getStudioTagger(p)
return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) {
return addSceneStudio(rw, otherID, subjectID)
})
}
// StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image.
func StudioImages(p *models.Studio, paths []string, rw models.ImageReaderWriter) error {
t := getStudioTagger(p)
return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) {
return addImageStudio(rw, otherID, subjectID)
})
}
// StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery.
func StudioGalleries(p *models.Studio, paths []string, rw models.GalleryReaderWriter) error {
t := getStudioTagger(p)
return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) {
return addGalleryStudio(rw, otherID, subjectID)
})
}

237
pkg/autotag/studio_test.go Normal file
View file

@ -0,0 +1,237 @@
package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
)
func TestStudioScenes(t *testing.T) {
type test struct {
studioName string
expectedRegex string
}
studioNames := []test{
{
"studio name",
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"studio + name",
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range studioNames {
testStudioScenes(t, p.studioName, p.expectedRegex)
}
}
func testStudioScenes(t *testing.T, studioName, expectedRegex string) {
mockSceneReader := &mocks.SceneReaderWriter{}
const studioID = 2
var scenes []*models.Scene
matchingPaths, falsePaths := generateTestPaths(studioName, sceneExt)
for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{
ID: i + 1,
Path: p,
})
}
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
}
organized := false
perPage := models.PerPageAll
expectedSceneFilter := &models.SceneFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once()
for i := range matchingPaths {
sceneID := i + 1
mockSceneReader.On("Find", sceneID).Return(&models.Scene{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockSceneReader.On("Update", models.ScenePartial{
ID: sceneID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once()
}
err := StudioScenes(&studio, nil, mockSceneReader)
assert := assert.New(t)
assert.Nil(err)
mockSceneReader.AssertExpectations(t)
}
func TestStudioImages(t *testing.T) {
type test struct {
studioName string
expectedRegex string
}
studioNames := []test{
{
"studio name",
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"studio + name",
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range studioNames {
testStudioImages(t, p.studioName, p.expectedRegex)
}
}
func testStudioImages(t *testing.T, studioName, expectedRegex string) {
mockImageReader := &mocks.ImageReaderWriter{}
const studioID = 2
var images []*models.Image
matchingPaths, falsePaths := generateTestPaths(studioName, imageExt)
for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{
ID: i + 1,
Path: p,
})
}
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
}
organized := false
perPage := models.PerPageAll
expectedImageFilter := &models.ImageFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once()
for i := range matchingPaths {
imageID := i + 1
mockImageReader.On("Find", imageID).Return(&models.Image{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockImageReader.On("Update", models.ImagePartial{
ID: imageID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once()
}
err := StudioImages(&studio, nil, mockImageReader)
assert := assert.New(t)
assert.Nil(err)
mockImageReader.AssertExpectations(t)
}
func TestStudioGalleries(t *testing.T) {
type test struct {
studioName string
expectedRegex string
}
studioNames := []test{
{
"studio name",
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"studio + name",
`(?i)(?:^|_|[^\w\d])studio[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range studioNames {
testStudioGalleries(t, p.studioName, p.expectedRegex)
}
}
func testStudioGalleries(t *testing.T, studioName, expectedRegex string) {
mockGalleryReader := &mocks.GalleryReaderWriter{}
const studioID = 2
var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(studioName, galleryExt)
for i, p := range append(matchingPaths, falsePaths...) {
galleries = append(galleries, &models.Gallery{
ID: i + 1,
Path: models.NullString(p),
})
}
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
}
organized := false
perPage := models.PerPageAll
expectedGalleryFilter := &models.GalleryFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
for i := range matchingPaths {
galleryID := i + 1
mockGalleryReader.On("Find", galleryID).Return(&models.Gallery{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockGalleryReader.On("UpdatePartial", models.GalleryPartial{
ID: galleryID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once()
}
err := StudioGalleries(&studio, nil, mockGalleryReader)
assert := assert.New(t)
assert.Nil(err)
mockGalleryReader.AssertExpectations(t)
}

61
pkg/autotag/tag.go Normal file
View file

@ -0,0 +1,61 @@
package autotag
import (
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
)
func getMatchingTags(path string, tagReader models.TagReader) ([]*models.Tag, error) {
words := getPathWords(path)
tags, err := tagReader.QueryForAutoTag(words)
if err != nil {
return nil, err
}
var ret []*models.Tag
for _, p := range tags {
if nameMatchesPath(p.Name, path) {
ret = append(ret, p)
}
}
return ret, nil
}
func getTagTagger(p *models.Tag) tagger {
return tagger{
ID: p.ID,
Type: "tag",
Name: p.Name,
}
}
// TagScenes searches for scenes whose path matches the provided tag name and tags the scene with the tag.
func TagScenes(p *models.Tag, paths []string, rw models.SceneReaderWriter) error {
t := getTagTagger(p)
return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) {
return scene.AddTag(rw, otherID, subjectID)
})
}
// TagImages searches for images whose path matches the provided tag name and tags the image with the tag.
func TagImages(p *models.Tag, paths []string, rw models.ImageReaderWriter) error {
t := getTagTagger(p)
return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) {
return image.AddTag(rw, otherID, subjectID)
})
}
// TagGalleries searches for galleries whose path matches the provided tag name and tags the gallery with the tag.
func TagGalleries(p *models.Tag, paths []string, rw models.GalleryReaderWriter) error {
t := getTagTagger(p)
return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) {
return gallery.AddTag(rw, otherID, subjectID)
})
}

225
pkg/autotag/tag_test.go Normal file
View file

@ -0,0 +1,225 @@
package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
)
func TestTagScenes(t *testing.T) {
type test struct {
tagName string
expectedRegex string
}
tagNames := []test{
{
"tag name",
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"tag + name",
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range tagNames {
testTagScenes(t, p.tagName, p.expectedRegex)
}
}
func testTagScenes(t *testing.T, tagName, expectedRegex string) {
mockSceneReader := &mocks.SceneReaderWriter{}
const tagID = 2
var scenes []*models.Scene
matchingPaths, falsePaths := generateTestPaths(tagName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{
ID: i + 1,
Path: p,
})
}
tag := models.Tag{
ID: tagID,
Name: tagName,
}
organized := false
perPage := models.PerPageAll
expectedSceneFilter := &models.SceneFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once()
for i := range matchingPaths {
sceneID := i + 1
mockSceneReader.On("GetTagIDs", sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdateTags", sceneID, []int{tagID}).Return(nil).Once()
}
err := TagScenes(&tag, nil, mockSceneReader)
assert := assert.New(t)
assert.Nil(err)
mockSceneReader.AssertExpectations(t)
}
func TestTagImages(t *testing.T) {
type test struct {
tagName string
expectedRegex string
}
tagNames := []test{
{
"tag name",
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"tag + name",
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range tagNames {
testTagImages(t, p.tagName, p.expectedRegex)
}
}
func testTagImages(t *testing.T, tagName, expectedRegex string) {
mockImageReader := &mocks.ImageReaderWriter{}
const tagID = 2
var images []*models.Image
matchingPaths, falsePaths := generateTestPaths(tagName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{
ID: i + 1,
Path: p,
})
}
tag := models.Tag{
ID: tagID,
Name: tagName,
}
organized := false
perPage := models.PerPageAll
expectedImageFilter := &models.ImageFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once()
for i := range matchingPaths {
imageID := i + 1
mockImageReader.On("GetTagIDs", imageID).Return(nil, nil).Once()
mockImageReader.On("UpdateTags", imageID, []int{tagID}).Return(nil).Once()
}
err := TagImages(&tag, nil, mockImageReader)
assert := assert.New(t)
assert.Nil(err)
mockImageReader.AssertExpectations(t)
}
func TestTagGalleries(t *testing.T) {
type test struct {
tagName string
expectedRegex string
}
tagNames := []test{
{
"tag name",
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
},
{
"tag + name",
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
},
}
for _, p := range tagNames {
testTagGalleries(t, p.tagName, p.expectedRegex)
}
}
func testTagGalleries(t *testing.T, tagName, expectedRegex string) {
mockGalleryReader := &mocks.GalleryReaderWriter{}
const tagID = 2
var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(tagName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) {
galleries = append(galleries, &models.Gallery{
ID: i + 1,
Path: models.NullString(p),
})
}
tag := models.Tag{
ID: tagID,
Name: tagName,
}
organized := false
perPage := models.PerPageAll
expectedGalleryFilter := &models.GalleryFilterType{
Organized: &organized,
Path: &models.StringCriterionInput{
Value: expectedRegex,
Modifier: models.CriterionModifierMatchesRegex,
},
}
expectedFindFilter := &models.FindFilterType{
PerPage: &perPage,
}
mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
for i := range matchingPaths {
galleryID := i + 1
mockGalleryReader.On("GetTagIDs", galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdateTags", galleryID, []int{tagID}).Return(nil).Once()
}
err := TagGalleries(&tag, nil, mockGalleryReader)
assert := assert.New(t)
assert.Nil(err)
mockGalleryReader.AssertExpectations(t)
}

240
pkg/autotag/tagger.go Normal file
View file

@ -0,0 +1,240 @@
// Package autotag provides methods to auto-tag scenes with performers,
// studios and tags.
//
// The autotag engine tags scenes with performers/studios/tags if the scene's
// path matches the performer/studio/tag name. A scene's path is considered
// a match if it contains the performer/studio/tag's full name, ignoring any
// '.', '-', '_' characters in the path.
//
// For example, for a performer "foo bar", the following paths would be
// considered a match: "foo bar.mp4", "foobar.mp4", "foo.bar.mp4",
// "foo-bar.mp4", "aaa.foo bar.bbb.mp4".
// The following would not be considered a match:
// "aafoo bar.mp4", "foo barbb.mp4", "foo/bar.mp4"
package autotag
import (
"fmt"
"path/filepath"
"regexp"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
const separatorChars = `.\-_ `
// fixes #1292
func escapePathRegex(name string) string {
ret := name
chars := `+*?()|[]{}^$`
for _, c := range chars {
cStr := string(c)
ret = strings.ReplaceAll(ret, cStr, `\`+cStr)
}
return ret
}
func getPathQueryRegex(name string) string {
// escape specific regex characters
name = escapePathRegex(name)
// handle path separators
const separator = `[` + separatorChars + `]`
ret := strings.Replace(name, " ", separator+"*", -1)
ret = `(?:^|_|[^\w\d])` + ret + `(?:$|_|[^\w\d])`
return ret
}
func nameMatchesPath(name, path string) bool {
// escape specific regex characters
name = escapePathRegex(name)
name = strings.ToLower(name)
path = strings.ToLower(path)
// handle path separators
const separator = `[` + separatorChars + `]`
reStr := strings.Replace(name, " ", separator+"*", -1)
reStr = `(?:^|_|[^\w\d])` + reStr + `(?:$|_|[^\w\d])`
re := regexp.MustCompile(reStr)
return re.MatchString(path)
}
func getPathWords(path string) []string {
retStr := path
// remove the extension
ext := filepath.Ext(retStr)
if ext != "" {
retStr = strings.TrimSuffix(retStr, ext)
}
// handle path separators
const separator = `(?:_|[^\w\d])+`
re := regexp.MustCompile(separator)
retStr = re.ReplaceAllString(retStr, " ")
words := strings.Split(retStr, " ")
// remove any single letter words
var ret []string
for _, w := range words {
if len(w) > 1 {
ret = append(ret, w)
}
}
return ret
}
type tagger struct {
ID int
Type string
Name string
Path string
}
type addLinkFunc func(subjectID, otherID int) (bool, error)
func (t *tagger) addError(otherType, otherName string, err error) error {
return fmt.Errorf("error adding %s '%s' to %s '%s': %s", otherType, otherName, t.Type, t.Name, err.Error())
}
func (t *tagger) addLog(otherType, otherName string) {
logger.Infof("Added %s '%s' to %s '%s'", otherType, otherName, t.Type, t.Name)
}
func (t *tagger) tagPerformers(performerReader models.PerformerReader, addFunc addLinkFunc) error {
others, err := getMatchingPerformers(t.Path, performerReader)
if err != nil {
return err
}
for _, p := range others {
added, err := addFunc(t.ID, p.ID)
if err != nil {
return t.addError("performer", p.Name.String, err)
}
if added {
t.addLog("performer", p.Name.String)
}
}
return nil
}
func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFunc) error {
others, err := getMatchingStudios(t.Path, studioReader)
if err != nil {
return err
}
// only add first studio
if len(others) > 0 {
studio := others[0]
added, err := addFunc(t.ID, studio.ID)
if err != nil {
return t.addError("studio", studio.Name.String, err)
}
if added {
t.addLog("studio", studio.Name.String)
}
}
return nil
}
func (t *tagger) tagTags(tagReader models.TagReader, addFunc addLinkFunc) error {
others, err := getMatchingTags(t.Path, tagReader)
if err != nil {
return err
}
for _, p := range others {
added, err := addFunc(t.ID, p.ID)
if err != nil {
return t.addError("tag", p.Name, err)
}
if added {
t.addLog("tag", p.Name)
}
}
return nil
}
func (t *tagger) tagScenes(paths []string, sceneReader models.SceneReader, addFunc addLinkFunc) error {
others, err := getMatchingScenes(t.Name, paths, sceneReader)
if err != nil {
return err
}
for _, p := range others {
added, err := addFunc(t.ID, p.ID)
if err != nil {
return t.addError("scene", p.GetTitle(), err)
}
if added {
t.addLog("scene", p.GetTitle())
}
}
return nil
}
func (t *tagger) tagImages(paths []string, imageReader models.ImageReader, addFunc addLinkFunc) error {
others, err := getMatchingImages(t.Name, paths, imageReader)
if err != nil {
return err
}
for _, p := range others {
added, err := addFunc(t.ID, p.ID)
if err != nil {
return t.addError("image", p.GetTitle(), err)
}
if added {
t.addLog("image", p.GetTitle())
}
}
return nil
}
func (t *tagger) tagGalleries(paths []string, galleryReader models.GalleryReader, addFunc addLinkFunc) error {
others, err := getMatchingGalleries(t.Name, paths, galleryReader)
if err != nil {
return err
}
for _, p := range others {
added, err := addFunc(t.ID, p.ID)
if err != nil {
return t.addError("gallery", p.GetTitle(), err)
}
if added {
t.addLog("gallery", p.GetTitle())
}
}
return nil
}

View file

@ -23,11 +23,30 @@ import (
var DB *sqlx.DB
var WriteMu *sync.Mutex
var dbPath string
var appSchemaVersion uint = 19
var appSchemaVersion uint = 22
var databaseSchemaVersion uint
var (
// ErrMigrationNeeded indicates that a database migration is needed
// before the database can be initialized
ErrMigrationNeeded = errors.New("database migration required")
// ErrDatabaseNotInitialized indicates that the database is not
// initialized, usually due to an incomplete configuration.
ErrDatabaseNotInitialized = errors.New("database not initialized")
)
const sqlite3Driver = "sqlite3ex"
// Ready returns an error if the database is not ready to begin transactions.
func Ready() error {
if DB == nil {
return ErrDatabaseNotInitialized
}
return nil
}
func init() {
// register custom driver with regexp function
registerCustomDriver()
@ -37,20 +56,20 @@ func init() {
// performs a full migration to the latest schema version. Otherwise, any
// necessary migrations must be run separately using RunMigrations.
// Returns true if the database is new.
func Initialize(databasePath string) bool {
func Initialize(databasePath string) error {
dbPath = databasePath
if err := getDatabaseSchemaVersion(); err != nil {
panic(err)
return fmt.Errorf("error getting database schema version: %s", err.Error())
}
if databaseSchemaVersion == 0 {
// new database, just run the migrations
if err := RunMigrations(); err != nil {
panic(err)
return fmt.Errorf("error running initial schema migrations: %s", err.Error())
}
// RunMigrations calls Initialise. Just return
return true
return nil
} else {
if databaseSchemaVersion > appSchemaVersion {
panic(fmt.Sprintf("Database schema version %d is incompatible with required schema version %d", databaseSchemaVersion, appSchemaVersion))
@ -59,7 +78,7 @@ func Initialize(databasePath string) bool {
// if migration is needed, then don't open the connection
if NeedsMigration() {
logger.Warnf("Database schema version %d does not match required schema version %d.", databaseSchemaVersion, appSchemaVersion)
return false
return nil
}
}
@ -67,7 +86,7 @@ func Initialize(databasePath string) bool {
DB = open(databasePath, disableForeignKeys)
WriteMu = &sync.Mutex{}
return false
return nil
}
func open(databasePath string, disableForeignKeys bool) *sqlx.DB {
@ -150,6 +169,10 @@ func AppSchemaVersion() uint {
return appSchemaVersion
}
func DatabasePath() string {
return dbPath
}
func DatabaseBackupPath() string {
return fmt.Sprintf("%s.%d.%s", dbPath, databaseSchemaVersion, time.Now().Format("20060102_150405"))
}

View file

@ -0,0 +1 @@
ALTER TABLE `scenes` ADD COLUMN `phash` blob;

View file

@ -0,0 +1,5 @@
ALTER TABLE `performers` ADD COLUMN `details` text;
ALTER TABLE `performers` ADD COLUMN `death_date` date;
ALTER TABLE `performers` ADD COLUMN `hair_color` varchar(255);
ALTER TABLE `performers` ADD COLUMN `weight` integer;
ALTER TABLE `studios` ADD COLUMN `details` text;

View file

@ -0,0 +1,2 @@
ALTER TABLE `performers` ADD COLUMN `rating` tinyint;
ALTER TABLE `studios` ADD COLUMN `rating` tinyint;

View file

@ -1,7 +1,7 @@
package ffmpeg
import (
"fmt"
"bytes"
"io/ioutil"
"os"
"os/exec"
@ -62,7 +62,7 @@ func KillRunningEncoders(path string) {
for _, process := range processes {
// assume it worked, don't check for error
fmt.Printf("Killing encoder process for file: %s", path)
logger.Infof("Killing encoder process for file: %s", path)
process.Kill()
// wait for the process to die before returning
@ -82,7 +82,8 @@ func KillRunningEncoders(path string) {
}
}
func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) {
// FFmpeg runner with progress output, used for transcodes
func (e *Encoder) runTranscode(probeResult VideoFile, args []string) (string, error) {
cmd := exec.Command(e.Path, args...)
stderr, err := cmd.StderrPipe()
@ -137,3 +138,26 @@ func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) {
return stdoutString, nil
}
func (e *Encoder) run(probeResult VideoFile, args []string) (string, error) {
cmd := exec.Command(e.Path, args...)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
if err := cmd.Start(); err != nil {
return "", err
}
registerRunningEncoder(probeResult.Path, cmd.Process)
err := waitAndDeregister(probeResult.Path, cmd)
if err != nil {
// error message should be in the stderr stream
logger.Errorf("ffmpeg error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderr.String())
return stdout.String(), err
}
return stdout.String(), nil
}

View file

@ -0,0 +1,38 @@
package ffmpeg
import (
"fmt"
"image"
"strings"
)
type SpriteScreenshotOptions struct {
Time float64
Width int
}
func (e *Encoder) SpriteScreenshot(probeResult VideoFile, options SpriteScreenshotOptions) (image.Image, error) {
args := []string{
"-v", "error",
"-ss", fmt.Sprintf("%v", options.Time),
"-i", probeResult.Path,
"-vframes", "1",
"-vf", fmt.Sprintf("scale=%v:-1", options.Width),
"-c:v", "bmp",
"-f", "rawvideo",
"-",
}
data, err := e.run(probeResult, args)
if err != nil {
return nil, err
}
reader := strings.NewReader(data)
img, _, err := image.Decode(reader)
if err != nil {
return nil, err
}
return img, err
}

View file

@ -64,7 +64,7 @@ func (e *Encoder) Transcode(probeResult VideoFile, options TranscodeOptions) {
"-strict", "-2",
options.OutputPath,
}
_, _ = e.run(probeResult, args)
_, _ = e.runTranscode(probeResult, args)
}
//transcode the video, remove the audio
@ -84,7 +84,7 @@ func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions
"-vf", "scale=" + scale,
options.OutputPath,
}
_, _ = e.run(probeResult, args)
_, _ = e.runTranscode(probeResult, args)
}
//copy the video stream as is, transcode audio
@ -96,7 +96,7 @@ func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions
"-strict", "-2",
options.OutputPath,
}
_, _ = e.run(probeResult, args)
_, _ = e.runTranscode(probeResult, args)
}
//copy the video stream as is, drop audio
@ -107,5 +107,5 @@ func (e *Encoder) CopyVideo(probeResult VideoFile, options TranscodeOptions) {
"-c:v", "copy",
options.OutputPath,
}
_, _ = e.run(probeResult, args)
_, _ = e.runTranscode(probeResult, args)
}

40
pkg/gallery/query.go Normal file
View file

@ -0,0 +1,40 @@
package gallery
import (
"strconv"
"github.com/stashapp/stash/pkg/models"
)
func CountByPerformerID(r models.GalleryReader, id int) (int, error) {
filter := &models.GalleryFilterType{
Performers: &models.MultiCriterionInput{
Value: []string{strconv.Itoa(id)},
Modifier: models.CriterionModifierIncludes,
},
}
return r.QueryCount(filter, nil)
}
func CountByStudioID(r models.GalleryReader, id int) (int, error) {
filter := &models.GalleryFilterType{
Studios: &models.MultiCriterionInput{
Value: []string{strconv.Itoa(id)},
Modifier: models.CriterionModifierIncludes,
},
}
return r.QueryCount(filter, nil)
}
func CountByTagID(r models.GalleryReader, id int) (int, error) {
filter := &models.GalleryFilterType{
Tags: &models.MultiCriterionInput{
Value: []string{strconv.Itoa(id)},
Modifier: models.CriterionModifierIncludes,
},
}
return r.QueryCount(filter, nil)
}

View file

@ -21,3 +21,43 @@ func AddImage(qb models.GalleryReaderWriter, galleryID int, imageID int) error {
imageIDs = utils.IntAppendUnique(imageIDs, imageID)
return qb.UpdateImages(galleryID, imageIDs)
}
func AddPerformer(qb models.GalleryReaderWriter, id int, performerID int) (bool, error) {
performerIDs, err := qb.GetPerformerIDs(id)
if err != nil {
return false, err
}
oldLen := len(performerIDs)
performerIDs = utils.IntAppendUnique(performerIDs, performerID)
if len(performerIDs) != oldLen {
if err := qb.UpdatePerformers(id, performerIDs); err != nil {
return false, err
}
return true, nil
}
return false, nil
}
func AddTag(qb models.GalleryReaderWriter, id int, tagID int) (bool, error) {
tagIDs, err := qb.GetTagIDs(id)
if err != nil {
return false, err
}
oldLen := len(tagIDs)
tagIDs = utils.IntAppendUnique(tagIDs, tagID)
if len(tagIDs) != oldLen {
if err := qb.UpdateTags(id, tagIDs); err != nil {
return false, err
}
return true, nil
}
return false, nil
}

View file

@ -257,3 +257,10 @@ func GetTitle(s *models.Image) string {
_, fn := getFilePath(s.Path)
return filepath.Base(fn)
}
// GetFilename gets the base name of the image file
// If stripExt is set the file extension is omitted from the name
func GetFilename(s *models.Image, stripExt bool) string {
_, fn := getFilePath(s.Path)
return utils.GetNameFromPath(fn, stripExt)
}

40
pkg/image/query.go Normal file
View file

@ -0,0 +1,40 @@
package image
import (
"strconv"
"github.com/stashapp/stash/pkg/models"
)
func CountByPerformerID(r models.ImageReader, id int) (int, error) {
filter := &models.ImageFilterType{
Performers: &models.MultiCriterionInput{
Value: []string{strconv.Itoa(id)},
Modifier: models.CriterionModifierIncludes,
},
}
return r.QueryCount(filter, nil)
}
func CountByStudioID(r models.ImageReader, id int) (int, error) {
filter := &models.ImageFilterType{
Studios: &models.MultiCriterionInput{
Value: []string{strconv.Itoa(id)},
Modifier: models.CriterionModifierIncludes,
},
}
return r.QueryCount(filter, nil)
}
func CountByTagID(r models.ImageReader, id int) (int, error) {
filter := &models.ImageFilterType{
Tags: &models.MultiCriterionInput{
Value: []string{strconv.Itoa(id)},
Modifier: models.CriterionModifierIncludes,
},
}
return r.QueryCount(filter, nil)
}

View file

@ -1,6 +1,9 @@
package image
import "github.com/stashapp/stash/pkg/models"
import (
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func UpdateFileModTime(qb models.ImageWriter, id int, modTime models.NullSQLiteTimestamp) (*models.Image, error) {
return qb.Update(models.ImagePartial{
@ -8,3 +11,43 @@ func UpdateFileModTime(qb models.ImageWriter, id int, modTime models.NullSQLiteT
FileModTime: &modTime,
})
}
func AddPerformer(qb models.ImageReaderWriter, id int, performerID int) (bool, error) {
performerIDs, err := qb.GetPerformerIDs(id)
if err != nil {
return false, err
}
oldLen := len(performerIDs)
performerIDs = utils.IntAppendUnique(performerIDs, performerID)
if len(performerIDs) != oldLen {
if err := qb.UpdatePerformers(id, performerIDs); err != nil {
return false, err
}
return true, nil
}
return false, nil
}
func AddTag(qb models.ImageReaderWriter, id int, tagID int) (bool, error) {
tagIDs, err := qb.GetTagIDs(id)
if err != nil {
return false, err
}
oldLen := len(tagIDs)
tagIDs = utils.IntAppendUnique(tagIDs, tagID)
if len(tagIDs) != oldLen {
if err := qb.UpdateTags(id, tagIDs); err != nil {
return false, err
}
return true, nil
}
return false, nil
}

55
pkg/manager/apikey.go Normal file
View file

@ -0,0 +1,55 @@
package manager
import (
"errors"
"time"
"github.com/dgrijalva/jwt-go"
"github.com/stashapp/stash/pkg/manager/config"
)
var ErrInvalidToken = errors.New("invalid apikey")
const APIKeySubject = "APIKey"
type APIKeyClaims struct {
UserID string `json:"uid"`
jwt.StandardClaims
}
func GenerateAPIKey(userID string) (string, error) {
claims := &APIKeyClaims{
UserID: userID,
StandardClaims: jwt.StandardClaims{
Subject: APIKeySubject,
IssuedAt: time.Now().Unix(),
},
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
ss, err := token.SignedString(config.GetInstance().GetJWTSignKey())
if err != nil {
return "", err
}
return ss, nil
}
// GetUserIDFromAPIKey validates the provided api key and returns the user ID
func GetUserIDFromAPIKey(apiKey string) (string, error) {
claims := &APIKeyClaims{}
token, err := jwt.ParseWithClaims(apiKey, claims, func(t *jwt.Token) (interface{}, error) {
return config.GetInstance().GetJWTSignKey(), nil
})
if err != nil {
return "", err
}
if !token.Valid {
return "", ErrInvalidToken
}
return claims.UserID, nil
}

View file

@ -31,9 +31,11 @@ func setInitialMD5Config(txnManager models.TransactionManager) {
defaultAlgorithm = models.HashAlgorithmMd5
}
// TODO - this should use the config instance
viper.SetDefault(config.VideoFileNamingAlgorithm, defaultAlgorithm)
viper.SetDefault(config.CalculateMD5, usingMD5)
config := config.GetInstance()
if err := config.Write(); err != nil {
logger.Errorf("Error while writing configuration file: %s", err.Error())
}

View file

@ -1,8 +1,11 @@
package config
import (
"golang.org/x/crypto/bcrypt"
"fmt"
"runtime"
"strings"
"golang.org/x/crypto/bcrypt"
"errors"
"io/ioutil"
@ -20,6 +23,7 @@ const Cache = "cache"
const Generated = "generated"
const Metadata = "metadata"
const Downloads = "downloads"
const ApiKey = "api_key"
const Username = "username"
const Password = "password"
const MaxSessionAge = "max_session_age"
@ -102,6 +106,10 @@ const Language = "language"
// this should be manually configured only
const CustomServedFolders = "custom_served_folders"
// UI directory. Overrides to serve the UI from a specific location
// rather than use the embedded UI.
const CustomUILocation = "custom_ui_location"
// Interface options
const MenuItems = "menu_items"
@ -114,6 +122,7 @@ const AutostartVideo = "autostart_video"
const ShowStudioAsText = "show_studio_as_text"
const CSSEnabled = "cssEnabled"
const WallPlayback = "wall_playback"
const SlideshowDelay = "slideshow_delay"
// Logging options
const LogFile = "logFile"
@ -124,33 +133,70 @@ const LogAccess = "logAccess"
// File upload options
const MaxUploadSize = "max_upload_size"
func Set(key string, value interface{}) {
type MissingConfigError struct {
missingFields []string
}
func (e MissingConfigError) Error() string {
return fmt.Sprintf("missing the following mandatory settings: %s", strings.Join(e.missingFields, ", "))
}
type Instance struct {
isNewSystem bool
}
var instance *Instance
func GetInstance() *Instance {
if instance == nil {
instance = &Instance{}
}
return instance
}
func (i *Instance) IsNewSystem() bool {
return i.isNewSystem
}
func (i *Instance) SetConfigFile(fn string) {
viper.SetConfigFile(fn)
}
func (i *Instance) Set(key string, value interface{}) {
viper.Set(key, value)
}
func SetPassword(value string) {
func (i *Instance) SetPassword(value string) {
// if blank, don't bother hashing; we want it to be blank
if value == "" {
Set(Password, "")
i.Set(Password, "")
} else {
Set(Password, hashPassword(value))
i.Set(Password, hashPassword(value))
}
}
func Write() error {
func (i *Instance) Write() error {
return viper.WriteConfig()
}
func GetConfigPath() string {
configFileUsed := viper.ConfigFileUsed()
return filepath.Dir(configFileUsed)
}
func GetConfigFilePath() string {
// GetConfigFile returns the full path to the used configuration file.
func (i *Instance) GetConfigFile() string {
return viper.ConfigFileUsed()
}
func GetStashPaths() []*models.StashConfig {
// GetConfigPath returns the path of the directory containing the used
// configuration file.
func (i *Instance) GetConfigPath() string {
return filepath.Dir(i.GetConfigFile())
}
// GetDefaultDatabaseFilePath returns the default database filename,
// which is located in the same directory as the config file.
func (i *Instance) GetDefaultDatabaseFilePath() string {
return filepath.Join(i.GetConfigPath(), "stash-go.sqlite")
}
func (i *Instance) GetStashPaths() []*models.StashConfig {
var ret []*models.StashConfig
if err := viper.UnmarshalKey(Stash, &ret); err != nil || len(ret) == 0 {
// fallback to legacy format
@ -167,47 +213,51 @@ func GetStashPaths() []*models.StashConfig {
return ret
}
func GetCachePath() string {
func (i *Instance) GetConfigFilePath() string {
return viper.ConfigFileUsed()
}
func (i *Instance) GetCachePath() string {
return viper.GetString(Cache)
}
func GetGeneratedPath() string {
func (i *Instance) GetGeneratedPath() string {
return viper.GetString(Generated)
}
func GetMetadataPath() string {
func (i *Instance) GetMetadataPath() string {
return viper.GetString(Metadata)
}
func GetDatabasePath() string {
func (i *Instance) GetDatabasePath() string {
return viper.GetString(Database)
}
func GetJWTSignKey() []byte {
func (i *Instance) GetJWTSignKey() []byte {
return []byte(viper.GetString(JWTSignKey))
}
func GetSessionStoreKey() []byte {
func (i *Instance) GetSessionStoreKey() []byte {
return []byte(viper.GetString(SessionStoreKey))
}
func GetDefaultScrapersPath() string {
func (i *Instance) GetDefaultScrapersPath() string {
// default to the same directory as the config file
fn := filepath.Join(GetConfigPath(), "scrapers")
fn := filepath.Join(i.GetConfigPath(), "scrapers")
return fn
}
func GetExcludes() []string {
func (i *Instance) GetExcludes() []string {
return viper.GetStringSlice(Exclude)
}
func GetImageExcludes() []string {
func (i *Instance) GetImageExcludes() []string {
return viper.GetStringSlice(ImageExclude)
}
func GetVideoExtensions() []string {
func (i *Instance) GetVideoExtensions() []string {
ret := viper.GetStringSlice(VideoExtensions)
if ret == nil {
ret = defaultVideoExtensions
@ -215,7 +265,7 @@ func GetVideoExtensions() []string {
return ret
}
func GetImageExtensions() []string {
func (i *Instance) GetImageExtensions() []string {
ret := viper.GetStringSlice(ImageExtensions)
if ret == nil {
ret = defaultImageExtensions
@ -223,7 +273,7 @@ func GetImageExtensions() []string {
return ret
}
func GetGalleryExtensions() []string {
func (i *Instance) GetGalleryExtensions() []string {
ret := viper.GetStringSlice(GalleryExtensions)
if ret == nil {
ret = defaultGalleryExtensions
@ -231,11 +281,11 @@ func GetGalleryExtensions() []string {
return ret
}
func GetCreateGalleriesFromFolders() bool {
func (i *Instance) GetCreateGalleriesFromFolders() bool {
return viper.GetBool(CreateGalleriesFromFolders)
}
func GetLanguage() string {
func (i *Instance) GetLanguage() string {
ret := viper.GetString(Language)
// default to English
@ -248,13 +298,13 @@ func GetLanguage() string {
// IsCalculateMD5 returns true if MD5 checksums should be generated for
// scene video files.
func IsCalculateMD5() bool {
func (i *Instance) IsCalculateMD5() bool {
return viper.GetBool(CalculateMD5)
}
// GetVideoFileNamingAlgorithm returns what hash algorithm should be used for
// naming generated scene video files.
func GetVideoFileNamingAlgorithm() models.HashAlgorithm {
func (i *Instance) GetVideoFileNamingAlgorithm() models.HashAlgorithm {
ret := viper.GetString(VideoFileNamingAlgorithm)
// default to oshash
@ -265,23 +315,23 @@ func GetVideoFileNamingAlgorithm() models.HashAlgorithm {
return models.HashAlgorithm(ret)
}
func GetScrapersPath() string {
func (i *Instance) GetScrapersPath() string {
return viper.GetString(ScrapersPath)
}
func GetScraperUserAgent() string {
func (i *Instance) GetScraperUserAgent() string {
return viper.GetString(ScraperUserAgent)
}
// GetScraperCDPPath gets the path to the Chrome executable or remote address
// to an instance of Chrome.
func GetScraperCDPPath() string {
func (i *Instance) GetScraperCDPPath() string {
return viper.GetString(ScraperCDPPath)
}
// GetScraperCertCheck returns true if the scraper should check for insecure
// certificates when fetching an image or a page.
func GetScraperCertCheck() bool {
func (i *Instance) GetScraperCertCheck() bool {
ret := true
if viper.IsSet(ScraperCertCheck) {
ret = viper.GetBool(ScraperCertCheck)
@ -290,48 +340,48 @@ func GetScraperCertCheck() bool {
return ret
}
func GetStashBoxes() []*models.StashBox {
func (i *Instance) GetStashBoxes() []*models.StashBox {
var boxes []*models.StashBox
viper.UnmarshalKey(StashBoxes, &boxes)
return boxes
}
func GetDefaultPluginsPath() string {
func (i *Instance) GetDefaultPluginsPath() string {
// default to the same directory as the config file
fn := filepath.Join(GetConfigPath(), "plugins")
fn := filepath.Join(i.GetConfigPath(), "plugins")
return fn
}
func GetPluginsPath() string {
func (i *Instance) GetPluginsPath() string {
return viper.GetString(PluginsPath)
}
func GetHost() string {
func (i *Instance) GetHost() string {
return viper.GetString(Host)
}
func GetPort() int {
func (i *Instance) GetPort() int {
return viper.GetInt(Port)
}
func GetExternalHost() string {
func (i *Instance) GetExternalHost() string {
return viper.GetString(ExternalHost)
}
// GetPreviewSegmentDuration returns the duration of a single segment in a
// scene preview file, in seconds.
func GetPreviewSegmentDuration() float64 {
func (i *Instance) GetPreviewSegmentDuration() float64 {
return viper.GetFloat64(PreviewSegmentDuration)
}
// GetParallelTasks returns the number of parallel tasks that should be started
// by scan or generate task.
func GetParallelTasks() int {
func (i *Instance) GetParallelTasks() int {
return viper.GetInt(ParallelTasks)
}
func GetParallelTasksWithAutoDetection() int {
func (i *Instance) GetParallelTasksWithAutoDetection() int {
parallelTasks := viper.GetInt(ParallelTasks)
if parallelTasks <= 0 {
parallelTasks = (runtime.NumCPU() / 4) + 1
@ -340,7 +390,7 @@ func GetParallelTasksWithAutoDetection() int {
}
// GetPreviewSegments returns the amount of segments in a scene preview file.
func GetPreviewSegments() int {
func (i *Instance) GetPreviewSegments() int {
return viper.GetInt(PreviewSegments)
}
@ -350,7 +400,7 @@ func GetPreviewSegments() int {
// of seconds to exclude from the start of the video before it is included
// in the preview. If the value is suffixed with a '%' character (for example
// '2%'), then it is interpreted as a proportion of the total video duration.
func GetPreviewExcludeStart() string {
func (i *Instance) GetPreviewExcludeStart() string {
return viper.GetString(PreviewExcludeStart)
}
@ -359,13 +409,13 @@ func GetPreviewExcludeStart() string {
// is interpreted as the amount of seconds to exclude from the end of the video
// when generating previews. If the value is suffixed with a '%' character,
// then it is interpreted as a proportion of the total video duration.
func GetPreviewExcludeEnd() string {
func (i *Instance) GetPreviewExcludeEnd() string {
return viper.GetString(PreviewExcludeEnd)
}
// GetPreviewPreset returns the preset when generating previews. Defaults to
// Slow.
func GetPreviewPreset() models.PreviewPreset {
func (i *Instance) GetPreviewPreset() models.PreviewPreset {
ret := viper.GetString(PreviewPreset)
// default to slow
@ -376,7 +426,7 @@ func GetPreviewPreset() models.PreviewPreset {
return models.PreviewPreset(ret)
}
func GetMaxTranscodeSize() models.StreamingResolutionEnum {
func (i *Instance) GetMaxTranscodeSize() models.StreamingResolutionEnum {
ret := viper.GetString(MaxTranscodeSize)
// default to original
@ -387,7 +437,7 @@ func GetMaxTranscodeSize() models.StreamingResolutionEnum {
return models.StreamingResolutionEnum(ret)
}
func GetMaxStreamingTranscodeSize() models.StreamingResolutionEnum {
func (i *Instance) GetMaxStreamingTranscodeSize() models.StreamingResolutionEnum {
ret := viper.GetString(MaxStreamingTranscodeSize)
// default to original
@ -398,29 +448,33 @@ func GetMaxStreamingTranscodeSize() models.StreamingResolutionEnum {
return models.StreamingResolutionEnum(ret)
}
func GetUsername() string {
func (i *Instance) GetAPIKey() string {
return viper.GetString(ApiKey)
}
func (i *Instance) GetUsername() string {
return viper.GetString(Username)
}
func GetPasswordHash() string {
func (i *Instance) GetPasswordHash() string {
return viper.GetString(Password)
}
func GetCredentials() (string, string) {
if HasCredentials() {
func (i *Instance) GetCredentials() (string, string) {
if i.HasCredentials() {
return viper.GetString(Username), viper.GetString(Password)
}
return "", ""
}
func HasCredentials() bool {
func (i *Instance) HasCredentials() bool {
if !viper.IsSet(Username) || !viper.IsSet(Password) {
return false
}
username := GetUsername()
pwHash := GetPasswordHash()
username := i.GetUsername()
pwHash := i.GetPasswordHash()
return username != "" && pwHash != ""
}
@ -431,20 +485,20 @@ func hashPassword(password string) string {
return string(hash)
}
func ValidateCredentials(username string, password string) bool {
if !HasCredentials() {
func (i *Instance) ValidateCredentials(username string, password string) bool {
if !i.HasCredentials() {
// don't need to authenticate if no credentials saved
return true
}
authUser, authPWHash := GetCredentials()
authUser, authPWHash := i.GetCredentials()
err := bcrypt.CompareHashAndPassword([]byte(authPWHash), []byte(password))
return username == authUser && err == nil
}
func ValidateStashBoxes(boxes []*models.StashBoxInput) error {
func (i *Instance) ValidateStashBoxes(boxes []*models.StashBoxInput) error {
isMulti := len(boxes) > 1
re, err := regexp.Compile("^http.*graphql$")
@ -468,56 +522,65 @@ func ValidateStashBoxes(boxes []*models.StashBoxInput) error {
// GetMaxSessionAge gets the maximum age for session cookies, in seconds.
// Session cookie expiry times are refreshed every request.
func GetMaxSessionAge() int {
func (i *Instance) GetMaxSessionAge() int {
viper.SetDefault(MaxSessionAge, DefaultMaxSessionAge)
return viper.GetInt(MaxSessionAge)
}
// GetCustomServedFolders gets the map of custom paths to their applicable
// filesystem locations
func GetCustomServedFolders() URLMap {
func (i *Instance) GetCustomServedFolders() URLMap {
return viper.GetStringMapString(CustomServedFolders)
}
func (i *Instance) GetCustomUILocation() string {
return viper.GetString(CustomUILocation)
}
// Interface options
func GetMenuItems() []string {
func (i *Instance) GetMenuItems() []string {
if viper.IsSet(MenuItems) {
return viper.GetStringSlice(MenuItems)
}
return defaultMenuItems
}
func GetSoundOnPreview() bool {
viper.SetDefault(SoundOnPreview, true)
func (i *Instance) GetSoundOnPreview() bool {
viper.SetDefault(SoundOnPreview, false)
return viper.GetBool(SoundOnPreview)
}
func GetWallShowTitle() bool {
func (i *Instance) GetWallShowTitle() bool {
viper.SetDefault(WallShowTitle, true)
return viper.GetBool(WallShowTitle)
}
func GetWallPlayback() string {
func (i *Instance) GetWallPlayback() string {
viper.SetDefault(WallPlayback, "video")
return viper.GetString(WallPlayback)
}
func GetMaximumLoopDuration() int {
func (i *Instance) GetMaximumLoopDuration() int {
viper.SetDefault(MaximumLoopDuration, 0)
return viper.GetInt(MaximumLoopDuration)
}
func GetAutostartVideo() bool {
func (i *Instance) GetAutostartVideo() bool {
viper.SetDefault(AutostartVideo, false)
return viper.GetBool(AutostartVideo)
}
func GetShowStudioAsText() bool {
func (i *Instance) GetShowStudioAsText() bool {
viper.SetDefault(ShowStudioAsText, false)
return viper.GetBool(ShowStudioAsText)
}
func GetCSSPath() string {
func (i *Instance) GetSlideshowDelay() int {
viper.SetDefault(SlideshowDelay, 5000)
return viper.GetInt(SlideshowDelay)
}
func (i *Instance) GetCSSPath() string {
// use custom.css in the same directory as the config file
configFileUsed := viper.ConfigFileUsed()
configDir := filepath.Dir(configFileUsed)
@ -527,8 +590,8 @@ func GetCSSPath() string {
return fn
}
func GetCSS() string {
fn := GetCSSPath()
func (i *Instance) GetCSS() string {
fn := i.GetCSSPath()
exists, _ := utils.FileExists(fn)
if !exists {
@ -544,28 +607,28 @@ func GetCSS() string {
return string(buf)
}
func SetCSS(css string) {
fn := GetCSSPath()
func (i *Instance) SetCSS(css string) {
fn := i.GetCSSPath()
buf := []byte(css)
ioutil.WriteFile(fn, buf, 0777)
}
func GetCSSEnabled() bool {
func (i *Instance) GetCSSEnabled() bool {
return viper.GetBool(CSSEnabled)
}
// GetLogFile returns the filename of the file to output logs to.
// An empty string means that file logging will be disabled.
func GetLogFile() string {
func (i *Instance) GetLogFile() string {
return viper.GetString(LogFile)
}
// GetLogOut returns true if logging should be output to the terminal
// in addition to writing to a log file. Logging will be output to the
// terminal if file logging is disabled. Defaults to true.
func GetLogOut() bool {
func (i *Instance) GetLogOut() bool {
ret := true
if viper.IsSet(LogOut) {
ret = viper.GetBool(LogOut)
@ -576,7 +639,7 @@ func GetLogOut() bool {
// GetLogLevel returns the lowest log level to write to the log.
// Should be one of "Debug", "Info", "Warning", "Error"
func GetLogLevel() string {
func (i *Instance) GetLogLevel() string {
const defaultValue = "Info"
value := viper.GetString(LogLevel)
@ -589,7 +652,7 @@ func GetLogLevel() string {
// GetLogAccess returns true if http requests should be logged to the terminal.
// HTTP requests are not logged to the log file. Defaults to true.
func GetLogAccess() bool {
func (i *Instance) GetLogAccess() bool {
ret := true
if viper.IsSet(LogAccess) {
ret = viper.GetBool(LogAccess)
@ -599,7 +662,7 @@ func GetLogAccess() bool {
}
// Max allowed graphql upload size in megabytes
func GetMaxUploadSize() int64 {
func (i *Instance) GetMaxUploadSize() int64 {
ret := int64(1024)
if viper.IsSet(MaxUploadSize) {
ret = viper.GetInt64(MaxUploadSize)
@ -607,37 +670,65 @@ func GetMaxUploadSize() int64 {
return ret << 20
}
func IsValid() bool {
setPaths := viper.IsSet(Stash) && viper.IsSet(Cache) && viper.IsSet(Generated) && viper.IsSet(Metadata)
func (i *Instance) Validate() error {
mandatoryPaths := []string{
Database,
Generated,
}
// TODO: check valid paths
return setPaths
var missingFields []string
for _, p := range mandatoryPaths {
if !viper.IsSet(p) || viper.GetString(p) == "" {
missingFields = append(missingFields, p)
}
}
if len(missingFields) > 0 {
return MissingConfigError{
missingFields: missingFields,
}
}
return nil
}
func setDefaultValues() {
func (i *Instance) setDefaultValues() error {
viper.SetDefault(ParallelTasks, parallelTasksDefault)
viper.SetDefault(PreviewSegmentDuration, previewSegmentDurationDefault)
viper.SetDefault(PreviewSegments, previewSegmentsDefault)
viper.SetDefault(PreviewExcludeStart, previewExcludeStartDefault)
viper.SetDefault(PreviewExcludeEnd, previewExcludeEndDefault)
viper.SetDefault(Database, i.GetDefaultDatabaseFilePath())
// Set generated to the metadata path for backwards compat
viper.SetDefault(Generated, viper.GetString(Metadata))
// Set default scrapers and plugins paths
viper.SetDefault(ScrapersPath, i.GetDefaultScrapersPath())
viper.SetDefault(PluginsPath, i.GetDefaultPluginsPath())
return viper.WriteConfig()
}
// SetInitialConfig fills in missing required config fields
func SetInitialConfig() error {
func (i *Instance) SetInitialConfig() error {
// generate some api keys
const apiKeyLength = 32
if string(GetJWTSignKey()) == "" {
if string(i.GetJWTSignKey()) == "" {
signKey := utils.GenerateRandomKey(apiKeyLength)
Set(JWTSignKey, signKey)
i.Set(JWTSignKey, signKey)
}
if string(GetSessionStoreKey()) == "" {
if string(i.GetSessionStoreKey()) == "" {
sessionStoreKey := utils.GenerateRandomKey(apiKeyLength)
Set(SessionStoreKey, sessionStoreKey)
i.Set(SessionStoreKey, sessionStoreKey)
}
setDefaultValues()
return Write()
return i.setDefaultValues()
}
func (i *Instance) FinalizeSetup() {
i.isNewSystem = false
}

123
pkg/manager/config/init.go Normal file
View file

@ -0,0 +1,123 @@
package config
import (
"fmt"
"net"
"os"
"sync"
"github.com/spf13/pflag"
"github.com/spf13/viper"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
var once sync.Once
type flagStruct struct {
configFilePath string
}
func Initialize() (*Instance, error) {
var err error
once.Do(func() {
instance = &Instance{}
flags := initFlags()
if err = initConfig(flags); err != nil {
return
}
initEnvs()
if instance.isNewSystem {
if instance.Validate() == nil {
// system has been initialised by the environment
instance.isNewSystem = false
}
}
if !instance.isNewSystem {
err = instance.SetInitialConfig()
}
})
return instance, err
}
func initConfig(flags flagStruct) error {
// The config file is called config. Leave off the file extension.
viper.SetConfigName("config")
viper.AddConfigPath(".") // Look for config in the working directory
viper.AddConfigPath("$HOME/.stash") // Look for the config in the home directory
configFile := ""
envConfigFile := os.Getenv("STASH_CONFIG_FILE")
if flags.configFilePath != "" {
configFile = flags.configFilePath
} else if envConfigFile != "" {
configFile = envConfigFile
}
if configFile != "" {
viper.SetConfigFile(configFile)
// if file does not exist, assume it is a new system
if exists, _ := utils.FileExists(configFile); !exists {
instance.isNewSystem = true
// ensure we can write to the file
if err := utils.Touch(configFile); err != nil {
return fmt.Errorf(`could not write to provided config path "%s": %s`, configFile, err.Error())
} else {
// remove the file
os.Remove(configFile)
}
return nil
}
}
err := viper.ReadInConfig() // Find and read the config file
// if not found, assume its a new system
if _, isMissing := err.(viper.ConfigFileNotFoundError); isMissing {
instance.isNewSystem = true
return nil
} else if err != nil {
return err
}
return nil
}
func initFlags() flagStruct {
flags := flagStruct{}
pflag.IP("host", net.IPv4(0, 0, 0, 0), "ip address for the host")
pflag.Int("port", 9999, "port to serve from")
pflag.StringVarP(&flags.configFilePath, "config", "c", "", "config file to use")
pflag.Parse()
if err := viper.BindPFlags(pflag.CommandLine); err != nil {
logger.Infof("failed to bind flags: %s", err.Error())
}
return flags
}
func initEnvs() {
viper.SetEnvPrefix("stash") // will be uppercased automatically
viper.BindEnv("host") // STASH_HOST
viper.BindEnv("port") // STASH_PORT
viper.BindEnv("external_host") // STASH_EXTERNAL_HOST
viper.BindEnv("generated") // STASH_GENERATED
viper.BindEnv("metadata") // STASH_METADATA
viper.BindEnv("cache") // STASH_CACHE
// only set stash config flag if not already set
if instance.GetStashPaths() == nil {
viper.BindEnv("stash") // STASH_STASH
}
}

View file

@ -0,0 +1,99 @@
package manager
import (
"fmt"
"image"
"image/color"
"math"
"github.com/corona10/goimagehash"
"github.com/disintegration/imaging"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
type PhashGenerator struct {
Info *GeneratorInfo
VideoChecksum string
Columns int
Rows int
}
func NewPhashGenerator(videoFile ffmpeg.VideoFile, checksum string) (*PhashGenerator, error) {
exists, err := utils.FileExists(videoFile.Path)
if !exists {
return nil, err
}
generator, err := newGeneratorInfo(videoFile)
if err != nil {
return nil, err
}
return &PhashGenerator{
Info: generator,
VideoChecksum: checksum,
Columns: 5,
Rows: 5,
}, nil
}
func (g *PhashGenerator) Generate() (*uint64, error) {
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
sprite, err := g.generateSprite(&encoder)
if err != nil {
return nil, err
}
hash, err := goimagehash.PerceptionHash(sprite)
if err != nil {
return nil, err
}
hashValue := hash.GetHash()
return &hashValue, nil
}
func (g *PhashGenerator) generateSprite(encoder *ffmpeg.Encoder) (image.Image, error) {
logger.Infof("[generator] generating phash sprite for %s", g.Info.VideoFile.Path)
// Generate sprite image offset by 5% on each end to avoid intro/outros
chunkCount := g.Columns * g.Rows
offset := 0.05 * g.Info.VideoFile.Duration
stepSize := (0.9 * g.Info.VideoFile.Duration) / float64(chunkCount)
var images []image.Image
for i := 0; i < chunkCount; i++ {
time := offset + (float64(i) * stepSize)
options := ffmpeg.SpriteScreenshotOptions{
Time: time,
Width: 160,
}
img, err := encoder.SpriteScreenshot(g.Info.VideoFile, options)
if err != nil {
return nil, err
}
images = append(images, img)
}
// Combine all of the thumbnails into a sprite image
if len(images) == 0 {
return nil, fmt.Errorf("images slice is empty, failed to generate phash sprite for %s", g.Info.VideoFile.Path)
}
width := images[0].Bounds().Size().X
height := images[0].Bounds().Size().Y
canvasWidth := width * g.Columns
canvasHeight := height * g.Rows
montage := imaging.New(canvasWidth, canvasHeight, color.NRGBA{})
for index := 0; index < len(images); index++ {
x := width * (index % g.Columns)
y := height * int(math.Floor(float64(index)/float64(g.Rows)))
img := images[index]
montage = imaging.Paste(montage, img, image.Pt(x, y))
}
return montage, nil
}

View file

@ -58,11 +58,6 @@ func (g *PreviewGenerator) Generate() error {
}
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
if err := g.generateConcatFile(); err != nil {
return err
}
if g.GenerateVideo {
if err := g.generateVideo(&encoder, false); err != nil {
logger.Warnf("[generator] failed generating scene preview, trying fallback")
@ -101,18 +96,32 @@ func (g *PreviewGenerator) generateVideo(encoder *ffmpeg.Encoder, fallback bool)
if !g.Overwrite && outputExists {
return nil
}
err := g.generateConcatFile()
if err != nil {
return err
}
var tmpFiles []string // a list of tmp files used during the preview generation
tmpFiles = append(tmpFiles, g.getConcatFilePath()) // add concat filename to tmpFiles
defer func() { removeFiles(tmpFiles) }() // remove tmpFiles when done
stepSize, offset := g.Info.getStepSizeAndOffset()
durationSegment := g.Info.ChunkDuration
if durationSegment < 0.75 { // a very short duration can create files without a video stream
durationSegment = 0.75 // use 0.75 in that case
logger.Warnf("[generator] Segment duration (%f) too short.Using 0.75 instead.", g.Info.ChunkDuration)
}
for i := 0; i < g.Info.ChunkCount; i++ {
time := offset + (float64(i) * stepSize)
num := fmt.Sprintf("%.3d", i)
filename := "preview_" + g.VideoChecksum + "_" + num + ".mp4"
chunkOutputPath := instance.Paths.Generated.GetTmpPath(filename)
tmpFiles = append(tmpFiles, chunkOutputPath) // add chunk filename to tmpFiles
options := ffmpeg.ScenePreviewChunkOptions{
StartTime: time,
Duration: g.Info.ChunkDuration,
Duration: durationSegment,
Width: 640,
OutputPath: chunkOutputPath,
}
@ -152,3 +161,11 @@ func (g *PreviewGenerator) generateImage(encoder *ffmpeg.Encoder) error {
func (g *PreviewGenerator) getConcatFilePath() string {
return instance.Paths.Generated.GetTmpPath(fmt.Sprintf("files_%s.txt", g.VideoChecksum))
}
func removeFiles(list []string) {
for _, f := range list {
if err := os.Remove(f); err != nil {
logger.Warnf("[generator] Delete error: %s", err)
}
}
}

Some files were not shown because too many files have changed in this diff Show more