Merge pull request #2175 from stashapp/develop

Merge to master for v0.12.0 release
This commit is contained in:
WithoutPants 2021-12-29 09:39:32 +11:00 committed by GitHub
commit fb864a6e90
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
278 changed files with 30564 additions and 29150 deletions

1
.gitignore vendored
View file

@ -30,6 +30,7 @@ ui/v2.5/src/core/generated-*.tsx
.idea/**/dictionaries
.idea/**/shelf
.vscode
.devcontainer
# Generated files
.idea/**/contentModel.xml

View file

@ -165,17 +165,17 @@ pre-ui:
.PHONY: ui
ui: pre-build
$(SET) REACT_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
$(SET) REACT_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
$(SET) REACT_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
$(SET) VITE_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
$(SET) VITE_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
$(SET) VITE_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
cd ui/v2.5 && yarn build
.PHONY: ui-start
ui-start: pre-build
$(SET) REACT_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
$(SET) REACT_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
$(SET) REACT_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
cd ui/v2.5 && yarn start
$(SET) VITE_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
$(SET) VITE_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
$(SET) VITE_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
cd ui/v2.5 && yarn start --host
.PHONY: fmt-ui
fmt-ui:

View file

@ -41,6 +41,12 @@ Download and run Stash. It will prompt you for some configuration options and a
The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our stash-box database. Note that this data source is not comprehensive and you may need to use the scrapers to identify some of your media.
# Translation
[![Translate](https://translate.stashapp.cc/widgets/stash/-/stash-desktop-client/svg-badge.svg)](https://translate.stashapp.cc/engage/stash/)
🇧🇷 🇨🇳 🇬🇧 🇫🇮 🇫🇷 🇩🇪 🇮🇹 🇪🇸 🇸🇪 🇹🇼
Stash is available in 10 languages (so far!) and it could be in your language too. If you want to help us translate Stash into your language, you can make an account at [translate.stashapp.cc](https://translate.stashapp.cc/projects/stash/stash-desktop-client/) to get started contributing new languages or improving existing ones. Thanks!
# Support (FAQ)
Answers to other Frequently Asked Questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ)

View file

@ -24,22 +24,22 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MingW.
### macOS
TODO
1. If you don't have it already, install the [Homebrew package manager](https://brew.sh).
2. Install dependencies: `brew install go git yarn gcc make`
## Commands
* `make generate` - Generate Go and UI GraphQL files
* `make build` - Builds the binary (make sure to build the UI as well... see below)
* `make docker-build` - Locally builds and tags a complete 'stash/build' docker image
* `make pre-ui` - Installs the UI dependencies. Only needs to be run once before building the UI for the first time, or if the dependencies are updated
* `make generate` - Generate Go and UI GraphQL files
* `make fmt-ui` - Formats the UI source code
* `make ui` - Builds the frontend
* `make build` - Builds the binary (make sure to build the UI as well... see below)
* `make docker-build` - Locally builds and tags a complete 'stash/build' docker image
* `make lint` - Run the linter on the backend
* `make fmt` - Run `go fmt`
* `make it` - Run the unit and integration tests
* `make validate` - Run all of the tests and checks required to submit a PR
* `make ui-start` - Runs the UI in development mode. Requires a running stash server to connect to. Stash port can be changed from the default of `9999` with environment variable `REACT_APP_PLATFORM_PORT`.
* `make ui-start` - Runs the UI in development mode. Requires a running stash server to connect to. Stash server port can be changed from the default of `9999` using environment variable `VITE_APP_PLATFORM_PORT`. UI runs on port `3000` or the next available port.
## Building a release

16
go.mod
View file

@ -4,7 +4,7 @@ require (
github.com/99designs/gqlgen v0.12.2
github.com/Yamashou/gqlgenc v0.0.0-20200902035953-4dbef3551953
github.com/anacrolix/dms v1.2.2
github.com/antchfx/htmlquery v1.2.3
github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758
github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84
github.com/chromedp/chromedp v0.7.3
github.com/corona10/goimagehash v1.0.3
@ -37,20 +37,24 @@ require (
github.com/vektra/mockery/v2 v2.2.1
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9
golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf
golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b // indirect
golang.org/x/text v0.3.6
golang.org/x/text v0.3.7
golang.org/x/tools v0.1.5 // indirect
gopkg.in/sourcemap.v1 v1.0.5 // indirect
gopkg.in/yaml.v2 v2.4.0
)
require github.com/vektah/gqlparser/v2 v2.0.1
require (
github.com/lucasb-eyer/go-colorful v1.2.0
github.com/vearutop/statigz v1.1.6
github.com/vektah/gqlparser/v2 v2.0.1
)
require (
github.com/agnivade/levenshtein v1.1.0 // indirect
github.com/antchfx/xpath v1.1.6 // indirect
github.com/antchfx/xpath v1.2.0 // indirect
github.com/chromedp/sysutil v1.0.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
@ -58,7 +62,7 @@ require (
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/gobwas/ws v1.1.0-rc.5 // indirect
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/hashicorp/errwrap v1.0.0 // indirect
github.com/hashicorp/go-multierror v1.1.0 // indirect
github.com/hashicorp/golang-lru v0.5.1 // indirect

25
go.sum
View file

@ -79,10 +79,12 @@ github.com/anacrolix/missinggo v1.1.0/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xop
github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/antchfx/htmlquery v1.2.3 h1:sP3NFDneHx2stfNXCKbhHFo8XgNjCACnU/4AO5gWz6M=
github.com/antchfx/htmlquery v1.2.3/go.mod h1:B0ABL+F5irhhMWg54ymEZinzMSi0Kt3I2if0BLYa3V0=
github.com/antchfx/xpath v1.1.6 h1:6sVh6hB5T6phw1pFpHRQ+C4bd8sNI+O58flqtg7h0R0=
github.com/antchfx/xpath v1.1.6/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
github.com/andybalholm/brotli v1.0.3 h1:fpcw+r1N1h0Poc1F/pHbW40cUm/lMEQslZtCkBQ0UnM=
github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758 h1:Ldjwcl7T8VqCKgQQ0TfPI8fNb8O/GtMXcYaHlqOu99s=
github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758/go.mod h1:2xO6iu3EVWs7R2JYqBbp8YzG50gj/ofqs5/0VZoDZLc=
github.com/antchfx/xpath v1.2.0 h1:mbwv7co+x0RwgeGAOHdrKy89GvHaGvxxBtPK0uF9Zr8=
github.com/antchfx/xpath v1.2.0/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/apache/arrow/go/arrow v0.0.0-20200601151325-b2287a20f230/go.mod h1:QNYViu/X0HXDHw7m3KXzWSVXIbfUvJqBFe6Gj8/pYA0=
github.com/apache/arrow/go/arrow v0.0.0-20210521153258-78c88a9f517b/go.mod h1:R4hW3Ug0s+n4CUsWHKOj00Pu01ZqU4x/hSF5kXUcXKQ=
@ -126,6 +128,8 @@ github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCS
github.com/bkaradzic/go-lz4 v1.0.0/go.mod h1:0YdlkowM3VswSROI7qDxhRvJ3sLhlFrRRwjwegp5jy4=
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bool64/dev v0.1.41 h1:L554LCQZc3d7mtcdPUgDbSrCVbr48/30zgu0VuC/FTA=
github.com/bool64/dev v0.1.41/go.mod h1:cTHiTDNc8EewrQPy3p1obNilpMpdmlUesDkFTF2zRWU=
github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
github.com/cenkalti/backoff/v4 v4.0.2/go.mod h1:eEew/i+1Q6OrCDZh3WiXYv3+nJwBASZ8Bog/87DQnVg=
@ -268,8 +272,9 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
@ -505,6 +510,8 @@ github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
@ -706,6 +713,8 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/urfave/cli/v2 v2.1.1 h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k=
github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ=
github.com/vearutop/statigz v1.1.6 h1:si1zvulh/6P4S/SjFticuKQ8/EgQISglaRuycj8PWso=
github.com/vearutop/statigz v1.1.6/go.mod h1:czAv7iXgPv/s+xsgXpVEhhD0NSOQ4wZPgmM/n7LANDI=
github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e h1:+w0Zm/9gaWpEAyDlU1eKOuk5twTjAjuevXqcJJw8hrg=
github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U=
github.com/vektah/gqlparser v1.3.1 h1:8b0IcD3qZKWJQHSzynbDlrtP3IxVydZ2DZepCGofqfU=
@ -863,8 +872,9 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023 h1:ADo5wSpq2gqaCGQWzk7S5vd//0iyyLeAratkEoG5dLE=
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9 h1:0qxwC5n+ttVOINCBeRHO0nq9X7uy8SDsPoi5OaCdIEI=
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@ -986,8 +996,9 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=

View file

@ -14,6 +14,10 @@ resolver:
struct_tag: gqlgen
models:
# Scalars
Timestamp:
model: github.com/stashapp/stash/pkg/models.Timestamp
# Objects
Gallery:
model: github.com/stashapp/stash/pkg/models.Gallery
Image:

View file

@ -7,6 +7,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
databasePath
generatedPath
metadataPath
scrapersPath
cachePath
calculateMD5
videoFileNamingAlgorithm
@ -61,7 +62,7 @@ fragment ConfigInterfaceData on ConfigInterfaceResult {
cssEnabled
language
slideshowDelay
disabledDropdownCreate {
disableDropdownCreate {
performer
tag
studio
@ -106,6 +107,16 @@ fragment ScraperSourceData on ScraperSource {
}
fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult {
scan {
useFileMetadata
stripFileExtension
scanGeneratePreviews
scanGenerateImagePreviews
scanGenerateSprites
scanGeneratePhashes
scanGenerateThumbnails
}
identify {
sources {
source {
@ -120,6 +131,31 @@ fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult {
}
}
autoTag {
performers
studios
tags
}
generate {
sprites
previews
imagePreviews
previewOptions {
previewSegments
previewSegmentDuration
previewExcludeStart
previewExcludeEnd
previewPreset
}
markers
markerImagePreviews
markerScreenshots
transcodes
phashes
interactiveHeatmapsSpeeds
}
deleteFile
deleteGenerated
}

View file

@ -2,6 +2,8 @@ fragment GalleryData on Gallery {
id
checksum
path
created_at
updated_at
title
date
url

View file

@ -6,6 +6,8 @@ fragment ImageData on Image {
organized
o_counter
path
created_at
updated_at
file {
size

View file

@ -12,6 +12,7 @@ fragment SlimSceneData on Scene {
path
phash
interactive
interactive_speed
file {
size
@ -33,6 +34,7 @@ fragment SlimSceneData on Scene {
chapters_vtt
sprite
funscript
interactive_heatmap
}
scene_markers {

View file

@ -12,6 +12,9 @@ fragment SceneData on Scene {
path
phash
interactive
interactive_speed
created_at
updated_at
file {
size
@ -33,6 +36,7 @@ fragment SceneData on Scene {
chapters_vtt
sprite
funscript
interactive_heatmap
}
scene_markers {

View file

@ -1,27 +1,3 @@
query ScrapeFreeones($performer_name: String!) {
scrapeFreeones(performer_name: $performer_name) {
name
url
twitter
instagram
birthdate
ethnicity
country
eye_color
height
measurements
fake_tits
career_length
tattoos
piercings
aliases
details
death_date
hair_color
weight
}
}
query ScrapeFreeonesPerformers($q: String!) {
scrapeFreeonesPerformerList(query: $q)
}

View file

@ -67,10 +67,12 @@ type Query {
# Scrapers
"""List available scrapers"""
listPerformerScrapers: [Scraper!]!
listSceneScrapers: [Scraper!]!
listGalleryScrapers: [Scraper!]!
listMovieScrapers: [Scraper!]!
listScrapers(types: [ScrapeContentType!]!): [Scraper!]!
listPerformerScrapers: [Scraper!]! @deprecated(reason: "Use listScrapers(types: [PERFORMER])")
listSceneScrapers: [Scraper!]! @deprecated(reason: "Use listScrapers(types: [SCENE])")
listGalleryScrapers: [Scraper!]! @deprecated(reason: "Use listScrapers(types: [GALLERY])")
listMovieScrapers: [Scraper!]! @deprecated(reason: "Use listScrapers(types: [MOVIE])")
"""Scrape for a single scene"""
scrapeSingleScene(source: ScraperSourceInput!, input: ScrapeSingleSceneInput!): [ScrapedScene!]!
@ -88,6 +90,9 @@ type Query {
"""Scrape for a single movie"""
scrapeSingleMovie(source: ScraperSourceInput!, input: ScrapeSingleMovieInput!): [ScrapedMovie!]!
"Scrapes content based on a URL"
scrapeURL(url: String!, ty: ScrapeContentType!): ScrapedContent
"""Scrapes a complete performer record based on a URL"""
scrapePerformerURL(url: String!): ScrapedPerformer
"""Scrapes a complete performer record based on a URL"""
@ -106,8 +111,6 @@ type Query {
"""Scrapes a complete gallery record based on an existing gallery"""
scrapeGallery(scraper_id: ID!, gallery: GalleryUpdateInput!): ScrapedGallery @deprecated(reason: "use scrapeSingleGallery")
"""Scrape a performer using Freeones"""
scrapeFreeones(performer_name: String!): ScrapedPerformer @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")
"""Scrape a list of performers from a query"""
scrapeFreeonesPerformerList(query: String!): [String!]! @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")

View file

@ -41,16 +41,18 @@ input ConfigGeneralInput {
generatedPath: String
"""Path to import/export files"""
metadataPath: String
"""Path to scrapers"""
scrapersPath: String
"""Path to cache"""
cachePath: String
"""Whether to calculate MD5 checksums for scene video files"""
calculateMD5: Boolean!
calculateMD5: Boolean
"""Hash algorithm to use for generated file naming"""
videoFileNamingAlgorithm: HashAlgorithm!
videoFileNamingAlgorithm: HashAlgorithm
"""Number of parallel tasks to start during scan/generate"""
parallelTasks: Int
"""Include audio stream in previews"""
previewAudio: Boolean!
previewAudio: Boolean
"""Number of segments in a preview file"""
previewSegments: Int
"""Preview segment duration, in seconds"""
@ -78,13 +80,13 @@ input ConfigGeneralInput {
"""Name of the log file"""
logFile: String
"""Whether to also output to stderr"""
logOut: Boolean!
logOut: Boolean
"""Minimum log level"""
logLevel: String!
logLevel: String
"""Whether to log http access"""
logAccess: Boolean!
logAccess: Boolean
"""True if galleries should be created from folders with images"""
createGalleriesFromFolders: Boolean!
createGalleriesFromFolders: Boolean
"""Array of video file extensions"""
videoExtensions: [String!]
"""Array of image file extensions"""
@ -104,7 +106,7 @@ input ConfigGeneralInput {
"""Whether the scraper should check for invalid certificates"""
scraperCertCheck: Boolean @deprecated(reason: "use mutation ConfigureScraping(input: ConfigScrapingInput) instead")
"""Stash-box instances used for tagging"""
stashBoxes: [StashBoxInput!]!
stashBoxes: [StashBoxInput!]
}
type ConfigGeneralResult {
@ -282,7 +284,8 @@ type ConfigInterfaceResult {
slideshowDelay: Int
"""Fields are true if creating via dropdown menus are disabled"""
disabledDropdownCreate: ConfigDisableDropdownCreate!
disableDropdownCreate: ConfigDisableDropdownCreate!
disabledDropdownCreate: ConfigDisableDropdownCreate! @deprecated(reason: "Use disableDropdownCreate")
"""Handy Connection Key"""
handyKey: String
@ -316,7 +319,7 @@ input ConfigScrapingInput {
"""Scraper CDP path. Path to chrome executable or remote address"""
scraperCDPPath: String
"""Whether the scraper should check for invalid certificates"""
scraperCertCheck: Boolean!
scraperCertCheck: Boolean
"""Tags blacklist during scraping"""
excludeTagPatterns: [String!]
}
@ -333,7 +336,10 @@ type ConfigScrapingResult {
}
type ConfigDefaultSettingsResult {
scan: ScanMetadataOptions
identify: IdentifyMetadataTaskOptions
autoTag: AutoTagMetadataOptions
generate: GenerateMetadataOptions
"""If true, delete file checkbox will be checked by default"""
deleteFile: Boolean
@ -342,7 +348,10 @@ type ConfigDefaultSettingsResult {
}
input ConfigDefaultSettingsInput {
scan: ScanMetadataInput
identify: IdentifyMetadataInput
autoTag: AutoTagMetadataInput
generate: GenerateMetadataInput
"""If true, delete file checkbox will be checked by default"""
deleteFile: Boolean

View file

@ -158,6 +158,8 @@ input SceneFilterType {
url: StringCriterionInput
"""Filter by interactive"""
interactive: Boolean
"""Filter by InteractiveSpeed"""
interactive_speed: IntCriterionInput
}
input MovieFilterType {

View file

@ -9,7 +9,10 @@ input GenerateMetadataInput {
markerImagePreviews: Boolean
markerScreenshots: Boolean
transcodes: Boolean
"""Generate transcodes even if not required"""
forceTranscodes: Boolean
phashes: Boolean
interactiveHeatmapsSpeeds: Boolean
"""scene ids to generate for"""
sceneIDs: [ID!]
@ -33,8 +36,41 @@ input GeneratePreviewOptionsInput {
previewPreset: PreviewPreset
}
type GenerateMetadataOptions {
sprites: Boolean
previews: Boolean
imagePreviews: Boolean
previewOptions: GeneratePreviewOptions
markers: Boolean
markerImagePreviews: Boolean
markerScreenshots: Boolean
transcodes: Boolean
phashes: Boolean
interactiveHeatmapsSpeeds: Boolean
}
type GeneratePreviewOptions {
"""Number of segments in a preview file"""
previewSegments: Int
"""Preview segment duration, in seconds"""
previewSegmentDuration: Float
"""Duration of start of video to exclude when generating previews"""
previewExcludeStart: String
"""Duration of end of video to exclude when generating previews"""
previewExcludeEnd: String
"""Preset when generating preview"""
previewPreset: PreviewPreset
}
"Filter options for meta data scannning"
input ScanMetaDataFilterInput {
"If set, files with a modification time before this time point are ignored by the scan"
minModTime: Timestamp
}
input ScanMetadataInput {
paths: [String!]
"""Set name, date, details from metadata (if present)"""
useFileMetadata: Boolean
"""Strip file extension from title"""
@ -49,9 +85,31 @@ input ScanMetadataInput {
scanGeneratePhashes: Boolean
"""Generate image thumbnails during scan"""
scanGenerateThumbnails: Boolean
"Filter options for the scan"
filter: ScanMetaDataFilterInput
}
type ScanMetadataOptions {
"""Set name, date, details from metadata (if present)"""
useFileMetadata: Boolean!
"""Strip file extension from title"""
stripFileExtension: Boolean!
"""Generate previews during scan"""
scanGeneratePreviews: Boolean!
"""Generate image previews during scan"""
scanGenerateImagePreviews: Boolean!
"""Generate sprites during scan"""
scanGenerateSprites: Boolean!
"""Generate phashes during scan"""
scanGeneratePhashes: Boolean!
"""Generate image thumbnails during scan"""
scanGenerateThumbnails: Boolean!
}
input CleanMetadataInput {
paths: [String!]
"""Do a dry run. Don't delete any files"""
dryRun: Boolean!
}
@ -67,15 +125,24 @@ input AutoTagMetadataInput {
tags: [String!]
}
type AutoTagMetadataOptions {
"""IDs of performers to tag files with, or "*" for all"""
performers: [String!]
"""IDs of studios to tag files with, or "*" for all"""
studios: [String!]
"""IDs of tags to tag files with, or "*" for all"""
tags: [String!]
}
enum IdentifyFieldStrategy {
"""Never sets the field value"""
IGNORE
"""
For multi-value fields, merge with existing.
For multi-value fields, merge with existing.
For single-value fields, ignore if already set
"""
MERGE
"""Always replaces the value if a value is found.
"""Always replaces the value if a value is found.
For multi-value fields, any existing values are removed and replaced with the
scraped values.
"""

View file

@ -0,0 +1,7 @@
"""
Timestamp is a point in time. It is always output as RFC3339-compatible time points.
It can be input as a RFC3339 string, or as "<4h" for "4 hours in the past" or ">5m"
for "5 minutes in the future"
"""
scalar Timestamp

View file

@ -18,6 +18,7 @@ type ScenePathsType {
chapters_vtt: String # Resolver
sprite: String # Resolver
funscript: String # Resolver
interactive_heatmap: String # Resolver
}
type SceneMovie {
@ -39,6 +40,7 @@ type Scene {
path: String!
phash: String
interactive: Boolean!
interactive_speed: Int
created_at: Time!
updated_at: Time!
file_mod_time: Time

View file

@ -1,5 +1,5 @@
enum ScrapeType {
"""From text query"""
"""From text query"""
NAME
"""From existing object"""
FRAGMENT
@ -7,6 +7,22 @@ enum ScrapeType {
URL
}
"Type of the content a scraper generates"
enum ScrapeContentType {
GALLERY
MOVIE
PERFORMER
SCENE
}
"Scraped Content is the forming union over the different scrapers"
union ScrapedContent = ScrapedStudio
| ScrapedTag
| ScrapedScene
| ScrapedGallery
| ScrapedMovie
| ScrapedPerformer
type ScraperSpec {
"""URLs matching these can be scraped with"""
urls: [String!]
@ -26,6 +42,7 @@ type Scraper {
movie: ScraperSpec
}
type ScrapedStudio {
"""Set if studio matched"""
stored_id: ID

View file

@ -17,14 +17,31 @@ type imageBox struct {
files []string
}
var imageExtensions = []string{
".jpg",
".jpeg",
".png",
".gif",
".svg",
".webp",
}
func newImageBox(box fs.FS) (*imageBox, error) {
ret := &imageBox{
box: box,
}
err := fs.WalkDir(box, ".", func(path string, d fs.DirEntry, err error) error {
if !d.IsDir() {
ret.files = append(ret.files, path)
if d.IsDir() {
return nil
}
baseName := strings.ToLower(d.Name())
for _, ext := range imageExtensions {
if strings.HasSuffix(baseName, ext) {
ret.files = append(ret.files, path)
break
}
}
return nil

View file

@ -14,6 +14,7 @@ var matcher = language.NewMatcher([]language.Tag{
language.MustParse("de-DE"),
language.MustParse("it-IT"),
language.MustParse("fr-FR"),
language.MustParse("fi-FI"),
language.MustParse("pt-BR"),
language.MustParse("sv-SE"),
language.MustParse("zh-CN"),

View file

@ -7,13 +7,22 @@ import (
"strconv"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scraper"
)
var (
// ErrNotImplemented is an error which means the given functionality isn't implemented by the API.
ErrNotImplemented = errors.New("not implemented")
ErrNotSupported = errors.New("not supported")
// ErrNotSupported is returned whenever there's a test, which can be used to guard against the error,
// but the given parameters aren't supported by the system.
ErrNotSupported = errors.New("not supported")
// ErrInput signifies errors where the input isn't valid for some reason. And no more specific error exists.
ErrInput = errors.New("input error")
)
type hookExecutor interface {
@ -25,6 +34,10 @@ type Resolver struct {
hookExecutor hookExecutor
}
func (r *Resolver) scraperCache() *scraper.Cache {
return manager.GetInstance().ScraperCache
}
func (r *Resolver) Gallery() models.GalleryResolver {
return &galleryResolver{r}
}

View file

@ -61,6 +61,14 @@ func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, er
return nil, nil
}
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
if obj.InteractiveSpeed.Valid {
interactive_speed := int(obj.InteractiveSpeed.Int64)
return &interactive_speed, nil
}
return nil, nil
}
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) {
width := int(obj.Width.Int64)
height := int(obj.Height.Int64)
@ -89,16 +97,18 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
spritePath := builder.GetSpriteURL()
chaptersVttPath := builder.GetChaptersVTTURL()
funscriptPath := builder.GetFunscriptURL()
interactiveHeatmap := builder.GetInteractiveHeatmapURL()
return &models.ScenePathsType{
Screenshot: &screenshotPath,
Preview: &previewPath,
Stream: &streamPath,
Webp: &webpPath,
Vtt: &vttPath,
ChaptersVtt: &chaptersVttPath,
Sprite: &spritePath,
Funscript: &funscriptPath,
Screenshot: &screenshotPath,
Preview: &previewPath,
Stream: &streamPath,
Webp: &webpPath,
Vtt: &vttPath,
ChaptersVtt: &chaptersVttPath,
Sprite: &spritePath,
Funscript: &funscriptPath,
InteractiveHeatmap: &interactiveHeatmap,
}, nil
}

View file

@ -57,6 +57,20 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
return nil
}
validateDir := func(key string, value string, optional bool) error {
if err := checkConfigOverride(config.Metadata); err != nil {
return err
}
if !optional || value != "" {
if err := utils.EnsureDir(value); err != nil {
return err
}
}
return nil
}
existingDBPath := c.GetDatabasePath()
if input.DatabasePath != nil && existingDBPath != *input.DatabasePath {
if err := checkConfigOverride(config.Database); err != nil {
@ -72,64 +86,70 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
existingGeneratedPath := c.GetGeneratedPath()
if input.GeneratedPath != nil && existingGeneratedPath != *input.GeneratedPath {
if err := checkConfigOverride(config.Generated); err != nil {
if err := validateDir(config.Generated, *input.GeneratedPath, false); err != nil {
return makeConfigGeneralResult(), err
}
if err := utils.EnsureDir(*input.GeneratedPath); err != nil {
c.Set(config.Generated, input.GeneratedPath)
}
refreshScraperCache := false
existingScrapersPath := c.GetScrapersPath()
if input.ScrapersPath != nil && existingScrapersPath != *input.ScrapersPath {
if err := validateDir(config.ScrapersPath, *input.ScrapersPath, false); err != nil {
return makeConfigGeneralResult(), err
}
c.Set(config.Generated, input.GeneratedPath)
refreshScraperCache = true
c.Set(config.ScrapersPath, input.ScrapersPath)
}
existingMetadataPath := c.GetMetadataPath()
if input.MetadataPath != nil && existingMetadataPath != *input.MetadataPath {
if err := checkConfigOverride(config.Metadata); err != nil {
if err := validateDir(config.Metadata, *input.MetadataPath, true); err != nil {
return makeConfigGeneralResult(), err
}
if *input.MetadataPath != "" {
if err := utils.EnsureDir(*input.MetadataPath); err != nil {
return makeConfigGeneralResult(), err
}
}
c.Set(config.Metadata, input.MetadataPath)
}
existingCachePath := c.GetCachePath()
if input.CachePath != nil && existingCachePath != *input.CachePath {
if err := checkConfigOverride(config.Metadata); err != nil {
if err := validateDir(config.Cache, *input.CachePath, true); err != nil {
return makeConfigGeneralResult(), err
}
if *input.CachePath != "" {
if err := utils.EnsureDir(*input.CachePath); err != nil {
return makeConfigGeneralResult(), err
}
}
c.Set(config.Cache, input.CachePath)
}
if !input.CalculateMd5 && input.VideoFileNamingAlgorithm == models.HashAlgorithmMd5 {
return makeConfigGeneralResult(), errors.New("calculateMD5 must be true if using MD5")
}
if input.VideoFileNamingAlgorithm != nil && *input.VideoFileNamingAlgorithm != c.GetVideoFileNamingAlgorithm() {
calculateMD5 := c.IsCalculateMD5()
if input.CalculateMd5 != nil {
calculateMD5 = *input.CalculateMd5
}
if !calculateMD5 && *input.VideoFileNamingAlgorithm == models.HashAlgorithmMd5 {
return makeConfigGeneralResult(), errors.New("calculateMD5 must be true if using MD5")
}
if input.VideoFileNamingAlgorithm != c.GetVideoFileNamingAlgorithm() {
// validate changing VideoFileNamingAlgorithm
if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, input.VideoFileNamingAlgorithm); err != nil {
if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, *input.VideoFileNamingAlgorithm); err != nil {
return makeConfigGeneralResult(), err
}
c.Set(config.VideoFileNamingAlgorithm, input.VideoFileNamingAlgorithm)
c.Set(config.VideoFileNamingAlgorithm, *input.VideoFileNamingAlgorithm)
}
c.Set(config.CalculateMD5, input.CalculateMd5)
if input.CalculateMd5 != nil {
c.Set(config.CalculateMD5, *input.CalculateMd5)
}
if input.ParallelTasks != nil {
c.Set(config.ParallelTasks, *input.ParallelTasks)
}
c.Set(config.PreviewAudio, input.PreviewAudio)
if input.PreviewAudio != nil {
c.Set(config.PreviewAudio, *input.PreviewAudio)
}
if input.PreviewSegments != nil {
c.Set(config.PreviewSegments, *input.PreviewSegments)
@ -185,12 +205,17 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
c.Set(config.LogFile, input.LogFile)
}
c.Set(config.LogOut, input.LogOut)
c.Set(config.LogAccess, input.LogAccess)
if input.LogOut != nil {
c.Set(config.LogOut, *input.LogOut)
}
if input.LogLevel != c.GetLogLevel() {
if input.LogAccess != nil {
c.Set(config.LogAccess, *input.LogAccess)
}
if input.LogLevel != nil && *input.LogLevel != c.GetLogLevel() {
c.Set(config.LogLevel, input.LogLevel)
logger.SetLogLevel(input.LogLevel)
logger.SetLogLevel(*input.LogLevel)
}
if input.Excludes != nil {
@ -213,14 +238,15 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
c.Set(config.GalleryExtensions, input.GalleryExtensions)
}
c.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders)
if input.CreateGalleriesFromFolders != nil {
c.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders)
}
if input.CustomPerformerImageLocation != nil {
c.Set(config.CustomPerformerImageLocation, *input.CustomPerformerImageLocation)
initialiseCustomImages()
}
refreshScraperCache := false
if input.ScraperUserAgent != nil {
c.Set(config.ScraperUserAgent, input.ScraperUserAgent)
refreshScraperCache = true
@ -293,14 +319,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
c.Set(config.SlideshowDelay, *input.SlideshowDelay)
}
css := ""
if input.CSS != nil {
css = *input.CSS
c.SetCSS(*input.CSS)
}
c.SetCSS(css)
setBool(config.CSSEnabled, input.CSSEnabled)
if input.DisableDropdownCreate != nil {
@ -332,7 +354,9 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.Confi
c.Set(config.DLNAServerName, *input.ServerName)
}
c.Set(config.DLNADefaultIPWhitelist, input.WhitelistedIPs)
if input.WhitelistedIPs != nil {
c.Set(config.DLNADefaultIPWhitelist, input.WhitelistedIPs)
}
currentDLNAEnabled := c.GetDLNADefaultEnabled()
if input.Enabled != nil && *input.Enabled != currentDLNAEnabled {
@ -349,7 +373,9 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.Confi
}
}
c.Set(config.DLNAInterfaces, input.Interfaces)
if input.Interfaces != nil {
c.Set(config.DLNAInterfaces, input.Interfaces)
}
if err := c.Write(); err != nil {
return makeConfigDLNAResult(), err
@ -376,7 +402,10 @@ func (r *mutationResolver) ConfigureScraping(ctx context.Context, input models.C
c.Set(config.ScraperExcludeTagPatterns, input.ExcludeTagPatterns)
}
c.Set(config.ScraperCertCheck, input.ScraperCertCheck)
if input.ScraperCertCheck != nil {
c.Set(config.ScraperCertCheck, input.ScraperCertCheck)
}
if refreshScraperCache {
manager.GetInstance().RefreshScraperCache()
}
@ -394,6 +423,18 @@ func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input models.C
c.Set(config.DefaultIdentifySettings, input.Identify)
}
if input.Scan != nil {
c.Set(config.DefaultScanSettings, input.Scan)
}
if input.AutoTag != nil {
c.Set(config.DefaultAutoTagSettings, input.AutoTag)
}
if input.Generate != nil {
c.Set(config.DefaultGenerateSettings, input.Generate)
}
if input.DeleteFile != nil {
c.Set(config.DeleteFileDefault, *input.DeleteFile)
}

View file

@ -5,9 +5,12 @@ import (
"database/sql"
"errors"
"fmt"
"os"
"strconv"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
@ -395,8 +398,14 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
}
var galleries []*models.Gallery
var imgsToPostProcess []*models.Image
var imgsToDelete []*models.Image
var imgsDestroyed []*models.Image
fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(),
Paths: manager.GetInstance().Paths,
}
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Gallery()
@ -422,13 +431,19 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
}
for _, img := range imgs {
if err := iqb.Destroy(img.ID); err != nil {
if err := image.Destroy(img, iqb, fileDeleter, deleteGenerated, false); err != nil {
return err
}
imgsToPostProcess = append(imgsToPostProcess, img)
imgsDestroyed = append(imgsDestroyed, img)
}
} else if input.DeleteFile != nil && *input.DeleteFile {
if deleteFile {
if err := fileDeleter.Files([]string{gallery.Path.String}); err != nil {
return err
}
}
} else if deleteFile {
// Delete image if it is only attached to this gallery
imgs, err := iqb.FindByGalleryID(id)
if err != nil {
@ -442,14 +457,16 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
}
if len(imgGalleries) == 1 {
if err := iqb.Destroy(img.ID); err != nil {
if err := image.Destroy(img, iqb, fileDeleter, deleteGenerated, deleteFile); err != nil {
return err
}
imgsToDelete = append(imgsToDelete, img)
imgsToPostProcess = append(imgsToPostProcess, img)
imgsDestroyed = append(imgsDestroyed, img)
}
}
// we only want to delete a folder-based gallery if it is empty.
// don't do this with the file deleter
}
if err := qb.Destroy(id); err != nil {
@ -459,44 +476,53 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
return nil
}); err != nil {
fileDeleter.Rollback()
return false, err
}
// if delete file is true, then delete the file as well
// if it fails, just log a message
if input.DeleteFile != nil && *input.DeleteFile {
// #1804 - delete the image files first, since they must be removed
// before deleting a folder
for _, img := range imgsToDelete {
manager.DeleteImageFile(img)
}
// perform the post-commit actions
fileDeleter.Commit()
for _, gallery := range galleries {
manager.DeleteGalleryFile(gallery)
}
}
// if delete generated is true, then delete the generated files
// for the gallery
if input.DeleteGenerated != nil && *input.DeleteGenerated {
for _, img := range imgsToPostProcess {
manager.DeleteGeneratedImageFiles(img)
for _, gallery := range galleries {
// don't delete stash library paths
if utils.IsTrue(input.DeleteFile) && !gallery.Zip && gallery.Path.Valid && !isStashPath(gallery.Path.String) {
// try to remove the folder - it is possible that it is not empty
// so swallow the error if present
_ = os.Remove(gallery.Path.String)
}
}
// call post hook after performing the other actions
for _, gallery := range galleries {
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, input, nil)
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
GalleryDestroyInput: input,
Checksum: gallery.Checksum,
Path: gallery.Path.String,
}, nil)
}
// call image destroy post hook as well
for _, img := range imgsToDelete {
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, nil, nil)
for _, img := range imgsDestroyed {
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: img.Checksum,
Path: img.Path,
}, nil)
}
return true, nil
}
func isStashPath(path string) bool {
stashConfigs := manager.GetInstance().Config.GetStashPaths()
for _, config := range stashConfigs {
if path == config.Path {
return true
}
}
return false
}
func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.GalleryAddInput) (bool, error) {
galleryID, err := strconv.Atoi(input.GalleryID)
if err != nil {

View file

@ -6,6 +6,8 @@ import (
"strconv"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
@ -281,38 +283,38 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
return false, err
}
var image *models.Image
var i *models.Image
fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(),
Paths: manager.GetInstance().Paths,
}
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Image()
image, err = qb.Find(imageID)
i, err = qb.Find(imageID)
if err != nil {
return err
}
if image == nil {
if i == nil {
return fmt.Errorf("image with id %d not found", imageID)
}
return qb.Destroy(imageID)
return image.Destroy(i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile))
}); err != nil {
fileDeleter.Rollback()
return false, err
}
// if delete generated is true, then delete the generated files
// for the image
if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedImageFiles(image)
}
// if delete file is true, then delete the file as well
// if it fails, just log a message
if input.DeleteFile != nil && *input.DeleteFile {
manager.DeleteImageFile(image)
}
// perform the post-commit actions
fileDeleter.Commit()
// call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, input, nil)
r.hookExecutor.ExecutePostHooks(ctx, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
ImageDestroyInput: input,
Checksum: i.Checksum,
Path: i.Path,
}, nil)
return true, nil
}
@ -324,46 +326,47 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
}
var images []*models.Image
fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(),
Paths: manager.GetInstance().Paths,
}
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Image()
for _, imageID := range imageIDs {
image, err := qb.Find(imageID)
i, err := qb.Find(imageID)
if err != nil {
return err
}
if image == nil {
if i == nil {
return fmt.Errorf("image with id %d not found", imageID)
}
images = append(images, image)
if err := qb.Destroy(imageID); err != nil {
images = append(images, i)
if err := image.Destroy(i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil {
return err
}
}
return nil
}); err != nil {
fileDeleter.Rollback()
return false, err
}
// perform the post-commit actions
fileDeleter.Commit()
for _, image := range images {
// if delete generated is true, then delete the generated files
// for the image
if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedImageFiles(image)
}
// if delete file is true, then delete the file as well
// if it fails, just log a message
if input.DeleteFile != nil && *input.DeleteFile {
manager.DeleteImageFile(image)
}
// call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, input, nil)
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, plugin.ImagesDestroyInput{
ImagesDestroyInput: input,
Checksum: image.Checksum,
Path: image.Path,
}, nil)
}
return true, nil

View file

@ -7,6 +7,7 @@ import (
"strconv"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
@ -456,96 +457,105 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
return false, err
}
var scene *models.Scene
var postCommitFunc func()
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
var s *models.Scene
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
var err error
scene, err = qb.Find(sceneID)
s, err = qb.Find(sceneID)
if err != nil {
return err
}
if scene == nil {
if s == nil {
return fmt.Errorf("scene with id %d not found", sceneID)
}
postCommitFunc, err = manager.DestroyScene(scene, repo)
return err
// kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo)
return scene.Destroy(s, repo, fileDeleter, deleteGenerated, deleteFile)
}); err != nil {
fileDeleter.Rollback()
return false, err
}
// perform the post-commit actions
postCommitFunc()
// if delete generated is true, then delete the generated files
// for the scene
if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedSceneFiles(scene, config.GetInstance().GetVideoFileNamingAlgorithm())
}
// if delete file is true, then delete the file as well
// if it fails, just log a message
if input.DeleteFile != nil && *input.DeleteFile {
manager.DeleteSceneFile(scene)
}
fileDeleter.Commit()
// call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, input, nil)
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
SceneDestroyInput: input,
Checksum: s.Checksum.String,
OSHash: s.OSHash.String,
Path: s.Path,
}, nil)
return true, nil
}
func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.ScenesDestroyInput) (bool, error) {
var scenes []*models.Scene
var postCommitFuncs []func()
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
for _, id := range input.Ids {
sceneID, _ := strconv.Atoi(id)
scene, err := qb.Find(sceneID)
s, err := qb.Find(sceneID)
if err != nil {
return err
}
if scene != nil {
scenes = append(scenes, scene)
}
f, err := manager.DestroyScene(scene, repo)
if err != nil {
return err
if s != nil {
scenes = append(scenes, s)
}
postCommitFuncs = append(postCommitFuncs, f)
// kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo)
if err := scene.Destroy(s, repo, fileDeleter, deleteGenerated, deleteFile); err != nil {
return err
}
}
return nil
}); err != nil {
fileDeleter.Rollback()
return false, err
}
for _, f := range postCommitFuncs {
f()
}
// perform the post-commit actions
fileDeleter.Commit()
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
for _, scene := range scenes {
// if delete generated is true, then delete the generated files
// for the scene
if input.DeleteGenerated != nil && *input.DeleteGenerated {
manager.DeleteGeneratedSceneFiles(scene, fileNamingAlgo)
}
// if delete file is true, then delete the file as well
// if it fails, just log a message
if input.DeleteFile != nil && *input.DeleteFile {
manager.DeleteSceneFile(scene)
}
// call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, input, nil)
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{
ScenesDestroyInput: input,
Checksum: scene.Checksum.String,
OSHash: scene.OSHash.String,
Path: scene.Path,
}, nil)
}
return true, nil
@ -646,7 +656,14 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
return false, err
}
var postCommitFunc func()
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.SceneMarker()
sqb := repo.Scene()
@ -661,18 +678,19 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
return fmt.Errorf("scene marker with id %d not found", markerID)
}
scene, err := sqb.Find(int(marker.SceneID.Int64))
s, err := sqb.Find(int(marker.SceneID.Int64))
if err != nil {
return err
}
postCommitFunc, err = manager.DestroySceneMarker(scene, marker, qb)
return err
return scene.DestroyMarker(s, marker, qb, fileDeleter)
}); err != nil {
fileDeleter.Rollback()
return false, err
}
postCommitFunc()
// perform the post-commit actions
fileDeleter.Commit()
r.hookExecutor.ExecutePostHooks(ctx, markerID, plugin.SceneMarkerDestroyPost, id, nil)
@ -682,7 +700,15 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, changedMarker models.SceneMarker, tagIDs []int) (*models.SceneMarker, error) {
var existingMarker *models.SceneMarker
var sceneMarker *models.SceneMarker
var scene *models.Scene
var s *models.Scene
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
// Start the transaction and save the scene marker
if err := r.withTxn(ctx, func(repo models.Repository) error {
@ -704,26 +730,31 @@ func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, cha
return err
}
scene, err = sqb.Find(int(existingMarker.SceneID.Int64))
s, err = sqb.Find(int(existingMarker.SceneID.Int64))
}
if err != nil {
return err
}
// remove the marker preview if the timestamp was changed
if s != nil && existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
seconds := int(existingMarker.Seconds)
if err := fileDeleter.MarkMarkerFiles(s, seconds); err != nil {
return err
}
}
// Save the marker tags
// If this tag is the primary tag, then let's not add it.
tagIDs = utils.IntExclude(tagIDs, []int{changedMarker.PrimaryTagID})
return qb.UpdateTags(sceneMarker.ID, tagIDs)
}); err != nil {
fileDeleter.Rollback()
return nil, err
}
// remove the marker preview if the timestamp was changed
if scene != nil && existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
seconds := int(existingMarker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetInstance().GetVideoFileNamingAlgorithm())
}
// perform the post-commit actions
fileDeleter.Commit()
return sceneMarker, nil
}

View file

@ -20,7 +20,7 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
return client.SubmitStashBoxFingerprints(input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
return client.SubmitStashBoxFingerprints(ctx, input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
}
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) (string, error) {

View file

@ -347,5 +347,6 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMerge
return nil, err
}
r.hookExecutor.ExecutePostHooks(ctx, t.ID, plugin.TagMergePost, input, nil)
return t, nil
}

View file

@ -121,6 +121,9 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
handyKey := config.GetHandyKey()
scriptOffset := config.GetFunscriptOffset()
// FIXME - misnamed output field means we have redundant fields
disableDropdownCreate := config.GetDisableDropdownCreate()
return &models.ConfigInterfaceResult{
MenuItems: menuItems,
SoundOnPreview: &soundOnPreview,
@ -136,9 +139,13 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
CSSEnabled: &cssEnabled,
Language: &language,
SlideshowDelay: &slideshowDelay,
DisabledDropdownCreate: config.GetDisableDropdownCreate(),
HandyKey: &handyKey,
FunscriptOffset: &scriptOffset,
// FIXME - see above
DisabledDropdownCreate: disableDropdownCreate,
DisableDropdownCreate: disableDropdownCreate,
HandyKey: &handyKey,
FunscriptOffset: &scriptOffset,
}
}
@ -174,6 +181,9 @@ func makeConfigDefaultsResult() *models.ConfigDefaultSettingsResult {
return &models.ConfigDefaultSettingsResult{
Identify: config.GetDefaultIdentifySettings(),
Scan: config.GetDefaultScanSettings(),
AutoTag: config.GetDefaultAutoTagSettings(),
Generate: config.GetDefaultGenerateSettings(),
DeleteFile: &deleteFileDefault,
DeleteGenerated: &deleteGeneratedDefault,
}

View file

@ -6,53 +6,57 @@ import (
"fmt"
"strconv"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scraper"
"github.com/stashapp/stash/pkg/scraper/stashbox"
)
// deprecated
func (r *queryResolver) ScrapeFreeones(ctx context.Context, performer_name string) (*models.ScrapedPerformer, error) {
scrapedPerformer := models.ScrapedPerformerInput{
Name: &performer_name,
}
return manager.GetInstance().ScraperCache.ScrapePerformer(scraper.FreeonesScraperID, scrapedPerformer)
func (r *queryResolver) ScrapeURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
return r.scraperCache().ScrapeURL(ctx, url, ty)
}
// deprecated
func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) {
scrapedPerformers, err := manager.GetInstance().ScraperCache.ScrapePerformerList(scraper.FreeonesScraperID, query)
content, err := r.scraperCache().ScrapeName(ctx, scraper.FreeonesScraperID, query, models.ScrapeContentTypePerformer)
if err != nil {
return nil, err
}
performers, err := marshalScrapedPerformers(content)
if err != nil {
return nil, err
}
var ret []string
for _, v := range scrapedPerformers {
if v.Name != nil {
ret = append(ret, *v.Name)
for _, p := range performers {
if p.Name != nil {
ret = append(ret, *p.Name)
}
}
return ret, nil
}
func (r *queryResolver) ListScrapers(ctx context.Context, types []models.ScrapeContentType) ([]*models.Scraper, error) {
return r.scraperCache().ListScrapers(types), nil
}
func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*models.Scraper, error) {
return manager.GetInstance().ScraperCache.ListPerformerScrapers(), nil
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypePerformer}), nil
}
func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*models.Scraper, error) {
return manager.GetInstance().ScraperCache.ListSceneScrapers(), nil
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeScene}), nil
}
func (r *queryResolver) ListGalleryScrapers(ctx context.Context) ([]*models.Scraper, error) {
return manager.GetInstance().ScraperCache.ListGalleryScrapers(), nil
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeGallery}), nil
}
func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*models.Scraper, error) {
return manager.GetInstance().ScraperCache.ListMovieScrapers(), nil
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeMovie}), nil
}
func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) {
@ -60,15 +64,29 @@ func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID strin
return nil, nil
}
return manager.GetInstance().ScraperCache.ScrapePerformerList(scraperID, query)
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, models.ScrapeContentTypePerformer)
if err != nil {
return nil, err
}
return marshalScrapedPerformers(content)
}
func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
return manager.GetInstance().ScraperCache.ScrapePerformer(scraperID, scrapedPerformer)
content, err := r.scraperCache().ScrapeFragment(ctx, scraperID, scraper.Input{Performer: &scrapedPerformer})
if err != nil {
return nil, err
}
return marshalScrapedPerformer(content)
}
func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) {
return manager.GetInstance().ScraperCache.ScrapePerformerURL(url)
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypePerformer)
if err != nil {
return nil, err
}
return marshalScrapedPerformer(content)
}
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) {
@ -76,50 +94,80 @@ func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string,
return nil, nil
}
return manager.GetInstance().ScraperCache.ScrapeSceneQuery(scraperID, query)
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, models.ScrapeContentTypeScene)
if err != nil {
return nil, err
}
return marshalScrapedScenes(content)
}
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
id, err := strconv.Atoi(scene.ID)
if err != nil {
return nil, fmt.Errorf("%w: scene.ID is not an integer: '%s'", ErrInput, scene.ID)
}
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, models.ScrapeContentTypeScene)
if err != nil {
return nil, err
}
return manager.GetInstance().ScraperCache.ScrapeScene(scraperID, id)
return marshalScrapedScene(content)
}
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
return manager.GetInstance().ScraperCache.ScrapeSceneURL(url)
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeScene)
if err != nil {
return nil, err
}
return marshalScrapedScene(content)
}
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
id, err := strconv.Atoi(gallery.ID)
if err != nil {
return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, gallery.ID)
}
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, models.ScrapeContentTypeGallery)
if err != nil {
return nil, err
}
return manager.GetInstance().ScraperCache.ScrapeGallery(scraperID, id)
return marshalScrapedGallery(content)
}
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) {
return manager.GetInstance().ScraperCache.ScrapeGalleryURL(url)
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeGallery)
if err != nil {
return nil, err
}
return marshalScrapedGallery(content)
}
func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) {
return manager.GetInstance().ScraperCache.ScrapeMovieURL(url)
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeMovie)
if err != nil {
return nil, err
}
return marshalScrapedMovie(content)
}
func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.StashBoxSceneQueryInput) ([]*models.ScrapedScene, error) {
boxes := config.GetInstance().GetStashBoxes()
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, input.StashBoxIndex)
}
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
if len(input.SceneIds) > 0 {
return client.FindStashBoxScenesByFingerprintsFlat(input.SceneIds)
return client.FindStashBoxScenesByFingerprintsFlat(ctx, input.SceneIds)
}
if input.Q != nil {
@ -133,17 +181,17 @@ func (r *queryResolver) QueryStashBoxPerformer(ctx context.Context, input models
boxes := config.GetInstance().GetStashBoxes()
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, input.StashBoxIndex)
}
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
if len(input.PerformerIds) > 0 {
return client.FindStashBoxPerformersByNames(input.PerformerIds)
return client.FindStashBoxPerformersByNames(ctx, input.PerformerIds)
}
if input.Q != nil {
return client.QueryStashBoxPerformer(*input.Q)
return client.QueryStashBoxPerformer(ctx, *input.Q)
}
return nil, nil
@ -153,7 +201,7 @@ func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) {
boxes := config.GetInstance().GetStashBoxes()
if index < 0 || index >= len(boxes) {
return nil, fmt.Errorf("invalid stash_box_index %d", index)
return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, index)
}
return stashbox.NewClient(*boxes[index], r.txnManager), nil
@ -161,7 +209,8 @@ func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) {
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleSceneInput) ([]*models.ScrapedScene, error) {
if source.ScraperID != nil {
var singleScene *models.ScrapedScene
var c models.ScrapedContent
var content []models.ScrapedContent
var err error
switch {
@ -169,26 +218,24 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
var sceneID int
sceneID, err = strconv.Atoi(*input.SceneID)
if err != nil {
return nil, err
return nil, fmt.Errorf("%w: sceneID is not an integer: '%s'", ErrInput, *input.SceneID)
}
singleScene, err = manager.GetInstance().ScraperCache.ScrapeScene(*source.ScraperID, sceneID)
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, sceneID, models.ScrapeContentTypeScene)
content = []models.ScrapedContent{c}
case input.SceneInput != nil:
singleScene, err = manager.GetInstance().ScraperCache.ScrapeSceneFragment(*source.ScraperID, *input.SceneInput)
c, err = r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Scene: input.SceneInput})
content = []models.ScrapedContent{c}
case input.Query != nil:
return manager.GetInstance().ScraperCache.ScrapeSceneQuery(*source.ScraperID, *input.Query)
content, err = r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, models.ScrapeContentTypeScene)
default:
err = errors.New("scene_id, scene_input or query must be set")
err = fmt.Errorf("%w: scene_id, scene_input, or query must be set", ErrInput)
}
if err != nil {
return nil, err
}
if singleScene != nil {
return []*models.ScrapedScene{singleScene}, nil
}
return nil, nil
return marshalScrapedScenes(content)
} else if source.StashBoxIndex != nil {
client, err := r.getStashBoxClient(*source.StashBoxIndex)
if err != nil {
@ -196,15 +243,15 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
}
if input.SceneID != nil {
return client.FindStashBoxScenesByFingerprintsFlat([]string{*input.SceneID})
return client.FindStashBoxScenesByFingerprintsFlat(ctx, []string{*input.SceneID})
} else if input.Query != nil {
return client.QueryStashBoxScene(ctx, *input.Query)
}
return nil, errors.New("scene_id or query must be set")
return nil, fmt.Errorf("%w: scene_id or query must be set", ErrInput)
}
return nil, errors.New("scraper_id or stash_box_index must be set")
return nil, fmt.Errorf("%w: scraper_id or stash_box_index must be set", ErrInput)
}
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) {
@ -216,7 +263,7 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.Scr
return nil, err
}
return client.FindStashBoxScenesByFingerprints(input.SceneIds)
return client.FindStashBoxScenesByFingerprints(ctx, input.SceneIds)
}
return nil, errors.New("scraper_id or stash_box_index must be set")
@ -225,20 +272,21 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.Scr
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
if source.ScraperID != nil {
if input.PerformerInput != nil {
singlePerformer, err := manager.GetInstance().ScraperCache.ScrapePerformer(*source.ScraperID, *input.PerformerInput)
performer, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Performer: input.PerformerInput})
if err != nil {
return nil, err
}
if singlePerformer != nil {
return []*models.ScrapedPerformer{singlePerformer}, nil
}
return nil, nil
return marshalScrapedPerformers([]models.ScrapedContent{performer})
}
if input.Query != nil {
return manager.GetInstance().ScraperCache.ScrapePerformerList(*source.ScraperID, *input.Query)
content, err := r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, models.ScrapeContentTypePerformer)
if err != nil {
return nil, err
}
return marshalScrapedPerformers(content)
}
return nil, ErrNotImplemented
@ -251,9 +299,9 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models
var ret []*models.StashBoxPerformerQueryResult
switch {
case input.PerformerID != nil:
ret, err = client.FindStashBoxPerformersByNames([]string{*input.PerformerID})
ret, err = client.FindStashBoxPerformersByNames(ctx, []string{*input.PerformerID})
case input.Query != nil:
ret, err = client.QueryStashBoxPerformer(*input.Query)
ret, err = client.QueryStashBoxPerformer(ctx, *input.Query)
default:
return nil, ErrNotImplemented
}
@ -281,45 +329,43 @@ func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models
return nil, err
}
return client.FindStashBoxPerformersByPerformerNames(input.PerformerIds)
return client.FindStashBoxPerformersByPerformerNames(ctx, input.PerformerIds)
}
return nil, errors.New("scraper_id or stash_box_index must be set")
}
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleGalleryInput) ([]*models.ScrapedGallery, error) {
if source.ScraperID != nil {
var singleGallery *models.ScrapedGallery
var err error
switch {
case input.GalleryID != nil:
var galleryID int
galleryID, err = strconv.Atoi(*input.GalleryID)
if err != nil {
return nil, err
}
singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGallery(*source.ScraperID, galleryID)
case input.GalleryInput != nil:
singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGalleryFragment(*source.ScraperID, *input.GalleryInput)
default:
return nil, ErrNotImplemented
}
if err != nil {
return nil, err
}
if singleGallery != nil {
return []*models.ScrapedGallery{singleGallery}, nil
}
return nil, nil
} else if source.StashBoxIndex != nil {
if source.StashBoxIndex != nil {
return nil, ErrNotSupported
}
return nil, errors.New("scraper_id must be set")
if source.ScraperID == nil {
return nil, fmt.Errorf("%w: scraper_id must be set", ErrInput)
}
var c models.ScrapedContent
switch {
case input.GalleryID != nil:
galleryID, err := strconv.Atoi(*input.GalleryID)
if err != nil {
return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, *input.GalleryID)
}
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, galleryID, models.ScrapeContentTypeGallery)
if err != nil {
return nil, err
}
return marshalScrapedGalleries([]models.ScrapedContent{c})
case input.GalleryInput != nil:
c, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Gallery: input.GalleryInput})
if err != nil {
return nil, err
}
return marshalScrapedGalleries([]models.ScrapedContent{c})
default:
return nil, ErrNotImplemented
}
}
func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) {

View file

@ -38,6 +38,7 @@ func (rs sceneRoutes) Routes() chi.Router {
r.Get("/webp", rs.Webp)
r.Get("/vtt/chapter", rs.ChapterVtt)
r.Get("/funscript", rs.Funscript)
r.Get("/interactive_heatmap", rs.InteractiveHeatmap)
r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream)
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
@ -273,6 +274,13 @@ func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
utils.ServeFileNoCache(w, r, funscript)
}
func (rs sceneRoutes) InteractiveHeatmap(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "image/png")
filepath := manager.GetInstance().Paths.Scene.GetInteractiveHeatmapPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
http.ServeFile(w, r, filepath)
}
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "text/vtt")

139
pkg/api/scraped_content.go Normal file
View file

@ -0,0 +1,139 @@
package api
import (
"fmt"
"github.com/stashapp/stash/pkg/models"
)
// marshalScrapedScenes converts ScrapedContent into ScrapedScene. If conversion fails, an
// error is returned to the caller.
func marshalScrapedScenes(content []models.ScrapedContent) ([]*models.ScrapedScene, error) {
var ret []*models.ScrapedScene
for _, c := range content {
if c == nil {
ret = append(ret, nil)
continue
}
switch s := c.(type) {
case *models.ScrapedScene:
ret = append(ret, s)
case models.ScrapedScene:
ret = append(ret, &s)
default:
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedScene", models.ErrConversion)
}
}
return ret, nil
}
// marshalScrapedPerformers converts ScrapedContent into ScrapedPerformer. If conversion
// fails, an error is returned to the caller.
func marshalScrapedPerformers(content []models.ScrapedContent) ([]*models.ScrapedPerformer, error) {
var ret []*models.ScrapedPerformer
for _, c := range content {
if c == nil {
ret = append(ret, nil)
continue
}
switch p := c.(type) {
case *models.ScrapedPerformer:
ret = append(ret, p)
case models.ScrapedPerformer:
ret = append(ret, &p)
default:
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedPerformer", models.ErrConversion)
}
}
return ret, nil
}
// marshalScrapedGalleries converts ScrapedContent into ScrapedGallery. If
// conversion fails, an error is returned.
func marshalScrapedGalleries(content []models.ScrapedContent) ([]*models.ScrapedGallery, error) {
var ret []*models.ScrapedGallery
for _, c := range content {
if c == nil {
ret = append(ret, nil)
continue
}
switch g := c.(type) {
case *models.ScrapedGallery:
ret = append(ret, g)
case models.ScrapedGallery:
ret = append(ret, &g)
default:
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGallery", models.ErrConversion)
}
}
return ret, nil
}
// marshalScrapedMovies converts ScrapedContent into ScrapedMovie. If conversion
// fails, an error is returned.
func marshalScrapedMovies(content []models.ScrapedContent) ([]*models.ScrapedMovie, error) {
var ret []*models.ScrapedMovie
for _, c := range content {
if c == nil {
ret = append(ret, nil)
continue
}
switch m := c.(type) {
case *models.ScrapedMovie:
ret = append(ret, m)
case models.ScrapedMovie:
ret = append(ret, &m)
default:
return nil, fmt.Errorf("%w: cannot turn ScrapedConetnt into ScrapedMovie", models.ErrConversion)
}
}
return ret, nil
}
// marshalScrapedPerformer will marshal a single performer
func marshalScrapedPerformer(content models.ScrapedContent) (*models.ScrapedPerformer, error) {
p, err := marshalScrapedPerformers([]models.ScrapedContent{content})
if err != nil {
return nil, err
}
return p[0], nil
}
// marshalScrapedScene will marshal a single scraped scene
func marshalScrapedScene(content models.ScrapedContent) (*models.ScrapedScene, error) {
s, err := marshalScrapedScenes([]models.ScrapedContent{content})
if err != nil {
return nil, err
}
return s[0], nil
}
// marshalScrapedGallery will marshal a single scraped gallery
func marshalScrapedGallery(content models.ScrapedContent) (*models.ScrapedGallery, error) {
g, err := marshalScrapedGalleries([]models.ScrapedContent{content})
if err != nil {
return nil, err
}
return g[0], nil
}
// marshalScrapedMovie will marshal a single scraped movie
func marshalScrapedMovie(content models.ScrapedContent) (*models.ScrapedMovie, error) {
m, err := marshalScrapedMovies([]models.ScrapedContent{content})
if err != nil {
return nil, err
}
return m[0], nil
}

View file

@ -30,6 +30,7 @@ import (
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
"github.com/vearutop/statigz"
)
var version string
@ -53,6 +54,7 @@ func Start(uiBox embed.FS, loginUIBox embed.FS) {
if c.GetLogAccess() {
r.Use(middleware.Logger)
}
r.Use(SecurityHeadersMiddleware)
r.Use(middleware.DefaultCompress)
r.Use(middleware.StripSlashes)
r.Use(cors.AllowAll().Handler)
@ -205,19 +207,22 @@ func Start(uiBox embed.FS, loginUIBox embed.FS) {
}
prefix := getProxyPrefix(r.Header)
baseURLIndex := strings.Replace(string(data), "%BASE_URL%", prefix+"/", 2)
baseURLIndex = strings.Replace(baseURLIndex, "base href=\"/\"", fmt.Sprintf("base href=\"%s\"", prefix+"/"), 2)
baseURLIndex := strings.ReplaceAll(string(data), "/%BASE_URL%", prefix)
baseURLIndex = strings.Replace(baseURLIndex, "base href=\"/\"", fmt.Sprintf("base href=\"%s\"", prefix+"/"), 1)
_, _ = w.Write([]byte(baseURLIndex))
} else {
isStatic, _ := path.Match("/static/*/*", r.URL.Path)
if isStatic {
w.Header().Add("Cache-Control", "max-age=604800000")
}
uiRoot, err := fs.Sub(uiBox, uiRootDir)
if err != nil {
panic(err)
prefix := getProxyPrefix(r.Header)
if prefix != "" {
r.URL.Path = strings.Replace(r.URL.Path, prefix, "", 1)
}
http.FileServer(http.FS(uiRoot)).ServeHTTP(w, r)
r.URL.Path = uiRootDir + r.URL.Path
statigz.FileServer(uiBox).ServeHTTP(w, r)
}
})
@ -338,6 +343,36 @@ var (
BaseURLCtxKey = &contextKey{"BaseURL"}
)
func SecurityHeadersMiddleware(next http.Handler) http.Handler {
fn := func(w http.ResponseWriter, r *http.Request) {
c := config.GetInstance()
connectableOrigins := "connect-src data: 'self'"
// Workaround Safari bug https://bugs.webkit.org/show_bug.cgi?id=201591
// Allows websocket requests to any origin
connectableOrigins += " ws: wss:"
// The graphql playground pulls its frontend from a cdn
connectableOrigins += " https://cdn.jsdelivr.net "
if !c.IsNewSystem() && c.GetHandyKey() != "" {
connectableOrigins += " https://www.handyfeeling.com"
}
connectableOrigins += "; "
cspDirectives := "default-src data: 'self' 'unsafe-inline';" + connectableOrigins + "img-src data: *; script-src 'self' https://cdn.jsdelivr.net 'unsafe-inline'; media-src 'self' blob:; child-src 'none'; object-src 'none'; form-action 'self'"
w.Header().Set("Referrer-Policy", "same-origin")
w.Header().Set("X-Content-Type-Options", "nosniff")
w.Header().Set("X-Frame-Options", "DENY")
w.Header().Set("X-XSS-Protection", "1")
w.Header().Set("Content-Security-Policy", cspDirectives)
next.ServeHTTP(w, r)
}
return http.HandlerFunc(fn)
}
func BaseURLMiddleware(next http.Handler) http.Handler {
fn := func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()

View file

@ -66,3 +66,7 @@ func (b SceneURLBuilder) GetSceneMarkerStreamScreenshotURL(sceneMarkerID int) st
func (b SceneURLBuilder) GetFunscriptURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
}
func (b SceneURLBuilder) GetInteractiveHeatmapURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/interactive_heatmap"
}

View file

@ -60,14 +60,12 @@ func (t *tagger) tagPerformers(performerReader models.PerformerReader, addFunc a
}
func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFunc) error {
others, err := match.PathToStudios(t.Path, studioReader)
studio, err := match.PathToStudio(t.Path, studioReader)
if err != nil {
return err
}
// only add first studio
if len(others) > 0 {
studio := others[0]
if studio != nil {
added, err := addFunc(t.ID, studio.ID)
if err != nil {

View file

@ -23,7 +23,7 @@ import (
var DB *sqlx.DB
var WriteMu sync.Mutex
var dbPath string
var appSchemaVersion uint = 28
var appSchemaVersion uint = 29
var databaseSchemaVersion uint
//go:embed migrations/*.sql

View file

@ -0,0 +1 @@
ALTER TABLE `scenes` ADD COLUMN `interactive_speed` int

View file

@ -44,6 +44,15 @@ func Download(ctx context.Context, configDirectory string) error {
return err
}
}
// validate that the urls contained what we needed
executables := []string{getFFMPEGFilename(), getFFProbeFilename()}
for _, executable := range executables {
_, err := os.Stat(filepath.Join(configDirectory, executable))
if err != nil {
return err
}
}
return nil
}
@ -76,7 +85,6 @@ func DownloadSingle(ctx context.Context, configDirectory, url string) error {
}
// Configure where we want to download the archive
urlExt := path.Ext(url)
urlBase := path.Base(url)
archivePath := filepath.Join(configDirectory, urlBase)
_ = os.Remove(archivePath) // remove archive if it already exists
@ -118,7 +126,7 @@ func DownloadSingle(ctx context.Context, configDirectory, url string) error {
logger.Info("Downloading complete")
if urlExt == ".zip" {
if resp.Header.Get("Content-Type") == "application/zip" {
logger.Infof("Unzipping %s...", archivePath)
if err := unzip(archivePath, configDirectory); err != nil {
return err
@ -126,12 +134,18 @@ func DownloadSingle(ctx context.Context, configDirectory, url string) error {
// On OSX or Linux set downloaded files permissions
if runtime.GOOS == "darwin" || runtime.GOOS == "linux" {
if err := os.Chmod(filepath.Join(configDirectory, "ffmpeg"), 0755); err != nil {
return err
_, err = os.Stat(filepath.Join(configDirectory, "ffmpeg"))
if !os.IsNotExist(err) {
if err = os.Chmod(filepath.Join(configDirectory, "ffmpeg"), 0755); err != nil {
return err
}
}
if err := os.Chmod(filepath.Join(configDirectory, "ffprobe"), 0755); err != nil {
return err
_, err = os.Stat(filepath.Join(configDirectory, "ffprobe"))
if !os.IsNotExist(err) {
if err := os.Chmod(filepath.Join(configDirectory, "ffprobe"), 0755); err != nil {
return err
}
}
// TODO: In future possible clear xattr to allow running on osx without user intervention
@ -139,8 +153,6 @@ func DownloadSingle(ctx context.Context, configDirectory, url string) error {
// xattr -c /path/to/binary -- xattr.Remove(path, "com.apple.quarantine")
}
logger.Infof("ffmpeg and ffprobe successfully installed in %s", configDirectory)
} else {
return fmt.Errorf("ffmpeg was downloaded to %s", archivePath)
}
@ -152,7 +164,7 @@ func getFFMPEGURL() []string {
var urls []string
switch runtime.GOOS {
case "darwin":
urls = []string{"https://evermeet.cx/ffmpeg/ffmpeg-4.3.1.zip", "https://evermeet.cx/ffmpeg/ffprobe-4.3.1.zip"}
urls = []string{"https://evermeet.cx/ffmpeg/getrelease/zip", "https://evermeet.cx/ffmpeg/getrelease/ffprobe/zip"}
case "linux":
switch runtime.GOARCH {
case "amd64":

View file

@ -10,9 +10,22 @@ type SceneMarkerOptions struct {
Seconds int
Width int
OutputPath string
Audio bool
}
func (e *Encoder) SceneMarkerVideo(probeResult VideoFile, options SceneMarkerOptions) error {
argsAudio := []string{
"-c:a", "aac",
"-b:a", "64k",
}
if !options.Audio {
argsAudio = []string{
"-an",
}
}
args := []string{
"-v", "error",
"-ss", strconv.Itoa(options.Seconds),
@ -29,11 +42,10 @@ func (e *Encoder) SceneMarkerVideo(probeResult VideoFile, options SceneMarkerOpt
"-threads", "4",
"-vf", fmt.Sprintf("scale=%v:-2", options.Width),
"-sws_flags", "lanczos",
"-c:a", "aac",
"-b:a", "64k",
"-strict", "-2",
options.OutputPath,
}
args = append(args, argsAudio...)
args = append(args, options.OutputPath)
_, err := e.run(probeResult.Path, args, nil)
return err
}

161
pkg/file/delete.go Normal file
View file

@ -0,0 +1,161 @@
package file
import (
"errors"
"fmt"
"io/fs"
"os"
"github.com/stashapp/stash/pkg/logger"
)
const deleteFileSuffix = ".delete"
// RenamerRemover provides access to the Rename and Remove functions.
type RenamerRemover interface {
Rename(oldpath, newpath string) error
Remove(name string) error
RemoveAll(path string) error
Stat(name string) (fs.FileInfo, error)
}
type renamerRemoverImpl struct {
RenameFn func(oldpath, newpath string) error
RemoveFn func(name string) error
RemoveAllFn func(path string) error
StatFn func(path string) (fs.FileInfo, error)
}
func (r renamerRemoverImpl) Rename(oldpath, newpath string) error {
return r.RenameFn(oldpath, newpath)
}
func (r renamerRemoverImpl) Remove(name string) error {
return r.RemoveFn(name)
}
func (r renamerRemoverImpl) RemoveAll(path string) error {
return r.RemoveAllFn(path)
}
func (r renamerRemoverImpl) Stat(path string) (fs.FileInfo, error) {
return r.StatFn(path)
}
// Deleter is used to safely delete files and directories from the filesystem.
// During a transaction, files and directories are marked for deletion using
// the Files and Dirs methods. This will rename the files/directories to be
// deleted. If the transaction is rolled back, then the files/directories can
// be restored to their original state with the Abort method. If the
// transaction is committed, the marked files are then deleted from the
// filesystem using the Complete method.
type Deleter struct {
RenamerRemover RenamerRemover
files []string
dirs []string
}
func NewDeleter() *Deleter {
return &Deleter{
RenamerRemover: renamerRemoverImpl{
RenameFn: os.Rename,
RemoveFn: os.Remove,
RemoveAllFn: os.RemoveAll,
StatFn: os.Stat,
},
}
}
// Files designates files to be deleted. Each file marked will be renamed to add
// a `.delete` suffix. An error is returned if a file could not be renamed.
// Note that if an error is returned, then some files may be left renamed.
// Abort should be called to restore marked files if this function returns an
// error.
func (d *Deleter) Files(paths []string) error {
for _, p := range paths {
// fail silently if the file does not exist
if _, err := d.RenamerRemover.Stat(p); err != nil {
if errors.Is(err, fs.ErrNotExist) {
logger.Warnf("File %q does not exist and therefore cannot be deleted. Ignoring.", p)
continue
}
return fmt.Errorf("check file %q exists: %w", p, err)
}
if err := d.renameForDelete(p); err != nil {
return fmt.Errorf("marking file %q for deletion: %w", p, err)
}
d.files = append(d.files, p)
}
return nil
}
// Dirs designates directories to be deleted. Each directory marked will be renamed to add
// a `.delete` suffix. An error is returned if a directory could not be renamed.
// Note that if an error is returned, then some directories may be left renamed.
// Abort should be called to restore marked files/directories if this function returns an
// error.
func (d *Deleter) Dirs(paths []string) error {
for _, p := range paths {
// fail silently if the file does not exist
if _, err := d.RenamerRemover.Stat(p); err != nil {
if errors.Is(err, fs.ErrNotExist) {
logger.Warnf("Directory %q does not exist and therefore cannot be deleted. Ignoring.", p)
continue
}
return fmt.Errorf("check directory %q exists: %w", p, err)
}
if err := d.renameForDelete(p); err != nil {
return fmt.Errorf("marking directory %q for deletion: %w", p, err)
}
d.dirs = append(d.dirs, p)
}
return nil
}
// Rollback tries to rename all marked files and directories back to their
// original names and clears the marked list. Any errors encountered are
// logged. All files will be attempted regardless of any errors occurred.
func (d *Deleter) Rollback() {
for _, f := range append(d.files, d.dirs...) {
if err := d.renameForRestore(f); err != nil {
logger.Warnf("Error restoring %q: %v", f, err)
}
}
d.files = nil
d.dirs = nil
}
// Commit deletes all files marked for deletion and clears the marked list.
// Any errors encountered are logged. All files will be attempted, regardless
// of the errors encountered.
func (d *Deleter) Commit() {
for _, f := range d.files {
if err := d.RenamerRemover.Remove(f + deleteFileSuffix); err != nil {
logger.Warnf("Error deleting file %q: %v", f+deleteFileSuffix, err)
}
}
for _, f := range d.dirs {
if err := d.RenamerRemover.RemoveAll(f + deleteFileSuffix); err != nil {
logger.Warnf("Error deleting directory %q: %v", f+deleteFileSuffix, err)
}
}
d.files = nil
d.dirs = nil
}
func (d *Deleter) renameForDelete(path string) error {
return d.RenamerRemover.Rename(path, path+deleteFileSuffix)
}
func (d *Deleter) renameForRestore(path string) error {
return d.RenamerRemover.Rename(path+deleteFileSuffix, path)
}

40
pkg/gallery/filter.go Normal file
View file

@ -0,0 +1,40 @@
package gallery
import (
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/models"
)
func PathsFilter(paths []string) *models.GalleryFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.GalleryFilterType
var or *models.GalleryFilterType
for _, p := range paths {
newOr := &models.GalleryFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p += sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}

View file

@ -12,7 +12,7 @@ import (
)
type SceneScraper interface {
ScrapeScene(sceneID int) (*models.ScrapedScene, error)
ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error)
}
type SceneUpdatePostHookExecutor interface {
@ -34,7 +34,7 @@ type SceneIdentifier struct {
}
func (t *SceneIdentifier) Identify(ctx context.Context, txnManager models.TransactionManager, scene *models.Scene) error {
result, err := t.scrapeScene(scene)
result, err := t.scrapeScene(ctx, scene)
if err != nil {
return err
}
@ -57,11 +57,11 @@ type scrapeResult struct {
source ScraperSource
}
func (t *SceneIdentifier) scrapeScene(scene *models.Scene) (*scrapeResult, error) {
func (t *SceneIdentifier) scrapeScene(ctx context.Context, scene *models.Scene) (*scrapeResult, error) {
// iterate through the input sources
for _, source := range t.Sources {
// scrape using the source
scraped, err := source.Scraper.ScrapeScene(scene.ID)
scraped, err := source.Scraper.ScrapeScene(ctx, scene.ID)
if err != nil {
return nil, fmt.Errorf("error scraping from %v: %v", source.Scraper, err)
}

View file

@ -17,7 +17,7 @@ type mockSceneScraper struct {
results map[int]*models.ScrapedScene
}
func (s mockSceneScraper) ScrapeScene(sceneID int) (*models.ScrapedScene, error) {
func (s mockSceneScraper) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
if utils.IntInclude(s.errIDs, sceneID) {
return nil, errors.New("scrape scene error")
}

48
pkg/image/delete.go Normal file
View file

@ -0,0 +1,48 @@
package image
import (
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type Destroyer interface {
Destroy(id int) error
}
// FileDeleter is an extension of file.Deleter that handles deletion of image files.
type FileDeleter struct {
file.Deleter
Paths *paths.Paths
}
// MarkGeneratedFiles marks for deletion the generated files for the provided image.
func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
exists, _ := utils.FileExists(thumbPath)
if exists {
return d.Files([]string{thumbPath})
}
return nil
}
// Destroy destroys an image, optionally marking the file and generated files for deletion.
func Destroy(i *models.Image, destroyer Destroyer, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error {
// don't try to delete if the image is in a zip file
if deleteFile && !file.IsZipPath(i.Path) {
if err := fileDeleter.Files([]string{i.Path}); err != nil {
return err
}
}
if deleteGenerated {
if err := fileDeleter.MarkGeneratedFiles(i); err != nil {
return err
}
}
return destroyer.Destroy(i.ID)
}

40
pkg/image/filter.go Normal file
View file

@ -0,0 +1,40 @@
package image
import (
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/models"
)
func PathsFilter(paths []string) *models.ImageFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.ImageFilterType
var or *models.ImageFilterType
for _, p := range paths {
newOr := &models.ImageFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p += sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}

View file

@ -124,13 +124,14 @@ const (
WallShowTitle = "wall_show_title"
defaultWallShowTitle = true
CustomPerformerImageLocation = "custom_performer_image_location"
MaximumLoopDuration = "maximum_loop_duration"
AutostartVideo = "autostart_video"
AutostartVideoOnPlaySelected = "autostart_video_on_play_selected"
ContinuePlaylistDefault = "continue_playlist_default"
ShowStudioAsText = "show_studio_as_text"
CSSEnabled = "cssEnabled"
CustomPerformerImageLocation = "custom_performer_image_location"
MaximumLoopDuration = "maximum_loop_duration"
AutostartVideo = "autostart_video"
AutostartVideoOnPlaySelected = "autostart_video_on_play_selected"
autostartVideoOnPlaySelectedDefault = true
ContinuePlaylistDefault = "continue_playlist_default"
ShowStudioAsText = "show_studio_as_text"
CSSEnabled = "cssEnabled"
WallPlayback = "wall_playback"
defaultWallPlayback = "video"
@ -167,7 +168,11 @@ const (
LogAccess = "logAccess"
defaultLogAccess = true
// Default settings
DefaultScanSettings = "defaults.scan_task"
DefaultIdentifySettings = "defaults.identify_task"
DefaultAutoTagSettings = "defaults.auto_tag_task"
DefaultGenerateSettings = "defaults.generate_task"
DeleteFileDefault = "defaults.delete_file"
DeleteGeneratedDefault = "defaults.delete_generated"
@ -826,15 +831,18 @@ func (i *Instance) GetAutostartVideo() bool {
func (i *Instance) GetAutostartVideoOnPlaySelected() bool {
i.Lock()
defer i.Unlock()
viper.SetDefault(AutostartVideoOnPlaySelected, true)
return viper.GetBool(AutostartVideoOnPlaySelected)
ret := autostartVideoOnPlaySelectedDefault
v := i.viper(AutostartVideoOnPlaySelected)
if v.IsSet(AutostartVideoOnPlaySelected) {
ret = v.GetBool(AutostartVideoOnPlaySelected)
}
return ret
}
func (i *Instance) GetContinuePlaylistDefault() bool {
i.Lock()
defer i.Unlock()
viper.SetDefault(ContinuePlaylistDefault, false)
return viper.GetBool(ContinuePlaylistDefault)
return i.getBool(ContinuePlaylistDefault)
}
func (i *Instance) GetShowStudioAsText() bool {
@ -949,6 +957,63 @@ func (i *Instance) GetDefaultIdentifySettings() *models.IdentifyMetadataTaskOpti
return nil
}
// GetDefaultScanSettings returns the default Scan task settings.
// Returns nil if the settings could not be unmarshalled, or if it
// has not been set.
func (i *Instance) GetDefaultScanSettings() *models.ScanMetadataOptions {
i.RLock()
defer i.RUnlock()
v := i.viper(DefaultScanSettings)
if v.IsSet(DefaultScanSettings) {
var ret models.ScanMetadataOptions
if err := v.UnmarshalKey(DefaultScanSettings, &ret); err != nil {
return nil
}
return &ret
}
return nil
}
// GetDefaultAutoTagSettings returns the default Scan task settings.
// Returns nil if the settings could not be unmarshalled, or if it
// has not been set.
func (i *Instance) GetDefaultAutoTagSettings() *models.AutoTagMetadataOptions {
i.RLock()
defer i.RUnlock()
v := i.viper(DefaultAutoTagSettings)
if v.IsSet(DefaultAutoTagSettings) {
var ret models.AutoTagMetadataOptions
if err := v.UnmarshalKey(DefaultAutoTagSettings, &ret); err != nil {
return nil
}
return &ret
}
return nil
}
// GetDefaultGenerateSettings returns the default Scan task settings.
// Returns nil if the settings could not be unmarshalled, or if it
// has not been set.
func (i *Instance) GetDefaultGenerateSettings() *models.GenerateMetadataOptions {
i.RLock()
defer i.RUnlock()
v := i.viper(DefaultGenerateSettings)
if v.IsSet(DefaultGenerateSettings) {
var ret models.GenerateMetadataOptions
if err := v.UnmarshalKey(DefaultGenerateSettings, &ret); err != nil {
return nil
}
return &ret
}
return nil
}
// GetTrustedProxies returns a comma separated list of ip addresses that should allow proxying.
// When empty, allow from any private network
func (i *Instance) GetTrustedProxies() []string {

View file

@ -0,0 +1,270 @@
package manager
import (
"encoding/json"
"fmt"
"image"
"image/draw"
"image/png"
"io/ioutil"
"math"
"os"
"sort"
"github.com/lucasb-eyer/go-colorful"
)
type InteractiveHeatmapSpeedGenerator struct {
InteractiveSpeed int64
Funscript Script
FunscriptPath string
HeatmapPath string
Width int
Height int
NumSegments int
}
type Script struct {
// Version of Launchscript
Version string `json:"version"`
// Inverted causes up and down movement to be flipped.
Inverted bool `json:"inverted,omitempty"`
// Range is the percentage of a full stroke to use.
Range int `json:"range,omitempty"`
// Actions are the timed moves.
Actions []Action `json:"actions"`
AvarageSpeed int64
}
// Action is a move at a specific time.
type Action struct {
// At time in milliseconds the action should fire.
At int64 `json:"at"`
// Pos is the place in percent to move to.
Pos int `json:"pos"`
Slope float64
Intensity int64
Speed float64
}
type GradientTable []struct {
Col colorful.Color
Pos float64
}
func NewInteractiveHeatmapSpeedGenerator(funscriptPath string, heatmapPath string) *InteractiveHeatmapSpeedGenerator {
return &InteractiveHeatmapSpeedGenerator{
FunscriptPath: funscriptPath,
HeatmapPath: heatmapPath,
Width: 320,
Height: 15,
NumSegments: 150,
}
}
func (g *InteractiveHeatmapSpeedGenerator) Generate() error {
funscript, err := g.LoadFunscriptData(g.FunscriptPath)
if err != nil {
return err
}
g.Funscript = funscript
g.Funscript.UpdateIntensityAndSpeed()
err = g.RenderHeatmap()
if err != nil {
return err
}
g.InteractiveSpeed = g.Funscript.CalculateMedian()
return nil
}
func (g *InteractiveHeatmapSpeedGenerator) LoadFunscriptData(path string) (Script, error) {
data, err := ioutil.ReadFile(path)
if err != nil {
return Script{}, err
}
var funscript Script
err = json.Unmarshal(data, &funscript)
if err != nil {
return Script{}, err
}
if funscript.Actions == nil {
return Script{}, fmt.Errorf("actions list missing in %s", path)
}
sort.SliceStable(funscript.Actions, func(i, j int) bool { return funscript.Actions[i].At < funscript.Actions[j].At })
// trim actions with negative timestamps to avoid index range errors when generating heatmap
isValid := func(x int64) bool { return x >= 0 }
i := 0
for _, x := range funscript.Actions {
if isValid(x.At) {
funscript.Actions[i] = x
i++
}
}
funscript.Actions = funscript.Actions[:i]
return funscript, nil
}
func (funscript *Script) UpdateIntensityAndSpeed() {
var t1, t2 int64
var p1, p2 int
var slope float64
var intensity int64
for i := range funscript.Actions {
if i == 0 {
continue
}
t1 = funscript.Actions[i].At
t2 = funscript.Actions[i-1].At
p1 = funscript.Actions[i].Pos
p2 = funscript.Actions[i-1].Pos
slope = math.Min(math.Max(1/(2*float64(t1-t2)/1000), 0), 20)
intensity = int64(slope * math.Abs((float64)(p1-p2)))
speed := math.Abs(float64(p1-p2)) / float64(t1-t2) * 1000
funscript.Actions[i].Slope = slope
funscript.Actions[i].Intensity = intensity
funscript.Actions[i].Speed = speed
}
}
// funscript needs to have intensity updated first
func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap() error {
gradient := g.Funscript.getGradientTable(g.NumSegments)
img := image.NewRGBA(image.Rect(0, 0, g.Width, g.Height))
for x := 0; x < g.Width; x++ {
c := gradient.GetInterpolatedColorFor(float64(x) / float64(g.Width))
draw.Draw(img, image.Rect(x, 0, x+1, g.Height), &image.Uniform{c}, image.Point{}, draw.Src)
}
// add 10 minute marks
maxts := g.Funscript.Actions[len(g.Funscript.Actions)-1].At
const tick = 600000
var ts int64 = tick
c, _ := colorful.Hex("#000000")
for ts < maxts {
x := int(float64(ts) / float64(maxts) * float64(g.Width))
draw.Draw(img, image.Rect(x-1, g.Height/2, x+1, g.Height), &image.Uniform{c}, image.Point{}, draw.Src)
ts += tick
}
outpng, err := os.Create(g.HeatmapPath)
if err != nil {
return err
}
defer outpng.Close()
err = png.Encode(outpng, img)
return err
}
func (funscript *Script) CalculateMedian() int64 {
sort.Slice(funscript.Actions, func(i, j int) bool {
return funscript.Actions[i].Speed < funscript.Actions[j].Speed
})
mNumber := len(funscript.Actions) / 2
if len(funscript.Actions)%2 != 0 {
return int64(funscript.Actions[mNumber].Speed)
}
return int64((funscript.Actions[mNumber-1].Speed + funscript.Actions[mNumber].Speed) / 2)
}
func (gt GradientTable) GetInterpolatedColorFor(t float64) colorful.Color {
for i := 0; i < len(gt)-1; i++ {
c1 := gt[i]
c2 := gt[i+1]
if c1.Pos <= t && t <= c2.Pos {
// We are in between c1 and c2. Go blend them!
t := (t - c1.Pos) / (c2.Pos - c1.Pos)
return c1.Col.BlendHcl(c2.Col, t).Clamped()
}
}
// Nothing found? Means we're at (or past) the last gradient keypoint.
return gt[len(gt)-1].Col
}
func (funscript Script) getGradientTable(numSegments int) GradientTable {
segments := make([]struct {
count int
intensity int
}, numSegments)
gradient := make(GradientTable, numSegments)
maxts := funscript.Actions[len(funscript.Actions)-1].At
for _, a := range funscript.Actions {
segment := int(float64(a.At) / float64(maxts+1) * float64(numSegments))
segments[segment].count++
segments[segment].intensity += int(a.Intensity)
}
for i := 0; i < numSegments; i++ {
gradient[i].Pos = float64(i) / float64(numSegments-1)
if segments[i].count > 0 {
gradient[i].Col = getSegmentColor(float64(segments[i].intensity) / float64(segments[i].count))
} else {
gradient[i].Col = getSegmentColor(0.0)
}
}
return gradient
}
func getSegmentColor(intensity float64) colorful.Color {
colorBlue, _ := colorful.Hex("#1e90ff") // DodgerBlue
colorGreen, _ := colorful.Hex("#228b22") // ForestGreen
colorYellow, _ := colorful.Hex("#ffd700") // Gold
colorRed, _ := colorful.Hex("#dc143c") // Crimson
colorPurple, _ := colorful.Hex("#800080") // Purple
colorBlack, _ := colorful.Hex("#0f001e")
colorBackground, _ := colorful.Hex("#30404d") // Same as GridCard bg
var stepSize = 60.0
var f float64
var c colorful.Color
switch {
case intensity <= 0.001:
c = colorBackground
case intensity <= 1*stepSize:
f = (intensity - 0*stepSize) / stepSize
c = colorBlue.BlendLab(colorGreen, f)
case intensity <= 2*stepSize:
f = (intensity - 1*stepSize) / stepSize
c = colorGreen.BlendLab(colorYellow, f)
case intensity <= 3*stepSize:
f = (intensity - 2*stepSize) / stepSize
c = colorYellow.BlendLab(colorRed, f)
case intensity <= 4*stepSize:
f = (intensity - 3*stepSize) / stepSize
c = colorRed.BlendRgb(colorPurple, f)
default:
f = (intensity - 4*stepSize) / (5 * stepSize)
f = math.Min(f, 1.0)
c = colorPurple.BlendLab(colorBlack, f)
}
return c
}

View file

@ -1,6 +1,7 @@
package manager
import (
"errors"
"fmt"
"image"
"image/color"
@ -33,6 +34,12 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO
if !exists {
return nil, err
}
// FFMPEG bombs out if we try to request 89 snapshots from a 2 second video
if videoFile.Duration < 3 {
return nil, errors.New("video too short to create sprite")
}
generator, err := newGeneratorInfo(videoFile)
if err != nil {
return nil, err

View file

@ -2,34 +2,11 @@ package manager
import (
"archive/zip"
"os"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
// DeleteGeneratedImageFiles deletes generated files for the provided image.
func DeleteGeneratedImageFiles(image *models.Image) {
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
exists, _ := utils.FileExists(thumbPath)
if exists {
err := os.Remove(thumbPath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", thumbPath, err.Error())
}
}
}
// DeleteImageFile deletes the image file from the filesystem.
func DeleteImageFile(image *models.Image) {
err := os.Remove(image.Path)
if err != nil {
logger.Warnf("Could not delete file %s: %s", image.Path, err.Error())
}
}
func walkGalleryZip(path string, walkFunc func(file *zip.File) error) error {
readCloser, err := zip.OpenReader(path)
if err != nil {

View file

@ -276,6 +276,9 @@ func (s *singleton) RefreshConfig() {
if err := utils.EnsureDir(s.Paths.Generated.Downloads); err != nil {
logger.Warnf("could not create directory for Downloads: %v", err)
}
if err := utils.EnsureDir(s.Paths.Generated.InteractiveHeatmap); err != nil {
logger.Warnf("could not create directory for Interactive Heatmaps: %v", err)
}
}
}

View file

@ -13,13 +13,14 @@ const thumbDirDepth int = 2
const thumbDirLength int = 2 // thumbDirDepth * thumbDirLength must be smaller than the length of checksum
type generatedPaths struct {
Screenshots string
Thumbnails string
Vtt string
Markers string
Transcodes string
Downloads string
Tmp string
Screenshots string
Thumbnails string
Vtt string
Markers string
Transcodes string
Downloads string
Tmp string
InteractiveHeatmap string
}
func newGeneratedPaths(path string) *generatedPaths {
@ -31,6 +32,7 @@ func newGeneratedPaths(path string) *generatedPaths {
gp.Transcodes = filepath.Join(path, "transcodes")
gp.Downloads = filepath.Join(path, "download_stage")
gp.Tmp = filepath.Join(path, "tmp")
gp.InteractiveHeatmap = filepath.Join(path, "interactive_heatmaps")
return &gp
}

View file

@ -1,8 +1,9 @@
package paths
import (
"github.com/stashapp/stash/pkg/utils"
"path/filepath"
"github.com/stashapp/stash/pkg/utils"
)
type scenePaths struct {
@ -51,3 +52,7 @@ func (sp *scenePaths) GetSpriteImageFilePath(checksum string) string {
func (sp *scenePaths) GetSpriteVttFilePath(checksum string) string {
return filepath.Join(sp.generated.Vtt, checksum+"_thumbs.vtt")
}
func (sp *scenePaths) GetInteractiveHeatmapPath(checksum string) string {
return filepath.Join(sp.generated.InteractiveHeatmap, checksum+".png")
}

View file

@ -44,7 +44,20 @@ func WaitAndDeregisterStream(filepath string, w *http.ResponseWriter, r *http.Re
}()
}
func KillRunningStreams(path string) {
func KillRunningStreams(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
killRunningStreams(scene.Path)
sceneHash := scene.GetHash(fileNamingAlgo)
if sceneHash == "" {
return
}
transcodePath := GetInstance().Paths.Scene.GetTranscodePath(sceneHash)
killRunningStreams(transcodePath)
}
func killRunningStreams(path string) {
ffmpeg.KillRunningEncoders(path)
streamingFilesMutex.RLock()

View file

@ -2,190 +2,13 @@ package manager
import (
"fmt"
"os"
"path/filepath"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
// DestroyScene deletes a scene and its associated relationships from the
// database. Returns a function to perform any post-commit actions.
func DestroyScene(scene *models.Scene, repo models.Repository) (func(), error) {
qb := repo.Scene()
mqb := repo.SceneMarker()
markers, err := mqb.FindBySceneID(scene.ID)
if err != nil {
return nil, err
}
var funcs []func()
for _, m := range markers {
f, err := DestroySceneMarker(scene, m, mqb)
if err != nil {
return nil, err
}
funcs = append(funcs, f)
}
if err := qb.Destroy(scene.ID); err != nil {
return nil, err
}
return func() {
for _, f := range funcs {
f()
}
}, nil
}
// DestroySceneMarker deletes the scene marker from the database and returns a
// function that removes the generated files, to be executed after the
// transaction is successfully committed.
func DestroySceneMarker(scene *models.Scene, sceneMarker *models.SceneMarker, qb models.SceneMarkerWriter) (func(), error) {
if err := qb.Destroy(sceneMarker.ID); err != nil {
return nil, err
}
// delete the preview for the marker
return func() {
seconds := int(sceneMarker.Seconds)
DeleteSceneMarkerFiles(scene, seconds, config.GetInstance().GetVideoFileNamingAlgorithm())
}, nil
}
// DeleteGeneratedSceneFiles deletes generated files for the provided scene.
func DeleteGeneratedSceneFiles(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
sceneHash := scene.GetHash(fileNamingAlgo)
if sceneHash == "" {
return
}
markersFolder := filepath.Join(GetInstance().Paths.Generated.Markers, sceneHash)
exists, _ := utils.FileExists(markersFolder)
if exists {
err := os.RemoveAll(markersFolder)
if err != nil {
logger.Warnf("Could not delete folder %s: %s", markersFolder, err.Error())
}
}
thumbPath := GetInstance().Paths.Scene.GetThumbnailScreenshotPath(sceneHash)
exists, _ = utils.FileExists(thumbPath)
if exists {
err := os.Remove(thumbPath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", thumbPath, err.Error())
}
}
normalPath := GetInstance().Paths.Scene.GetScreenshotPath(sceneHash)
exists, _ = utils.FileExists(normalPath)
if exists {
err := os.Remove(normalPath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", normalPath, err.Error())
}
}
streamPreviewPath := GetInstance().Paths.Scene.GetStreamPreviewPath(sceneHash)
exists, _ = utils.FileExists(streamPreviewPath)
if exists {
err := os.Remove(streamPreviewPath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", streamPreviewPath, err.Error())
}
}
streamPreviewImagePath := GetInstance().Paths.Scene.GetStreamPreviewImagePath(sceneHash)
exists, _ = utils.FileExists(streamPreviewImagePath)
if exists {
err := os.Remove(streamPreviewImagePath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", streamPreviewImagePath, err.Error())
}
}
transcodePath := GetInstance().Paths.Scene.GetTranscodePath(sceneHash)
exists, _ = utils.FileExists(transcodePath)
if exists {
// kill any running streams
KillRunningStreams(transcodePath)
err := os.Remove(transcodePath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", transcodePath, err.Error())
}
}
spritePath := GetInstance().Paths.Scene.GetSpriteImageFilePath(sceneHash)
exists, _ = utils.FileExists(spritePath)
if exists {
err := os.Remove(spritePath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", spritePath, err.Error())
}
}
vttPath := GetInstance().Paths.Scene.GetSpriteVttFilePath(sceneHash)
exists, _ = utils.FileExists(vttPath)
if exists {
err := os.Remove(vttPath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", vttPath, err.Error())
}
}
}
// DeleteSceneMarkerFiles deletes generated files for a scene marker with the
// provided scene and timestamp.
func DeleteSceneMarkerFiles(scene *models.Scene, seconds int, fileNamingAlgo models.HashAlgorithm) {
videoPath := GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(fileNamingAlgo), seconds)
imagePath := GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(fileNamingAlgo), seconds)
screenshotPath := GetInstance().Paths.SceneMarkers.GetStreamScreenshotPath(scene.GetHash(fileNamingAlgo), seconds)
exists, _ := utils.FileExists(videoPath)
if exists {
err := os.Remove(videoPath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", videoPath, err.Error())
}
}
exists, _ = utils.FileExists(imagePath)
if exists {
err := os.Remove(imagePath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", imagePath, err.Error())
}
}
exists, _ = utils.FileExists(screenshotPath)
if exists {
err := os.Remove(screenshotPath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", screenshotPath, err.Error())
}
}
}
// DeleteSceneFile deletes the scene video file from the filesystem.
func DeleteSceneFile(scene *models.Scene) {
// kill any running encoders
KillRunningStreams(scene.Path)
err := os.Remove(scene.Path)
if err != nil {
logger.Warnf("Could not delete file %s: %s", scene.Path, err.Error())
}
}
func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) {
var container ffmpeg.Container
if scene.Format.Valid {

View file

@ -3,9 +3,10 @@ package manager
import (
"context"
"fmt"
"os"
"path/filepath"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
@ -46,7 +47,7 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) {
if err := j.processImages(ctx, progress, r.Image()); err != nil {
return fmt.Errorf("error cleaning images: %w", err)
}
if err := j.processGalleries(ctx, progress, r.Gallery()); err != nil {
if err := j.processGalleries(ctx, progress, r.Gallery(), r.Image()); err != nil {
return fmt.Errorf("error cleaning galleries: %w", err)
}
@ -66,28 +67,35 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) {
}
func (j *cleanJob) getCount(r models.ReaderRepository) (int, error) {
sceneCount, err := r.Scene().Count()
sceneFilter := scene.PathsFilter(j.input.Paths)
sceneResult, err := r.Scene().Query(models.SceneQueryOptions{
QueryOptions: models.QueryOptions{
Count: true,
},
SceneFilter: sceneFilter,
})
if err != nil {
return 0, err
}
imageCount, err := r.Image().Count()
imageCount, err := r.Image().QueryCount(image.PathsFilter(j.input.Paths), nil)
if err != nil {
return 0, err
}
galleryCount, err := r.Gallery().Count()
galleryCount, err := r.Gallery().QueryCount(gallery.PathsFilter(j.input.Paths), nil)
if err != nil {
return 0, err
}
return sceneCount + imageCount + galleryCount, nil
return sceneResult.Count + imageCount + galleryCount, nil
}
func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb models.SceneReader) error {
batchSize := 1000
findFilter := models.BatchFindFilter(batchSize)
sceneFilter := scene.PathsFilter(j.input.Paths)
sort := "path"
findFilter.Sort = &sort
@ -99,7 +107,7 @@ func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb
return nil
}
scenes, err := scene.Query(qb, nil, findFilter)
scenes, err := scene.Query(qb, sceneFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for scenes: %w", err)
}
@ -146,10 +154,11 @@ func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb
return nil
}
func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress, qb models.GalleryReader) error {
func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress, qb models.GalleryReader, iqb models.ImageReader) error {
batchSize := 1000
findFilter := models.BatchFindFilter(batchSize)
galleryFilter := gallery.PathsFilter(j.input.Paths)
sort := "path"
findFilter.Sort = &sort
@ -161,14 +170,14 @@ func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress,
return nil
}
galleries, _, err := qb.Query(nil, findFilter)
galleries, _, err := qb.Query(galleryFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for galleries: %w", err)
}
for _, gallery := range galleries {
progress.ExecuteTask(fmt.Sprintf("Assessing gallery %s for clean", gallery.GetTitle()), func() {
if j.shouldCleanGallery(gallery) {
if j.shouldCleanGallery(gallery, iqb) {
toDelete = append(toDelete, gallery.ID)
} else {
// increment progress, no further processing
@ -210,6 +219,7 @@ func (j *cleanJob) processImages(ctx context.Context, progress *job.Progress, qb
batchSize := 1000
findFilter := models.BatchFindFilter(batchSize)
imageFilter := image.PathsFilter(j.input.Paths)
// performance consideration: order by path since default ordering by
// title is slow
@ -224,7 +234,7 @@ func (j *cleanJob) processImages(ctx context.Context, progress *job.Progress, qb
return nil
}
images, err := image.Query(qb, nil, findFilter)
images, err := image.Query(qb, imageFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for images: %w", err)
}
@ -308,9 +318,9 @@ func (j *cleanJob) shouldCleanScene(s *models.Scene) bool {
return false
}
func (j *cleanJob) shouldCleanGallery(g *models.Gallery) bool {
func (j *cleanJob) shouldCleanGallery(g *models.Gallery, qb models.ImageReader) bool {
// never clean manually created galleries
if !g.Zip {
if !g.Path.Valid {
return false
}
@ -326,9 +336,27 @@ func (j *cleanJob) shouldCleanGallery(g *models.Gallery) bool {
}
config := config.GetInstance()
if !utils.MatchExtension(path, config.GetGalleryExtensions()) {
logger.Infof("File extension does not match gallery extensions. Marking to clean: \"%s\"", path)
return true
if g.Zip {
if !utils.MatchExtension(path, config.GetGalleryExtensions()) {
logger.Infof("File extension does not match gallery extensions. Marking to clean: \"%s\"", path)
return true
}
if countImagesInZip(path) == 0 {
logger.Infof("Gallery has 0 images. Marking to clean: \"%s\"", path)
return true
}
} else {
// folder-based - delete if it has no images
count, err := qb.CountByGalleryID(g.ID)
if err != nil {
logger.Warnf("Error trying to count gallery images for %q: %v", path, err)
return false
}
if count == 0 {
return true
}
}
if matchFile(path, config.GetImageExcludes()) {
@ -336,11 +364,6 @@ func (j *cleanJob) shouldCleanGallery(g *models.Gallery) bool {
return true
}
if countImagesInZip(path) == 0 {
logger.Infof("Gallery has 0 images. Marking to clean: \"%s\"", path)
return true
}
return false
}
@ -370,72 +393,99 @@ func (j *cleanJob) shouldCleanImage(s *models.Image) bool {
}
func (j *cleanJob) deleteScene(ctx context.Context, fileNamingAlgorithm models.HashAlgorithm, sceneID int) {
var postCommitFunc func()
var scene *models.Scene
fileNamingAlgo := GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: GetInstance().Paths,
}
var s *models.Scene
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
qb := repo.Scene()
var err error
scene, err = qb.Find(sceneID)
s, err = qb.Find(sceneID)
if err != nil {
return err
}
postCommitFunc, err = DestroyScene(scene, repo)
return err
return scene.Destroy(s, repo, fileDeleter, true, false)
}); err != nil {
fileDeleter.Rollback()
logger.Errorf("Error deleting scene from database: %s", err.Error())
return
}
postCommitFunc()
// perform the post-commit actions
fileDeleter.Commit()
DeleteGeneratedSceneFiles(scene, fileNamingAlgorithm)
GetInstance().PluginCache.ExecutePostHooks(ctx, sceneID, plugin.SceneDestroyPost, nil, nil)
GetInstance().PluginCache.ExecutePostHooks(ctx, sceneID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
Checksum: s.Checksum.String,
OSHash: s.OSHash.String,
Path: s.Path,
}, nil)
}
func (j *cleanJob) deleteGallery(ctx context.Context, galleryID int) {
var g *models.Gallery
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
qb := repo.Gallery()
var err error
g, err = qb.Find(galleryID)
if err != nil {
return err
}
return qb.Destroy(galleryID)
}); err != nil {
logger.Errorf("Error deleting gallery from database: %s", err.Error())
return
}
GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryDestroyPost, nil, nil)
GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
Checksum: g.Checksum,
Path: g.Path.String,
}, nil)
}
func (j *cleanJob) deleteImage(ctx context.Context, imageID int) {
var checksum string
fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(),
Paths: GetInstance().Paths,
}
var i *models.Image
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
qb := repo.Image()
image, err := qb.Find(imageID)
var err error
i, err = qb.Find(imageID)
if err != nil {
return err
}
if image == nil {
if i == nil {
return fmt.Errorf("image not found: %d", imageID)
}
checksum = image.Checksum
return qb.Destroy(imageID)
return image.Destroy(i, qb, fileDeleter, true, false)
}); err != nil {
fileDeleter.Rollback()
logger.Errorf("Error deleting image from database: %s", err.Error())
return
}
// remove cache image
pathErr := os.Remove(GetInstance().Paths.Generated.GetThumbnailPath(checksum, models.DefaultGthumbWidth))
if pathErr != nil {
logger.Errorf("Error deleting thumbnail image from cache: %s", pathErr)
}
GetInstance().PluginCache.ExecutePostHooks(ctx, imageID, plugin.ImageDestroyPost, nil, nil)
// perform the post-commit actions
fileDeleter.Commit()
GetInstance().PluginCache.ExecutePostHooks(ctx, imageID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: i.Checksum,
Path: i.Path,
}, nil)
}
func getStashFromPath(pathToCheck string) *models.StashConfig {

View file

@ -26,12 +26,13 @@ type GenerateJob struct {
}
type totalsGenerate struct {
sprites int64
previews int64
imagePreviews int64
markers int64
transcodes int64
phashes int64
sprites int64
previews int64
imagePreviews int64
markers int64
transcodes int64
phashes int64
interactiveHeatmapSpeeds int64
tasks int
}
@ -94,7 +95,7 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) {
return
}
logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes", totals.sprites, totals.previews, totals.imagePreviews, totals.markers, totals.transcodes, totals.phashes)
logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes %d heatmaps & speeds", totals.sprites, totals.previews, totals.imagePreviews, totals.markers, totals.transcodes, totals.phashes, totals.interactiveHeatmapSpeeds)
progress.SetTotal(int(totals.tasks))
}()
@ -251,9 +252,11 @@ func (j *GenerateJob) queueSceneJobs(scene *models.Scene, queue chan<- Task, tot
}
if utils.IsTrue(j.input.Transcodes) {
forceTranscode := utils.IsTrue(j.input.ForceTranscodes)
task := &GenerateTranscodeTask{
Scene: *scene,
Overwrite: j.overwrite,
Force: forceTranscode,
fileNamingAlgorithm: j.fileNamingAlgo,
}
if task.isTranscodeNeeded() {
@ -277,6 +280,21 @@ func (j *GenerateJob) queueSceneJobs(scene *models.Scene, queue chan<- Task, tot
queue <- task
}
}
if utils.IsTrue(j.input.InteractiveHeatmapsSpeeds) {
task := &GenerateInteractiveHeatmapSpeedTask{
Scene: *scene,
Overwrite: j.overwrite,
fileNamingAlgorithm: j.fileNamingAlgo,
TxnManager: j.txnManager,
}
if task.shouldGenerate() {
totals.interactiveHeatmapSpeeds++
totals.tasks++
queue <- task
}
}
}
func (j *GenerateJob) queueMarkerJob(marker *models.SceneMarker, queue chan<- Task, totals *totalsGenerate) {

View file

@ -0,0 +1,87 @@
package manager
import (
"context"
"database/sql"
"fmt"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type GenerateInteractiveHeatmapSpeedTask struct {
Scene models.Scene
Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
TxnManager models.TransactionManager
}
func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string {
return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path)
}
func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
if !t.shouldGenerate() {
return
}
videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
funscriptPath := utils.GetFunscriptPath(t.Scene.Path)
heatmapPath := instance.Paths.Scene.GetInteractiveHeatmapPath(videoChecksum)
generator := NewInteractiveHeatmapSpeedGenerator(funscriptPath, heatmapPath)
err := generator.Generate()
if err != nil {
logger.Errorf("error generating heatmap: %s", err.Error())
return
}
median := sql.NullInt64{
Int64: generator.InteractiveSpeed,
Valid: true,
}
var s *models.Scene
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
s, err = r.Scene().FindByPath(t.Scene.Path)
return err
}); err != nil {
logger.Error(err.Error())
return
}
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
qb := r.Scene()
scenePartial := models.ScenePartial{
ID: s.ID,
InteractiveSpeed: &median,
}
_, err := qb.Update(scenePartial)
return err
}); err != nil {
logger.Error(err.Error())
}
}
func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool {
if !t.Scene.Interactive {
return false
}
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
return !t.doesHeatmapExist(sceneHash) || t.Overwrite
}
func (t *GenerateInteractiveHeatmapSpeedTask) doesHeatmapExist(sceneChecksum string) bool {
if sceneChecksum == "" {
return false
}
imageExists, _ := utils.FileExists(instance.Paths.Scene.GetInteractiveHeatmapPath(sceneChecksum))
return imageExists
}

View file

@ -117,6 +117,7 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene
ScenePath: scene.Path,
Seconds: seconds,
Width: 640,
Audio: instance.Config.GetPreviewAudio(),
}
encoder := instance.FFMPEG

View file

@ -211,8 +211,8 @@ type stashboxSource struct {
endpoint string
}
func (s stashboxSource) ScrapeScene(sceneID int) (*models.ScrapedScene, error) {
results, err := s.FindStashBoxScenesByFingerprintsFlat([]string{strconv.Itoa(sceneID)})
func (s stashboxSource) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
results, err := s.FindStashBoxScenesByFingerprintsFlat(ctx, []string{strconv.Itoa(sceneID)})
if err != nil {
return nil, fmt.Errorf("error querying stash-box using scene ID %d: %w", sceneID, err)
}
@ -233,8 +233,17 @@ type scraperSource struct {
scraperID string
}
func (s scraperSource) ScrapeScene(sceneID int) (*models.ScrapedScene, error) {
return s.cache.ScrapeScene(s.scraperID, sceneID)
func (s scraperSource) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
content, err := s.cache.ScrapeID(ctx, s.scraperID, sceneID, models.ScrapeContentTypeScene)
if err != nil {
return nil, err
}
if scene, ok := content.(models.ScrapedScene); ok {
return &scene, nil
}
return nil, errors.New("could not convert content to scene")
}
func (s scraperSource) String() string {

View file

@ -146,6 +146,11 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
func (j *ScanJob) queueFiles(ctx context.Context, paths []*models.StashConfig, scanQueue chan<- scanFile, parallelTasks int) (total int, newFiles int) {
defer close(scanQueue)
var minModTime time.Time
if j.input.Filter != nil && j.input.Filter.MinModTime != nil {
minModTime = *j.input.Filter.MinModTime
}
wg := sizedwaitgroup.New(parallelTasks)
for _, sp := range paths {
@ -160,6 +165,11 @@ func (j *ScanJob) queueFiles(ctx context.Context, paths []*models.StashConfig, s
return context.Canceled
}
// exit early on cutoff
if info.Mode().IsRegular() && info.ModTime().Before(minModTime) {
return nil
}
wg.Add()
go func() {

View file

@ -38,6 +38,7 @@ func (t *ScanTask) scanScene() *models.Scene {
VideoFileCreator: &instance.FFProbe,
PluginCache: instance.PluginCache,
MutexManager: t.mutexManager,
UseFileMetadata: t.UseFileMetadata,
}
if s != nil {

View file

@ -44,7 +44,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
if t.refresh {
var performerID string
txnErr := t.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
txnErr := t.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
stashids, _ := r.Performer().GetStashIDs(t.performer.ID)
for _, id := range stashids {
if id.Endpoint == t.box.Endpoint {
@ -57,7 +57,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
logger.Warnf("error while executing read transaction: %v", err)
}
if performerID != "" {
performer, err = client.FindStashBoxPerformerByID(performerID)
performer, err = client.FindStashBoxPerformerByID(ctx, performerID)
}
} else {
var name string
@ -66,7 +66,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
} else {
name = t.performer.Name.String
}
performer, err = client.FindStashBoxPerformerByName(name)
performer, err = client.FindStashBoxPerformerByName(ctx, name)
}
if err != nil {

View file

@ -15,6 +15,9 @@ type GenerateTranscodeTask struct {
Scene models.Scene
Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
// is true, generate even if video is browser-supported
Force bool
}
func (t *GenerateTranscodeTask) GetDescription() string {
@ -49,7 +52,7 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
audioCodec = ffmpeg.AudioCodec(t.Scene.AudioCodec.String)
}
if ffmpeg.IsStreamable(videoCodec, audioCodec, container) {
if !t.Force && ffmpeg.IsStreamable(videoCodec, audioCodec, container) {
return
}
@ -95,6 +98,14 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
// used only when counting files to generate, doesn't affect the actual transcode generation
// if container is missing from DB it is treated as non supported in order not to delay the user
func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
if !t.Overwrite && hasTranscode {
return false
}
if t.Force {
return true
}
videoCodec := t.Scene.VideoCodec.String
container := ""
@ -111,9 +122,5 @@ func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
return false
}
hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
if !t.Overwrite && hasTranscode {
return false
}
return true
}

View file

@ -6,6 +6,7 @@ import (
"regexp"
"strings"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
@ -58,7 +59,9 @@ func getPathWords(path string) []string {
return ret
}
func nameMatchesPath(name, path string) bool {
// nameMatchesPath returns the index in the path for the right-most match.
// Returns -1 if not found.
func nameMatchesPath(name, path string) int {
// escape specific regex characters
name = regexp.QuoteMeta(name)
@ -72,7 +75,13 @@ func nameMatchesPath(name, path string) bool {
reStr = `(?:^|_|[^\w\d])` + reStr + `(?:$|_|[^\w\d])`
re := regexp.MustCompile(reStr)
return re.MatchString(path)
found := re.FindAllStringIndex(path, -1)
if found == nil {
return -1
}
return found[len(found)-1][0]
}
func PathToPerformers(path string, performerReader models.PerformerReader) ([]*models.Performer, error) {
@ -86,7 +95,7 @@ func PathToPerformers(path string, performerReader models.PerformerReader) ([]*m
var ret []*models.Performer
for _, p := range performers {
// TODO - commenting out alias handling until both sides work correctly
if nameMatchesPath(p.Name.String, path) { // || nameMatchesPath(p.Aliases.String, path) {
if nameMatchesPath(p.Name.String, path) != -1 { // || nameMatchesPath(p.Aliases.String, path) {
ret = append(ret, p)
}
}
@ -94,7 +103,10 @@ func PathToPerformers(path string, performerReader models.PerformerReader) ([]*m
return ret, nil
}
func PathToStudios(path string, reader models.StudioReader) ([]*models.Studio, error) {
// PathToStudio returns the Studio that matches the given path.
// Where multiple matching studios are found, the one that matches the latest
// position in the path is returned.
func PathToStudio(path string, reader models.StudioReader) (*models.Studio, error) {
words := getPathWords(path)
candidates, err := reader.QueryForAutoTag(words)
@ -102,29 +114,26 @@ func PathToStudios(path string, reader models.StudioReader) ([]*models.Studio, e
return nil, err
}
var ret []*models.Studio
var ret *models.Studio
index := -1
for _, c := range candidates {
matches := false
if nameMatchesPath(c.Name.String, path) {
matches = true
matchIndex := nameMatchesPath(c.Name.String, path)
if matchIndex != -1 && matchIndex > index {
ret = c
index = matchIndex
}
if !matches {
aliases, err := reader.GetAliases(c.ID)
if err != nil {
return nil, err
}
for _, alias := range aliases {
if nameMatchesPath(alias, path) {
matches = true
break
}
}
aliases, err := reader.GetAliases(c.ID)
if err != nil {
return nil, err
}
if matches {
ret = append(ret, c)
for _, alias := range aliases {
matchIndex = nameMatchesPath(alias, path)
if matchIndex != -1 && matchIndex > index {
ret = c
index = matchIndex
}
}
}
@ -142,7 +151,7 @@ func PathToTags(path string, tagReader models.TagReader) ([]*models.Tag, error)
var ret []*models.Tag
for _, t := range tags {
matches := false
if nameMatchesPath(t.Name, path) {
if nameMatchesPath(t.Name, path) != -1 {
matches = true
}
@ -152,7 +161,7 @@ func PathToTags(path string, tagReader models.TagReader) ([]*models.Tag, error)
return nil, err
}
for _, alias := range aliases {
if nameMatchesPath(alias, path) {
if nameMatchesPath(alias, path) != -1 {
matches = true
break
}
@ -167,38 +176,6 @@ func PathToTags(path string, tagReader models.TagReader) ([]*models.Tag, error)
return ret, nil
}
func scenePathsFilter(paths []string) *models.SceneFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.SceneFilterType
var or *models.SceneFilterType
for _, p := range paths {
newOr := &models.SceneFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p += sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}
func PathToScenes(name string, paths []string, sceneReader models.SceneReader) ([]*models.Scene, error) {
regex := getPathQueryRegex(name)
organized := false
@ -210,7 +187,7 @@ func PathToScenes(name string, paths []string, sceneReader models.SceneReader) (
Organized: &organized,
}
filter.And = scenePathsFilter(paths)
filter.And = scene.PathsFilter(paths)
pp := models.PerPageAll
scenes, err := scene.Query(sceneReader, &filter, &models.FindFilterType{
@ -223,7 +200,7 @@ func PathToScenes(name string, paths []string, sceneReader models.SceneReader) (
var ret []*models.Scene
for _, p := range scenes {
if nameMatchesPath(name, p.Path) {
if nameMatchesPath(name, p.Path) != -1 {
ret = append(ret, p)
}
}
@ -231,38 +208,6 @@ func PathToScenes(name string, paths []string, sceneReader models.SceneReader) (
return ret, nil
}
func imagePathsFilter(paths []string) *models.ImageFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.ImageFilterType
var or *models.ImageFilterType
for _, p := range paths {
newOr := &models.ImageFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p += sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}
func PathToImages(name string, paths []string, imageReader models.ImageReader) ([]*models.Image, error) {
regex := getPathQueryRegex(name)
organized := false
@ -274,7 +219,7 @@ func PathToImages(name string, paths []string, imageReader models.ImageReader) (
Organized: &organized,
}
filter.And = imagePathsFilter(paths)
filter.And = image.PathsFilter(paths)
pp := models.PerPageAll
images, err := image.Query(imageReader, &filter, &models.FindFilterType{
@ -287,7 +232,7 @@ func PathToImages(name string, paths []string, imageReader models.ImageReader) (
var ret []*models.Image
for _, p := range images {
if nameMatchesPath(name, p.Path) {
if nameMatchesPath(name, p.Path) != -1 {
ret = append(ret, p)
}
}
@ -295,38 +240,6 @@ func PathToImages(name string, paths []string, imageReader models.ImageReader) (
return ret, nil
}
func galleryPathsFilter(paths []string) *models.GalleryFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.GalleryFilterType
var or *models.GalleryFilterType
for _, p := range paths {
newOr := &models.GalleryFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p += sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}
func PathToGalleries(name string, paths []string, galleryReader models.GalleryReader) ([]*models.Gallery, error) {
regex := getPathQueryRegex(name)
organized := false
@ -338,7 +251,7 @@ func PathToGalleries(name string, paths []string, galleryReader models.GalleryRe
Organized: &organized,
}
filter.And = galleryPathsFilter(paths)
filter.And = gallery.PathsFilter(paths)
pp := models.PerPageAll
gallerys, _, err := galleryReader.Query(&filter, &models.FindFilterType{
@ -351,7 +264,7 @@ func PathToGalleries(name string, paths []string, galleryReader models.GalleryRe
var ret []*models.Gallery
for _, p := range gallerys {
if nameMatchesPath(name, p.Path.String) {
if nameMatchesPath(name, p.Path.String) != -1 {
ret = append(ret, p)
}
}

76
pkg/match/path_test.go Normal file
View file

@ -0,0 +1,76 @@
package match
import "testing"
func Test_nameMatchesPath(t *testing.T) {
const name = "first last"
tests := []struct {
name string
path string
want int
}{
{
"exact",
name,
0,
},
{
"partial",
"first",
-1,
},
{
"separator",
"first.last",
0,
},
{
"separator",
"first-last",
0,
},
{
"separator",
"first_last",
0,
},
{
"separators",
"first.-_ last",
0,
},
{
"within string",
"before_first last/after",
6,
},
{
"not within string",
"beforefirst last/after",
-1,
},
{
"not within string",
"before/first lastafter",
-1,
},
{
"not within string",
"first last1",
-1,
},
{
"not within string",
"1first last",
-1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := nameMatchesPath(name, tt.path); got != tt.want {
t.Errorf("nameMatchesPath() = %v, want %v", got, tt.want)
}
})
}
}

View file

@ -2,4 +2,10 @@ package models
import "errors"
var ErrNotFound = errors.New("not found")
var (
// ErrNotFound signifies entities which are not found
ErrNotFound = errors.New("not found")
// ErrConversion signifies conversion errors
ErrConversion = errors.New("conversion error")
)

View file

@ -9,32 +9,33 @@ import (
// Scene stores the metadata for a single video scene.
type Scene struct {
ID int `db:"id" json:"id"`
Checksum sql.NullString `db:"checksum" json:"checksum"`
OSHash sql.NullString `db:"oshash" json:"oshash"`
Path string `db:"path" json:"path"`
Title sql.NullString `db:"title" json:"title"`
Details sql.NullString `db:"details" json:"details"`
URL sql.NullString `db:"url" json:"url"`
Date SQLiteDate `db:"date" json:"date"`
Rating sql.NullInt64 `db:"rating" json:"rating"`
Organized bool `db:"organized" json:"organized"`
OCounter int `db:"o_counter" json:"o_counter"`
Size sql.NullString `db:"size" json:"size"`
Duration sql.NullFloat64 `db:"duration" json:"duration"`
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
Format sql.NullString `db:"format" json:"format_name"`
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
Width sql.NullInt64 `db:"width" json:"width"`
Height sql.NullInt64 `db:"height" json:"height"`
Framerate sql.NullFloat64 `db:"framerate" json:"framerate"`
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
Phash sql.NullInt64 `db:"phash,omitempty" json:"phash"`
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
Interactive bool `db:"interactive" json:"interactive"`
ID int `db:"id" json:"id"`
Checksum sql.NullString `db:"checksum" json:"checksum"`
OSHash sql.NullString `db:"oshash" json:"oshash"`
Path string `db:"path" json:"path"`
Title sql.NullString `db:"title" json:"title"`
Details sql.NullString `db:"details" json:"details"`
URL sql.NullString `db:"url" json:"url"`
Date SQLiteDate `db:"date" json:"date"`
Rating sql.NullInt64 `db:"rating" json:"rating"`
Organized bool `db:"organized" json:"organized"`
OCounter int `db:"o_counter" json:"o_counter"`
Size sql.NullString `db:"size" json:"size"`
Duration sql.NullFloat64 `db:"duration" json:"duration"`
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
Format sql.NullString `db:"format" json:"format_name"`
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
Width sql.NullInt64 `db:"width" json:"width"`
Height sql.NullInt64 `db:"height" json:"height"`
Framerate sql.NullFloat64 `db:"framerate" json:"framerate"`
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
Phash sql.NullInt64 `db:"phash,omitempty" json:"phash"`
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
Interactive bool `db:"interactive" json:"interactive"`
InteractiveSpeed sql.NullInt64 `db:"interactive_speed" json:"interactive_speed"`
}
func (s *Scene) File() File {
@ -92,32 +93,33 @@ func (s *Scene) SetFile(f File) {
// ScenePartial represents part of a Scene object. It is used to update
// the database entry. Only non-nil fields will be updated.
type ScenePartial struct {
ID int `db:"id" json:"id"`
Checksum *sql.NullString `db:"checksum" json:"checksum"`
OSHash *sql.NullString `db:"oshash" json:"oshash"`
Path *string `db:"path" json:"path"`
Title *sql.NullString `db:"title" json:"title"`
Details *sql.NullString `db:"details" json:"details"`
URL *sql.NullString `db:"url" json:"url"`
Date *SQLiteDate `db:"date" json:"date"`
Rating *sql.NullInt64 `db:"rating" json:"rating"`
Organized *bool `db:"organized" json:"organized"`
Size *sql.NullString `db:"size" json:"size"`
Duration *sql.NullFloat64 `db:"duration" json:"duration"`
VideoCodec *sql.NullString `db:"video_codec" json:"video_codec"`
Format *sql.NullString `db:"format" json:"format_name"`
AudioCodec *sql.NullString `db:"audio_codec" json:"audio_codec"`
Width *sql.NullInt64 `db:"width" json:"width"`
Height *sql.NullInt64 `db:"height" json:"height"`
Framerate *sql.NullFloat64 `db:"framerate" json:"framerate"`
Bitrate *sql.NullInt64 `db:"bitrate" json:"bitrate"`
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"`
FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
Phash *sql.NullInt64 `db:"phash,omitempty" json:"phash"`
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
Interactive *bool `db:"interactive" json:"interactive"`
ID int `db:"id" json:"id"`
Checksum *sql.NullString `db:"checksum" json:"checksum"`
OSHash *sql.NullString `db:"oshash" json:"oshash"`
Path *string `db:"path" json:"path"`
Title *sql.NullString `db:"title" json:"title"`
Details *sql.NullString `db:"details" json:"details"`
URL *sql.NullString `db:"url" json:"url"`
Date *SQLiteDate `db:"date" json:"date"`
Rating *sql.NullInt64 `db:"rating" json:"rating"`
Organized *bool `db:"organized" json:"organized"`
Size *sql.NullString `db:"size" json:"size"`
Duration *sql.NullFloat64 `db:"duration" json:"duration"`
VideoCodec *sql.NullString `db:"video_codec" json:"video_codec"`
Format *sql.NullString `db:"format" json:"format_name"`
AudioCodec *sql.NullString `db:"audio_codec" json:"audio_codec"`
Width *sql.NullInt64 `db:"width" json:"width"`
Height *sql.NullInt64 `db:"height" json:"height"`
Framerate *sql.NullFloat64 `db:"framerate" json:"framerate"`
Bitrate *sql.NullInt64 `db:"bitrate" json:"bitrate"`
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"`
FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
Phash *sql.NullInt64 `db:"phash,omitempty" json:"phash"`
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
Interactive *bool `db:"interactive" json:"interactive"`
InteractiveSpeed *sql.NullInt64 `db:"interactive_speed" json:"interactive_speed"`
}
// UpdateInput constructs a SceneUpdateInput using the populated fields in the ScenePartial object.

167
pkg/models/search.go Normal file
View file

@ -0,0 +1,167 @@
package models
import "strings"
const (
or = "OR"
orSymbol = "|"
notPrefix = '-'
phraseChar = '"'
)
// SearchSpecs provides the specifications for text-based searches.
type SearchSpecs struct {
// MustHave specifies all of the terms that must appear in the results.
MustHave []string
// AnySets specifies sets of terms where one of each set must appear in the results.
AnySets [][]string
// MustNot specifies all terms that must not appear in the results.
MustNot []string
}
// combinePhrases detects quote characters at the start and end of
// words and combines the contents into a single word.
func combinePhrases(words []string) []string {
var ret []string
startIndex := -1
for i, w := range words {
if startIndex == -1 {
// looking for start of phrase
// this could either be " or -"
ww := w
if len(w) > 0 && w[0] == notPrefix {
ww = w[1:]
}
if len(ww) > 0 && ww[0] == phraseChar && (len(ww) < 2 || ww[len(ww)-1] != phraseChar) {
startIndex = i
continue
}
ret = append(ret, w)
} else if len(w) > 0 && w[len(w)-1] == phraseChar { // looking for end of phrase
// combine words
phrase := strings.Join(words[startIndex:i+1], " ")
// add to return value
ret = append(ret, phrase)
startIndex = -1
}
}
if startIndex != -1 {
ret = append(ret, words[startIndex:]...)
}
return ret
}
func extractOrConditions(words []string, searchSpec *SearchSpecs) []string {
for foundOr := true; foundOr; {
foundOr = false
for i, w := range words {
if i > 0 && i < len(words)-1 && (strings.EqualFold(w, or) || w == orSymbol) {
// found an OR keyword
// first operand will be the last word
startIndex := i - 1
// find the last operand
// this will be the last word not preceded by OR
lastIndex := len(words) - 1
for ii := i + 2; ii < len(words); ii += 2 {
if !strings.EqualFold(words[ii], or) {
lastIndex = ii - 1
break
}
}
foundOr = true
// combine the words into an any set
var set []string
for ii := startIndex; ii <= lastIndex; ii += 2 {
word := extractPhrase(words[ii])
if word == "" {
continue
}
set = append(set, word)
}
searchSpec.AnySets = append(searchSpec.AnySets, set)
// take out the OR'd words
words = append(words[0:startIndex], words[lastIndex+1:]...)
// break and reparse
break
}
}
}
return words
}
func extractNotConditions(words []string, searchSpec *SearchSpecs) []string {
var ret []string
for _, w := range words {
if len(w) > 1 && w[0] == notPrefix {
word := extractPhrase(w[1:])
if word == "" {
continue
}
searchSpec.MustNot = append(searchSpec.MustNot, word)
} else {
ret = append(ret, w)
}
}
return ret
}
func extractPhrase(w string) string {
if len(w) > 1 && w[0] == phraseChar && w[len(w)-1] == phraseChar {
return w[1 : len(w)-1]
}
return w
}
// ParseSearchString parses the Q value and returns a SearchSpecs object.
//
// By default, any words in the search value must appear in the results.
// Words encompassed by quotes (") as treated as a single term.
// Where keyword "OR" (case-insensitive) appears (and is not part of a quoted phrase), one of the
// OR'd terms must appear in the results.
// Where a keyword is prefixed with "-", that keyword must not appear in the results.
// Where OR appears as the first or last term, or where one of the OR operands has a
// not prefix, then the OR is treated literally.
func ParseSearchString(s string) SearchSpecs {
s = strings.TrimSpace(s)
if s == "" {
return SearchSpecs{}
}
// break into words
words := strings.Split(s, " ")
// combine phrases first, then extract OR conditions, then extract NOT conditions
// and the leftovers will be AND'd
ret := SearchSpecs{}
words = combinePhrases(words)
words = extractOrConditions(words, &ret)
words = extractNotConditions(words, &ret)
for _, w := range words {
// ignore empty quotes
word := extractPhrase(w)
if word == "" {
continue
}
ret.MustHave = append(ret.MustHave, word)
}
return ret
}

227
pkg/models/search_test.go Normal file
View file

@ -0,0 +1,227 @@
package models
import (
"reflect"
"testing"
)
func TestParseSearchString(t *testing.T) {
tests := []struct {
name string
q string
want SearchSpecs
}{
{
"basic",
"a b c",
SearchSpecs{
MustHave: []string{"a", "b", "c"},
},
},
{
"empty",
"",
SearchSpecs{},
},
{
"whitespace",
" ",
SearchSpecs{},
},
{
"single",
"a",
SearchSpecs{
MustHave: []string{"a"},
},
},
{
"quoted",
`"a b" c`,
SearchSpecs{
MustHave: []string{"a b", "c"},
},
},
{
"quoted double space",
`"a b" c`,
SearchSpecs{
MustHave: []string{"a b", "c"},
},
},
{
"quoted end space",
`"a b " c`,
SearchSpecs{
MustHave: []string{"a b ", "c"},
},
},
{
"no matching end quote",
`"a b c`,
SearchSpecs{
MustHave: []string{`"a`, "b", "c"},
},
},
{
"no matching start quote",
`a b c"`,
SearchSpecs{
MustHave: []string{"a", "b", `c"`},
},
},
{
"or",
"a OR b",
SearchSpecs{
AnySets: [][]string{
{"a", "b"},
},
},
},
{
"multi or",
"a OR b c OR d",
SearchSpecs{
AnySets: [][]string{
{"a", "b"},
{"c", "d"},
},
},
},
{
"lowercase or",
"a or b",
SearchSpecs{
AnySets: [][]string{
{"a", "b"},
},
},
},
{
"or symbol",
"a | b",
SearchSpecs{
AnySets: [][]string{
{"a", "b"},
},
},
},
{
"quoted or",
`a "OR" b`,
SearchSpecs{
MustHave: []string{"a", "OR", "b"},
},
},
{
"quoted or symbol",
`a "|" b`,
SearchSpecs{
MustHave: []string{"a", "|", "b"},
},
},
{
"or phrases",
`"a b" OR "c d"`,
SearchSpecs{
AnySets: [][]string{
{"a b", "c d"},
},
},
},
{
"or at start",
"OR a",
SearchSpecs{
MustHave: []string{"OR", "a"},
},
},
{
"or at end",
"a OR",
SearchSpecs{
MustHave: []string{"a", "OR"},
},
},
{
"or symbol at start",
"| a",
SearchSpecs{
MustHave: []string{"|", "a"},
},
},
{
"or symbol at end",
"a |",
SearchSpecs{
MustHave: []string{"a", "|"},
},
},
{
"nots",
"-a -b",
SearchSpecs{
MustNot: []string{"a", "b"},
},
},
{
"not or",
"-a OR b",
SearchSpecs{
AnySets: [][]string{
{"-a", "b"},
},
},
},
{
"not phrase",
`-"a b"`,
SearchSpecs{
MustNot: []string{"a b"},
},
},
{
"not in phrase",
`"-a b"`,
SearchSpecs{
MustHave: []string{"-a b"},
},
},
{
"double not",
"--a",
SearchSpecs{
MustNot: []string{"-a"},
},
},
{
"empty quote",
`"" a`,
SearchSpecs{
MustHave: []string{"a"},
},
},
{
"not empty quote",
`-"" a`,
SearchSpecs{
MustHave: []string{"a"},
},
},
{
"quote in word",
`ab"cd"`,
SearchSpecs{
MustHave: []string{`ab"cd"`},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := ParseSearchString(tt.q); !reflect.DeepEqual(got, tt.want) {
t.Errorf("FindFilterType.ParseSearchString() = %v, want %v", got, tt.want)
}
})
}
}

View file

@ -2,9 +2,10 @@ package models
import (
"database/sql/driver"
"fmt"
"strings"
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
@ -33,14 +34,19 @@ func (t *SQLiteDate) Scan(value interface{}) error {
// Value implements the driver Valuer interface.
func (t SQLiteDate) Value() (driver.Value, error) {
if !t.Valid {
return nil, nil
}
s := strings.TrimSpace(t.String)
// handle empty string
if t.String == "" {
if s == "" {
return "", nil
}
result, err := utils.ParseDateStringAsFormat(t.String, "2006-01-02")
result, err := utils.ParseDateStringAsFormat(s, "2006-01-02")
if err != nil {
logger.Debugf("sqlite date conversion error: %s", err.Error())
return nil, fmt.Errorf("converting sqlite date %q: %w", s, err)
}
return result, nil
}

View file

@ -0,0 +1,84 @@
package models
import (
"database/sql/driver"
"reflect"
"testing"
)
func TestSQLiteDate_Value(t *testing.T) {
tests := []struct {
name string
tr SQLiteDate
want driver.Value
wantErr bool
}{
{
"empty string",
SQLiteDate{"", true},
"",
false,
},
{
"whitespace",
SQLiteDate{" ", true},
"",
false,
},
{
"RFC3339",
SQLiteDate{"2021-11-22T17:11:55+11:00", true},
"2021-11-22",
false,
},
{
"date",
SQLiteDate{"2021-11-22", true},
"2021-11-22",
false,
},
{
"date and time",
SQLiteDate{"2021-11-22 17:12:05", true},
"2021-11-22",
false,
},
{
"date, time and zone",
SQLiteDate{"2021-11-22 17:33:05 AEST", true},
"2021-11-22",
false,
},
{
"whitespaced date",
SQLiteDate{" 2021-11-22 ", true},
"2021-11-22",
false,
},
{
"bad format",
SQLiteDate{"foo", true},
nil,
true,
},
{
"invalid",
SQLiteDate{"null", false},
nil,
false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.tr.Value()
if (err != nil) != tt.wantErr {
t.Errorf("SQLiteDate.Value() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("SQLiteDate.Value() = %v, want %v", got, tt.want)
}
})
}
}

57
pkg/models/timestamp.go Normal file
View file

@ -0,0 +1,57 @@
package models
import (
"errors"
"fmt"
"io"
"strconv"
"time"
"github.com/99designs/gqlgen/graphql"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
var ErrTimestamp = errors.New("cannot parse Timestamp")
func MarshalTimestamp(t time.Time) graphql.Marshaler {
if t.IsZero() {
return graphql.Null
}
return graphql.WriterFunc(func(w io.Writer) {
_, err := io.WriteString(w, strconv.Quote(t.Format(time.RFC3339Nano)))
if err != nil {
logger.Warnf("could not marshal timestamp: %v", err)
}
})
}
func UnmarshalTimestamp(v interface{}) (time.Time, error) {
if tmpStr, ok := v.(string); ok {
if len(tmpStr) == 0 {
return time.Time{}, fmt.Errorf("%w: empty string", ErrTimestamp)
}
switch tmpStr[0] {
case '>', '<':
d, err := time.ParseDuration(tmpStr[1:])
if err != nil {
return time.Time{}, fmt.Errorf("%w: cannot parse %v-duration: %v", ErrTimestamp, tmpStr[0], err)
}
t := time.Now()
// Compute point in time:
if tmpStr[0] == '<' {
t = t.Add(-d)
} else {
t = t.Add(d)
}
return t, nil
}
return utils.ParseDateStringAsTime(tmpStr)
}
return time.Time{}, fmt.Errorf("%w: not a string", ErrTimestamp)
}

View file

@ -0,0 +1,90 @@
package models
import (
"bytes"
"strconv"
"testing"
"time"
)
func TestTimestampSymmetry(t *testing.T) {
n := time.Now()
buf := bytes.NewBuffer([]byte{})
MarshalTimestamp(n).MarshalGQL(buf)
str, err := strconv.Unquote(buf.String())
if err != nil {
t.Fatal("could not unquote string")
}
got, err := UnmarshalTimestamp(str)
if err != nil {
t.Fatalf("could not unmarshal time: %v", err)
}
if !n.Equal(got) {
t.Fatalf("have %v, want %v", got, n)
}
}
func TestTimestamp(t *testing.T) {
n := time.Now().In(time.UTC)
testCases := []struct {
name string
have string
want string
}{
{"reflexivity", n.Format(time.RFC3339Nano), n.Format(time.RFC3339Nano)},
{"rfc3339", "2021-11-04T01:02:03Z", "2021-11-04T01:02:03Z"},
{"date", "2021-04-05", "2021-04-05T00:00:00Z"},
{"datetime", "2021-04-05 14:45:36", "2021-04-05T14:45:36Z"},
{"datetime-tz", "2021-04-05 14:45:36 PDT", "2021-04-05T14:45:36Z"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
p, err := UnmarshalTimestamp(tc.have)
if err != nil {
t.Fatalf("could not unmarshal time: %v", err)
}
buf := bytes.NewBuffer([]byte{})
MarshalTimestamp(p).MarshalGQL(buf)
got, err := strconv.Unquote(buf.String())
if err != nil {
t.Fatalf("count not unquote string")
}
if got != tc.want {
t.Errorf("got %s; want %s", got, tc.want)
}
})
}
}
const epsilon = 10 * time.Second
func TestTimestampRelative(t *testing.T) {
n := time.Now()
testCases := []struct {
name string
have string
want time.Time
}{
{"past", "<4h", n.Add(-4 * time.Hour)},
{"future", ">5m", n.Add(5 * time.Minute)},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
got, err := UnmarshalTimestamp(tc.have)
if err != nil {
t.Fatalf("could not unmarshal time: %v", err)
}
if got.Sub(tc.want) > epsilon {
t.Errorf("not within bound of %v; got %s; want %s", epsilon, got, tc.want)
}
})
}
}

View file

@ -1,6 +1,7 @@
package plugin
import (
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin/common"
)
@ -40,6 +41,7 @@ const (
TagCreatePost HookTriggerEnum = "Tag.Create.Post"
TagUpdatePost HookTriggerEnum = "Tag.Update.Post"
TagMergePost HookTriggerEnum = "Tag.Merge.Post"
TagDestroyPost HookTriggerEnum = "Tag.Destroy.Post"
)
@ -74,6 +76,7 @@ var AllHookTriggerEnum = []HookTriggerEnum{
TagCreatePost,
TagUpdatePost,
TagMergePost,
TagDestroyPost,
}
@ -123,3 +126,36 @@ func (e HookTriggerEnum) String() string {
func addHookContext(argsMap common.ArgsMap, hookContext common.HookContext) {
argsMap[common.HookContextKey] = hookContext
}
// types for destroy hooks, to provide a little more information
type SceneDestroyInput struct {
models.SceneDestroyInput
Checksum string `json:"checksum"`
OSHash string `json:"oshash"`
Path string `json:"path"`
}
type ScenesDestroyInput struct {
models.ScenesDestroyInput
Checksum string `json:"checksum"`
OSHash string `json:"oshash"`
Path string `json:"path"`
}
type GalleryDestroyInput struct {
models.GalleryDestroyInput
Checksum string `json:"checksum"`
Path string `json:"path"`
}
type ImageDestroyInput struct {
models.ImageDestroyInput
Checksum string `json:"checksum"`
Path string `json:"path"`
}
type ImagesDestroyInput struct {
models.ImagesDestroyInput
Checksum string `json:"checksum"`
Path string `json:"path"`
}

164
pkg/scene/delete.go Normal file
View file

@ -0,0 +1,164 @@
package scene
import (
"path/filepath"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
// FileDeleter is an extension of file.Deleter that handles deletion of scene files.
type FileDeleter struct {
file.Deleter
FileNamingAlgo models.HashAlgorithm
Paths *paths.Paths
}
// MarkGeneratedFiles marks for deletion the generated files for the provided scene.
func (d *FileDeleter) MarkGeneratedFiles(scene *models.Scene) error {
sceneHash := scene.GetHash(d.FileNamingAlgo)
if sceneHash == "" {
return nil
}
markersFolder := filepath.Join(d.Paths.Generated.Markers, sceneHash)
exists, _ := utils.FileExists(markersFolder)
if exists {
if err := d.Dirs([]string{markersFolder}); err != nil {
return err
}
}
var files []string
thumbPath := d.Paths.Scene.GetThumbnailScreenshotPath(sceneHash)
exists, _ = utils.FileExists(thumbPath)
if exists {
files = append(files, thumbPath)
}
normalPath := d.Paths.Scene.GetScreenshotPath(sceneHash)
exists, _ = utils.FileExists(normalPath)
if exists {
files = append(files, normalPath)
}
streamPreviewPath := d.Paths.Scene.GetStreamPreviewPath(sceneHash)
exists, _ = utils.FileExists(streamPreviewPath)
if exists {
files = append(files, streamPreviewPath)
}
streamPreviewImagePath := d.Paths.Scene.GetStreamPreviewImagePath(sceneHash)
exists, _ = utils.FileExists(streamPreviewImagePath)
if exists {
files = append(files, streamPreviewImagePath)
}
transcodePath := d.Paths.Scene.GetTranscodePath(sceneHash)
exists, _ = utils.FileExists(transcodePath)
if exists {
files = append(files, transcodePath)
}
spritePath := d.Paths.Scene.GetSpriteImageFilePath(sceneHash)
exists, _ = utils.FileExists(spritePath)
if exists {
files = append(files, spritePath)
}
vttPath := d.Paths.Scene.GetSpriteVttFilePath(sceneHash)
exists, _ = utils.FileExists(vttPath)
if exists {
files = append(files, vttPath)
}
heatmapPath := d.Paths.Scene.GetInteractiveHeatmapPath(sceneHash)
exists, _ = utils.FileExists(heatmapPath)
if exists {
files = append(files, heatmapPath)
}
return d.Files(files)
}
// MarkMarkerFiles deletes generated files for a scene marker with the
// provided scene and timestamp.
func (d *FileDeleter) MarkMarkerFiles(scene *models.Scene, seconds int) error {
videoPath := d.Paths.SceneMarkers.GetStreamPath(scene.GetHash(d.FileNamingAlgo), seconds)
imagePath := d.Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(d.FileNamingAlgo), seconds)
screenshotPath := d.Paths.SceneMarkers.GetStreamScreenshotPath(scene.GetHash(d.FileNamingAlgo), seconds)
var files []string
exists, _ := utils.FileExists(videoPath)
if exists {
files = append(files, videoPath)
}
exists, _ = utils.FileExists(imagePath)
if exists {
files = append(files, imagePath)
}
exists, _ = utils.FileExists(screenshotPath)
if exists {
files = append(files, screenshotPath)
}
return d.Files(files)
}
// Destroy deletes a scene and its associated relationships from the
// database.
func Destroy(scene *models.Scene, repo models.Repository, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error {
qb := repo.Scene()
mqb := repo.SceneMarker()
markers, err := mqb.FindBySceneID(scene.ID)
if err != nil {
return err
}
for _, m := range markers {
if err := DestroyMarker(scene, m, mqb, fileDeleter); err != nil {
return err
}
}
if deleteFile {
if err := fileDeleter.Files([]string{scene.Path}); err != nil {
return err
}
}
if deleteGenerated {
if err := fileDeleter.MarkGeneratedFiles(scene); err != nil {
return err
}
}
if err := qb.Destroy(scene.ID); err != nil {
return err
}
return nil
}
// DestroyMarker deletes the scene marker from the database and returns a
// function that removes the generated files, to be executed after the
// transaction is successfully committed.
func DestroyMarker(scene *models.Scene, sceneMarker *models.SceneMarker, qb models.SceneMarkerWriter, fileDeleter *FileDeleter) error {
if err := qb.Destroy(sceneMarker.ID); err != nil {
return err
}
// delete the preview for the marker
seconds := int(sceneMarker.Seconds)
return fileDeleter.MarkMarkerFiles(scene, seconds)
}

40
pkg/scene/filter.go Normal file
View file

@ -0,0 +1,40 @@
package scene
import (
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/models"
)
func PathsFilter(paths []string) *models.SceneFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *models.SceneFilterType
var or *models.SceneFilterType
for _, p := range paths {
newOr := &models.SceneFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p += sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}

View file

@ -42,6 +42,10 @@ func MigrateHash(p *paths.Paths, oldHash string, newHash string) {
oldPath = scenePaths.GetSpriteImageFilePath(oldHash)
newPath = scenePaths.GetSpriteImageFilePath(newHash)
migrateSceneFiles(oldPath, newPath)
oldPath = scenePaths.GetInteractiveHeatmapPath(oldHash)
newPath = scenePaths.GetInteractiveHeatmapPath(newHash)
migrateSceneFiles(oldPath, newPath)
}
func migrateSceneFiles(oldName, newName string) {

View file

@ -130,6 +130,7 @@ func (scanner *Scanner) ScanExisting(existing file.FileBased, file file.SourceFi
}
}
s.Interactive = interactive
s.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()}
_, err := qb.UpdateFull(*s)
@ -262,7 +263,7 @@ func (scanner *Scanner) ScanNew(file file.SourceFile) (retScene *models.Scene, e
if scanner.UseFileMetadata {
newScene.Details = sql.NullString{String: videoFile.Comment, Valid: true}
newScene.Date = models.SQLiteDate{String: videoFile.CreationTime.Format("2006-01-02")}
_ = newScene.Date.Scan(videoFile.CreationTime)
}
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {

View file

@ -1,6 +1,7 @@
package scraper
import (
"context"
"net/http"
"github.com/stashapp/stash/pkg/models"
@ -24,20 +25,12 @@ func (e scraperAction) IsValid() bool {
}
type scraperActionImpl interface {
scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error)
scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error)
scrapePerformerByURL(url string) (*models.ScrapedPerformer, error)
scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error)
scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error)
scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error)
scrapeScenesByName(name string) ([]*models.ScrapedScene, error)
scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error)
scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error)
scrapeSceneByURL(url string) (*models.ScrapedScene, error)
scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error)
scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error)
scrapeGalleryByURL(url string) (*models.ScrapedGallery, error)
scrapeMovieByURL(url string) (*models.ScrapedMovie, error)
scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error)
scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error)
}
func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraperActionImpl {

View file

@ -2,8 +2,8 @@ package scraper
import (
"context"
"errors"
"fmt"
"net/http"
"strconv"
"github.com/stashapp/stash/pkg/match"
@ -16,14 +16,12 @@ const (
autoTagScraperName = "Auto Tag"
)
var errNotSupported = errors.New("not supported")
type autotagScraper struct {
txnManager models.TransactionManager
globalConfig GlobalConfig
}
func (s *autotagScraper) matchPerformers(path string, performerReader models.PerformerReader) ([]*models.ScrapedPerformer, error) {
func autotagMatchPerformers(path string, performerReader models.PerformerReader) ([]*models.ScrapedPerformer, error) {
p, err := match.PathToPerformers(path, performerReader)
if err != nil {
return nil, fmt.Errorf("error matching performers: %w", err)
@ -47,16 +45,16 @@ func (s *autotagScraper) matchPerformers(path string, performerReader models.Per
return ret, nil
}
func (s *autotagScraper) matchStudio(path string, studioReader models.StudioReader) (*models.ScrapedStudio, error) {
st, err := match.PathToStudios(path, studioReader)
func autotagMatchStudio(path string, studioReader models.StudioReader) (*models.ScrapedStudio, error) {
studio, err := match.PathToStudio(path, studioReader)
if err != nil {
return nil, fmt.Errorf("error matching studios: %w", err)
}
if len(st) > 0 {
id := strconv.Itoa(st[0].ID)
if studio != nil {
id := strconv.Itoa(studio.ID)
return &models.ScrapedStudio{
Name: st[0].Name.String,
Name: studio.Name.String,
StoredID: &id,
}, nil
}
@ -64,7 +62,7 @@ func (s *autotagScraper) matchStudio(path string, studioReader models.StudioRead
return nil, nil
}
func (s *autotagScraper) matchTags(path string, tagReader models.TagReader) ([]*models.ScrapedTag, error) {
func autotagMatchTags(path string, tagReader models.TagReader) ([]*models.ScrapedTag, error) {
t, err := match.PathToTags(path, tagReader)
if err != nil {
return nil, fmt.Errorf("error matching tags: %w", err)
@ -85,32 +83,24 @@ func (s *autotagScraper) matchTags(path string, tagReader models.TagReader) ([]*
return ret, nil
}
type autotagSceneScraper struct {
*autotagScraper
}
func (c *autotagSceneScraper) scrapeByName(name string) ([]*models.ScrapedScene, error) {
return nil, errNotSupported
}
func (c *autotagSceneScraper) scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error) {
func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) {
var ret *models.ScrapedScene
// populate performers, studio and tags based on scene path
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
if err := s.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
path := scene.Path
performers, err := c.matchPerformers(path, r.Performer())
performers, err := autotagMatchPerformers(path, r.Performer())
if err != nil {
return err
return fmt.Errorf("autotag scraper viaScene: %w", err)
}
studio, err := c.matchStudio(path, r.Studio())
studio, err := autotagMatchStudio(path, r.Studio())
if err != nil {
return err
return fmt.Errorf("autotag scraper viaScene: %w", err)
}
tags, err := c.matchTags(path, r.Tag())
tags, err := autotagMatchTags(path, r.Tag())
if err != nil {
return err
return fmt.Errorf("autotag scraper viaScene: %w", err)
}
if len(performers) > 0 || studio != nil || len(tags) > 0 {
@ -129,19 +119,7 @@ func (c *autotagSceneScraper) scrapeByScene(scene *models.Scene) (*models.Scrape
return ret, nil
}
func (c *autotagSceneScraper) scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
return nil, errNotSupported
}
func (c *autotagSceneScraper) scrapeByURL(url string) (*models.ScrapedScene, error) {
return nil, errNotSupported
}
type autotagGalleryScraper struct {
*autotagScraper
}
func (c *autotagGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) {
if !gallery.Path.Valid {
// not valid for non-path-based galleries
return nil, nil
@ -150,20 +128,20 @@ func (c *autotagGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*model
var ret *models.ScrapedGallery
// populate performers, studio and tags based on scene path
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
if err := s.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
path := gallery.Path.String
performers, err := c.matchPerformers(path, r.Performer())
performers, err := autotagMatchPerformers(path, r.Performer())
if err != nil {
return err
return fmt.Errorf("autotag scraper viaGallery: %w", err)
}
studio, err := c.matchStudio(path, r.Studio())
studio, err := autotagMatchStudio(path, r.Studio())
if err != nil {
return err
return fmt.Errorf("autotag scraper viaGallery: %w", err)
}
tags, err := c.matchTags(path, r.Tag())
tags, err := autotagMatchTags(path, r.Tag())
if err != nil {
return err
return fmt.Errorf("autotag scraper viaGallery: %w", err)
}
if len(performers) > 0 || studio != nil || len(tags) > 0 {
@ -182,12 +160,36 @@ func (c *autotagGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*model
return ret, nil
}
func (c *autotagGalleryScraper) scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
return nil, errNotSupported
func (s autotagScraper) supports(ty models.ScrapeContentType) bool {
switch ty {
case models.ScrapeContentTypeScene:
return true
case models.ScrapeContentTypeGallery:
return true
}
return false
}
func (c *autotagGalleryScraper) scrapeByURL(url string) (*models.ScrapedGallery, error) {
return nil, errNotSupported
func (s autotagScraper) supportsURL(url string, ty models.ScrapeContentType) bool {
return false
}
func (s autotagScraper) spec() models.Scraper {
supportedScrapes := []models.ScrapeType{
models.ScrapeTypeFragment,
}
return models.Scraper{
ID: autoTagScraperID,
Name: autoTagScraperName,
Scene: &models.ScraperSpec{
SupportedScrapes: supportedScrapes,
},
Gallery: &models.ScraperSpec{
SupportedScrapes: supportedScrapes,
},
}
}
func getAutoTagScraper(txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
@ -196,23 +198,5 @@ func getAutoTagScraper(txnManager models.TransactionManager, globalConfig Global
globalConfig: globalConfig,
}
supportedScrapes := []models.ScrapeType{
models.ScrapeTypeFragment,
}
return scraper{
ID: autoTagScraperID,
Spec: &models.Scraper{
ID: autoTagScraperID,
Name: autoTagScraperName,
Scene: &models.ScraperSpec{
SupportedScrapes: supportedScrapes,
},
Gallery: &models.ScraperSpec{
SupportedScrapes: supportedScrapes,
},
},
Scene: &autotagSceneScraper{&base},
Gallery: &autotagGalleryScraper{&base},
}
return base
}

298
pkg/scraper/cache.go Normal file
View file

@ -0,0 +1,298 @@
package scraper
import (
"context"
"crypto/tls"
"fmt"
"net/http"
"os"
"path/filepath"
"sort"
"strings"
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
const (
// scrapeGetTimeout is the timeout for scraper HTTP requests. Includes transfer time.
// We may want to bump this at some point and use local context-timeouts if more granularity
// is needed.
scrapeGetTimeout = time.Second * 60
// maxIdleConnsPerHost is the maximum number of idle connections the HTTP client will
// keep on a per-host basis.
maxIdleConnsPerHost = 8
// maxRedirects defines the maximum number of redirects the HTTP client will follow
maxRedirects = 20
)
// GlobalConfig contains the global scraper options.
type GlobalConfig interface {
GetScraperUserAgent() string
GetScrapersPath() string
GetScraperCDPPath() string
GetScraperCertCheck() bool
}
func isCDPPathHTTP(c GlobalConfig) bool {
return strings.HasPrefix(c.GetScraperCDPPath(), "http://") || strings.HasPrefix(c.GetScraperCDPPath(), "https://")
}
func isCDPPathWS(c GlobalConfig) bool {
return strings.HasPrefix(c.GetScraperCDPPath(), "ws://")
}
// Cache stores the database of scrapers
type Cache struct {
client *http.Client
scrapers map[string]scraper // Scraper ID -> Scraper
globalConfig GlobalConfig
txnManager models.TransactionManager
}
// newClient creates a scraper-local http client we use throughout the scraper subsystem.
func newClient(gc GlobalConfig) *http.Client {
client := &http.Client{
Transport: &http.Transport{ // ignore insecure certificates
TLSClientConfig: &tls.Config{InsecureSkipVerify: !gc.GetScraperCertCheck()},
MaxIdleConnsPerHost: maxIdleConnsPerHost,
},
Timeout: scrapeGetTimeout,
// defaultCheckRedirect code with max changed from 10 to maxRedirects
CheckRedirect: func(req *http.Request, via []*http.Request) error {
if len(via) >= maxRedirects {
return fmt.Errorf("%w: gave up after %d redirects", ErrMaxRedirects, maxRedirects)
}
return nil
},
}
return client
}
// NewCache returns a new Cache loading scraper configurations from the
// scraper path provided in the global config object. It returns a new
// instance and an error if the scraper directory could not be loaded.
//
// Scraper configurations are loaded from yml files in the provided scrapers
// directory and any subdirectories.
func NewCache(globalConfig GlobalConfig, txnManager models.TransactionManager) (*Cache, error) {
// HTTP Client setup
client := newClient(globalConfig)
scrapers, err := loadScrapers(globalConfig, txnManager)
if err != nil {
return nil, err
}
return &Cache{
client: client,
globalConfig: globalConfig,
scrapers: scrapers,
txnManager: txnManager,
}, nil
}
func loadScrapers(globalConfig GlobalConfig, txnManager models.TransactionManager) (map[string]scraper, error) {
path := globalConfig.GetScrapersPath()
scrapers := make(map[string]scraper)
// Add built-in scrapers
freeOnes := getFreeonesScraper(txnManager, globalConfig)
autoTag := getAutoTagScraper(txnManager, globalConfig)
scrapers[freeOnes.spec().ID] = freeOnes
scrapers[autoTag.spec().ID] = autoTag
logger.Debugf("Reading scraper configs from %s", path)
scraperFiles := []string{}
err := utils.SymWalk(path, func(fp string, f os.FileInfo, err error) error {
if filepath.Ext(fp) == ".yml" {
c, err := loadConfigFromYAMLFile(fp)
if err != nil {
logger.Errorf("Error loading scraper %s: %v", fp, err)
} else {
scraper := newGroupScraper(*c, txnManager, globalConfig)
scrapers[scraper.spec().ID] = scraper
}
scraperFiles = append(scraperFiles, fp)
}
return nil
})
if err != nil {
logger.Errorf("Error reading scraper configs: %v", err)
return nil, err
}
return scrapers, nil
}
// ReloadScrapers clears the scraper cache and reloads from the scraper path.
// In the event of an error during loading, the cache will be left empty.
func (c *Cache) ReloadScrapers() error {
c.scrapers = nil
scrapers, err := loadScrapers(c.globalConfig, c.txnManager)
if err != nil {
return err
}
c.scrapers = scrapers
return nil
}
// ListScrapers lists scrapers matching one of the given types.
// Returns a list of scrapers, sorted by their ID.
func (c Cache) ListScrapers(tys []models.ScrapeContentType) []*models.Scraper {
var ret []*models.Scraper
for _, s := range c.scrapers {
for _, t := range tys {
if s.supports(t) {
spec := s.spec()
ret = append(ret, &spec)
break
}
}
}
sort.Slice(ret, func(i, j int) bool {
return ret[i].ID < ret[j].ID
})
return ret
}
// GetScraper returns the scraper matching the provided id.
func (c Cache) GetScraper(scraperID string) *models.Scraper {
s := c.findScraper(scraperID)
if s != nil {
spec := s.spec()
return &spec
}
return nil
}
func (c Cache) findScraper(scraperID string) scraper {
s, ok := c.scrapers[scraperID]
if ok {
return s
}
return nil
}
func (c Cache) ScrapeName(ctx context.Context, id, query string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
// find scraper with the provided id
s := c.findScraper(id)
if s == nil {
return nil, fmt.Errorf("%w: id %s", ErrNotFound, id)
}
if !s.supports(ty) {
return nil, fmt.Errorf("%w: cannot use scraper %s as a %v scraper", ErrNotSupported, id, ty)
}
ns, ok := s.(nameScraper)
if !ok {
return nil, fmt.Errorf("%w: cannot use scraper %s to scrape by name", ErrNotSupported, id)
}
return ns.viaName(ctx, c.client, query, ty)
}
// ScrapeFragment uses the given fragment input to scrape
func (c Cache) ScrapeFragment(ctx context.Context, id string, input Input) (models.ScrapedContent, error) {
s := c.findScraper(id)
if s == nil {
return nil, fmt.Errorf("%w: id %s", ErrNotFound, id)
}
fs, ok := s.(fragmentScraper)
if !ok {
return nil, fmt.Errorf("%w: cannot use scraper %s as a fragment scraper", ErrNotSupported, id)
}
content, err := fs.viaFragment(ctx, c.client, input)
if err != nil {
return nil, fmt.Errorf("error while fragment scraping with scraper %s: %w", id, err)
}
return c.postScrape(ctx, content)
}
// ScrapeURL scrapes a given url for the given content. Searches the scraper cache
// and picks the first scraper capable of scraping the given url into the desired
// content. Returns the scraped content or an error if the scrape fails.
func (c Cache) ScrapeURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
for _, s := range c.scrapers {
if s.supportsURL(url, ty) {
ul, ok := s.(urlScraper)
if !ok {
return nil, fmt.Errorf("%w: cannot use scraper %s as an url scraper", ErrNotSupported, s.spec().ID)
}
ret, err := ul.viaURL(ctx, c.client, url, ty)
if err != nil {
return nil, err
}
if ret == nil {
return ret, nil
}
return c.postScrape(ctx, ret)
}
}
return nil, nil
}
func (c Cache) ScrapeID(ctx context.Context, scraperID string, id int, ty models.ScrapeContentType) (models.ScrapedContent, error) {
s := c.findScraper(scraperID)
if s == nil {
return nil, fmt.Errorf("%w: id %s", ErrNotFound, scraperID)
}
if !s.supports(ty) {
return nil, fmt.Errorf("%w: cannot use scraper %s to scrape %v content", ErrNotSupported, scraperID, ty)
}
var ret models.ScrapedContent
switch ty {
case models.ScrapeContentTypeScene:
ss, ok := s.(sceneScraper)
if !ok {
return nil, fmt.Errorf("%w: cannot use scraper %s as a scene scraper", ErrNotSupported, scraperID)
}
scene, err := getScene(ctx, id, c.txnManager)
if err != nil {
return nil, fmt.Errorf("scraper %s: unable to load scene id %v: %w", scraperID, id, err)
}
ret, err = ss.viaScene(ctx, c.client, scene)
if err != nil {
return nil, fmt.Errorf("scraper %s: %w", scraperID, err)
}
case models.ScrapeContentTypeGallery:
gs, ok := s.(galleryScraper)
if !ok {
return nil, fmt.Errorf("%w: cannot use scraper %s as a gallery scraper", ErrNotSupported, scraperID)
}
gallery, err := getGallery(ctx, id, c.txnManager)
if err != nil {
return nil, fmt.Errorf("scraper %s: unable to load gallery id %v: %w", scraperID, id, err)
}
ret, err = gs.viaGallery(ctx, c.client, gallery)
if err != nil {
return nil, fmt.Errorf("scraper %s: %w", scraperID, err)
}
}
return c.postScrape(ctx, ret)
}

View file

@ -8,6 +8,7 @@ import (
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/models"
"gopkg.in/yaml.v2"
)
@ -232,55 +233,118 @@ func loadConfigFromYAMLFile(path string) (*config, error) {
return ret, nil
}
func (c config) supportsPerformers() bool {
return c.PerformerByName != nil || c.PerformerByFragment != nil || len(c.PerformerByURL) > 0
func (c config) spec() models.Scraper {
ret := models.Scraper{
ID: c.ID,
Name: c.Name,
}
performer := models.ScraperSpec{}
if c.PerformerByName != nil {
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeName)
}
if c.PerformerByFragment != nil {
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeFragment)
}
if len(c.PerformerByURL) > 0 {
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.PerformerByURL {
performer.Urls = append(performer.Urls, v.URL...)
}
}
if len(performer.SupportedScrapes) > 0 {
ret.Performer = &performer
}
scene := models.ScraperSpec{}
if c.SceneByFragment != nil {
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment)
}
if c.SceneByName != nil && c.SceneByQueryFragment != nil {
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName)
}
if len(c.SceneByURL) > 0 {
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.SceneByURL {
scene.Urls = append(scene.Urls, v.URL...)
}
}
if len(scene.SupportedScrapes) > 0 {
ret.Scene = &scene
}
gallery := models.ScraperSpec{}
if c.GalleryByFragment != nil {
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeFragment)
}
if len(c.GalleryByURL) > 0 {
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.GalleryByURL {
gallery.Urls = append(gallery.Urls, v.URL...)
}
}
if len(gallery.SupportedScrapes) > 0 {
ret.Gallery = &gallery
}
movie := models.ScraperSpec{}
if len(c.MovieByURL) > 0 {
movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.MovieByURL {
movie.Urls = append(movie.Urls, v.URL...)
}
}
if len(movie.SupportedScrapes) > 0 {
ret.Movie = &movie
}
return ret
}
func (c config) matchesPerformerURL(url string) bool {
for _, scraper := range c.PerformerByURL {
if scraper.matchesURL(url) {
return true
}
}
return false
}
func (c config) supportsScenes() bool {
return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0
}
func (c config) supportsGalleries() bool {
return c.GalleryByFragment != nil || len(c.GalleryByURL) > 0
}
func (c config) matchesSceneURL(url string) bool {
for _, scraper := range c.SceneByURL {
if scraper.matchesURL(url) {
return true
}
}
return false
}
func (c config) matchesGalleryURL(url string) bool {
for _, scraper := range c.GalleryByURL {
if scraper.matchesURL(url) {
return true
}
}
return false
}
func (c config) supportsMovies() bool {
return len(c.MovieByURL) > 0
}
func (c config) matchesMovieURL(url string) bool {
for _, scraper := range c.MovieByURL {
if scraper.matchesURL(url) {
return true
func (c config) supports(ty models.ScrapeContentType) bool {
switch ty {
case models.ScrapeContentTypePerformer:
return c.PerformerByName != nil || c.PerformerByFragment != nil || len(c.PerformerByURL) > 0
case models.ScrapeContentTypeScene:
return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0
case models.ScrapeContentTypeGallery:
return c.GalleryByFragment != nil || len(c.GalleryByURL) > 0
case models.ScrapeContentTypeMovie:
return len(c.MovieByURL) > 0
}
panic("Unhandled ScrapeContentType")
}
func (c config) matchesURL(url string, ty models.ScrapeContentType) bool {
switch ty {
case models.ScrapeContentTypePerformer:
for _, scraper := range c.PerformerByURL {
if scraper.matchesURL(url) {
return true
}
}
case models.ScrapeContentTypeScene:
for _, scraper := range c.SceneByURL {
if scraper.matchesURL(url) {
return true
}
}
case models.ScrapeContentTypeGallery:
for _, scraper := range c.GalleryByURL {
if scraper.matchesURL(url) {
return true
}
}
case models.ScrapeContentTypeMovie:
for _, scraper := range c.MovieByURL {
if scraper.matchesURL(url) {
return true
}
}
}

View file

@ -1,289 +0,0 @@
package scraper
import (
"net/http"
"github.com/stashapp/stash/pkg/models"
)
type configSceneScraper struct {
*configScraper
}
func (c *configSceneScraper) matchesURL(url string) bool {
return c.config.matchesSceneURL(url)
}
func (c *configSceneScraper) scrapeByName(name string) ([]*models.ScrapedScene, error) {
if c.config.SceneByName != nil {
s := c.config.getScraper(*c.config.SceneByName, c.client, c.txnManager, c.globalConfig)
return s.scrapeScenesByName(name)
}
return nil, nil
}
func (c *configSceneScraper) scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error) {
if c.config.SceneByFragment != nil {
s := c.config.getScraper(*c.config.SceneByFragment, c.client, c.txnManager, c.globalConfig)
return s.scrapeSceneByScene(scene)
}
return nil, nil
}
func (c *configSceneScraper) scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
if c.config.SceneByQueryFragment != nil {
s := c.config.getScraper(*c.config.SceneByQueryFragment, c.client, c.txnManager, c.globalConfig)
return s.scrapeSceneByFragment(scene)
}
return nil, nil
}
func (c *configSceneScraper) scrapeByURL(url string) (*models.ScrapedScene, error) {
for _, scraper := range c.config.SceneByURL {
if scraper.matchesURL(url) {
s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig)
ret, err := s.scrapeSceneByURL(url)
if err != nil {
return nil, err
}
if ret != nil {
return ret, nil
}
}
}
return nil, nil
}
type configPerformerScraper struct {
*configScraper
}
func (c *configPerformerScraper) matchesURL(url string) bool {
return c.config.matchesPerformerURL(url)
}
func (c *configPerformerScraper) scrapeByName(name string) ([]*models.ScrapedPerformer, error) {
if c.config.PerformerByName != nil {
s := c.config.getScraper(*c.config.PerformerByName, c.client, c.txnManager, c.globalConfig)
return s.scrapePerformersByName(name)
}
return nil, nil
}
func (c *configPerformerScraper) scrapeByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
if c.config.PerformerByFragment != nil {
s := c.config.getScraper(*c.config.PerformerByFragment, c.client, c.txnManager, c.globalConfig)
return s.scrapePerformerByFragment(scrapedPerformer)
}
// try to match against URL if present
if scrapedPerformer.URL != nil && *scrapedPerformer.URL != "" {
return c.scrapeByURL(*scrapedPerformer.URL)
}
return nil, nil
}
func (c *configPerformerScraper) scrapeByURL(url string) (*models.ScrapedPerformer, error) {
for _, scraper := range c.config.PerformerByURL {
if scraper.matchesURL(url) {
s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig)
ret, err := s.scrapePerformerByURL(url)
if err != nil {
return nil, err
}
if ret != nil {
return ret, nil
}
}
}
return nil, nil
}
type configGalleryScraper struct {
*configScraper
}
func (c *configGalleryScraper) matchesURL(url string) bool {
return c.config.matchesGalleryURL(url)
}
func (c *configGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
if c.config.GalleryByFragment != nil {
s := c.config.getScraper(*c.config.GalleryByFragment, c.client, c.txnManager, c.globalConfig)
return s.scrapeGalleryByGallery(gallery)
}
return nil, nil
}
func (c *configGalleryScraper) scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
if c.config.GalleryByFragment != nil {
// TODO - this should be galleryByQueryFragment
s := c.config.getScraper(*c.config.GalleryByFragment, c.client, c.txnManager, c.globalConfig)
return s.scrapeGalleryByFragment(gallery)
}
return nil, nil
}
func (c *configGalleryScraper) scrapeByURL(url string) (*models.ScrapedGallery, error) {
for _, scraper := range c.config.GalleryByURL {
if scraper.matchesURL(url) {
s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig)
ret, err := s.scrapeGalleryByURL(url)
if err != nil {
return nil, err
}
if ret != nil {
return ret, nil
}
}
}
return nil, nil
}
type configMovieScraper struct {
*configScraper
}
func (c *configMovieScraper) matchesURL(url string) bool {
return c.config.matchesMovieURL(url)
}
func (c *configMovieScraper) scrapeByURL(url string) (*models.ScrapedMovie, error) {
for _, scraper := range c.config.MovieByURL {
if scraper.matchesURL(url) {
s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig)
ret, err := s.scrapeMovieByURL(url)
if err != nil {
return nil, err
}
if ret != nil {
return ret, nil
}
}
}
return nil, nil
}
type configScraper struct {
config config
client *http.Client
txnManager models.TransactionManager
globalConfig GlobalConfig
}
func createScraperFromConfig(c config, client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
base := configScraper{
client: client,
config: c,
txnManager: txnManager,
globalConfig: globalConfig,
}
ret := scraper{
ID: c.ID,
Spec: configScraperSpec(c),
}
// only set fields if supported
if c.supportsPerformers() {
ret.Performer = &configPerformerScraper{&base}
}
if c.supportsGalleries() {
ret.Gallery = &configGalleryScraper{&base}
}
if c.supportsMovies() {
ret.Movie = &configMovieScraper{&base}
}
if c.supportsScenes() {
ret.Scene = &configSceneScraper{&base}
}
return ret
}
func configScraperSpec(c config) *models.Scraper {
ret := models.Scraper{
ID: c.ID,
Name: c.Name,
}
performer := models.ScraperSpec{}
if c.PerformerByName != nil {
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeName)
}
if c.PerformerByFragment != nil {
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeFragment)
}
if len(c.PerformerByURL) > 0 {
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.PerformerByURL {
performer.Urls = append(performer.Urls, v.URL...)
}
}
if len(performer.SupportedScrapes) > 0 {
ret.Performer = &performer
}
scene := models.ScraperSpec{}
if c.SceneByFragment != nil {
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment)
}
if c.SceneByName != nil && c.SceneByQueryFragment != nil {
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName)
}
if len(c.SceneByURL) > 0 {
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.SceneByURL {
scene.Urls = append(scene.Urls, v.URL...)
}
}
if len(scene.SupportedScrapes) > 0 {
ret.Scene = &scene
}
gallery := models.ScraperSpec{}
if c.GalleryByFragment != nil {
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeFragment)
}
if len(c.GalleryByURL) > 0 {
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.GalleryByURL {
gallery.Urls = append(gallery.Urls, v.URL...)
}
}
if len(gallery.SupportedScrapes) > 0 {
ret.Gallery = &gallery
}
movie := models.ScraperSpec{}
if len(c.MovieByURL) > 0 {
movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL)
for _, v := range c.MovieByURL {
movie.Urls = append(movie.Urls, v.URL...)
}
}
if len(movie.SupportedScrapes) > 0 {
ret.Movie = &movie
}
return &ret
}

View file

@ -1,7 +1,6 @@
package scraper
import (
"net/http"
"strings"
"github.com/stashapp/stash/pkg/logger"
@ -47,7 +46,7 @@ xPathScrapers:
- regex: \sBio\s*$
with: ""
URL: //link[@rel="alternate" and @hreflang="x-default"]/@href
Twitter: //a[not(starts-with(@href,'https://twitter.com/FreeOnes'))][contains(@href,'twitter.com/')]/@href
Twitter: //a[not(starts-with(@href,'https://twitter.com/FreeOnes'))][contains(@href,'twitter.com/')]/@href
Instagram: //a[contains(@href,'instagram.com/')]/@href
Birthdate:
selector: //span[contains(text(),'Born On')]
@ -124,7 +123,7 @@ xPathScrapers:
# Last updated April 13, 2021
`
func getFreeonesScraper(client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
func getFreeonesScraper(txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
yml := freeonesScraperConfig
c, err := loadConfigFromYAML(FreeonesScraperID, strings.NewReader(yml))
@ -132,5 +131,5 @@ func getFreeonesScraper(client *http.Client, txnManager models.TransactionManage
logger.Fatalf("Error loading builtin freeones scraper: %s", err.Error())
}
return createScraperFromConfig(*c, client, txnManager, globalConfig)
return newGroupScraper(*c, txnManager, globalConfig)
}

141
pkg/scraper/group.go Normal file
View file

@ -0,0 +1,141 @@
package scraper
import (
"context"
"fmt"
"net/http"
"github.com/stashapp/stash/pkg/models"
)
type group struct {
config config
txnManager models.TransactionManager
globalConf GlobalConfig
}
func newGroupScraper(c config, txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
return group{
config: c,
txnManager: txnManager,
globalConf: globalConfig,
}
}
func (g group) spec() models.Scraper {
return g.config.spec()
}
// fragmentScraper finds an appropriate fragment scraper based on input.
func (g group) fragmentScraper(input Input) *scraperTypeConfig {
switch {
case input.Performer != nil:
return g.config.PerformerByFragment
case input.Gallery != nil:
// TODO - this should be galleryByQueryFragment
return g.config.GalleryByFragment
case input.Scene != nil:
return g.config.SceneByQueryFragment
}
return nil
}
func (g group) viaFragment(ctx context.Context, client *http.Client, input Input) (models.ScrapedContent, error) {
stc := g.fragmentScraper(input)
if stc == nil {
// If there's no performer fragment scraper in the group, we try to use
// the URL scraper. Check if there's an URL in the input, and then shift
// to an URL scrape if it's present.
if input.Performer != nil && input.Performer.URL != nil && *input.Performer.URL != "" {
return g.viaURL(ctx, client, *input.Performer.URL, models.ScrapeContentTypePerformer)
}
return nil, ErrNotSupported
}
s := g.config.getScraper(*stc, client, g.txnManager, g.globalConf)
return s.scrapeByFragment(ctx, input)
}
func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) {
if g.config.SceneByFragment == nil {
return nil, ErrNotSupported
}
s := g.config.getScraper(*g.config.SceneByFragment, client, g.txnManager, g.globalConf)
return s.scrapeSceneByScene(ctx, scene)
}
func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) {
if g.config.GalleryByFragment == nil {
return nil, ErrNotSupported
}
s := g.config.getScraper(*g.config.GalleryByFragment, client, g.txnManager, g.globalConf)
return s.scrapeGalleryByGallery(ctx, gallery)
}
func loadUrlCandidates(c config, ty models.ScrapeContentType) []*scrapeByURLConfig {
switch ty {
case models.ScrapeContentTypePerformer:
return c.PerformerByURL
case models.ScrapeContentTypeScene:
return c.SceneByURL
case models.ScrapeContentTypeMovie:
return c.MovieByURL
case models.ScrapeContentTypeGallery:
return c.GalleryByURL
}
panic("loadUrlCandidates: unreachable")
}
func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
candidates := loadUrlCandidates(g.config, ty)
for _, scraper := range candidates {
if scraper.matchesURL(url) {
s := g.config.getScraper(scraper.scraperTypeConfig, client, g.txnManager, g.globalConf)
ret, err := s.scrapeByURL(ctx, url, ty)
if err != nil {
return nil, err
}
if ret != nil {
return ret, nil
}
}
}
return nil, nil
}
func (g group) viaName(ctx context.Context, client *http.Client, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
switch ty {
case models.ScrapeContentTypePerformer:
if g.config.PerformerByName == nil {
break
}
s := g.config.getScraper(*g.config.PerformerByName, client, g.txnManager, g.globalConf)
return s.scrapeByName(ctx, name, ty)
case models.ScrapeContentTypeScene:
if g.config.SceneByName == nil {
break
}
s := g.config.getScraper(*g.config.SceneByName, client, g.txnManager, g.globalConf)
return s.scrapeByName(ctx, name, ty)
}
return nil, fmt.Errorf("%w: cannot load %v by name", ErrNotSupported, ty)
}
func (g group) supports(ty models.ScrapeContentType) bool {
return g.config.supports(ty)
}
func (g group) supportsURL(url string, ty models.ScrapeContentType) bool {
return g.config.matchesURL(url, ty)
}

View file

@ -12,7 +12,7 @@ import (
)
func setPerformerImage(ctx context.Context, client *http.Client, p *models.ScrapedPerformer, globalConfig GlobalConfig) error {
if p == nil || p.Image == nil || !strings.HasPrefix(*p.Image, "http") {
if p.Image == nil || !strings.HasPrefix(*p.Image, "http") {
// nothing to do
return nil
}
@ -31,7 +31,7 @@ func setPerformerImage(ctx context.Context, client *http.Client, p *models.Scrap
func setSceneImage(ctx context.Context, client *http.Client, s *models.ScrapedScene, globalConfig GlobalConfig) error {
// don't try to get the image if it doesn't appear to be a URL
if s == nil || s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
if s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
// nothing to do
return nil
}
@ -48,7 +48,7 @@ func setSceneImage(ctx context.Context, client *http.Client, s *models.ScrapedSc
func setMovieFrontImage(ctx context.Context, client *http.Client, m *models.ScrapedMovie, globalConfig GlobalConfig) error {
// don't try to get the image if it doesn't appear to be a URL
if m == nil || m.FrontImage == nil || !strings.HasPrefix(*m.FrontImage, "http") {
if m.FrontImage == nil || !strings.HasPrefix(*m.FrontImage, "http") {
// nothing to do
return nil
}
@ -65,7 +65,7 @@ func setMovieFrontImage(ctx context.Context, client *http.Client, m *models.Scra
func setMovieBackImage(ctx context.Context, client *http.Client, m *models.ScrapedMovie, globalConfig GlobalConfig) error {
// don't try to get the image if it doesn't appear to be a URL
if m == nil || m.BackImage == nil || !strings.HasPrefix(*m.BackImage, "http") {
if m.BackImage == nil || !strings.HasPrefix(*m.BackImage, "http") {
// nothing to do
return nil
}

View file

@ -3,6 +3,7 @@ package scraper
import (
"context"
"errors"
"fmt"
"io"
"net/http"
"net/url"
@ -74,55 +75,33 @@ func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) {
return docStr, err
}
func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries
doc, scraper, err := s.scrapeURL(context.TODO(), u)
func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for url-queries
doc, scraper, err := s.scrapeURL(ctx, u)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapePerformer(q)
}
func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries
doc, scraper, err := s.scrapeURL(context.TODO(), u)
if err != nil {
return nil, err
switch ty {
case models.ScrapeContentTypePerformer:
return scraper.scrapePerformer(ctx, q)
case models.ScrapeContentTypeScene:
return scraper.scrapeScene(ctx, q)
case models.ScrapeContentTypeGallery:
return scraper.scrapeGallery(ctx, q)
case models.ScrapeContentTypeMovie:
return scraper.scrapeMovie(ctx, q)
}
q := s.getJsonQuery(doc)
return scraper.scrapeScene(q)
return nil, ErrNotSupported
}
func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries
doc, scraper, err := s.scrapeURL(context.TODO(), u)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *jsonScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries
doc, scraper, err := s.scrapeURL(context.TODO(), u)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeMovie(q)
}
func (s *jsonScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
scraper := s.getJsonScraper()
if scraper == nil {
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
return nil, fmt.Errorf("%w: name %v", ErrNotFound, s.scraper.Scraper)
}
const placeholder = "{}"
@ -133,46 +112,45 @@ func (s *jsonScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerf
url := s.scraper.QueryURL
url = strings.ReplaceAll(url, placeholder, escapedName)
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapePerformers(q)
}
q.setType(SearchQuery)
func (s *jsonScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
return nil, errors.New("scrapePerformerByFragment not supported for json scraper")
}
var content []models.ScrapedContent
switch ty {
case models.ScrapeContentTypePerformer:
performers, err := scraper.scrapePerformers(ctx, q)
if err != nil {
return nil, err
}
func (s *jsonScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
scraper := s.getJsonScraper()
for _, p := range performers {
content = append(content, p)
}
if scraper == nil {
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
return content, nil
case models.ScrapeContentTypeScene:
scenes, err := scraper.scrapeScenes(ctx, q)
if err != nil {
return nil, err
}
for _, s := range scenes {
content = append(content, s)
}
return content, nil
}
const placeholder = "{}"
// replace the placeholder string with the URL-escaped name
escapedName := url.QueryEscape(name)
url := s.scraper.QueryURL
url = strings.ReplaceAll(url, placeholder, escapedName)
doc, err := s.loadURL(context.TODO(), url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeScenes(q)
return nil, ErrNotSupported
}
func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
// construct the URL
queryURL := queryURLParametersFromScene(scene)
if s.scraper.QueryURLReplacements != nil {
@ -186,17 +164,28 @@ func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedSc
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeScene(q)
return scraper.scrapeScene(ctx, q)
}
func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
switch {
case input.Gallery != nil:
return nil, fmt.Errorf("%w: cannot use a json scraper as a gallery fragment scraper", ErrNotSupported)
case input.Performer != nil:
return nil, fmt.Errorf("%w: cannot use a json scraper as a performer fragment scraper", ErrNotSupported)
case input.Scene == nil:
return nil, fmt.Errorf("%w: scene input is nil", ErrNotSupported)
}
scene := *input.Scene
// construct the URL
queryURL := queryURLParametersFromScrapedScene(scene)
if s.scraper.QueryURLReplacements != nil {
@ -210,17 +199,17 @@ func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*mo
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeScene(q)
return scraper.scrapeScene(ctx, q)
}
func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
// construct the URL
queryURL := queryURLParametersFromGallery(gallery)
if s.scraper.QueryURLReplacements != nil {
@ -234,18 +223,14 @@ func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.S
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *jsonScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByFragment not supported for json scraper")
return scraper.scrapeGallery(ctx, q)
}
func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery {
@ -256,16 +241,24 @@ func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery {
}
type jsonQuery struct {
doc string
scraper *jsonScraper
doc string
scraper *jsonScraper
queryType QueryType
}
func (q *jsonQuery) runQuery(selector string) []string {
func (q *jsonQuery) getType() QueryType {
return q.queryType
}
func (q *jsonQuery) setType(t QueryType) {
q.queryType = t
}
func (q *jsonQuery) runQuery(selector string) ([]string, error) {
value := gjson.Get(q.doc, selector)
if !value.Exists() {
logger.Warnf("Could not find json path '%s' in json object", selector)
return nil
return nil, fmt.Errorf("could not find json path '%s' in json object", selector)
}
var ret []string
@ -278,11 +271,11 @@ func (q *jsonQuery) runQuery(selector string) []string {
ret = append(ret, value.String())
}
return ret
return ret, nil
}
func (q *jsonQuery) subScrape(value string) mappedQuery {
doc, err := q.scraper.loadURL(context.TODO(), value)
func (q *jsonQuery) subScrape(ctx context.Context, value string) mappedQuery {
doc, err := q.scraper.loadURL(ctx, value)
if err != nil {
logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())

View file

@ -1,6 +1,7 @@
package scraper
import (
"context"
"testing"
"gopkg.in/yaml.v2"
@ -81,7 +82,7 @@ jsonScrapers:
doc: json,
}
scrapedPerformer, err := performerScraper.scrapePerformer(q)
scrapedPerformer, err := performerScraper.scrapePerformer(context.Background(), q)
if err != nil {
t.Fatalf("Error scraping performer: %s", err.Error())
}

View file

@ -1,6 +1,7 @@
package scraper
import (
"context"
"errors"
"fmt"
"math"
@ -17,8 +18,10 @@ import (
)
type mappedQuery interface {
runQuery(selector string) []string
subScrape(value string) mappedQuery
runQuery(selector string) ([]string, error)
getType() QueryType
setType(QueryType)
subScrape(ctx context.Context, value string) mappedQuery
}
type commonMappedConfig map[string]string
@ -38,7 +41,7 @@ func (s mappedConfig) applyCommon(c commonMappedConfig, src string) string {
return ret
}
func (s mappedConfig) process(q mappedQuery, common commonMappedConfig) mappedResults {
func (s mappedConfig) process(ctx context.Context, q mappedQuery, common commonMappedConfig) mappedResults {
var ret mappedResults
for k, attrConfig := range s {
@ -51,10 +54,13 @@ func (s mappedConfig) process(q mappedQuery, common commonMappedConfig) mappedRe
selector := attrConfig.Selector
selector = s.applyCommon(common, selector)
found := q.runQuery(selector)
found, err := q.runQuery(selector)
if err != nil {
logger.Warnf("key '%v': %v", k, err)
}
if len(found) > 0 {
result := s.postProcess(q, attrConfig, found)
result := s.postProcess(ctx, q, attrConfig, found)
for i, text := range result {
ret = ret.setKey(i, k, text)
}
@ -65,14 +71,18 @@ func (s mappedConfig) process(q mappedQuery, common commonMappedConfig) mappedRe
return ret
}
func (s mappedConfig) postProcess(q mappedQuery, attrConfig mappedScraperAttrConfig, found []string) []string {
func (s mappedConfig) postProcess(ctx context.Context, q mappedQuery, attrConfig mappedScraperAttrConfig, found []string) []string {
// check if we're concatenating the results into a single result
var ret []string
if attrConfig.hasConcat() {
result := attrConfig.concatenateResults(found)
result = attrConfig.postProcess(result, q)
result = attrConfig.postProcess(ctx, result, q)
if attrConfig.hasSplit() {
results := attrConfig.splitString(result)
// skip cleaning when the query is used for searching
if q.getType() == SearchQuery {
return results
}
results = attrConfig.cleanResults(results)
return results
}
@ -80,14 +90,19 @@ func (s mappedConfig) postProcess(q mappedQuery, attrConfig mappedScraperAttrCon
ret = []string{result}
} else {
for _, text := range found {
text = attrConfig.postProcess(text, q)
text = attrConfig.postProcess(ctx, text, q)
if attrConfig.hasSplit() {
return attrConfig.splitString(text)
}
ret = append(ret, text)
}
// skip cleaning when the query is used for searching
if q.getType() == SearchQuery {
return ret
}
ret = attrConfig.cleanResults(ret)
}
return ret
@ -356,12 +371,12 @@ func (c mappedRegexConfigs) apply(value string) string {
}
type postProcessAction interface {
Apply(value string, q mappedQuery) string
Apply(ctx context.Context, value string, q mappedQuery) string
}
type postProcessParseDate string
func (p *postProcessParseDate) Apply(value string, q mappedQuery) string {
func (p *postProcessParseDate) Apply(ctx context.Context, value string, q mappedQuery) string {
parseDate := string(*p)
const internalDateFormat = "2006-01-02"
@ -393,7 +408,7 @@ func (p *postProcessParseDate) Apply(value string, q mappedQuery) string {
type postProcessSubtractDays bool
func (p *postProcessSubtractDays) Apply(value string, q mappedQuery) string {
func (p *postProcessSubtractDays) Apply(ctx context.Context, value string, q mappedQuery) string {
const internalDateFormat = "2006-01-02"
i, err := strconv.Atoi(value)
@ -409,21 +424,24 @@ func (p *postProcessSubtractDays) Apply(value string, q mappedQuery) string {
type postProcessReplace mappedRegexConfigs
func (c *postProcessReplace) Apply(value string, q mappedQuery) string {
func (c *postProcessReplace) Apply(ctx context.Context, value string, q mappedQuery) string {
replace := mappedRegexConfigs(*c)
return replace.apply(value)
}
type postProcessSubScraper mappedScraperAttrConfig
func (p *postProcessSubScraper) Apply(value string, q mappedQuery) string {
func (p *postProcessSubScraper) Apply(ctx context.Context, value string, q mappedQuery) string {
subScrapeConfig := mappedScraperAttrConfig(*p)
logger.Debugf("Sub-scraping for: %s", value)
ss := q.subScrape(value)
ss := q.subScrape(ctx, value)
if ss != nil {
found := ss.runQuery(subScrapeConfig.Selector)
found, err := ss.runQuery(subScrapeConfig.Selector)
if err != nil {
logger.Warnf("subscrape for '%v': %v", value, err)
}
if len(found) > 0 {
// check if we're concatenating the results into a single result
@ -434,7 +452,7 @@ func (p *postProcessSubScraper) Apply(value string, q mappedQuery) string {
result = found[0]
}
result = subScrapeConfig.postProcess(result, ss)
result = subScrapeConfig.postProcess(ctx, result, ss)
return result
}
}
@ -444,7 +462,7 @@ func (p *postProcessSubScraper) Apply(value string, q mappedQuery) string {
type postProcessMap map[string]string
func (p *postProcessMap) Apply(value string, q mappedQuery) string {
func (p *postProcessMap) Apply(ctx context.Context, value string, q mappedQuery) string {
// return the mapped value if present
m := *p
mapped, ok := m[value]
@ -458,7 +476,7 @@ func (p *postProcessMap) Apply(value string, q mappedQuery) string {
type postProcessFeetToCm bool
func (p *postProcessFeetToCm) Apply(value string, q mappedQuery) string {
func (p *postProcessFeetToCm) Apply(ctx context.Context, value string, q mappedQuery) string {
const foot_in_cm = 30.48
const inch_in_cm = 2.54
@ -482,7 +500,7 @@ func (p *postProcessFeetToCm) Apply(value string, q mappedQuery) string {
type postProcessLbToKg bool
func (p *postProcessLbToKg) Apply(value string, q mappedQuery) string {
func (p *postProcessLbToKg) Apply(ctx context.Context, value string, q mappedQuery) string {
const lb_in_kg = 0.45359237
w, err := strconv.ParseFloat(value, 64)
if err == nil {
@ -684,9 +702,9 @@ func (c mappedScraperAttrConfig) splitString(value string) []string {
return res
}
func (c mappedScraperAttrConfig) postProcess(value string, q mappedQuery) string {
func (c mappedScraperAttrConfig) postProcess(ctx context.Context, value string, q mappedQuery) string {
for _, action := range c.postProcessActions {
value = action.Apply(value, q)
value = action.Apply(ctx, value, q)
}
return value
@ -742,7 +760,7 @@ func (r mappedResults) setKey(index int, key string, value string) mappedResults
return r
}
func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer, error) {
func (s mappedScraper) scrapePerformer(ctx context.Context, q mappedQuery) (*models.ScrapedPerformer, error) {
var ret models.ScrapedPerformer
performerMap := s.Performer
@ -752,14 +770,14 @@ func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer,
performerTagsMap := performerMap.Tags
results := performerMap.process(q, s.Common)
results := performerMap.process(ctx, q, s.Common)
if len(results) > 0 {
results[0].apply(&ret)
// now apply the tags
if performerTagsMap != nil {
logger.Debug(`Processing performer tags:`)
tagResults := performerTagsMap.process(q, s.Common)
tagResults := performerTagsMap.process(ctx, q, s.Common)
for _, p := range tagResults {
tag := &models.ScrapedTag{}
@ -772,7 +790,7 @@ func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer,
return &ret, nil
}
func (s mappedScraper) scrapePerformers(q mappedQuery) ([]*models.ScrapedPerformer, error) {
func (s mappedScraper) scrapePerformers(ctx context.Context, q mappedQuery) ([]*models.ScrapedPerformer, error) {
var ret []*models.ScrapedPerformer
performerMap := s.Performer
@ -780,7 +798,7 @@ func (s mappedScraper) scrapePerformers(q mappedQuery) ([]*models.ScrapedPerform
return nil, nil
}
results := performerMap.process(q, s.Common)
results := performerMap.process(ctx, q, s.Common)
for _, r := range results {
var p models.ScrapedPerformer
r.apply(&p)
@ -790,7 +808,7 @@ func (s mappedScraper) scrapePerformers(q mappedQuery) ([]*models.ScrapedPerform
return ret, nil
}
func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.ScrapedScene {
func (s mappedScraper) processScene(ctx context.Context, q mappedQuery, r mappedResult) *models.ScrapedScene {
var ret models.ScrapedScene
sceneScraperConfig := s.Scene
@ -807,13 +825,13 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
// process performer tags once
var performerTagResults mappedResults
if scenePerformerTagsMap != nil {
performerTagResults = scenePerformerTagsMap.process(q, s.Common)
performerTagResults = scenePerformerTagsMap.process(ctx, q, s.Common)
}
// now apply the performers and tags
if scenePerformersMap.mappedConfig != nil {
logger.Debug(`Processing scene performers:`)
performerResults := scenePerformersMap.process(q, s.Common)
performerResults := scenePerformersMap.process(ctx, q, s.Common)
for _, p := range performerResults {
performer := &models.ScrapedPerformer{}
@ -831,7 +849,7 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
if sceneTagsMap != nil {
logger.Debug(`Processing scene tags:`)
tagResults := sceneTagsMap.process(q, s.Common)
tagResults := sceneTagsMap.process(ctx, q, s.Common)
for _, p := range tagResults {
tag := &models.ScrapedTag{}
@ -842,7 +860,7 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
if sceneStudioMap != nil {
logger.Debug(`Processing scene studio:`)
studioResults := sceneStudioMap.process(q, s.Common)
studioResults := sceneStudioMap.process(ctx, q, s.Common)
if len(studioResults) > 0 {
studio := &models.ScrapedStudio{}
@ -853,7 +871,7 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
if sceneMoviesMap != nil {
logger.Debug(`Processing scene movies:`)
movieResults := sceneMoviesMap.process(q, s.Common)
movieResults := sceneMoviesMap.process(ctx, q, s.Common)
for _, p := range movieResults {
movie := &models.ScrapedMovie{}
@ -865,7 +883,7 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
return &ret
}
func (s mappedScraper) scrapeScenes(q mappedQuery) ([]*models.ScrapedScene, error) {
func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*models.ScrapedScene, error) {
var ret []*models.ScrapedScene
sceneScraperConfig := s.Scene
@ -875,16 +893,16 @@ func (s mappedScraper) scrapeScenes(q mappedQuery) ([]*models.ScrapedScene, erro
}
logger.Debug(`Processing scenes:`)
results := sceneMap.process(q, s.Common)
results := sceneMap.process(ctx, q, s.Common)
for _, r := range results {
logger.Debug(`Processing scene:`)
ret = append(ret, s.processScene(q, r))
ret = append(ret, s.processScene(ctx, q, r))
}
return ret, nil
}
func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error) {
func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*models.ScrapedScene, error) {
var ret models.ScrapedScene
sceneScraperConfig := s.Scene
@ -894,16 +912,16 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
}
logger.Debug(`Processing scene:`)
results := sceneMap.process(q, s.Common)
results := sceneMap.process(ctx, q, s.Common)
if len(results) > 0 {
ss := s.processScene(q, results[0])
ss := s.processScene(ctx, q, results[0])
ret = *ss
}
return &ret, nil
}
func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, error) {
func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*models.ScrapedGallery, error) {
var ret models.ScrapedGallery
galleryScraperConfig := s.Gallery
@ -917,14 +935,14 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
galleryStudioMap := galleryScraperConfig.Studio
logger.Debug(`Processing gallery:`)
results := galleryMap.process(q, s.Common)
results := galleryMap.process(ctx, q, s.Common)
if len(results) > 0 {
results[0].apply(&ret)
// now apply the performers and tags
if galleryPerformersMap != nil {
logger.Debug(`Processing gallery performers:`)
performerResults := galleryPerformersMap.process(q, s.Common)
performerResults := galleryPerformersMap.process(ctx, q, s.Common)
for _, p := range performerResults {
performer := &models.ScrapedPerformer{}
@ -935,7 +953,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
if galleryTagsMap != nil {
logger.Debug(`Processing gallery tags:`)
tagResults := galleryTagsMap.process(q, s.Common)
tagResults := galleryTagsMap.process(ctx, q, s.Common)
for _, p := range tagResults {
tag := &models.ScrapedTag{}
@ -946,7 +964,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
if galleryStudioMap != nil {
logger.Debug(`Processing gallery studio:`)
studioResults := galleryStudioMap.process(q, s.Common)
studioResults := galleryStudioMap.process(ctx, q, s.Common)
if len(studioResults) > 0 {
studio := &models.ScrapedStudio{}
@ -959,7 +977,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
return &ret, nil
}
func (s mappedScraper) scrapeMovie(q mappedQuery) (*models.ScrapedMovie, error) {
func (s mappedScraper) scrapeMovie(ctx context.Context, q mappedQuery) (*models.ScrapedMovie, error) {
var ret models.ScrapedMovie
movieScraperConfig := s.Movie
@ -970,13 +988,13 @@ func (s mappedScraper) scrapeMovie(q mappedQuery) (*models.ScrapedMovie, error)
movieStudioMap := movieScraperConfig.Studio
results := movieMap.process(q, s.Common)
results := movieMap.process(ctx, q, s.Common)
if len(results) > 0 {
results[0].apply(&ret)
if movieStudioMap != nil {
logger.Debug(`Processing movie studio:`)
studioResults := movieStudioMap.process(q, s.Common)
studioResults := movieStudioMap.process(ctx, q, s.Common)
if len(studioResults) > 0 {
studio := &models.ScrapedStudio{}

View file

@ -1,6 +1,7 @@
package scraper
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
@ -15,7 +16,7 @@ performerByURL:
xPathScrapers:
performerScraper:
performer:
Name:
Name:
selector: //div/a/@href
postProcess:
- parseDate: Jan 2, 2006
@ -55,6 +56,6 @@ func TestFeetToCM(t *testing.T) {
q := &xpathQuery{}
for _, test := range feetToCMTests {
assert.Equal(t, test.out, pp.Apply(test.in, q))
assert.Equal(t, test.out, pp.Apply(context.Background(), test.in, q))
}
}

View file

@ -0,0 +1,236 @@
package scraper
import (
"context"
"regexp"
"strings"
"github.com/stashapp/stash/pkg/logger"
stash_config "github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models"
)
// postScrape handles post-processing of scraped content. If the content
// requires post-processing, this function fans out to the given content
// type and post-processes it.
func (c Cache) postScrape(ctx context.Context, content models.ScrapedContent) (models.ScrapedContent, error) {
// Analyze the concrete type, call the right post-processing function
switch v := content.(type) {
case *models.ScrapedPerformer:
if v != nil {
return c.postScrapePerformer(ctx, *v)
}
case models.ScrapedPerformer:
return c.postScrapePerformer(ctx, v)
case *models.ScrapedScene:
if v != nil {
return c.postScrapeScene(ctx, *v)
}
case models.ScrapedScene:
return c.postScrapeScene(ctx, v)
case *models.ScrapedGallery:
if v != nil {
return c.postScrapeGallery(ctx, *v)
}
case models.ScrapedGallery:
return c.postScrapeGallery(ctx, v)
case *models.ScrapedMovie:
if v != nil {
return c.postScrapeMovie(ctx, *v)
}
case models.ScrapedMovie:
return c.postScrapeMovie(ctx, v)
}
// If nothing matches, pass the content through
return content, nil
}
func (c Cache) postScrapePerformer(ctx context.Context, p models.ScrapedPerformer) (models.ScrapedContent, error) {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
tqb := r.Tag()
tags, err := postProcessTags(tqb, p.Tags)
if err != nil {
return err
}
p.Tags = tags
return nil
}); err != nil {
return nil, err
}
// post-process - set the image if applicable
if err := setPerformerImage(ctx, c.client, &p, c.globalConfig); err != nil {
logger.Warnf("Could not set image using URL %s: %s", *p.Image, err.Error())
}
return p, nil
}
func (c Cache) postScrapeMovie(ctx context.Context, m models.ScrapedMovie) (models.ScrapedContent, error) {
if m.Studio != nil {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
return match.ScrapedStudio(r.Studio(), m.Studio, nil)
}); err != nil {
return nil, err
}
}
// post-process - set the image if applicable
if err := setMovieFrontImage(ctx, c.client, &m, c.globalConfig); err != nil {
logger.Warnf("could not set front image using URL %s: %v", *m.FrontImage, err)
}
if err := setMovieBackImage(ctx, c.client, &m, c.globalConfig); err != nil {
logger.Warnf("could not set back image using URL %s: %v", *m.BackImage, err)
}
return m, nil
}
func (c Cache) postScrapeScenePerformer(ctx context.Context, p models.ScrapedPerformer) error {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
tqb := r.Tag()
tags, err := postProcessTags(tqb, p.Tags)
if err != nil {
return err
}
p.Tags = tags
return nil
}); err != nil {
return err
}
return nil
}
func (c Cache) postScrapeScene(ctx context.Context, scene models.ScrapedScene) (models.ScrapedContent, error) {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
pqb := r.Performer()
mqb := r.Movie()
tqb := r.Tag()
sqb := r.Studio()
for _, p := range scene.Performers {
if p == nil {
continue
}
if err := c.postScrapeScenePerformer(ctx, *p); err != nil {
return err
}
if err := match.ScrapedPerformer(pqb, p, nil); err != nil {
return err
}
}
for _, p := range scene.Movies {
err := match.ScrapedMovie(mqb, p)
if err != nil {
return err
}
}
tags, err := postProcessTags(tqb, scene.Tags)
if err != nil {
return err
}
scene.Tags = tags
if scene.Studio != nil {
err := match.ScrapedStudio(sqb, scene.Studio, nil)
if err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
// post-process - set the image if applicable
if err := setSceneImage(ctx, c.client, &scene, c.globalConfig); err != nil {
logger.Warnf("Could not set image using URL %s: %v", *scene.Image, err)
}
return scene, nil
}
func (c Cache) postScrapeGallery(ctx context.Context, g models.ScrapedGallery) (models.ScrapedContent, error) {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
pqb := r.Performer()
tqb := r.Tag()
sqb := r.Studio()
for _, p := range g.Performers {
err := match.ScrapedPerformer(pqb, p, nil)
if err != nil {
return err
}
}
tags, err := postProcessTags(tqb, g.Tags)
if err != nil {
return err
}
g.Tags = tags
if g.Studio != nil {
err := match.ScrapedStudio(sqb, g.Studio, nil)
if err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
return g, nil
}
func postProcessTags(tqb models.TagReader, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) {
var ret []*models.ScrapedTag
excludePatterns := stash_config.GetInstance().GetScraperExcludeTagPatterns()
var excludeRegexps []*regexp.Regexp
for _, excludePattern := range excludePatterns {
reg, err := regexp.Compile(strings.ToLower(excludePattern))
if err != nil {
logger.Errorf("Invalid tag exclusion pattern :%v", err)
} else {
excludeRegexps = append(excludeRegexps, reg)
}
}
var ignoredTags []string
ScrapeTag:
for _, t := range scrapedTags {
for _, reg := range excludeRegexps {
if reg.MatchString(strings.ToLower(t.Name)) {
ignoredTags = append(ignoredTags, t.Name)
continue ScrapeTag
}
}
err := match.ScrapedTag(tqb, t)
if err != nil {
return nil, err
}
ret = append(ret, t)
}
if len(ignoredTags) > 0 {
logger.Infof("Scraping ignored tags: %s", strings.Join(ignoredTags, ", "))
}
return ret, nil
}

View file

@ -1,51 +1,86 @@
package scraper
import "github.com/stashapp/stash/pkg/models"
import (
"context"
"errors"
"net/http"
type urlMatcher interface {
matchesURL(url string) bool
"github.com/stashapp/stash/pkg/models"
)
var (
// ErrMaxRedirects is returned if the max number of HTTP redirects are reached.
ErrMaxRedirects = errors.New("maximum number of HTTP redirects reached")
// ErrNotFound is returned when an entity isn't found
ErrNotFound = errors.New("scraper not found")
// ErrNotSupported is returned when a given invocation isn't supported, and there
// is a guard function which should be able to guard against it.
ErrNotSupported = errors.New("scraper operation not supported")
)
// Input coalesces inputs of different types into a single structure.
// The system expects one of these to be set, and the remaining to be
// set to nil.
type Input struct {
Performer *models.ScrapedPerformerInput
Scene *models.ScrapedSceneInput
Gallery *models.ScrapedGalleryInput
}
type performerScraper interface {
scrapeByName(name string) ([]*models.ScrapedPerformer, error)
scrapeByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error)
scrapeByURL(url string) (*models.ScrapedPerformer, error)
// simple type definitions that can help customize
// actions per query
type QueryType int
const (
// for now only SearchQuery is needed
SearchQuery QueryType = iota + 1
)
// scraper is the generic interface to the scraper subsystems
type scraper interface {
// spec returns the scraper specification, suitable for graphql
spec() models.Scraper
// supports tests if the scraper supports a given content type
supports(models.ScrapeContentType) bool
// supportsURL tests if the scraper supports scrapes of a given url, producing a given content type
supportsURL(url string, ty models.ScrapeContentType) bool
}
// urlScraper is the interface of scrapers supporting url loads
type urlScraper interface {
scraper
viaURL(ctx context.Context, client *http.Client, url string, ty models.ScrapeContentType) (models.ScrapedContent, error)
}
// nameScraper is the interface of scrapers supporting name loads
type nameScraper interface {
scraper
viaName(ctx context.Context, client *http.Client, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error)
}
// fragmentScraper is the interface of scrapers supporting fragment loads
type fragmentScraper interface {
scraper
viaFragment(ctx context.Context, client *http.Client, input Input) (models.ScrapedContent, error)
}
// sceneScraper is a scraper which supports scene scrapes with
// scene data as the input.
type sceneScraper interface {
scrapeByName(name string) ([]*models.ScrapedScene, error)
scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error)
scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error)
scrapeByURL(url string) (*models.ScrapedScene, error)
scraper
viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error)
}
// galleryScraper is a scraper which supports gallery scrapes with
// gallery data as the input.
type galleryScraper interface {
scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error)
scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error)
scrapeByURL(url string) (*models.ScrapedGallery, error)
}
type movieScraper interface {
scrapeByURL(url string) (*models.ScrapedMovie, error)
}
type scraper struct {
ID string
Spec *models.Scraper
Performer performerScraper
Scene sceneScraper
Gallery galleryScraper
Movie movieScraper
}
func matchesURL(maybeURLMatcher interface{}, url string) bool {
if maybeURLMatcher != nil {
matcher, ok := maybeURLMatcher.(urlMatcher)
if ok {
return matcher.matchesURL(url)
}
}
return false
scraper
viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error)
}

View file

@ -1,660 +0,0 @@
package scraper
import (
"context"
"crypto/tls"
"errors"
"fmt"
"net/http"
"os"
"path/filepath"
"regexp"
"strings"
"time"
"github.com/stashapp/stash/pkg/logger"
stash_config "github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
var ErrMaxRedirects = errors.New("maximum number of HTTP redirects reached")
const (
// scrapeGetTimeout is the timeout for scraper HTTP requests. Includes transfer time.
// We may want to bump this at some point and use local context-timeouts if more granularity
// is needed.
scrapeGetTimeout = time.Second * 60
// maxIdleConnsPerHost is the maximum number of idle connections the HTTP client will
// keep on a per-host basis.
maxIdleConnsPerHost = 8
// maxRedirects defines the maximum number of redirects the HTTP client will follow
maxRedirects = 20
)
// GlobalConfig contains the global scraper options.
type GlobalConfig interface {
GetScraperUserAgent() string
GetScrapersPath() string
GetScraperCDPPath() string
GetScraperCertCheck() bool
}
func isCDPPathHTTP(c GlobalConfig) bool {
return strings.HasPrefix(c.GetScraperCDPPath(), "http://") || strings.HasPrefix(c.GetScraperCDPPath(), "https://")
}
func isCDPPathWS(c GlobalConfig) bool {
return strings.HasPrefix(c.GetScraperCDPPath(), "ws://")
}
// Cache stores scraper details.
type Cache struct {
client *http.Client
scrapers []scraper
globalConfig GlobalConfig
txnManager models.TransactionManager
}
// newClient creates a scraper-local http client we use throughout the scraper subsystem.
func newClient(gc GlobalConfig) *http.Client {
client := &http.Client{
Transport: &http.Transport{ // ignore insecure certificates
TLSClientConfig: &tls.Config{InsecureSkipVerify: !gc.GetScraperCertCheck()},
MaxIdleConnsPerHost: maxIdleConnsPerHost,
},
Timeout: scrapeGetTimeout,
// defaultCheckRedirect code with max changed from 10 to maxRedirects
CheckRedirect: func(req *http.Request, via []*http.Request) error {
if len(via) >= maxRedirects {
return fmt.Errorf("after %d redirects: %w", maxRedirects, ErrMaxRedirects)
}
return nil
},
}
return client
}
// NewCache returns a new Cache loading scraper configurations from the
// scraper path provided in the global config object. It returns a new
// instance and an error if the scraper directory could not be loaded.
//
// Scraper configurations are loaded from yml files in the provided scrapers
// directory and any subdirectories.
func NewCache(globalConfig GlobalConfig, txnManager models.TransactionManager) (*Cache, error) {
// HTTP Client setup
client := newClient(globalConfig)
scrapers, err := loadScrapers(globalConfig, client, txnManager)
if err != nil {
return nil, err
}
return &Cache{
client: client,
globalConfig: globalConfig,
scrapers: scrapers,
txnManager: txnManager,
}, nil
}
func loadScrapers(globalConfig GlobalConfig, client *http.Client, txnManager models.TransactionManager) ([]scraper, error) {
path := globalConfig.GetScrapersPath()
scrapers := make([]scraper, 0)
logger.Debugf("Reading scraper configs from %s", path)
scraperFiles := []string{}
err := utils.SymWalk(path, func(fp string, f os.FileInfo, err error) error {
if filepath.Ext(fp) == ".yml" {
scraperFiles = append(scraperFiles, fp)
}
return nil
})
if err != nil {
logger.Errorf("Error reading scraper configs: %s", err.Error())
return nil, err
}
// add built-in freeones scraper
scrapers = append(scrapers, getFreeonesScraper(client, txnManager, globalConfig), getAutoTagScraper(txnManager, globalConfig))
for _, file := range scraperFiles {
c, err := loadConfigFromYAMLFile(file)
if err != nil {
logger.Errorf("Error loading scraper %s: %s", file, err.Error())
} else {
scraper := createScraperFromConfig(*c, client, txnManager, globalConfig)
scrapers = append(scrapers, scraper)
}
}
return scrapers, nil
}
// ReloadScrapers clears the scraper cache and reloads from the scraper path.
// In the event of an error during loading, the cache will be left empty.
func (c *Cache) ReloadScrapers() error {
c.scrapers = nil
scrapers, err := loadScrapers(c.globalConfig, c.client, c.txnManager)
if err != nil {
return err
}
c.scrapers = scrapers
return nil
}
// TODO - don't think this is needed
// UpdateConfig updates the global config for the cache. If the scraper path
// has changed, ReloadScrapers will need to be called separately.
func (c *Cache) UpdateConfig(globalConfig GlobalConfig) {
c.globalConfig = globalConfig
}
// ListPerformerScrapers returns a list of scrapers that are capable of
// scraping performers.
func (c Cache) ListPerformerScrapers() []*models.Scraper {
var ret []*models.Scraper
for _, s := range c.scrapers {
// filter on type
if s.Performer != nil {
ret = append(ret, s.Spec)
}
}
return ret
}
// ListSceneScrapers returns a list of scrapers that are capable of
// scraping scenes.
func (c Cache) ListSceneScrapers() []*models.Scraper {
var ret []*models.Scraper
for _, s := range c.scrapers {
// filter on type
if s.Scene != nil {
ret = append(ret, s.Spec)
}
}
return ret
}
// ListGalleryScrapers returns a list of scrapers that are capable of
// scraping galleries.
func (c Cache) ListGalleryScrapers() []*models.Scraper {
var ret []*models.Scraper
for _, s := range c.scrapers {
// filter on type
if s.Gallery != nil {
ret = append(ret, s.Spec)
}
}
return ret
}
// ListMovieScrapers returns a list of scrapers that are capable of
// scraping scenes.
func (c Cache) ListMovieScrapers() []*models.Scraper {
var ret []*models.Scraper
for _, s := range c.scrapers {
// filter on type
if s.Movie != nil {
ret = append(ret, s.Spec)
}
}
return ret
}
// GetScraper returns the scraper matching the provided id.
func (c Cache) GetScraper(scraperID string) *models.Scraper {
ret := c.findScraper(scraperID)
if ret != nil {
return ret.Spec
}
return nil
}
func (c Cache) findScraper(scraperID string) *scraper {
for _, s := range c.scrapers {
if s.ID == scraperID {
return &s
}
}
return nil
}
// ScrapePerformerList uses the scraper with the provided ID to query for
// performers using the provided query string. It returns a list of
// scraped performer data.
func (c Cache) ScrapePerformerList(scraperID string, query string) ([]*models.ScrapedPerformer, error) {
// find scraper with the provided id
s := c.findScraper(scraperID)
if s != nil && s.Performer != nil {
return s.Performer.scrapeByName(query)
}
return nil, errors.New("Scraper with ID " + scraperID + " not found")
}
// ScrapePerformer uses the scraper with the provided ID to scrape a
// performer using the provided performer fragment.
func (c Cache) ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
// find scraper with the provided id
s := c.findScraper(scraperID)
if s != nil && s.Performer != nil {
ret, err := s.Performer.scrapeByFragment(scrapedPerformer)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapePerformer(context.TODO(), ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
return nil, errors.New("Scraper with ID " + scraperID + " not found")
}
// ScrapePerformerURL uses the first scraper it finds that matches the URL
// provided to scrape a performer. If no scrapers are found that matches
// the URL, then nil is returned.
func (c Cache) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) {
for _, s := range c.scrapers {
if matchesURL(s.Performer, url) {
ret, err := s.Performer.scrapeByURL(url)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapePerformer(context.TODO(), ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
}
return nil, nil
}
func (c Cache) postScrapePerformer(ctx context.Context, ret *models.ScrapedPerformer) error {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
tqb := r.Tag()
tags, err := postProcessTags(tqb, ret.Tags)
if err != nil {
return err
}
ret.Tags = tags
return nil
}); err != nil {
return err
}
// post-process - set the image if applicable
if err := setPerformerImage(ctx, c.client, ret, c.globalConfig); err != nil {
logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
}
return nil
}
func (c Cache) postScrapeScenePerformer(ret *models.ScrapedPerformer) error {
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
tqb := r.Tag()
tags, err := postProcessTags(tqb, ret.Tags)
if err != nil {
return err
}
ret.Tags = tags
return nil
}); err != nil {
return err
}
return nil
}
func (c Cache) postScrapeScene(ctx context.Context, ret *models.ScrapedScene) error {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
pqb := r.Performer()
mqb := r.Movie()
tqb := r.Tag()
sqb := r.Studio()
for _, p := range ret.Performers {
if err := c.postScrapeScenePerformer(p); err != nil {
return err
}
if err := match.ScrapedPerformer(pqb, p, nil); err != nil {
return err
}
}
for _, p := range ret.Movies {
err := match.ScrapedMovie(mqb, p)
if err != nil {
return err
}
}
tags, err := postProcessTags(tqb, ret.Tags)
if err != nil {
return err
}
ret.Tags = tags
if ret.Studio != nil {
err := match.ScrapedStudio(sqb, ret.Studio, nil)
if err != nil {
return err
}
}
return nil
}); err != nil {
return err
}
// post-process - set the image if applicable
if err := setSceneImage(ctx, c.client, ret, c.globalConfig); err != nil {
logger.Warnf("Could not set image using URL %s: %v", *ret.Image, err)
}
return nil
}
func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error {
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
pqb := r.Performer()
tqb := r.Tag()
sqb := r.Studio()
for _, p := range ret.Performers {
err := match.ScrapedPerformer(pqb, p, nil)
if err != nil {
return err
}
}
tags, err := postProcessTags(tqb, ret.Tags)
if err != nil {
return err
}
ret.Tags = tags
if ret.Studio != nil {
err := match.ScrapedStudio(sqb, ret.Studio, nil)
if err != nil {
return err
}
}
return nil
}); err != nil {
return err
}
return nil
}
// ScrapeScene uses the scraper with the provided ID to scrape a scene using existing data.
func (c Cache) ScrapeScene(scraperID string, sceneID int) (*models.ScrapedScene, error) {
// find scraper with the provided id
s := c.findScraper(scraperID)
if s != nil && s.Scene != nil {
// get scene from id
scene, err := getScene(sceneID, c.txnManager)
if err != nil {
return nil, err
}
ret, err := s.Scene.scrapeByScene(scene)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapeScene(context.TODO(), ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
return nil, errors.New("Scraper with ID " + scraperID + " not found")
}
// ScrapeSceneQuery uses the scraper with the provided ID to query for
// scenes using the provided query string. It returns a list of
// scraped scene data.
func (c Cache) ScrapeSceneQuery(scraperID string, query string) ([]*models.ScrapedScene, error) {
// find scraper with the provided id
s := c.findScraper(scraperID)
if s != nil && s.Scene != nil {
return s.Scene.scrapeByName(query)
}
return nil, errors.New("Scraper with ID " + scraperID + " not found")
}
// ScrapeSceneFragment uses the scraper with the provided ID to scrape a scene.
func (c Cache) ScrapeSceneFragment(scraperID string, scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
// find scraper with the provided id
s := c.findScraper(scraperID)
if s != nil && s.Scene != nil {
ret, err := s.Scene.scrapeByFragment(scene)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapeScene(context.TODO(), ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
return nil, errors.New("Scraper with ID " + scraperID + " not found")
}
// ScrapeSceneURL uses the first scraper it finds that matches the URL
// provided to scrape a scene. If no scrapers are found that matches
// the URL, then nil is returned.
func (c Cache) ScrapeSceneURL(url string) (*models.ScrapedScene, error) {
for _, s := range c.scrapers {
if matchesURL(s.Scene, url) {
ret, err := s.Scene.scrapeByURL(url)
if err != nil {
return nil, err
}
err = c.postScrapeScene(context.TODO(), ret)
if err != nil {
return nil, err
}
return ret, nil
}
}
return nil, nil
}
// ScrapeGallery uses the scraper with the provided ID to scrape a gallery using existing data.
func (c Cache) ScrapeGallery(scraperID string, galleryID int) (*models.ScrapedGallery, error) {
s := c.findScraper(scraperID)
if s != nil && s.Gallery != nil {
// get gallery from id
gallery, err := getGallery(galleryID, c.txnManager)
if err != nil {
return nil, err
}
ret, err := s.Gallery.scrapeByGallery(gallery)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapeGallery(ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
return nil, errors.New("Scraped with ID " + scraperID + " not found")
}
// ScrapeGalleryFragment uses the scraper with the provided ID to scrape a gallery.
func (c Cache) ScrapeGalleryFragment(scraperID string, gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
s := c.findScraper(scraperID)
if s != nil && s.Gallery != nil {
ret, err := s.Gallery.scrapeByFragment(gallery)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapeGallery(ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
return nil, errors.New("Scraped with ID " + scraperID + " not found")
}
// ScrapeGalleryURL uses the first scraper it finds that matches the URL
// provided to scrape a scene. If no scrapers are found that matches
// the URL, then nil is returned.
func (c Cache) ScrapeGalleryURL(url string) (*models.ScrapedGallery, error) {
for _, s := range c.scrapers {
if matchesURL(s.Gallery, url) {
ret, err := s.Gallery.scrapeByURL(url)
if err != nil {
return nil, err
}
err = c.postScrapeGallery(ret)
if err != nil {
return nil, err
}
return ret, nil
}
}
return nil, nil
}
// ScrapeMovieURL uses the first scraper it finds that matches the URL
// provided to scrape a movie. If no scrapers are found that matches
// the URL, then nil is returned.
func (c Cache) ScrapeMovieURL(url string) (*models.ScrapedMovie, error) {
for _, s := range c.scrapers {
if s.Movie != nil && matchesURL(s.Movie, url) {
ret, err := s.Movie.scrapeByURL(url)
if err != nil {
return nil, err
}
if ret.Studio != nil {
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
return match.ScrapedStudio(r.Studio(), ret.Studio, nil)
}); err != nil {
return nil, err
}
}
// post-process - set the image if applicable
if err := setMovieFrontImage(context.TODO(), c.client, ret, c.globalConfig); err != nil {
logger.Warnf("Could not set front image using URL %s: %s", *ret.FrontImage, err.Error())
}
if err := setMovieBackImage(context.TODO(), c.client, ret, c.globalConfig); err != nil {
logger.Warnf("Could not set back image using URL %s: %s", *ret.BackImage, err.Error())
}
return ret, nil
}
}
return nil, nil
}
func postProcessTags(tqb models.TagReader, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) {
var ret []*models.ScrapedTag
excludePatterns := stash_config.GetInstance().GetScraperExcludeTagPatterns()
var excludeRegexps []*regexp.Regexp
for _, excludePattern := range excludePatterns {
reg, err := regexp.Compile(strings.ToLower(excludePattern))
if err != nil {
logger.Errorf("Invalid tag exclusion pattern :%v", err)
} else {
excludeRegexps = append(excludeRegexps, reg)
}
}
var ignoredTags []string
ScrapeTag:
for _, t := range scrapedTags {
for _, reg := range excludeRegexps {
if reg.MatchString(strings.ToLower(t.Name)) {
ignoredTags = append(ignoredTags, t.Name)
continue ScrapeTag
}
}
err := match.ScrapedTag(tqb, t)
if err != nil {
return nil, err
}
ret = append(ret, t)
}
if len(ignoredTags) > 0 {
logger.Infof("Scraping ignored tags: %s", strings.Join(ignoredTags, ", "))
}
return ret, nil
}

View file

@ -1,6 +1,7 @@
package scraper
import (
"context"
"encoding/json"
"errors"
"fmt"
@ -13,6 +14,8 @@ import (
"github.com/stashapp/stash/pkg/models"
)
var ErrScraperScript = errors.New("scraper script error")
type scriptScraper struct {
scraper scraperTypeConfig
config config
@ -73,65 +76,122 @@ func (s *scriptScraper) runScraperScript(inString string, out interface{}) error
logger.Debugf("Scraper script <%s> started", strings.Join(cmd.Args, " "))
// TODO - add a timeout here
decodeErr := json.NewDecoder(stdout).Decode(out)
if decodeErr != nil {
logger.Error("could not unmarshal json: " + decodeErr.Error())
return errors.New("could not unmarshal json: " + decodeErr.Error())
// Make a copy of stdout here. This allows us to decode it twice.
var sb strings.Builder
tr := io.TeeReader(stdout, &sb)
// First, perform a decode where unknown fields are disallowed.
d := json.NewDecoder(tr)
d.DisallowUnknownFields()
strictErr := d.Decode(out)
if strictErr != nil {
// The decode failed for some reason, use the built string
// and allow unknown fields in the decode.
s := sb.String()
lenientErr := json.NewDecoder(strings.NewReader(s)).Decode(out)
if lenientErr != nil {
// The error is genuine, so return it
logger.Errorf("could not unmarshal json from script output: %v", lenientErr)
return fmt.Errorf("could not unmarshal json from script output: %w", lenientErr)
}
// Lenient decode succeeded, print a warning, but use the decode
logger.Warnf("reading script result: %v", strictErr)
}
err = cmd.Wait()
logger.Debugf("Scraper script finished")
if err != nil {
return errors.New("error running scraper script")
return fmt.Errorf("%w: %v", ErrScraperScript, err)
}
return nil
}
func (s *scriptScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
inString := `{"name": "` + name + `"}`
func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
input := `{"name": "` + name + `"}`
var performers []models.ScrapedPerformer
err := s.runScraperScript(inString, &performers)
// convert to pointers
var ret []*models.ScrapedPerformer
if err == nil {
for i := 0; i < len(performers); i++ {
ret = append(ret, &performers[i])
var ret []models.ScrapedContent
var err error
switch ty {
case models.ScrapeContentTypePerformer:
var performers []models.ScrapedPerformer
err = s.runScraperScript(input, &performers)
if err == nil {
for _, p := range performers {
v := p
ret = append(ret, &v)
}
}
case models.ScrapeContentTypeScene:
var scenes []models.ScrapedScene
err = s.runScraperScript(input, &scenes)
if err == nil {
for _, s := range scenes {
v := s
ret = append(ret, &v)
}
}
default:
return nil, ErrNotSupported
}
return ret, err
}
func (s *scriptScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
inString, err := json.Marshal(scrapedPerformer)
func (s *scriptScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
var inString []byte
var err error
var ty models.ScrapeContentType
switch {
case input.Performer != nil:
inString, err = json.Marshal(*input.Performer)
ty = models.ScrapeContentTypePerformer
case input.Gallery != nil:
inString, err = json.Marshal(*input.Gallery)
ty = models.ScrapeContentTypeGallery
case input.Scene != nil:
inString, err = json.Marshal(*input.Scene)
ty = models.ScrapeContentTypeScene
}
if err != nil {
return nil, err
}
var ret models.ScrapedPerformer
err = s.runScraperScript(string(inString), &ret)
return &ret, err
return s.scrape(ctx, string(inString), ty)
}
func (s *scriptScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
inString := `{"url": "` + url + `"}`
var ret models.ScrapedPerformer
err := s.runScraperScript(string(inString), &ret)
return &ret, err
func (s *scriptScraper) scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
return s.scrape(ctx, `{"url": "`+url+`"}`, ty)
}
func (s *scriptScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
func (s *scriptScraper) scrape(ctx context.Context, input string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
switch ty {
case models.ScrapeContentTypePerformer:
var performer models.ScrapedPerformer
err := s.runScraperScript(input, &performer)
return &performer, err
case models.ScrapeContentTypeGallery:
var gallery models.ScrapedGallery
err := s.runScraperScript(input, &gallery)
return &gallery, err
case models.ScrapeContentTypeScene:
var scene models.ScrapedScene
err := s.runScraperScript(input, &scene)
return &scene, err
case models.ScrapeContentTypeMovie:
var movie models.ScrapedMovie
err := s.runScraperScript(input, &movie)
return &movie, err
}
return nil, ErrNotSupported
}
func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
inString, err := json.Marshal(sceneToUpdateInput(scene))
if err != nil {
@ -145,39 +205,7 @@ func (s *scriptScraper) scrapeSceneByScene(scene *models.Scene) (*models.Scraped
return &ret, err
}
func (s *scriptScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
inString := `{"name": "` + name + `"}`
var scenes []models.ScrapedScene
err := s.runScraperScript(inString, &scenes)
// convert to pointers
var ret []*models.ScrapedScene
if err == nil {
for i := 0; i < len(scenes); i++ {
ret = append(ret, &scenes[i])
}
}
return ret, err
}
func (s *scriptScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
inString, err := json.Marshal(scene)
if err != nil {
return nil, err
}
var ret models.ScrapedScene
err = s.runScraperScript(string(inString), &ret)
return &ret, err
}
func (s *scriptScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
inString, err := json.Marshal(galleryToUpdateInput(gallery))
if err != nil {
@ -191,50 +219,6 @@ func (s *scriptScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models
return &ret, err
}
func (s *scriptScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
inString, err := json.Marshal(gallery)
if err != nil {
return nil, err
}
var ret models.ScrapedGallery
err = s.runScraperScript(string(inString), &ret)
return &ret, err
}
func (s *scriptScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
inString := `{"url": "` + url + `"}`
var ret models.ScrapedScene
err := s.runScraperScript(string(inString), &ret)
return &ret, err
}
func (s *scriptScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
inString := `{"url": "` + url + `"}`
var ret models.ScrapedGallery
err := s.runScraperScript(string(inString), &ret)
return &ret, err
}
func (s *scriptScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
inString := `{"url": "` + url + `"}`
var ret models.ScrapedMovie
err := s.runScraperScript(string(inString), &ret)
return &ret, err
}
func findPythonExecutable() (string, error) {
_, err := exec.LookPath("python3")

View file

@ -3,7 +3,7 @@ package scraper
import (
"context"
"database/sql"
"errors"
"fmt"
"net/http"
"strconv"
@ -54,37 +54,6 @@ type stashFindPerformerNamesResultType struct {
Performers []*stashFindPerformerNamePerformer `graphql:"performers"`
}
func (s *stashScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
client := s.getStashClient()
var q struct {
FindPerformers stashFindPerformerNamesResultType `graphql:"findPerformers(filter: $f)"`
}
page := 1
perPage := 10
vars := map[string]interface{}{
"f": models.FindFilterType{
Q: &name,
Page: &page,
PerPage: &perPage,
},
}
err := client.Query(context.TODO(), &q, vars)
if err != nil {
return nil, err
}
var ret []*models.ScrapedPerformer
for _, p := range q.FindPerformers.Performers {
ret = append(ret, p.toPerformer())
}
return ret, nil
}
// need a separate for scraped stash performers - does not include remote_site_id or image
type scrapedTagStash struct {
Name string `graphql:"name" json:"name"`
@ -114,7 +83,17 @@ type scrapedPerformerStash struct {
Weight *string `graphql:"weight" json:"weight"`
}
func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
func (s *stashScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
if input.Gallery != nil || input.Scene != nil {
return nil, fmt.Errorf("%w: using stash scraper as a fragment scraper", ErrNotSupported)
}
if input.Performer == nil {
return nil, fmt.Errorf("%w: the given performer is nil", ErrNotSupported)
}
scrapedPerformer := input.Performer
client := s.getStashClient()
var q struct {
@ -128,7 +107,7 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
"f": performerID,
}
err := client.Query(context.TODO(), &q, vars)
err := client.Query(ctx, &q, vars)
if err != nil {
return nil, err
}
@ -141,7 +120,7 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
}
// get the performer image directly
ret.Image, err = getStashPerformerImage(context.TODO(), s.config.StashServer.URL, performerID, s.client, s.globalConfig)
ret.Image, err = getStashPerformerImage(ctx, s.config.StashServer.URL, performerID, s.client, s.globalConfig)
if err != nil {
return nil, err
}
@ -159,7 +138,7 @@ type stashFindSceneNamesResultType struct {
Scenes []*scrapedSceneStash `graphql:"scenes"`
}
func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash) (*models.ScrapedScene, error) {
func (s *stashScraper) scrapedStashSceneToScrapedScene(ctx context.Context, scene *scrapedSceneStash) (*models.ScrapedScene, error) {
ret := models.ScrapedScene{}
err := copier.Copy(&ret, scene)
if err != nil {
@ -167,7 +146,7 @@ func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash)
}
// get the performer image directly
ret.Image, err = getStashSceneImage(context.TODO(), s.config.StashServer.URL, scene.ID, s.client, s.globalConfig)
ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, scene.ID, s.client, s.globalConfig)
if err != nil {
return nil, err
}
@ -175,13 +154,9 @@ func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash)
return &ret, nil
}
func (s *stashScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
func (s *stashScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
client := s.getStashClient()
var q struct {
FindScenes stashFindSceneNamesResultType `graphql:"findScenes(filter: $f)"`
}
page := 1
perPage := 10
@ -193,21 +168,45 @@ func (s *stashScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene,
},
}
err := client.Query(context.TODO(), &q, vars)
if err != nil {
return nil, err
}
var ret []models.ScrapedContent
switch ty {
case models.ScrapeContentTypeScene:
var q struct {
FindScenes stashFindSceneNamesResultType `graphql:"findScenes(filter: $f)"`
}
var ret []*models.ScrapedScene
for _, scene := range q.FindScenes.Scenes {
converted, err := s.scrapedStashSceneToScrapedScene(scene)
err := client.Query(ctx, &q, vars)
if err != nil {
return nil, err
}
ret = append(ret, converted)
for _, scene := range q.FindScenes.Scenes {
converted, err := s.scrapedStashSceneToScrapedScene(ctx, scene)
if err != nil {
return nil, err
}
ret = append(ret, converted)
}
return ret, nil
case models.ScrapeContentTypePerformer:
var q struct {
FindPerformers stashFindPerformerNamesResultType `graphql:"findPerformers(filter: $f)"`
}
err := client.Query(ctx, &q, vars)
if err != nil {
return nil, err
}
for _, p := range q.FindPerformers.Performers {
ret = append(ret, p.toPerformer())
}
return ret, nil
}
return ret, nil
return nil, ErrNotSupported
}
type scrapedSceneStash struct {
@ -222,7 +221,7 @@ type scrapedSceneStash struct {
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
}
func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
// query by MD5
var q struct {
FindScene *scrapedSceneStash `graphql:"findSceneByHash(input: $c)"`
@ -243,18 +242,18 @@ func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS
}
client := s.getStashClient()
if err := client.Query(context.TODO(), &q, vars); err != nil {
if err := client.Query(ctx, &q, vars); err != nil {
return nil, err
}
// need to copy back to a scraped scene
ret, err := s.scrapedStashSceneToScrapedScene(q.FindScene)
ret, err := s.scrapedStashSceneToScrapedScene(ctx, q.FindScene)
if err != nil {
return nil, err
}
// get the performer image directly
ret.Image, err = getStashSceneImage(context.TODO(), s.config.StashServer.URL, q.FindScene.ID, s.client, s.globalConfig)
ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, q.FindScene.ID, s.client, s.globalConfig)
if err != nil {
return nil, err
}
@ -262,10 +261,6 @@ func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS
return ret, nil
}
func (s *stashScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
return nil, errors.New("scrapeSceneByFragment not supported for stash scraper")
}
type scrapedGalleryStash struct {
ID string `graphql:"id" json:"id"`
Title *string `graphql:"title" json:"title"`
@ -278,7 +273,7 @@ type scrapedGalleryStash struct {
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
}
func (s *stashScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
func (s *stashScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
var q struct {
FindGallery *scrapedGalleryStash `graphql:"findGalleryByHash(input: $c)"`
}
@ -296,7 +291,7 @@ func (s *stashScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.
}
client := s.getStashClient()
if err := client.Query(context.TODO(), &q, vars); err != nil {
if err := client.Query(ctx, &q, vars); err != nil {
return nil, err
}
@ -309,29 +304,13 @@ func (s *stashScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.
return &ret, nil
}
func (s *stashScraper) scrapeGalleryByFragment(scene models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByFragment not supported for stash scraper")
func (s *stashScraper) scrapeByURL(_ context.Context, _ string, _ models.ScrapeContentType) (models.ScrapedContent, error) {
return nil, ErrNotSupported
}
func (s *stashScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
return nil, errors.New("scrapePerformerByURL not supported for stash scraper")
}
func (s *stashScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
return nil, errors.New("scrapeSceneByURL not supported for stash scraper")
}
func (s *stashScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByURL not supported for stash scraper")
}
func (s *stashScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
return nil, errors.New("scrapeMovieByURL not supported for stash scraper")
}
func getScene(sceneID int, txnManager models.TransactionManager) (*models.Scene, error) {
func getScene(ctx context.Context, sceneID int, txnManager models.TransactionManager) (*models.Scene, error) {
var ret *models.Scene
if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
if err := txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
var err error
ret, err = r.Scene().Find(sceneID)
return err
@ -367,9 +346,9 @@ func sceneToUpdateInput(scene *models.Scene) models.SceneUpdateInput {
}
}
func getGallery(galleryID int, txnManager models.TransactionManager) (*models.Gallery, error) {
func getGallery(ctx context.Context, galleryID int, txnManager models.TransactionManager) (*models.Gallery, error) {
var ret *models.Gallery
if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
if err := txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
var err error
ret, err = r.Gallery().Find(galleryID)
return err

View file

@ -56,7 +56,7 @@ func (c Client) QueryStashBoxScene(ctx context.Context, queryStr string) ([]*mod
var ret []*models.ScrapedScene
for _, s := range sceneFragments {
ss, err := c.sceneFragmentToScrapedScene(context.TODO(), s)
ss, err := c.sceneFragmentToScrapedScene(ctx, s)
if err != nil {
return nil, err
}
@ -69,9 +69,7 @@ func (c Client) QueryStashBoxScene(ctx context.Context, queryStr string) ([]*mod
// FindStashBoxScenesByFingerprints queries stash-box for scenes using every
// scene's MD5/OSHASH checksum, or PHash, and returns results in the same order
// as the input slice.
func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models.ScrapedScene, error) {
ctx := context.TODO()
func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, sceneIDs []string) ([][]*models.ScrapedScene, error) {
ids, err := utils.StringSliceToIntSlice(sceneIDs)
if err != nil {
return nil, err
@ -150,9 +148,7 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models
// FindStashBoxScenesByFingerprintsFlat queries stash-box for scenes using every
// scene's MD5/OSHASH checksum, or PHash, and returns results a flat slice.
func (c Client) FindStashBoxScenesByFingerprintsFlat(sceneIDs []string) ([]*models.ScrapedScene, error) {
ctx := context.TODO()
func (c Client) FindStashBoxScenesByFingerprintsFlat(ctx context.Context, sceneIDs []string) ([]*models.ScrapedScene, error) {
ids, err := utils.StringSliceToIntSlice(sceneIDs)
if err != nil {
return nil, err
@ -230,7 +226,7 @@ func (c Client) findStashBoxScenesByFingerprints(ctx context.Context, fingerprin
return ret, nil
}
func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) (bool, error) {
func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []string, endpoint string) (bool, error) {
ids, err := utils.StringSliceToIntSlice(sceneIDs)
if err != nil {
return false, err
@ -238,7 +234,7 @@ func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) (
var fingerprints []graphql.FingerprintSubmission
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
qb := r.Scene()
for _, sceneID := range ids {
@ -307,12 +303,12 @@ func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) (
return false, err
}
return c.submitStashBoxFingerprints(fingerprints)
return c.submitStashBoxFingerprints(ctx, fingerprints)
}
func (c Client) submitStashBoxFingerprints(fingerprints []graphql.FingerprintSubmission) (bool, error) {
func (c Client) submitStashBoxFingerprints(ctx context.Context, fingerprints []graphql.FingerprintSubmission) (bool, error) {
for _, fingerprint := range fingerprints {
_, err := c.client.SubmitFingerprint(context.TODO(), fingerprint)
_, err := c.client.SubmitFingerprint(ctx, fingerprint)
if err != nil {
return false, err
}
@ -322,8 +318,8 @@ func (c Client) submitStashBoxFingerprints(fingerprints []graphql.FingerprintSub
}
// QueryStashBoxPerformer queries stash-box for performers using a query string.
func (c Client) QueryStashBoxPerformer(queryStr string) ([]*models.StashBoxPerformerQueryResult, error) {
performers, err := c.queryStashBoxPerformer(queryStr)
func (c Client) QueryStashBoxPerformer(ctx context.Context, queryStr string) ([]*models.StashBoxPerformerQueryResult, error) {
performers, err := c.queryStashBoxPerformer(ctx, queryStr)
res := []*models.StashBoxPerformerQueryResult{
{
@ -342,8 +338,8 @@ func (c Client) QueryStashBoxPerformer(queryStr string) ([]*models.StashBoxPerfo
return res, err
}
func (c Client) queryStashBoxPerformer(queryStr string) ([]*models.ScrapedPerformer, error) {
performers, err := c.client.SearchPerformer(context.TODO(), queryStr)
func (c Client) queryStashBoxPerformer(ctx context.Context, queryStr string) ([]*models.ScrapedPerformer, error) {
performers, err := c.client.SearchPerformer(ctx, queryStr)
if err != nil {
return nil, err
}
@ -360,7 +356,7 @@ func (c Client) queryStashBoxPerformer(queryStr string) ([]*models.ScrapedPerfor
}
// FindStashBoxPerformersByNames queries stash-box for performers by name
func (c Client) FindStashBoxPerformersByNames(performerIDs []string) ([]*models.StashBoxPerformerQueryResult, error) {
func (c Client) FindStashBoxPerformersByNames(ctx context.Context, performerIDs []string) ([]*models.StashBoxPerformerQueryResult, error) {
ids, err := utils.StringSliceToIntSlice(performerIDs)
if err != nil {
return nil, err
@ -368,7 +364,7 @@ func (c Client) FindStashBoxPerformersByNames(performerIDs []string) ([]*models.
var performers []*models.Performer
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
qb := r.Performer()
for _, performerID := range ids {
@ -391,10 +387,10 @@ func (c Client) FindStashBoxPerformersByNames(performerIDs []string) ([]*models.
return nil, err
}
return c.findStashBoxPerformersByNames(performers)
return c.findStashBoxPerformersByNames(ctx, performers)
}
func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([][]*models.ScrapedPerformer, error) {
func (c Client) FindStashBoxPerformersByPerformerNames(ctx context.Context, performerIDs []string) ([][]*models.ScrapedPerformer, error) {
ids, err := utils.StringSliceToIntSlice(performerIDs)
if err != nil {
return nil, err
@ -402,7 +398,7 @@ func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([
var performers []*models.Performer
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
qb := r.Performer()
for _, performerID := range ids {
@ -425,7 +421,7 @@ func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([
return nil, err
}
results, err := c.findStashBoxPerformersByNames(performers)
results, err := c.findStashBoxPerformersByNames(ctx, performers)
if err != nil {
return nil, err
}
@ -438,11 +434,11 @@ func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([
return ret, nil
}
func (c Client) findStashBoxPerformersByNames(performers []*models.Performer) ([]*models.StashBoxPerformerQueryResult, error) {
func (c Client) findStashBoxPerformersByNames(ctx context.Context, performers []*models.Performer) ([]*models.StashBoxPerformerQueryResult, error) {
var ret []*models.StashBoxPerformerQueryResult
for _, performer := range performers {
if performer.Name.Valid {
performerResults, err := c.queryStashBoxPerformer(performer.Name.String)
performerResults, err := c.queryStashBoxPerformer(ctx, performer.Name.String)
if err != nil {
return nil, err
}
@ -610,6 +606,11 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode
sp.FakeTits = enumToStringPtr(p.BreastType, true)
}
if len(p.Aliases) > 0 {
alias := strings.Join(p.Aliases, ", ")
sp.Aliases = &alias
}
return sp
}
@ -705,8 +706,8 @@ func (c Client) sceneFragmentToScrapedScene(ctx context.Context, s *graphql.Scen
return ss, nil
}
func (c Client) FindStashBoxPerformerByID(id string) (*models.ScrapedPerformer, error) {
performer, err := c.client.FindPerformerByID(context.TODO(), id)
func (c Client) FindStashBoxPerformerByID(ctx context.Context, id string) (*models.ScrapedPerformer, error) {
performer, err := c.client.FindPerformerByID(ctx, id)
if err != nil {
return nil, err
}
@ -715,8 +716,8 @@ func (c Client) FindStashBoxPerformerByID(id string) (*models.ScrapedPerformer,
return ret, nil
}
func (c Client) FindStashBoxPerformerByName(name string) (*models.ScrapedPerformer, error) {
performers, err := c.client.SearchPerformer(context.TODO(), name)
func (c Client) FindStashBoxPerformerByName(ctx context.Context, name string) (*models.ScrapedPerformer, error) {
performers, err := c.client.SearchPerformer(ctx, name)
if err != nil {
return nil, err
}

View file

@ -99,8 +99,6 @@ func urlFromCDP(ctx context.Context, url string, driverOptions scraperDriverOpti
sleepDuration = time.Duration(driverOptions.Sleep) * time.Second
}
act := context.TODO()
// if scraperCDPPath is a remote address, then allocate accordingly
cdpPath := globalConfig.GetScraperCDPPath()
if cdpPath != "" {
@ -118,7 +116,7 @@ func urlFromCDP(ctx context.Context, url string, driverOptions scraperDriverOpti
}
}
act, cancelAct = chromedp.NewRemoteAllocator(act, remote)
ctx, cancelAct = chromedp.NewRemoteAllocator(ctx, remote)
} else {
// use a temporary user directory for chrome
dir, err := os.MkdirTemp("", "stash-chromedp")
@ -131,13 +129,13 @@ func urlFromCDP(ctx context.Context, url string, driverOptions scraperDriverOpti
chromedp.UserDataDir(dir),
chromedp.ExecPath(cdpPath),
)
act, cancelAct = chromedp.NewExecAllocator(act, opts...)
ctx, cancelAct = chromedp.NewExecAllocator(ctx, opts...)
}
defer cancelAct()
}
ctx, cancel := chromedp.NewContext(act)
ctx, cancel := chromedp.NewContext(ctx)
defer cancel()
// add a fixed timeout for the http request

View file

@ -4,6 +4,7 @@ import (
"bytes"
"context"
"errors"
"fmt"
"net/http"
"net/url"
"regexp"
@ -39,14 +40,14 @@ func (s *xpathScraper) getXpathScraper() *mappedScraper {
return s.config.XPathScrapers[s.scraper.Scraper]
}
func (s *xpathScraper) scrapeURL(url string) (*html.Node, *mappedScraper, error) {
func (s *xpathScraper) scrapeURL(ctx context.Context, url string) (*html.Node, *mappedScraper, error) {
scraper := s.getXpathScraper()
if scraper == nil {
return nil, nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, nil, err
@ -55,55 +56,33 @@ func (s *xpathScraper) scrapeURL(url string) (*html.Node, *mappedScraper, error)
return doc, scraper, nil
}
func (s *xpathScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries
doc, scraper, err := s.scrapeURL(u)
doc, scraper, err := s.scrapeURL(ctx, u)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapePerformer(q)
}
func (s *xpathScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
switch ty {
case models.ScrapeContentTypePerformer:
return scraper.scrapePerformer(ctx, q)
case models.ScrapeContentTypeScene:
return scraper.scrapeScene(ctx, q)
case models.ScrapeContentTypeGallery:
return scraper.scrapeGallery(ctx, q)
case models.ScrapeContentTypeMovie:
return scraper.scrapeMovie(ctx, q)
}
q := s.getXPathQuery(doc)
return scraper.scrapeScene(q)
return nil, ErrNotSupported
}
func (s *xpathScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *xpathScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapeMovie(q)
}
func (s *xpathScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
scraper := s.getXpathScraper()
if scraper == nil {
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
return nil, fmt.Errorf("%w: name %v", ErrNotFound, s.scraper.Scraper)
}
const placeholder = "{}"
@ -114,46 +93,43 @@ func (s *xpathScraper) scrapePerformersByName(name string) ([]*models.ScrapedPer
url := s.scraper.QueryURL
url = strings.ReplaceAll(url, placeholder, escapedName)
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapePerformers(q)
}
q.setType(SearchQuery)
func (s *xpathScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
return nil, errors.New("scrapePerformerByFragment not supported for xpath scraper")
}
var content []models.ScrapedContent
switch ty {
case models.ScrapeContentTypePerformer:
performers, err := scraper.scrapePerformers(ctx, q)
if err != nil {
return nil, err
}
for _, p := range performers {
content = append(content, p)
}
func (s *xpathScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
scraper := s.getXpathScraper()
return content, nil
case models.ScrapeContentTypeScene:
scenes, err := scraper.scrapeScenes(ctx, q)
if err != nil {
return nil, err
}
for _, s := range scenes {
content = append(content, s)
}
if scraper == nil {
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
return content, nil
}
const placeholder = "{}"
// replace the placeholder string with the URL-escaped name
escapedName := url.QueryEscape(name)
url := s.scraper.QueryURL
url = strings.ReplaceAll(url, placeholder, escapedName)
doc, err := s.loadURL(context.TODO(), url)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapeScenes(q)
return nil, ErrNotSupported
}
func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
func (s *xpathScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
// construct the URL
queryURL := queryURLParametersFromScene(scene)
if s.scraper.QueryURLReplacements != nil {
@ -167,17 +143,28 @@ func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapeScene(q)
return scraper.scrapeScene(ctx, q)
}
func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
switch {
case input.Gallery != nil:
return nil, fmt.Errorf("%w: cannot use an xpath scraper as a gallery fragment scraper", ErrNotSupported)
case input.Performer != nil:
return nil, fmt.Errorf("%w: cannot use an xpath scraper as a performer fragment scraper", ErrNotSupported)
case input.Scene == nil:
return nil, fmt.Errorf("%w: scene input is nil", ErrNotSupported)
}
scene := *input.Scene
// construct the URL
queryURL := queryURLParametersFromScrapedScene(scene)
if s.scraper.QueryURLReplacements != nil {
@ -191,17 +178,17 @@ func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*m
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapeScene(q)
return scraper.scrapeScene(ctx, q)
}
func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
// construct the URL
queryURL := queryURLParametersFromGallery(gallery)
if s.scraper.QueryURLReplacements != nil {
@ -215,18 +202,14 @@ func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
doc, err := s.loadURL(ctx, url)
if err != nil {
return nil, err
}
q := s.getXPathQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *xpathScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByFragment not supported for xpath scraper")
return scraper.scrapeGallery(ctx, q)
}
func (s *xpathScraper) loadURL(ctx context.Context, url string) (*html.Node, error) {
@ -256,15 +239,23 @@ func (s *xpathScraper) getXPathQuery(doc *html.Node) *xpathQuery {
}
type xpathQuery struct {
doc *html.Node
scraper *xpathScraper
doc *html.Node
scraper *xpathScraper
queryType QueryType
}
func (q *xpathQuery) runQuery(selector string) []string {
func (q *xpathQuery) getType() QueryType {
return q.queryType
}
func (q *xpathQuery) setType(t QueryType) {
q.queryType = t
}
func (q *xpathQuery) runQuery(selector string) ([]string, error) {
found, err := htmlquery.QueryAll(q.doc, selector)
if err != nil {
logger.Warnf("Error parsing xpath expression '%s': %s", selector, err.Error())
return nil
return nil, fmt.Errorf("selector '%s': parse error: %v", selector, err)
}
var ret []string
@ -276,7 +267,7 @@ func (q *xpathQuery) runQuery(selector string) []string {
}
}
return ret
return ret, nil
}
func (q *xpathQuery) nodeText(n *html.Node) string {
@ -301,8 +292,8 @@ func (q *xpathQuery) nodeText(n *html.Node) string {
return ret
}
func (q *xpathQuery) subScrape(value string) mappedQuery {
doc, err := q.scraper.loadURL(context.TODO(), value)
func (q *xpathQuery) subScrape(ctx context.Context, value string) mappedQuery {
doc, err := q.scraper.loadURL(ctx, value)
if err != nil {
logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())

Some files were not shown because too many files have changed in this diff Show more