diff --git a/.gitattributes b/.gitattributes
index 4fea60b95..3b5c2ca77 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,2 +1,6 @@
go.mod text eol=lf
-go.sum text eol=lf
\ No newline at end of file
+go.sum text eol=lf
+*.go text eol=lf
+vendor/** -text
+ui/v2.5/**/*.ts* text eol=lf
+ui/v2.5/**/*.scss text eol=lf
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 87bb80199..45fc209de 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -23,6 +23,8 @@ A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem please ensure that your screenshots are SFW or at least appropriately censored.
+**Stash Version: (from Settings -> About):**
+
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
diff --git a/.gitignore b/.gitignore
index a54db8fb0..4f92a344f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,6 +21,7 @@
# GraphQL generated output
pkg/models/generated_*.go
ui/v2/src/core/generated-*.tsx
+ui/v2.5/src/core/generated-*.tsx
# packr generated files
*-packr.go
diff --git a/.idea/go.iml b/.idea/go.iml
index ef3c3f44e..eddfcc6c3 100644
--- a/.idea/go.iml
+++ b/.idea/go.iml
@@ -4,11 +4,10 @@
-
-
-
+
+
-
\ No newline at end of file
+
diff --git a/.travis.yml b/.travis.yml
index 8ffc58986..ca5eecc95 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,4 +1,8 @@
+if: tag != latest_develop # dont build for the latest_develop tagged version
+
dist: xenial
+git:
+ depth: false
language: go
go:
- 1.11.x
@@ -9,14 +13,15 @@ env:
- GO111MODULE=on
before_install:
- echo -e "machine github.com\n login $CI_USER_TOKEN" > ~/.netrc
-- travis_retry yarn --cwd ui/v2 install --frozen-lockfile
+- nvm install 12
+- travis_retry yarn --cwd ui/v2.5 install --frozen-lockfile
- make generate
-- CI=false yarn --cwd ui/v2 build # TODO: Fix warnings
+- CI=false yarn --cwd ui/v2.5 build-ci
#- go get -v github.com/mgechev/revive
script:
+# left lint off to avoid getting extra dependency
#- make lint
-#- make vet
-- make it
+- make fmt-check vet it
after_success:
- docker pull stashapp/compiler:develop
- sh ./scripts/cross-compile.sh
@@ -31,6 +36,8 @@ before_deploy:
deploy:
# latest develop release
- provider: releases
+ # use the v2 release provider for proper release note setting
+ edge: true
api_key:
secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00=
file:
@@ -41,7 +48,7 @@ deploy:
skip_cleanup: true
overwrite: true
name: "${STASH_VERSION}: Latest development build"
- body: ${RELEASE_DATE}\n This is always the latest committed version on the develop branch. Use as your own risk!
+ release_notes: "**${RELEASE_DATE}**\n This is always the latest committed version on the develop branch. Use as your own risk!"
prerelease: true
on:
repo: stashapp/stash
diff --git a/Makefile b/Makefile
index 2cb51b4af..659a80120 100644
--- a/Makefile
+++ b/Makefile
@@ -21,13 +21,18 @@ clean:
.PHONY: generate
generate:
go generate -mod=vendor
- cd ui/v2 && yarn run gqlgen
+ cd ui/v2.5 && yarn run gqlgen
# Runs gofmt -w on the project's source code, modifying any files that do not match its style.
.PHONY: fmt
fmt:
go fmt ./...
+# Ensures that changed files have had gofmt run on them
+.PHONY: fmt-check
+fmt-check:
+ sh ./scripts/check-gofmt.sh
+
# Runs go vet on the project's source code.
.PHONY: vet
vet:
@@ -47,7 +52,31 @@ test:
it:
go test -mod=vendor -tags=integration ./...
+# installs UI dependencies. Run when first cloning repository, or if UI
+# dependencies have changed
+.PHONY: pre-ui
+pre-ui:
+ cd ui/v2.5 && yarn install --frozen-lockfile
+
.PHONY: ui
ui:
- cd ui/v2 && yarn build
+ cd ui/v2.5 && yarn build
packr2
+
+fmt-ui:
+ cd ui/v2.5 && yarn format
+
+# runs tests and checks on the UI and builds it
+.PHONY: ui-validate
+ui-validate:
+ cd ui/v2.5 && yarn run validate
+
+# just repacks the packr files - use when updating migrations and packed files without
+# rebuilding the UI
+.PHONY: packr
+packr:
+ packr2
+
+# runs all of the tests and checks required for a PR to be accepted
+.PHONY: validate
+validate: ui-validate fmt-check vet lint it
diff --git a/README.md b/README.md
index 3e4a18582..b00c8c456 100644
--- a/README.md
+++ b/README.md
@@ -69,7 +69,7 @@ Join the [Discord server](https://discord.gg/2TsNFKt).
* Go Install: `go get github.com/gobuffalo/packr/v2/packr2@v2.0.2`
* [Binary Download](https://github.com/gobuffalo/packr/releases)
* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager
- * Run `yarn install --frozen-lockfile` in the `stash/ui/v2` folder (before running make generate for first time).
+ * Run `yarn install --frozen-lockfile` in the `stash/ui/v2.5` folder (before running make generate for first time).
NOTE: You may need to run the `go get` commands outside the project directory to avoid modifying the projects module file.
@@ -92,11 +92,18 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MingW.
## Commands
-* `make generate` - Generate Go GraphQL and packr2 files
+* `make generate` - Generate Go and UI GraphQL files
* `make build` - Builds the binary (make sure to build the UI as well... see below)
-* `make ui` - Builds the frontend
+* `make pre-ui` - Installs the UI dependencies. Only needs to be run once before building the UI for the first time, or if the dependencies are updated
+* `make fmt-ui` - Formats the UI source code.
+* `make ui` - Builds the frontend and the packr2 files
+* `make packr` - Generate packr2 files (sub-target of `ui`. Use to regenerate packr2 files without rebuilding UI)
* `make vet` - Run `go vet`
* `make lint` - Run the linter
+* `make fmt` - Run `go fmt`
+* `make fmt-check` - Ensure changed files are formatted correctly
+* `make it` - Run the unit and integration tests
+* `make validate` - Run all of the tests and checks required to submit a PR
## Building a release
@@ -111,3 +118,10 @@ where the app can be cross-compiled. This process is kicked off by CI via the `
command to open a bash shell to the container to poke around:
`docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t stashappdev/compiler:latest /bin/bash`
+
+## Customization
+
+You can make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks).
+
+[Stash Plex Theme](https://github.com/stashapp/stash/wiki/Stash-Plex-Theme) is a community created theme inspired by popular Plex Interface.
+
diff --git a/docker/build/x86_64/Dockerfile b/docker/build/x86_64/Dockerfile
index c2f8af444..8ac117647 100644
--- a/docker/build/x86_64/Dockerfile
+++ b/docker/build/x86_64/Dockerfile
@@ -32,10 +32,10 @@ RUN wget -O /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-rele
mv /ffmpeg*/ /ffmpeg/
# copy the ui yarn stuff so that it doesn't get rebuilt every time
-COPY ./ui/v2/package.json ./ui/v2/yarn.lock /stash/ui/v2/
+COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/
WORKDIR /stash
-RUN yarn --cwd ui/v2 install --frozen-lockfile
+RUN yarn --cwd ui/v2.5 install --frozen-lockfile
COPY . /stash/
ENV GO111MODULE=on
diff --git a/go.mod b/go.mod
index ae4aff41b..e4e407dd8 100644
--- a/go.mod
+++ b/go.mod
@@ -2,29 +2,30 @@ module github.com/stashapp/stash
require (
github.com/99designs/gqlgen v0.9.0
- github.com/PuerkitoBio/goquery v1.5.0
- github.com/antchfx/htmlquery v1.2.0
- github.com/antchfx/xpath v1.1.2 // indirect
- github.com/bmatcuk/doublestar v1.1.5
+ github.com/antchfx/htmlquery v1.2.3
+ github.com/bmatcuk/doublestar v1.3.1
github.com/disintegration/imaging v1.6.0
github.com/go-chi/chi v4.0.2+incompatible
github.com/gobuffalo/packr/v2 v2.0.2
github.com/golang-migrate/migrate/v4 v4.3.1
+ github.com/gorilla/sessions v1.2.0
github.com/gorilla/websocket v1.4.0
github.com/h2non/filetype v1.0.8
- // this is required for generate
github.com/inconshreveable/mousetrap v1.0.0 // indirect
+ github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
github.com/jmoiron/sqlx v1.2.0
- github.com/mattn/go-sqlite3 v1.10.0
+ github.com/json-iterator/go v1.1.9
+ github.com/mattn/go-sqlite3 v1.13.0
github.com/rs/cors v1.6.0
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f
github.com/sirupsen/logrus v1.4.2
github.com/spf13/pflag v1.0.3
github.com/spf13/viper v1.4.0
+ github.com/stretchr/testify v1.5.1
github.com/vektah/gqlparser v1.1.2
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
- golang.org/x/image v0.0.0-20190118043309-183bebdce1b2 // indirect
- golang.org/x/net v0.0.0-20190522155817-f3200d17e092
+ golang.org/x/image v0.0.0-20190118043309-183bebdce1b2
+ golang.org/x/net v0.0.0-20200421231249-e086a090c8fd
gopkg.in/yaml.v2 v2.2.2
)
diff --git a/go.sum b/go.sum
index bf4e10dbe..d9ef0d5a2 100644
--- a/go.sum
+++ b/go.sum
@@ -16,8 +16,6 @@ github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF0
github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA=
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
-github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
-github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/agnivade/levenshtein v1.0.1 h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ=
@@ -27,13 +25,11 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
-github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
-github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
-github.com/antchfx/htmlquery v1.2.0 h1:oKShnsGlnOHX6t4uj5OHgLKkABcJoqnXpqnscoi9Lpw=
-github.com/antchfx/htmlquery v1.2.0/go.mod h1:MS9yksVSQXls00iXkiMqXr0J+umL/AmxXKuP28SUJM8=
-github.com/antchfx/xpath v1.1.2 h1:YziPrtM0gEJBnhdUGxYcIVYXZ8FXbtbovxOi+UW/yWQ=
-github.com/antchfx/xpath v1.1.2/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
+github.com/antchfx/htmlquery v1.2.3 h1:sP3NFDneHx2stfNXCKbhHFo8XgNjCACnU/4AO5gWz6M=
+github.com/antchfx/htmlquery v1.2.3/go.mod h1:B0ABL+F5irhhMWg54ymEZinzMSi0Kt3I2if0BLYa3V0=
+github.com/antchfx/xpath v1.1.6 h1:6sVh6hB5T6phw1pFpHRQ+C4bd8sNI+O58flqtg7h0R0=
+github.com/antchfx/xpath v1.1.6/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
@@ -41,8 +37,8 @@ github.com/aws/aws-sdk-go v1.17.7/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
-github.com/bmatcuk/doublestar v1.1.5 h1:2bNwBOmhyFEFcoB3tGvTD5xanq+4kyOZlB8wFYbMjkk=
-github.com/bmatcuk/doublestar v1.1.5/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
+github.com/bmatcuk/doublestar v1.3.1 h1:rT8rxDPsavp9G+4ZULzqhhUSaI/OPsTZNG88Z3i0xvY=
+github.com/bmatcuk/doublestar v1.3.1/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
@@ -81,6 +77,7 @@ github.com/docker/docker v0.7.3-0.20190108045446-77df18c24acf/go.mod h1:eEKB0N0r
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dustin/go-humanize v0.0.0-20180713052910-9f541cc9db5d/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
+github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
@@ -325,6 +322,8 @@ github.com/golang-migrate/migrate/v4 v4.3.1/go.mod h1:mJ89KBgbXmM3P49BqOxRL3riNF
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef h1:veQD95Isof8w9/WXiA+pa3tz3fJXkt5B7QaRBrM62gk=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
@@ -340,6 +339,7 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
@@ -352,9 +352,12 @@ github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1/go.mod h1:YeAe0gNeiNT5hoiZRI4yiOky6jVdNvfO2N6Kav/HmxY=
+github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
github.com/gorilla/sessions v1.1.2/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
github.com/gorilla/sessions v1.1.3/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
+github.com/gorilla/sessions v1.2.0 h1:S7P+1Hm5V/AT9cjEcUD5uDaQSX0OE577aCXgoaKpYbQ=
+github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.2.0 h1:VJtLvh6VQym50czpZzx07z/kw9EgAxI3x1ZB8taTMQQ=
github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
@@ -383,6 +386,8 @@ github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANyt
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ=
github.com/jackc/pgx v3.2.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
+github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a h1:zPPuIq2jAWWPTrGt70eK/BSch+gFAGrNzecsoENgu2o=
+github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a/go.mod h1:yL958EeXv8Ylng6IfnvG4oflryUi3vgA3xPs9hmII1s=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0/go.mod h1:IiEW3SEiiErVyFdH8NTuWjSifiEQKUoyK3LNqr2kCHU=
github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=
@@ -391,6 +396,8 @@ github.com/joho/godotenv v1.2.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqx
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
+github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
+github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
@@ -445,6 +452,8 @@ github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o=
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
+github.com/mattn/go-sqlite3 v1.13.0 h1:LnJI81JidiW9r7pS/hXe6cFeO5EXNq7KbfvoJLRI69c=
+github.com/mattn/go-sqlite3 v1.13.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4=
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
@@ -453,6 +462,10 @@ github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:F
github.com/mitchellh/mapstructure v1.0.0/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/mongodb/mongo-go-driver v0.3.0/go.mod h1:NK/HWDIIZkaYsnYa0hmtP443T5ELr0KDecmIioVuuyU=
github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
@@ -574,6 +587,8 @@ github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
github.com/tidwall/pretty v0.0.0-20180105212114-65a9db5fad51/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
@@ -633,7 +648,6 @@ golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTk
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180816102801-aaf60122140d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -660,6 +674,8 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190424112056-4829fb13d2c6/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgPQpswuFndXpOj3rKEco=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
+golang.org/x/net v0.0.0-20200421231249-e086a090c8fd h1:QPwSajcTUrFriMF1nJ3XzgoqakqQEsnZf9LdXdi2nkI=
+golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -703,6 +719,8 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190426135247-a129542de9ae h1:mQLHiymj/JXKnnjc62tb7nD5pZLs940/sXJu+Xp3DBA=
golang.org/x/sys v0.0.0-20190426135247-a129542de9ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
diff --git a/gqlgen.yml b/gqlgen.yml
index b0f197084..29e794f31 100644
--- a/gqlgen.yml
+++ b/gqlgen.yml
@@ -26,6 +26,8 @@ models:
model: github.com/stashapp/stash/pkg/models.ScrapedItem
Studio:
model: github.com/stashapp/stash/pkg/models.Studio
+ Movie:
+ model: github.com/stashapp/stash/pkg/models.Movie
Tag:
model: github.com/stashapp/stash/pkg/models.Tag
ScrapedPerformer:
@@ -36,6 +38,8 @@ models:
model: github.com/stashapp/stash/pkg/models.ScrapedScenePerformer
ScrapedSceneStudio:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneStudio
+ ScrapedSceneMovie:
+ model: github.com/stashapp/stash/pkg/models.ScrapedSceneMovie
ScrapedSceneTag:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneTag
SceneFileType:
diff --git a/graphql/documents/data/config.graphql b/graphql/documents/data/config.graphql
index 57cd64c94..e050bfdd1 100644
--- a/graphql/documents/data/config.graphql
+++ b/graphql/documents/data/config.graphql
@@ -2,25 +2,32 @@ fragment ConfigGeneralData on ConfigGeneralResult {
stashes
databasePath
generatedPath
+ cachePath
maxTranscodeSize
maxStreamingTranscodeSize
+ forceMkv
+ forceHevc
username
password
+ maxSessionAge
logFile
logOut
logLevel
logAccess
excludes
+ scraperUserAgent
}
fragment ConfigInterfaceData on ConfigInterfaceResult {
soundOnPreview
wallShowTitle
+ wallPlayback
maximumLoopDuration
autostartVideo
showStudioAsText
css
cssEnabled
+ language
}
fragment ConfigData on ConfigResult {
diff --git a/graphql/documents/data/movie-slim.graphql b/graphql/documents/data/movie-slim.graphql
new file mode 100644
index 000000000..49f458921
--- /dev/null
+++ b/graphql/documents/data/movie-slim.graphql
@@ -0,0 +1,5 @@
+fragment SlimMovieData on Movie {
+ id
+ name
+ front_image_path
+}
\ No newline at end of file
diff --git a/graphql/documents/data/movie.graphql b/graphql/documents/data/movie.graphql
new file mode 100644
index 000000000..ef3ab3f9f
--- /dev/null
+++ b/graphql/documents/data/movie.graphql
@@ -0,0 +1,20 @@
+fragment MovieData on Movie {
+ id
+ checksum
+ name
+ aliases
+ duration
+ date
+ rating
+ director
+
+ studio {
+ ...StudioData
+ }
+
+ synopsis
+ url
+ front_image_path
+ back_image_path
+ scene_count
+}
diff --git a/graphql/documents/data/performer-slim.graphql b/graphql/documents/data/performer-slim.graphql
index fdfa6016b..c2abc6023 100644
--- a/graphql/documents/data/performer-slim.graphql
+++ b/graphql/documents/data/performer-slim.graphql
@@ -1,5 +1,6 @@
fragment SlimPerformerData on Performer {
id
name
+ gender
image_path
}
diff --git a/graphql/documents/data/performer.graphql b/graphql/documents/data/performer.graphql
index e2ce624ec..cc5e6d2f1 100644
--- a/graphql/documents/data/performer.graphql
+++ b/graphql/documents/data/performer.graphql
@@ -3,6 +3,7 @@ fragment PerformerData on Performer {
checksum
name
url
+ gender
twitter
instagram
birthdate
diff --git a/graphql/documents/data/scene-slim.graphql b/graphql/documents/data/scene-slim.graphql
index 60fa93fe6..6d6fb3b05 100644
--- a/graphql/documents/data/scene-slim.graphql
+++ b/graphql/documents/data/scene-slim.graphql
@@ -47,6 +47,15 @@ fragment SlimSceneData on Scene {
image_path
}
+ movies {
+ movie {
+ id
+ name
+ front_image_path
+ }
+ scene_index
+ }
+
tags {
id
name
diff --git a/graphql/documents/data/scene.graphql b/graphql/documents/data/scene.graphql
index 9157eb714..06a7cab5a 100644
--- a/graphql/documents/data/scene.graphql
+++ b/graphql/documents/data/scene.graphql
@@ -42,6 +42,13 @@ fragment SceneData on Scene {
studio {
...StudioData
}
+
+ movies {
+ movie {
+ ...MovieData
+ }
+ scene_index
+ }
tags {
...TagData
diff --git a/graphql/documents/data/scrapers.graphql b/graphql/documents/data/scrapers.graphql
index 616465d0d..e9c8f324b 100644
--- a/graphql/documents/data/scrapers.graphql
+++ b/graphql/documents/data/scrapers.graphql
@@ -1,5 +1,27 @@
fragment ScrapedPerformerData on ScrapedPerformer {
name
+ gender
+ url
+ twitter
+ instagram
+ birthdate
+ ethnicity
+ country
+ eye_color
+ height
+ measurements
+ fake_tits
+ career_length
+ tattoos
+ piercings
+ aliases
+ image
+}
+
+fragment ScrapedScenePerformerData on ScrapedScenePerformer {
+ id
+ name
+ gender
url
twitter
instagram
@@ -16,23 +38,27 @@ fragment ScrapedPerformerData on ScrapedPerformer {
aliases
}
-fragment ScrapedScenePerformerData on ScrapedScenePerformer {
+fragment ScrapedMovieData on ScrapedMovie {
+ name
+ aliases
+ duration
+ date
+ rating
+ director
+ url
+ synopsis
+}
+
+fragment ScrapedSceneMovieData on ScrapedSceneMovie {
id
name
- url
- twitter
- instagram
- birthdate
- ethnicity
- country
- eye_color
- height
- measurements
- fake_tits
- career_length
- tattoos
- piercings
aliases
+ duration
+ date
+ rating
+ director
+ url
+ synopsis
}
fragment ScrapedSceneStudioData on ScrapedSceneStudio {
@@ -51,6 +77,7 @@ fragment ScrapedSceneData on ScrapedScene {
details
url
date
+ image
file {
size
@@ -74,4 +101,8 @@ fragment ScrapedSceneData on ScrapedScene {
performers {
...ScrapedScenePerformerData
}
+
+ movies {
+ ...ScrapedSceneMovieData
+ }
}
\ No newline at end of file
diff --git a/graphql/documents/mutations/metadata.graphql b/graphql/documents/mutations/metadata.graphql
new file mode 100644
index 000000000..d02f10b09
--- /dev/null
+++ b/graphql/documents/mutations/metadata.graphql
@@ -0,0 +1,27 @@
+mutation MetadataImport {
+ metadataImport
+}
+
+mutation MetadataExport {
+ metadataExport
+}
+
+mutation MetadataScan($input: ScanMetadataInput!) {
+ metadataScan(input: $input)
+}
+
+mutation MetadataGenerate($input: GenerateMetadataInput!) {
+ metadataGenerate(input: $input)
+}
+
+mutation MetadataAutoTag($input: AutoTagMetadataInput!) {
+ metadataAutoTag(input: $input)
+}
+
+mutation MetadataClean {
+ metadataClean
+}
+
+mutation StopJob {
+ stopJob
+}
\ No newline at end of file
diff --git a/graphql/documents/mutations/movie.graphql b/graphql/documents/mutations/movie.graphql
new file mode 100644
index 000000000..253d2f8ac
--- /dev/null
+++ b/graphql/documents/mutations/movie.graphql
@@ -0,0 +1,40 @@
+mutation MovieCreate(
+ $name: String!,
+ $aliases: String,
+ $duration: Int,
+ $date: String,
+ $rating: Int,
+ $studio_id: ID,
+ $director: String,
+ $synopsis: String,
+ $url: String,
+ $front_image: String,
+ $back_image: String) {
+
+ movieCreate(input: { name: $name, aliases: $aliases, duration: $duration, date: $date, rating: $rating, studio_id: $studio_id, director: $director, synopsis: $synopsis, url: $url, front_image: $front_image, back_image: $back_image }) {
+ ...MovieData
+ }
+}
+
+mutation MovieUpdate(
+ $id: ID!
+ $name: String,
+ $aliases: String,
+ $duration: Int,
+ $date: String,
+ $rating: Int,
+ $studio_id: ID,
+ $director: String,
+ $synopsis: String,
+ $url: String,
+ $front_image: String,
+ $back_image: String) {
+
+ movieUpdate(input: { id: $id, name: $name, aliases: $aliases, duration: $duration, date: $date, rating: $rating, studio_id: $studio_id, director: $director, synopsis: $synopsis, url: $url, front_image: $front_image, back_image: $back_image }) {
+ ...MovieData
+ }
+}
+
+mutation MovieDestroy($id: ID!) {
+ movieDestroy(input: { id: $id })
+}
\ No newline at end of file
diff --git a/graphql/documents/mutations/performer.graphql b/graphql/documents/mutations/performer.graphql
index ec785f5f2..ae0b5e17f 100644
--- a/graphql/documents/mutations/performer.graphql
+++ b/graphql/documents/mutations/performer.graphql
@@ -1,6 +1,7 @@
mutation PerformerCreate(
$name: String,
$url: String,
+ $gender: GenderEnum,
$birthdate: String,
$ethnicity: String,
$country: String,
@@ -20,6 +21,7 @@ mutation PerformerCreate(
performerCreate(input: {
name: $name,
url: $url,
+ gender: $gender,
birthdate: $birthdate,
ethnicity: $ethnicity,
country: $country,
@@ -44,6 +46,7 @@ mutation PerformerUpdate(
$id: ID!,
$name: String,
$url: String,
+ $gender: GenderEnum,
$birthdate: String,
$ethnicity: String,
$country: String,
@@ -64,6 +67,7 @@ mutation PerformerUpdate(
id: $id,
name: $name,
url: $url,
+ gender: $gender,
birthdate: $birthdate,
ethnicity: $ethnicity,
country: $country,
diff --git a/graphql/documents/mutations/scene.graphql b/graphql/documents/mutations/scene.graphql
index ad4076c81..80c38d109 100644
--- a/graphql/documents/mutations/scene.graphql
+++ b/graphql/documents/mutations/scene.graphql
@@ -8,6 +8,7 @@ mutation SceneUpdate(
$studio_id: ID,
$gallery_id: ID,
$performer_ids: [ID!] = [],
+ $movies: [SceneMovieInput!] = [],
$tag_ids: [ID!] = [],
$cover_image: String) {
@@ -21,6 +22,7 @@ mutation SceneUpdate(
studio_id: $studio_id,
gallery_id: $gallery_id,
performer_ids: $performer_ids,
+ movies: $movies,
tag_ids: $tag_ids,
cover_image: $cover_image
}) {
@@ -37,8 +39,8 @@ mutation BulkSceneUpdate(
$rating: Int,
$studio_id: ID,
$gallery_id: ID,
- $performer_ids: [ID!],
- $tag_ids: [ID!]) {
+ $performer_ids: BulkUpdateIds,
+ $tag_ids: BulkUpdateIds) {
bulkSceneUpdate(input: {
ids: $ids,
@@ -76,4 +78,8 @@ mutation SceneResetO($id: ID!) {
mutation SceneDestroy($id: ID!, $delete_file: Boolean, $delete_generated : Boolean) {
sceneDestroy(input: {id: $id, delete_file: $delete_file, delete_generated: $delete_generated})
-}
\ No newline at end of file
+}
+
+mutation SceneGenerateScreenshot($id: ID!, $at: Float) {
+ sceneGenerateScreenshot(id: $id, at: $at)
+}
diff --git a/graphql/documents/queries/misc.graphql b/graphql/documents/queries/misc.graphql
index 65b0a59e1..2786ca997 100644
--- a/graphql/documents/queries/misc.graphql
+++ b/graphql/documents/queries/misc.graphql
@@ -19,19 +19,24 @@ query AllTags {
}
query AllPerformersForFilter {
- allPerformers {
+ allPerformersSlim {
...SlimPerformerData
}
}
query AllStudiosForFilter {
- allStudios {
+ allStudiosSlim {
...SlimStudioData
}
}
+query AllMoviesForFilter {
+ allMoviesSlim {
+ ...SlimMovieData
+ }
+}
query AllTagsForFilter {
- allTags {
+ allTagsSlim {
id
name
}
@@ -47,9 +52,11 @@ query ValidGalleriesForScene($scene_id: ID!) {
query Stats {
stats {
scene_count,
+ scene_size_count,
gallery_count,
performer_count,
studio_count,
+ movie_count,
tag_count
}
}
diff --git a/graphql/documents/queries/movie.graphql b/graphql/documents/queries/movie.graphql
new file mode 100644
index 000000000..c22b61b5b
--- /dev/null
+++ b/graphql/documents/queries/movie.graphql
@@ -0,0 +1,14 @@
+query FindMovies($filter: FindFilterType, $movie_filter: MovieFilterType) {
+ findMovies(filter: $filter, movie_filter: $movie_filter) {
+ count
+ movies {
+ ...MovieData
+ }
+ }
+}
+
+query FindMovie($id: ID!) {
+ findMovie(id: $id) {
+ ...MovieData
+ }
+}
\ No newline at end of file
diff --git a/graphql/documents/queries/scene.graphql b/graphql/documents/queries/scene.graphql
index add95cca1..343335503 100644
--- a/graphql/documents/queries/scene.graphql
+++ b/graphql/documents/queries/scene.graphql
@@ -46,6 +46,9 @@ query ParseSceneFilenames($filter: FindFilterType!, $config: SceneParserInput!)
rating
studio_id
gallery_id
+ movies {
+ movie_id
+ }
performer_ids
tag_ids
}
diff --git a/graphql/documents/queries/settings/config.graphql b/graphql/documents/queries/settings/config.graphql
index f92b14b7a..4ee9d4ec6 100644
--- a/graphql/documents/queries/settings/config.graphql
+++ b/graphql/documents/queries/settings/config.graphql
@@ -4,6 +4,10 @@ query Configuration {
}
}
-query Directories($path: String) {
- directories(path: $path)
-}
\ No newline at end of file
+query Directory($path: String) {
+ directory(path: $path) {
+ path
+ parent
+ directories
+ }
+}
diff --git a/graphql/documents/queries/settings/metadata.graphql b/graphql/documents/queries/settings/metadata.graphql
index a9092b8ea..376f8e4a0 100644
--- a/graphql/documents/queries/settings/metadata.graphql
+++ b/graphql/documents/queries/settings/metadata.graphql
@@ -1,27 +1,3 @@
-query MetadataImport {
- metadataImport
-}
-
-query MetadataExport {
- metadataExport
-}
-
-query MetadataScan($input: ScanMetadataInput!) {
- metadataScan(input: $input)
-}
-
-query MetadataGenerate($input: GenerateMetadataInput!) {
- metadataGenerate(input: $input)
-}
-
-query MetadataAutoTag($input: AutoTagMetadataInput!) {
- metadataAutoTag(input: $input)
-}
-
-query MetadataClean {
- metadataClean
-}
-
query JobStatus {
jobStatus {
progress
@@ -29,7 +5,3 @@ query JobStatus {
message
}
}
-
-query StopJob {
- stopJob
-}
\ No newline at end of file
diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql
index 8feae6add..3a57a551e 100644
--- a/graphql/schema/schema.graphql
+++ b/graphql/schema/schema.graphql
@@ -22,6 +22,11 @@ type Query {
"""A function which queries Studio objects"""
findStudios(filter: FindFilterType): FindStudiosResultType!
+ """Find a movie by ID"""
+ findMovie(id: ID!): Movie
+ """A function which queries Movie objects"""
+ findMovies(movie_filter: MovieFilterType, filter: FindFilterType): FindMoviesResultType!
+
findGallery(id: ID!): Gallery
findGalleries(filter: FindFilterType): FindGalleriesResultType!
@@ -68,32 +73,26 @@ type Query {
"""Returns the current, complete configuration"""
configuration: ConfigResult!
"""Returns an array of paths for the given path"""
- directories(path: String): [String!]!
+ directory(path: String): Directory!
# Metadata
- """Start an import. Returns the job ID"""
- metadataImport: String!
- """Start an export. Returns the job ID"""
- metadataExport: String!
- """Start a scan. Returns the job ID"""
- metadataScan(input: ScanMetadataInput!): String!
- """Start generating content. Returns the job ID"""
- metadataGenerate(input: GenerateMetadataInput!): String!
- """Start auto-tagging. Returns the job ID"""
- metadataAutoTag(input: AutoTagMetadataInput!): String!
- """Clean metadata. Returns the job ID"""
- metadataClean: String!
-
jobStatus: MetadataUpdateStatus!
- stopJob: Boolean!
# Get everything
allPerformers: [Performer!]!
allStudios: [Studio!]!
+ allMovies: [Movie!]!
allTags: [Tag!]!
+ # Get everything with minimal metadata
+
+ allPerformersSlim: [Performer!]!
+ allStudiosSlim: [Studio!]!
+ allMoviesSlim: [Movie!]!
+ allTagsSlim: [Tag!]!
+
# Version
version: Version!
@@ -114,6 +113,9 @@ type Mutation {
"""Resets the o-counter for a scene to 0. Returns the new value"""
sceneResetO(id: ID!): Int!
+ """Generates screenshot at specified time in seconds. Leave empty to generate default screenshot"""
+ sceneGenerateScreenshot(id: ID!, at: Float): String!
+
sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker
sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker
sceneMarkerDestroy(id: ID!): Boolean!
@@ -126,6 +128,10 @@ type Mutation {
studioUpdate(input: StudioUpdateInput!): Studio
studioDestroy(input: StudioDestroyInput!): Boolean!
+ movieCreate(input: MovieCreateInput!): Movie
+ movieUpdate(input: MovieUpdateInput!): Movie
+ movieDestroy(input: MovieDestroyInput!): Boolean!
+
tagCreate(input: TagCreateInput!): Tag
tagUpdate(input: TagUpdateInput!): Tag
tagDestroy(input: TagDestroyInput!): Boolean!
@@ -133,6 +139,21 @@ type Mutation {
"""Change general configuration options"""
configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult!
configureInterface(input: ConfigInterfaceInput!): ConfigInterfaceResult!
+
+ """Start an import. Returns the job ID"""
+ metadataImport: String!
+ """Start an export. Returns the job ID"""
+ metadataExport: String!
+ """Start a scan. Returns the job ID"""
+ metadataScan(input: ScanMetadataInput!): String!
+ """Start generating content. Returns the job ID"""
+ metadataGenerate(input: GenerateMetadataInput!): String!
+ """Start auto-tagging. Returns the job ID"""
+ metadataAutoTag(input: AutoTagMetadataInput!): String!
+ """Clean metadata. Returns the job ID"""
+ metadataClean: String!
+
+ stopJob: Boolean!
}
type Subscription {
diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql
index 2d90c24f4..0f65c97fb 100644
--- a/graphql/schema/types/config.graphql
+++ b/graphql/schema/types/config.graphql
@@ -14,14 +14,22 @@ input ConfigGeneralInput {
databasePath: String
"""Path to generated files"""
generatedPath: String
+ """Path to cache"""
+ cachePath: String
"""Max generated transcode size"""
maxTranscodeSize: StreamingResolutionEnum
"""Max streaming transcode size"""
maxStreamingTranscodeSize: StreamingResolutionEnum
+ """Force MKV as supported format"""
+ forceMkv: Boolean!
+ """Force HEVC as a supported codec"""
+ forceHevc: Boolean!
"""Username"""
username: String
"""Password"""
password: String
+ """Maximum session cookie age"""
+ maxSessionAge: Int
"""Name of the log file"""
logFile: String
"""Whether to also output to stderr"""
@@ -32,6 +40,8 @@ input ConfigGeneralInput {
logAccess: Boolean!
"""Array of file regexp to exclude from Scan"""
excludes: [String!]
+ """Scraper user agent string"""
+ scraperUserAgent: String
}
type ConfigGeneralResult {
@@ -41,14 +51,22 @@ type ConfigGeneralResult {
databasePath: String!
"""Path to generated files"""
generatedPath: String!
- """Max generated transcode size"""
+ """Path to cache"""
+ cachePath: String!
+ """Max generated transcode size"""
maxTranscodeSize: StreamingResolutionEnum
"""Max streaming transcode size"""
maxStreamingTranscodeSize: StreamingResolutionEnum
+ """Force MKV as supported format"""
+ forceMkv: Boolean!
+ """Force HEVC as a supported codec"""
+ forceHevc: Boolean!
"""Username"""
username: String!
"""Password"""
password: String!
+ """Maximum session cookie age"""
+ maxSessionAge: Int!
"""Name of the log file"""
logFile: String
"""Whether to also output to stderr"""
@@ -59,6 +77,8 @@ type ConfigGeneralResult {
logAccess: Boolean!
"""Array of file regexp to exclude from Scan"""
excludes: [String!]!
+ """Scraper user agent string"""
+ scraperUserAgent: String
}
input ConfigInterfaceInput {
@@ -66,6 +86,8 @@ input ConfigInterfaceInput {
soundOnPreview: Boolean
"""Show title and tags in wall view"""
wallShowTitle: Boolean
+ """Wall playback type"""
+ wallPlayback: String
"""Maximum duration (in seconds) in which a scene video will loop in the scene player"""
maximumLoopDuration: Int
"""If true, video will autostart on load in the scene player"""
@@ -75,6 +97,7 @@ input ConfigInterfaceInput {
"""Custom CSS"""
css: String
cssEnabled: Boolean
+ language: String
}
type ConfigInterfaceResult {
@@ -82,6 +105,8 @@ type ConfigInterfaceResult {
soundOnPreview: Boolean
"""Show title and tags in wall view"""
wallShowTitle: Boolean
+ """Wall playback type"""
+ wallPlayback: String
"""Maximum duration (in seconds) in which a scene video will loop in the scene player"""
maximumLoopDuration: Int
"""If true, video will autostart on load in the scene player"""
@@ -91,6 +116,8 @@ type ConfigInterfaceResult {
"""Custom CSS"""
css: String
cssEnabled: Boolean
+ """Interface language"""
+ language: String
}
"""All configuration settings"""
@@ -98,3 +125,10 @@ type ConfigResult {
general: ConfigGeneralResult!
interface: ConfigInterfaceResult!
}
+
+"""Directory structure of a path"""
+type Directory {
+ path: String!
+ parent: String
+ directories: [String!]!
+}
diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql
index 4849d2803..92ac58e2c 100644
--- a/graphql/schema/types/filters.graphql
+++ b/graphql/schema/types/filters.graphql
@@ -46,6 +46,10 @@ input PerformerFilterType {
piercings: StringCriterionInput
"""Filter by aliases"""
aliases: StringCriterionInput
+ """Filter by gender"""
+ gender: GenderCriterionInput
+ """Filter to only include performers missing this property"""
+ is_missing: String
}
input SceneMarkerFilterType {
@@ -74,12 +78,19 @@ input SceneFilterType {
is_missing: String
"""Filter to only include scenes with this studio"""
studios: MultiCriterionInput
+ """Filter to only include scenes with this movie"""
+ movies: MultiCriterionInput
"""Filter to only include scenes with these tags"""
tags: MultiCriterionInput
"""Filter to only include scenes with these performers"""
performers: MultiCriterionInput
}
+input MovieFilterType {
+ """Filter to only include movies with this studio"""
+ studios: MultiCriterionInput
+}
+
enum CriterionModifier {
"""="""
EQUALS,
@@ -112,4 +123,9 @@ input IntCriterionInput {
input MultiCriterionInput {
value: [ID!]
modifier: CriterionModifier!
+}
+
+input GenderCriterionInput {
+ value: GenderEnum
+ modifier: CriterionModifier!
}
\ No newline at end of file
diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql
index a603b56b5..dede131fc 100644
--- a/graphql/schema/types/metadata.graphql
+++ b/graphql/schema/types/metadata.graphql
@@ -1,8 +1,12 @@
input GenerateMetadataInput {
sprites: Boolean!
previews: Boolean!
+ previewPreset: PreviewPreset
+ imagePreviews: Boolean!
markers: Boolean!
transcodes: Boolean!
+ """gallery thumbnails for cache usage"""
+ thumbnails: Boolean!
}
input ScanMetadataInput {
@@ -20,6 +24,16 @@ input AutoTagMetadataInput {
type MetadataUpdateStatus {
progress: Float!
- status: String!
+ status: String!
message: String!
-}
\ No newline at end of file
+}
+
+enum PreviewPreset {
+ "X264_ULTRAFAST", ultrafast
+ "X264_VERYFAST", veryfast
+ "X264_FAST", fast
+ "X264_MEDIUM", medium
+ "X264_SLOW", slow
+ "X264_SLOWER", slower
+ "X264_VERYSLOW", veryslow
+}
diff --git a/graphql/schema/types/movie.graphql b/graphql/schema/types/movie.graphql
new file mode 100644
index 000000000..0b41af0c8
--- /dev/null
+++ b/graphql/schema/types/movie.graphql
@@ -0,0 +1,59 @@
+type Movie {
+ id: ID!
+ checksum: String!
+ name: String!
+ aliases: String
+ """Duration in seconds"""
+ duration: Int
+ date: String
+ rating: Int
+ studio: Studio
+ director: String
+ synopsis: String
+ url: String
+
+ front_image_path: String # Resolver
+ back_image_path: String # Resolver
+ scene_count: Int # Resolver
+}
+
+input MovieCreateInput {
+ name: String!
+ aliases: String
+ """Duration in seconds"""
+ duration: Int
+ date: String
+ rating: Int
+ studio_id: ID
+ director: String
+ synopsis: String
+ url: String
+ """This should be base64 encoded"""
+ front_image: String
+ back_image: String
+}
+
+input MovieUpdateInput {
+ id: ID!
+ name: String
+ aliases: String
+ duration: Int
+ date: String
+ rating: Int
+ studio_id: ID
+ director: String
+ synopsis: String
+ url: String
+ """This should be base64 encoded"""
+ front_image: String
+ back_image: String
+}
+
+input MovieDestroyInput {
+ id: ID!
+}
+
+type FindMoviesResultType {
+ count: Int!
+ movies: [Movie!]!
+}
\ No newline at end of file
diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql
index a1ba8e6f7..621f23dd9 100644
--- a/graphql/schema/types/performer.graphql
+++ b/graphql/schema/types/performer.graphql
@@ -1,8 +1,17 @@
+enum GenderEnum {
+ MALE
+ FEMALE
+ TRANSGENDER_MALE
+ TRANSGENDER_FEMALE
+ INTERSEX
+}
+
type Performer {
id: ID!
checksum: String!
name: String
url: String
+ gender: GenderEnum
twitter: String
instagram: String
birthdate: String
@@ -26,6 +35,7 @@ type Performer {
input PerformerCreateInput {
name: String
url: String
+ gender: GenderEnum
birthdate: String
ethnicity: String
country: String
@@ -48,6 +58,7 @@ input PerformerUpdateInput {
id: ID!
name: String
url: String
+ gender: GenderEnum
birthdate: String
ethnicity: String
country: String
diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql
index c0828dae4..8afdd4b43 100644
--- a/graphql/schema/types/scene.graphql
+++ b/graphql/schema/types/scene.graphql
@@ -18,6 +18,11 @@ type ScenePathsType {
chapters_vtt: String # Resolver
}
+type SceneMovie {
+ movie: Movie!
+ scene_index: Int
+}
+
type Scene {
id: ID!
checksum: String!
@@ -36,10 +41,16 @@ type Scene {
scene_markers: [SceneMarker!]!
gallery: Gallery
studio: Studio
+ movies: [SceneMovie!]!
tags: [Tag!]!
performers: [Performer!]!
}
+input SceneMovieInput {
+ movie_id: ID!
+ scene_index: Int
+}
+
input SceneUpdateInput {
clientMutationId: String
id: ID!
@@ -51,11 +62,23 @@ input SceneUpdateInput {
studio_id: ID
gallery_id: ID
performer_ids: [ID!]
+ movies: [SceneMovieInput!]
tag_ids: [ID!]
"""This should be base64 encoded"""
cover_image: String
}
+enum BulkUpdateIdMode {
+ SET
+ ADD
+ REMOVE
+}
+
+input BulkUpdateIds {
+ ids: [ID!]
+ mode: BulkUpdateIdMode!
+}
+
input BulkSceneUpdateInput {
clientMutationId: String
ids: [ID!]
@@ -66,8 +89,8 @@ input BulkSceneUpdateInput {
rating: Int
studio_id: ID
gallery_id: ID
- performer_ids: [ID!]
- tag_ids: [ID!]
+ performer_ids: BulkUpdateIds
+ tag_ids: BulkUpdateIds
}
input SceneDestroyInput {
@@ -87,6 +110,11 @@ input SceneParserInput {
capitalizeTitle: Boolean
}
+type SceneMovieID {
+ movie_id: ID!
+ scene_index: String
+}
+
type SceneParserResult {
scene: Scene!
title: String
@@ -97,6 +125,7 @@ type SceneParserResult {
studio_id: ID
gallery_id: ID
performer_ids: [ID!]
+ movies: [SceneMovieID!]
tag_ids: [ID!]
}
diff --git a/graphql/schema/types/scraped-movie.graphql b/graphql/schema/types/scraped-movie.graphql
new file mode 100644
index 000000000..7589de364
--- /dev/null
+++ b/graphql/schema/types/scraped-movie.graphql
@@ -0,0 +1,22 @@
+"""A movie from a scraping operation..."""
+type ScrapedMovie {
+ name: String
+ aliases: String
+ duration: String
+ date: String
+ rating: String
+ director: String
+ url: String
+ synopsis: String
+}
+
+input ScrapedMovieInput {
+ name: String
+ aliases: String
+ duration: String
+ date: String
+ rating: String
+ director: String
+ url: String
+ synopsis: String
+}
\ No newline at end of file
diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql
index a16f3df23..d991ed327 100644
--- a/graphql/schema/types/scraped-performer.graphql
+++ b/graphql/schema/types/scraped-performer.graphql
@@ -1,6 +1,7 @@
"""A performer from a scraping operation..."""
type ScrapedPerformer {
name: String
+ gender: String
url: String
twitter: String
instagram: String
@@ -15,10 +16,14 @@ type ScrapedPerformer {
tattoos: String
piercings: String
aliases: String
+
+ """This should be base64 encoded"""
+ image: String
}
input ScrapedPerformerInput {
name: String
+ gender: String
url: String
twitter: String
instagram: String
@@ -33,4 +38,6 @@ input ScrapedPerformerInput {
tattoos: String
piercings: String
aliases: String
+
+ # not including image for the input
}
\ No newline at end of file
diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql
index 1dc153eb1..69c050a63 100644
--- a/graphql/schema/types/scraper.graphql
+++ b/graphql/schema/types/scraper.graphql
@@ -27,6 +27,7 @@ type ScrapedScenePerformer {
"""Set if performer matched"""
id: ID
name: String!
+ gender: String
url: String
twitter: String
instagram: String
@@ -43,6 +44,19 @@ type ScrapedScenePerformer {
aliases: String
}
+type ScrapedSceneMovie {
+ """Set if movie matched"""
+ id: ID
+ name: String!
+ aliases: String
+ duration: String
+ date: String
+ rating: String
+ director: String
+ synopsis: String
+ url: String
+}
+
type ScrapedSceneStudio {
"""Set if studio matched"""
id: ID
@@ -62,9 +76,13 @@ type ScrapedScene {
url: String
date: String
+ """This should be base64 encoded"""
+ image: String
+
file: SceneFileType # Resolver
studio: ScrapedSceneStudio
tags: [ScrapedSceneTag!]
performers: [ScrapedScenePerformer!]
+ movies: [ScrapedSceneMovie!]
}
diff --git a/graphql/schema/types/stats.graphql b/graphql/schema/types/stats.graphql
index f091f1bd3..d94086308 100644
--- a/graphql/schema/types/stats.graphql
+++ b/graphql/schema/types/stats.graphql
@@ -1,7 +1,9 @@
type StatsResultType {
scene_count: Int!
+ scene_size_count: String!
gallery_count: Int!
performer_count: Int!
studio_count: Int!
+ movie_count: Int!
tag_count: Int!
-}
\ No newline at end of file
+}
diff --git a/pkg/api/cache_thumbs.go b/pkg/api/cache_thumbs.go
new file mode 100644
index 000000000..0bcbd616c
--- /dev/null
+++ b/pkg/api/cache_thumbs.go
@@ -0,0 +1,72 @@
+package api
+
+import (
+ "github.com/stashapp/stash/pkg/logger"
+ "github.com/stashapp/stash/pkg/manager/paths"
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/utils"
+ "io/ioutil"
+)
+
+type thumbBuffer struct {
+ path string
+ dir string
+ data []byte
+}
+
+func newCacheThumb(dir string, path string, data []byte) *thumbBuffer {
+ t := thumbBuffer{dir: dir, path: path, data: data}
+ return &t
+}
+
+var writeChan chan *thumbBuffer
+var touchChan chan *string
+
+func startThumbCache() { // TODO add extra wait, close chan code if/when stash gets a stop mode
+
+ writeChan = make(chan *thumbBuffer, 20)
+ go thumbnailCacheWriter()
+}
+
+//serialize file writes to avoid race conditions
+func thumbnailCacheWriter() {
+
+ for thumb := range writeChan {
+ exists, _ := utils.FileExists(thumb.path)
+ if !exists {
+ err := utils.WriteFile(thumb.path, thumb.data)
+ if err != nil {
+ logger.Errorf("Write error for thumbnail %s: %s ", thumb.path, err)
+ }
+ }
+ }
+
+}
+
+// get thumbnail from cache, otherwise create it and store to cache
+func cacheGthumb(gallery *models.Gallery, index int, width int) []byte {
+ thumbPath := paths.GetGthumbPath(gallery.Checksum, index, width)
+ exists, _ := utils.FileExists(thumbPath)
+ if exists { // if thumbnail exists in cache return that
+ content, err := ioutil.ReadFile(thumbPath)
+ if err == nil {
+ return content
+ } else {
+ logger.Errorf("Read Error for file %s : %s", thumbPath, err)
+ }
+
+ }
+ data := gallery.GetThumbnail(index, width)
+ thumbDir := paths.GetGthumbDir(gallery.Checksum)
+ t := newCacheThumb(thumbDir, thumbPath, data)
+ writeChan <- t // write the file to cache
+ return data
+}
+
+// create all thumbs for a given gallery
+func CreateGthumbs(gallery *models.Gallery) {
+ count := gallery.ImageCount()
+ for i := 0; i < count; i++ {
+ cacheGthumb(gallery, i, models.DefaultGthumbWidth)
+ }
+}
diff --git a/pkg/api/check_version.go b/pkg/api/check_version.go
index ca79170b4..7cd1bb573 100644
--- a/pkg/api/check_version.go
+++ b/pkg/api/check_version.go
@@ -2,6 +2,7 @@ package api
import (
"encoding/json"
+ "errors"
"fmt"
"io/ioutil"
"net/http"
@@ -18,6 +19,10 @@ const apiTags string = "https://api.github.com/repos/stashapp/stash/tags"
const apiAcceptHeader string = "application/vnd.github.v3+json"
const developmentTag string = "latest_develop"
+// ErrNoVersion indicates that no version information has been embedded in the
+// stash binary
+var ErrNoVersion = errors.New("no stash version")
+
var stashReleases = func() map[string]string {
return map[string]string{
"windows/amd64": "stash-win.exe",
@@ -140,7 +145,7 @@ func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease strin
version, _, _ := GetVersion()
if version == "" {
- return "", "", fmt.Errorf("Stash doesn't have a version. Version check not supported.")
+ return "", "", ErrNoVersion
}
// if the version is suffixed with -x-xxxx, then we are running a development build
diff --git a/pkg/api/context_keys.go b/pkg/api/context_keys.go
index ba58e761d..917e1c587 100644
--- a/pkg/api/context_keys.go
+++ b/pkg/api/context_keys.go
@@ -9,4 +9,6 @@ const (
performerKey key = 1
sceneKey key = 2
studioKey key = 3
+ movieKey key = 4
+ ContextUser key = 5
)
diff --git a/pkg/api/images.go b/pkg/api/images.go
index 48b5d18d5..bec90eb82 100644
--- a/pkg/api/images.go
+++ b/pkg/api/images.go
@@ -2,18 +2,31 @@ package api
import (
"math/rand"
+ "strings"
"github.com/gobuffalo/packr/v2"
)
var performerBox *packr.Box
+var performerBoxMale *packr.Box
func initialiseImages() {
performerBox = packr.New("Performer Box", "../../static/performer")
+ performerBoxMale = packr.New("Male Performer Box", "../../static/performer_male")
}
-func getRandomPerformerImage() ([]byte, error) {
- imageFiles := performerBox.List()
+func getRandomPerformerImage(gender string) ([]byte, error) {
+ var box *packr.Box
+ switch strings.ToUpper(gender) {
+ case "FEMALE":
+ box = performerBox
+ case "MALE":
+ box = performerBoxMale
+ default:
+ box = performerBox
+
+ }
+ imageFiles := box.List()
index := rand.Intn(len(imageFiles))
- return performerBox.Find(imageFiles[index])
+ return box.Find(imageFiles[index])
}
diff --git a/pkg/api/migrate.go b/pkg/api/migrate.go
new file mode 100644
index 000000000..6305f47f8
--- /dev/null
+++ b/pkg/api/migrate.go
@@ -0,0 +1,92 @@
+package api
+
+import (
+ "fmt"
+ "html/template"
+ "net/http"
+ "os"
+
+ "github.com/stashapp/stash/pkg/database"
+ "github.com/stashapp/stash/pkg/logger"
+)
+
+type migrateData struct {
+ ExistingVersion uint
+ MigrateVersion uint
+ BackupPath string
+}
+
+func getMigrateData() migrateData {
+ return migrateData{
+ ExistingVersion: database.Version(),
+ MigrateVersion: database.AppSchemaVersion(),
+ BackupPath: database.DatabaseBackupPath(),
+ }
+}
+
+func getMigrateHandler(w http.ResponseWriter, r *http.Request) {
+ if !database.NeedsMigration() {
+ http.Redirect(w, r, "/", 301)
+ return
+ }
+
+ data, _ := setupUIBox.Find("migrate.html")
+ templ, err := template.New("Migrate").Parse(string(data))
+ if err != nil {
+ http.Error(w, fmt.Sprintf("error: %s", err), 500)
+ return
+ }
+
+ err = templ.Execute(w, getMigrateData())
+ if err != nil {
+ http.Error(w, fmt.Sprintf("error: %s", err), 500)
+ }
+}
+
+func doMigrateHandler(w http.ResponseWriter, r *http.Request) {
+ err := r.ParseForm()
+ if err != nil {
+ http.Error(w, fmt.Sprintf("error: %s", err), 500)
+ }
+
+ formBackupPath := r.Form.Get("backuppath")
+
+ // always backup so that we can roll back to the previous version if
+ // migration fails
+ backupPath := formBackupPath
+ if formBackupPath == "" {
+ backupPath = database.DatabaseBackupPath()
+ }
+
+ // perform database backup
+ if err = database.Backup(backupPath); err != nil {
+ http.Error(w, fmt.Sprintf("error backing up database: %s", err), 500)
+ return
+ }
+
+ err = database.RunMigrations()
+ if err != nil {
+ errStr := fmt.Sprintf("error performing migration: %s", err)
+
+ // roll back to the backed up version
+ restoreErr := database.RestoreFromBackup(backupPath)
+ if restoreErr != nil {
+ errStr = fmt.Sprintf("ERROR: unable to restore database from backup after migration failure: %s\n%s", restoreErr.Error(), errStr)
+ } else {
+ errStr = "An error occurred migrating the database to the latest schema version. The backup database file was automatically renamed to restore the database.\n" + errStr
+ }
+
+ http.Error(w, errStr, 500)
+ return
+ }
+
+ // if no backup path was provided, then delete the created backup
+ if formBackupPath == "" {
+ err = os.Remove(backupPath)
+ if err != nil {
+ logger.Warnf("error removing unwanted database backup (%s): %s", backupPath, err.Error())
+ }
+ }
+
+ http.Redirect(w, r, "/", 301)
+}
diff --git a/pkg/api/resolver.go b/pkg/api/resolver.go
index 126003a6f..5713e0b22 100644
--- a/pkg/api/resolver.go
+++ b/pkg/api/resolver.go
@@ -33,6 +33,9 @@ func (r *Resolver) SceneMarker() models.SceneMarkerResolver {
func (r *Resolver) Studio() models.StudioResolver {
return &studioResolver{r}
}
+func (r *Resolver) Movie() models.MovieResolver {
+ return &movieResolver{r}
+}
func (r *Resolver) Subscription() models.SubscriptionResolver {
return &subscriptionResolver{r}
}
@@ -49,6 +52,7 @@ type performerResolver struct{ *Resolver }
type sceneResolver struct{ *Resolver }
type sceneMarkerResolver struct{ *Resolver }
type studioResolver struct{ *Resolver }
+type movieResolver struct{ *Resolver }
type tagResolver struct{ *Resolver }
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) ([]*models.SceneMarker, error) {
@@ -89,19 +93,24 @@ func (r *queryResolver) ValidGalleriesForScene(ctx context.Context, scene_id *st
func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, error) {
scenesQB := models.NewSceneQueryBuilder()
scenesCount, _ := scenesQB.Count()
+ scenesSizeCount, _ := scenesQB.SizeCount()
galleryQB := models.NewGalleryQueryBuilder()
galleryCount, _ := galleryQB.Count()
performersQB := models.NewPerformerQueryBuilder()
performersCount, _ := performersQB.Count()
studiosQB := models.NewStudioQueryBuilder()
studiosCount, _ := studiosQB.Count()
+ moviesQB := models.NewMovieQueryBuilder()
+ moviesCount, _ := moviesQB.Count()
tagsQB := models.NewTagQueryBuilder()
tagsCount, _ := tagsQB.Count()
return &models.StatsResultType{
SceneCount: scenesCount,
+ SceneSizeCount: scenesSizeCount,
GalleryCount: galleryCount,
PerformerCount: performersCount,
StudioCount: studiosCount,
+ MovieCount: moviesCount,
TagCount: tagsCount,
}, nil
}
diff --git a/pkg/api/resolver_model_movie.go b/pkg/api/resolver_model_movie.go
new file mode 100644
index 000000000..6ab444a64
--- /dev/null
+++ b/pkg/api/resolver_model_movie.go
@@ -0,0 +1,95 @@
+package api
+
+import (
+ "context"
+
+ "github.com/stashapp/stash/pkg/api/urlbuilders"
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/utils"
+)
+
+func (r *movieResolver) Name(ctx context.Context, obj *models.Movie) (string, error) {
+ if obj.Name.Valid {
+ return obj.Name.String, nil
+ }
+ return "", nil
+}
+
+func (r *movieResolver) URL(ctx context.Context, obj *models.Movie) (*string, error) {
+ if obj.URL.Valid {
+ return &obj.URL.String, nil
+ }
+ return nil, nil
+}
+
+func (r *movieResolver) Aliases(ctx context.Context, obj *models.Movie) (*string, error) {
+ if obj.Aliases.Valid {
+ return &obj.Aliases.String, nil
+ }
+ return nil, nil
+}
+
+func (r *movieResolver) Duration(ctx context.Context, obj *models.Movie) (*int, error) {
+ if obj.Duration.Valid {
+ rating := int(obj.Duration.Int64)
+ return &rating, nil
+ }
+ return nil, nil
+}
+
+func (r *movieResolver) Date(ctx context.Context, obj *models.Movie) (*string, error) {
+ if obj.Date.Valid {
+ result := utils.GetYMDFromDatabaseDate(obj.Date.String)
+ return &result, nil
+ }
+ return nil, nil
+}
+
+func (r *movieResolver) Rating(ctx context.Context, obj *models.Movie) (*int, error) {
+ if obj.Rating.Valid {
+ rating := int(obj.Rating.Int64)
+ return &rating, nil
+ }
+ return nil, nil
+}
+
+func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (*models.Studio, error) {
+ qb := models.NewStudioQueryBuilder()
+ if obj.StudioID.Valid {
+ return qb.Find(int(obj.StudioID.Int64), nil)
+ }
+
+ return nil, nil
+}
+
+func (r *movieResolver) Director(ctx context.Context, obj *models.Movie) (*string, error) {
+ if obj.Director.Valid {
+ return &obj.Director.String, nil
+ }
+ return nil, nil
+}
+
+func (r *movieResolver) Synopsis(ctx context.Context, obj *models.Movie) (*string, error) {
+ if obj.Synopsis.Valid {
+ return &obj.Synopsis.String, nil
+ }
+ return nil, nil
+}
+
+func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
+ baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
+ frontimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj.ID).GetMovieFrontImageURL()
+ return &frontimagePath, nil
+}
+
+func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
+ baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
+ backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj.ID).GetMovieBackImageURL()
+ return &backimagePath, nil
+}
+
+func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (*int, error) {
+ qb := models.NewSceneQueryBuilder()
+ res, err := qb.CountByMovieID(obj.ID)
+ return &res, err
+}
diff --git a/pkg/api/resolver_model_performer.go b/pkg/api/resolver_model_performer.go
index 94be0aeea..29a4d2d90 100644
--- a/pkg/api/resolver_model_performer.go
+++ b/pkg/api/resolver_model_performer.go
@@ -2,6 +2,7 @@ package api
import (
"context"
+
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/models"
)
@@ -20,6 +21,19 @@ func (r *performerResolver) URL(ctx context.Context, obj *models.Performer) (*st
return nil, nil
}
+func (r *performerResolver) Gender(ctx context.Context, obj *models.Performer) (*models.GenderEnum, error) {
+ var ret models.GenderEnum
+
+ if obj.Gender.Valid {
+ ret = models.GenderEnum(obj.Gender.String)
+ if ret.IsValid() {
+ return &ret, nil
+ }
+ }
+
+ return nil, nil
+}
+
func (r *performerResolver) Twitter(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.Twitter.Valid {
return &obj.Twitter.String, nil
diff --git a/pkg/api/resolver_model_scene.go b/pkg/api/resolver_model_scene.go
index 892fe6112..36e921052 100644
--- a/pkg/api/resolver_model_scene.go
+++ b/pkg/api/resolver_model_scene.go
@@ -82,7 +82,9 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
}
func (r *sceneResolver) IsStreamable(ctx context.Context, obj *models.Scene) (bool, error) {
- return manager.IsStreamable(obj)
+ // ignore error
+ ret, _ := manager.IsStreamable(obj)
+ return ret, nil
}
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) ([]*models.SceneMarker, error) {
@@ -100,6 +102,38 @@ func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (*models.
return qb.FindBySceneID(obj.ID)
}
+func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) ([]*models.SceneMovie, error) {
+ joinQB := models.NewJoinsQueryBuilder()
+ qb := models.NewMovieQueryBuilder()
+
+ sceneMovies, err := joinQB.GetSceneMovies(obj.ID, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ var ret []*models.SceneMovie
+ for _, sm := range sceneMovies {
+ movie, err := qb.Find(sm.MovieID, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ sceneIdx := sm.SceneIndex
+ sceneMovie := &models.SceneMovie{
+ Movie: movie,
+ }
+
+ if sceneIdx.Valid {
+ var idx int
+ idx = int(sceneIdx.Int64)
+ sceneMovie.SceneIndex = &idx
+ }
+
+ ret = append(ret, sceneMovie)
+ }
+ return ret, nil
+}
+
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.FindBySceneID(obj.ID, nil)
diff --git a/pkg/api/resolver_mutation_configure.go b/pkg/api/resolver_mutation_configure.go
index 2b9273caf..ab7deb743 100644
--- a/pkg/api/resolver_mutation_configure.go
+++ b/pkg/api/resolver_mutation_configure.go
@@ -38,6 +38,13 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
config.Set(config.Generated, input.GeneratedPath)
}
+ if input.CachePath != nil {
+ if err := utils.EnsureDir(*input.CachePath); err != nil {
+ return makeConfigGeneralResult(), err
+ }
+ config.Set(config.Cache, input.CachePath)
+ }
+
if input.MaxTranscodeSize != nil {
config.Set(config.MaxTranscodeSize, input.MaxTranscodeSize.String())
}
@@ -45,6 +52,8 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
if input.MaxStreamingTranscodeSize != nil {
config.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String())
}
+ config.Set(config.ForceMKV, input.ForceMkv)
+ config.Set(config.ForceHEVC, input.ForceHevc)
if input.Username != nil {
config.Set(config.Username, input.Username)
@@ -60,6 +69,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
}
}
+ if input.MaxSessionAge != nil {
+ config.Set(config.MaxSessionAge, *input.MaxSessionAge)
+ }
+
if input.LogFile != nil {
config.Set(config.LogFile, input.LogFile)
}
@@ -76,6 +89,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
config.Set(config.Exclude, input.Excludes)
}
+ if input.ScraperUserAgent != nil {
+ config.Set(config.ScraperUserAgent, input.ScraperUserAgent)
+ }
+
if err := config.Write(); err != nil {
return makeConfigGeneralResult(), err
}
@@ -94,6 +111,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
config.Set(config.WallShowTitle, *input.WallShowTitle)
}
+ if input.WallPlayback != nil {
+ config.Set(config.WallPlayback, *input.WallPlayback)
+ }
+
if input.MaximumLoopDuration != nil {
config.Set(config.MaximumLoopDuration, *input.MaximumLoopDuration)
}
@@ -106,6 +127,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
config.Set(config.ShowStudioAsText, *input.ShowStudioAsText)
}
+ if input.Language != nil {
+ config.Set(config.Language, *input.Language)
+ }
+
css := ""
if input.CSS != nil {
diff --git a/pkg/api/resolver_mutation_metadata.go b/pkg/api/resolver_mutation_metadata.go
new file mode 100644
index 000000000..65a743af8
--- /dev/null
+++ b/pkg/api/resolver_mutation_metadata.go
@@ -0,0 +1,53 @@
+package api
+
+import (
+ "context"
+
+ "github.com/stashapp/stash/pkg/manager"
+ "github.com/stashapp/stash/pkg/models"
+)
+
+func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) {
+ manager.GetInstance().Scan(input.UseFileMetadata)
+ return "todo", nil
+}
+
+func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
+ manager.GetInstance().Import()
+ return "todo", nil
+}
+
+func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
+ manager.GetInstance().Export()
+ return "todo", nil
+}
+
+func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) {
+ manager.GetInstance().Generate(input.Sprites, input.Previews, input.PreviewPreset, input.ImagePreviews, input.Markers, input.Transcodes, input.Thumbnails)
+ return "todo", nil
+}
+
+func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) {
+ manager.GetInstance().AutoTag(input.Performers, input.Studios, input.Tags)
+ return "todo", nil
+}
+
+func (r *mutationResolver) MetadataClean(ctx context.Context) (string, error) {
+ manager.GetInstance().Clean()
+ return "todo", nil
+}
+
+func (r *mutationResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) {
+ status := manager.GetInstance().Status
+ ret := models.MetadataUpdateStatus{
+ Progress: status.Progress,
+ Status: status.Status.String(),
+ Message: "",
+ }
+
+ return &ret, nil
+}
+
+func (r *mutationResolver) StopJob(ctx context.Context) (bool, error) {
+ return manager.GetInstance().Status.Stop(), nil
+}
diff --git a/pkg/api/resolver_mutation_movie.go b/pkg/api/resolver_mutation_movie.go
new file mode 100644
index 000000000..d52d0dabe
--- /dev/null
+++ b/pkg/api/resolver_mutation_movie.go
@@ -0,0 +1,199 @@
+package api
+
+import (
+ "context"
+ "database/sql"
+ "strconv"
+ "time"
+
+ "github.com/stashapp/stash/pkg/database"
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/utils"
+)
+
+func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCreateInput) (*models.Movie, error) {
+ // generate checksum from movie name rather than image
+ checksum := utils.MD5FromString(input.Name)
+
+ var frontimageData []byte
+ var backimageData []byte
+ var err error
+
+ if input.FrontImage == nil {
+ input.FrontImage = &models.DefaultMovieImage
+ }
+ if input.BackImage == nil {
+ input.BackImage = &models.DefaultMovieImage
+ }
+ // Process the base 64 encoded image string
+ _, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
+ if err != nil {
+ return nil, err
+ }
+ // Process the base 64 encoded image string
+ _, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
+ if err != nil {
+ return nil, err
+ }
+
+ // Populate a new movie from the input
+ currentTime := time.Now()
+ newMovie := models.Movie{
+ BackImage: backimageData,
+ FrontImage: frontimageData,
+ Checksum: checksum,
+ Name: sql.NullString{String: input.Name, Valid: true},
+ CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
+ UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
+ }
+
+ if input.Aliases != nil {
+ newMovie.Aliases = sql.NullString{String: *input.Aliases, Valid: true}
+ }
+ if input.Duration != nil {
+ duration := int64(*input.Duration)
+ newMovie.Duration = sql.NullInt64{Int64: duration, Valid: true}
+ }
+
+ if input.Date != nil {
+ newMovie.Date = models.SQLiteDate{String: *input.Date, Valid: true}
+ }
+
+ if input.Rating != nil {
+ rating := int64(*input.Rating)
+ newMovie.Rating = sql.NullInt64{Int64: rating, Valid: true}
+ }
+
+ if input.StudioID != nil {
+ studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
+ newMovie.StudioID = sql.NullInt64{Int64: studioID, Valid: true}
+ }
+
+ if input.Director != nil {
+ newMovie.Director = sql.NullString{String: *input.Director, Valid: true}
+ }
+
+ if input.Synopsis != nil {
+ newMovie.Synopsis = sql.NullString{String: *input.Synopsis, Valid: true}
+ }
+
+ if input.URL != nil {
+ newMovie.URL = sql.NullString{String: *input.URL, Valid: true}
+ }
+
+ // Start the transaction and save the movie
+ tx := database.DB.MustBeginTx(ctx, nil)
+ qb := models.NewMovieQueryBuilder()
+ movie, err := qb.Create(newMovie, tx)
+ if err != nil {
+ _ = tx.Rollback()
+ return nil, err
+ }
+
+ // Commit
+ if err := tx.Commit(); err != nil {
+ return nil, err
+ }
+
+ return movie, nil
+}
+
+func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUpdateInput) (*models.Movie, error) {
+ // Populate movie from the input
+ movieID, _ := strconv.Atoi(input.ID)
+
+ updatedMovie := models.MoviePartial{
+ ID: movieID,
+ UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
+ }
+ if input.FrontImage != nil {
+ _, frontimageData, err := utils.ProcessBase64Image(*input.FrontImage)
+ if err != nil {
+ return nil, err
+ }
+ updatedMovie.FrontImage = &frontimageData
+ }
+ if input.BackImage != nil {
+ _, backimageData, err := utils.ProcessBase64Image(*input.BackImage)
+ if err != nil {
+ return nil, err
+ }
+ updatedMovie.BackImage = &backimageData
+ }
+
+ if input.Name != nil {
+ // generate checksum from movie name rather than image
+ checksum := utils.MD5FromString(*input.Name)
+ updatedMovie.Name = &sql.NullString{String: *input.Name, Valid: true}
+ updatedMovie.Checksum = &checksum
+ }
+
+ if input.Aliases != nil {
+ updatedMovie.Aliases = &sql.NullString{String: *input.Aliases, Valid: true}
+ }
+ if input.Duration != nil {
+ duration := int64(*input.Duration)
+ updatedMovie.Duration = &sql.NullInt64{Int64: duration, Valid: true}
+ }
+
+ if input.Date != nil {
+ updatedMovie.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
+ }
+
+ if input.Rating != nil {
+ rating := int64(*input.Rating)
+ updatedMovie.Rating = &sql.NullInt64{Int64: rating, Valid: true}
+ } else {
+ // rating must be nullable
+ updatedMovie.Rating = &sql.NullInt64{Valid: false}
+ }
+
+ if input.StudioID != nil {
+ studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
+ updatedMovie.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
+ } else {
+ // studio must be nullable
+ updatedMovie.StudioID = &sql.NullInt64{Valid: false}
+ }
+
+ if input.Director != nil {
+ updatedMovie.Director = &sql.NullString{String: *input.Director, Valid: true}
+ }
+
+ if input.Synopsis != nil {
+ updatedMovie.Synopsis = &sql.NullString{String: *input.Synopsis, Valid: true}
+ }
+
+ if input.URL != nil {
+ updatedMovie.URL = &sql.NullString{String: *input.URL, Valid: true}
+ }
+
+ // Start the transaction and save the movie
+ tx := database.DB.MustBeginTx(ctx, nil)
+ qb := models.NewMovieQueryBuilder()
+ movie, err := qb.Update(updatedMovie, tx)
+ if err != nil {
+ _ = tx.Rollback()
+ return nil, err
+ }
+
+ // Commit
+ if err := tx.Commit(); err != nil {
+ return nil, err
+ }
+
+ return movie, nil
+}
+
+func (r *mutationResolver) MovieDestroy(ctx context.Context, input models.MovieDestroyInput) (bool, error) {
+ qb := models.NewMovieQueryBuilder()
+ tx := database.DB.MustBeginTx(ctx, nil)
+ if err := qb.Destroy(input.ID, tx); err != nil {
+ _ = tx.Rollback()
+ return false, err
+ }
+ if err := tx.Commit(); err != nil {
+ return false, err
+ }
+ return true, nil
+}
diff --git a/pkg/api/resolver_mutation_performer.go b/pkg/api/resolver_mutation_performer.go
index 65c089525..697192392 100644
--- a/pkg/api/resolver_mutation_performer.go
+++ b/pkg/api/resolver_mutation_performer.go
@@ -19,7 +19,11 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
var err error
if input.Image == nil {
- imageData, err = getRandomPerformerImage()
+ gender := ""
+ if input.Gender != nil {
+ gender = input.Gender.String()
+ }
+ imageData, err = getRandomPerformerImage(gender)
} else {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
}
@@ -42,6 +46,9 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
if input.URL != nil {
newPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
}
+ if input.Gender != nil {
+ newPerformer.Gender = sql.NullString{String: input.Gender.String(), Valid: true}
+ }
if input.Birthdate != nil {
newPerformer.Birthdate = models.SQLiteDate{String: *input.Birthdate, Valid: true}
}
@@ -128,6 +135,9 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
if input.URL != nil {
updatedPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
}
+ if input.Gender != nil {
+ updatedPerformer.Gender = sql.NullString{String: input.Gender.String(), Valid: true}
+ }
if input.Birthdate != nil {
updatedPerformer.Birthdate = models.SQLiteDate{String: *input.Birthdate, Valid: true}
}
diff --git a/pkg/api/resolver_mutation_scene.go b/pkg/api/resolver_mutation_scene.go
index 1d2564257..3cee12c11 100644
--- a/pkg/api/resolver_mutation_scene.go
+++ b/pkg/api/resolver_mutation_scene.go
@@ -147,6 +147,31 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
return nil, err
}
+ // Save the movies
+ var movieJoins []models.MoviesScenes
+
+ for _, movie := range input.Movies {
+
+ movieID, _ := strconv.Atoi(movie.MovieID)
+
+ movieJoin := models.MoviesScenes{
+ MovieID: movieID,
+ SceneID: sceneID,
+ }
+
+ if movie.SceneIndex != nil {
+ movieJoin.SceneIndex = sql.NullInt64{
+ Int64: int64(*movie.SceneIndex),
+ Valid: true,
+ }
+ }
+
+ movieJoins = append(movieJoins, movieJoin)
+ }
+ if err := jqb.UpdateMoviesScenes(sceneID, movieJoins, tx); err != nil {
+ return nil, err
+ }
+
// Save the tags
var tagJoins []models.ScenesTags
for _, tid := range input.TagIds {
@@ -247,9 +272,14 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
// Save the performers
if wasFieldIncluded(ctx, "performer_ids") {
+ performerIDs, err := adjustScenePerformerIDs(tx, sceneID, *input.PerformerIds)
+ if err != nil {
+ _ = tx.Rollback()
+ return nil, err
+ }
+
var performerJoins []models.PerformersScenes
- for _, pid := range input.PerformerIds {
- performerID, _ := strconv.Atoi(pid)
+ for _, performerID := range performerIDs {
performerJoin := models.PerformersScenes{
PerformerID: performerID,
SceneID: sceneID,
@@ -264,9 +294,14 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
// Save the tags
if wasFieldIncluded(ctx, "tag_ids") {
+ tagIDs, err := adjustSceneTagIDs(tx, sceneID, *input.TagIds)
+ if err != nil {
+ _ = tx.Rollback()
+ return nil, err
+ }
+
var tagJoins []models.ScenesTags
- for _, tid := range input.TagIds {
- tagID, _ := strconv.Atoi(tid)
+ for _, tagID := range tagIDs {
tagJoin := models.ScenesTags{
SceneID: sceneID,
TagID: tagID,
@@ -288,6 +323,72 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
return ret, nil
}
+func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
+ for _, idStr := range updateIDs.Ids {
+ id, _ := strconv.Atoi(idStr)
+
+ // look for the id in the list
+ foundExisting := false
+ for idx, existingID := range existingIDs {
+ if existingID == id {
+ if updateIDs.Mode == models.BulkUpdateIDModeRemove {
+ // remove from the list
+ existingIDs = append(existingIDs[:idx], existingIDs[idx+1:]...)
+ }
+
+ foundExisting = true
+ break
+ }
+ }
+
+ if !foundExisting && updateIDs.Mode != models.BulkUpdateIDModeRemove {
+ existingIDs = append(existingIDs, id)
+ }
+ }
+
+ return existingIDs
+}
+
+func adjustScenePerformerIDs(tx *sqlx.Tx, sceneID int, ids models.BulkUpdateIds) ([]int, error) {
+ var ret []int
+
+ jqb := models.NewJoinsQueryBuilder()
+ if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
+ // adding to the joins
+ performerJoins, err := jqb.GetScenePerformers(sceneID, tx)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for _, join := range performerJoins {
+ ret = append(ret, join.PerformerID)
+ }
+ }
+
+ return adjustIDs(ret, ids), nil
+}
+
+func adjustSceneTagIDs(tx *sqlx.Tx, sceneID int, ids models.BulkUpdateIds) ([]int, error) {
+ var ret []int
+
+ jqb := models.NewJoinsQueryBuilder()
+ if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
+ // adding to the joins
+ tagJoins, err := jqb.GetSceneTags(sceneID, tx)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for _, join := range tagJoins {
+ ret = append(ret, join.TagID)
+ }
+ }
+
+ return adjustIDs(ret, ids), nil
+}
+
func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) {
qb := models.NewSceneQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
@@ -356,6 +457,14 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.S
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
qb := models.NewSceneMarkerQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
+
+ markerID, _ := strconv.Atoi(id)
+ marker, err := qb.Find(markerID)
+
+ if err != nil {
+ return false, err
+ }
+
if err := qb.Destroy(id, tx); err != nil {
_ = tx.Rollback()
return false, err
@@ -363,6 +472,16 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
if err := tx.Commit(); err != nil {
return false, err
}
+
+ // delete the preview for the marker
+ sqb := models.NewSceneQueryBuilder()
+ scene, _ := sqb.Find(int(marker.SceneID.Int64))
+
+ if scene != nil {
+ seconds := int(marker.Seconds)
+ manager.DeleteSceneMarkerFiles(scene, seconds)
+ }
+
return true, nil
}
@@ -372,13 +491,18 @@ func changeMarker(ctx context.Context, changeType int, changedMarker models.Scen
qb := models.NewSceneMarkerQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
+ var existingMarker *models.SceneMarker
var sceneMarker *models.SceneMarker
var err error
switch changeType {
case create:
sceneMarker, err = qb.Create(changedMarker, tx)
case update:
- sceneMarker, err = qb.Update(changedMarker, tx)
+ // check to see if timestamp was changed
+ existingMarker, err = qb.Find(changedMarker.ID)
+ if err == nil {
+ sceneMarker, err = qb.Update(changedMarker, tx)
+ }
}
if err != nil {
_ = tx.Rollback()
@@ -416,6 +540,18 @@ func changeMarker(ctx context.Context, changeType int, changedMarker models.Scen
return nil, err
}
+ // remove the marker preview if the timestamp was changed
+ if existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
+ sqb := models.NewSceneQueryBuilder()
+
+ scene, _ := sqb.Find(int(existingMarker.SceneID.Int64))
+
+ if scene != nil {
+ seconds := int(existingMarker.Seconds)
+ manager.DeleteSceneMarkerFiles(scene, seconds)
+ }
+ }
+
return sceneMarker, nil
}
@@ -478,3 +614,13 @@ func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (int, err
return newVal, nil
}
+
+func (r *mutationResolver) SceneGenerateScreenshot(ctx context.Context, id string, at *float64) (string, error) {
+ if at != nil {
+ manager.GetInstance().GenerateScreenshot(id, *at)
+ } else {
+ manager.GetInstance().GenerateDefaultScreenshot(id)
+ }
+
+ return "todo", nil
+}
diff --git a/pkg/api/resolver_query_configuration.go b/pkg/api/resolver_query_configuration.go
index 80ddf8bd2..da059de60 100644
--- a/pkg/api/resolver_query_configuration.go
+++ b/pkg/api/resolver_query_configuration.go
@@ -12,12 +12,18 @@ func (r *queryResolver) Configuration(ctx context.Context) (*models.ConfigResult
return makeConfigResult(), nil
}
-func (r *queryResolver) Directories(ctx context.Context, path *string) ([]string, error) {
+func (r *queryResolver) Directory(ctx context.Context, path *string) (*models.Directory, error) {
var dirPath = ""
if path != nil {
dirPath = *path
}
- return utils.ListDir(dirPath), nil
+ currentDir := utils.GetDir(dirPath)
+
+ return &models.Directory{
+ Path: currentDir,
+ Parent: utils.GetParent(currentDir),
+ Directories: utils.ListDir(currentDir),
+ }, nil
}
func makeConfigResult() *models.ConfigResult {
@@ -33,38 +39,49 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
maxTranscodeSize := config.GetMaxTranscodeSize()
maxStreamingTranscodeSize := config.GetMaxStreamingTranscodeSize()
+ scraperUserAgent := config.GetScraperUserAgent()
+
return &models.ConfigGeneralResult{
Stashes: config.GetStashPaths(),
DatabasePath: config.GetDatabasePath(),
GeneratedPath: config.GetGeneratedPath(),
+ CachePath: config.GetCachePath(),
MaxTranscodeSize: &maxTranscodeSize,
MaxStreamingTranscodeSize: &maxStreamingTranscodeSize,
+ ForceMkv: config.GetForceMKV(),
+ ForceHevc: config.GetForceHEVC(),
Username: config.GetUsername(),
Password: config.GetPasswordHash(),
+ MaxSessionAge: config.GetMaxSessionAge(),
LogFile: &logFile,
LogOut: config.GetLogOut(),
LogLevel: config.GetLogLevel(),
LogAccess: config.GetLogAccess(),
Excludes: config.GetExcludes(),
+ ScraperUserAgent: &scraperUserAgent,
}
}
func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
soundOnPreview := config.GetSoundOnPreview()
wallShowTitle := config.GetWallShowTitle()
+ wallPlayback := config.GetWallPlayback()
maximumLoopDuration := config.GetMaximumLoopDuration()
autostartVideo := config.GetAutostartVideo()
showStudioAsText := config.GetShowStudioAsText()
css := config.GetCSS()
cssEnabled := config.GetCSSEnabled()
+ language := config.GetLanguage()
return &models.ConfigInterfaceResult{
SoundOnPreview: &soundOnPreview,
WallShowTitle: &wallShowTitle,
+ WallPlayback: &wallPlayback,
MaximumLoopDuration: &maximumLoopDuration,
AutostartVideo: &autostartVideo,
ShowStudioAsText: &showStudioAsText,
CSS: &css,
CSSEnabled: &cssEnabled,
+ Language: &language,
}
}
diff --git a/pkg/api/resolver_query_find_movie.go b/pkg/api/resolver_query_find_movie.go
new file mode 100644
index 000000000..9d23eddfc
--- /dev/null
+++ b/pkg/api/resolver_query_find_movie.go
@@ -0,0 +1,33 @@
+package api
+
+import (
+ "context"
+ "strconv"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+func (r *queryResolver) FindMovie(ctx context.Context, id string) (*models.Movie, error) {
+ qb := models.NewMovieQueryBuilder()
+ idInt, _ := strconv.Atoi(id)
+ return qb.Find(idInt, nil)
+}
+
+func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (*models.FindMoviesResultType, error) {
+ qb := models.NewMovieQueryBuilder()
+ movies, total := qb.Query(movieFilter, filter)
+ return &models.FindMoviesResultType{
+ Count: total,
+ Movies: movies,
+ }, nil
+}
+
+func (r *queryResolver) AllMovies(ctx context.Context) ([]*models.Movie, error) {
+ qb := models.NewMovieQueryBuilder()
+ return qb.All()
+}
+
+func (r *queryResolver) AllMoviesSlim(ctx context.Context) ([]*models.Movie, error) {
+ qb := models.NewMovieQueryBuilder()
+ return qb.AllSlim()
+}
diff --git a/pkg/api/resolver_query_find_performer.go b/pkg/api/resolver_query_find_performer.go
index 33c29d6c5..efb694910 100644
--- a/pkg/api/resolver_query_find_performer.go
+++ b/pkg/api/resolver_query_find_performer.go
@@ -25,3 +25,8 @@ func (r *queryResolver) AllPerformers(ctx context.Context) ([]*models.Performer,
qb := models.NewPerformerQueryBuilder()
return qb.All()
}
+
+func (r *queryResolver) AllPerformersSlim(ctx context.Context) ([]*models.Performer, error) {
+ qb := models.NewPerformerQueryBuilder()
+ return qb.AllSlim()
+}
diff --git a/pkg/api/resolver_query_find_studio.go b/pkg/api/resolver_query_find_studio.go
index 3537254f8..4b39130f4 100644
--- a/pkg/api/resolver_query_find_studio.go
+++ b/pkg/api/resolver_query_find_studio.go
@@ -25,3 +25,8 @@ func (r *queryResolver) AllStudios(ctx context.Context) ([]*models.Studio, error
qb := models.NewStudioQueryBuilder()
return qb.All()
}
+
+func (r *queryResolver) AllStudiosSlim(ctx context.Context) ([]*models.Studio, error) {
+ qb := models.NewStudioQueryBuilder()
+ return qb.AllSlim()
+}
diff --git a/pkg/api/resolver_query_find_tag.go b/pkg/api/resolver_query_find_tag.go
index e5b6a5929..64fc866c9 100644
--- a/pkg/api/resolver_query_find_tag.go
+++ b/pkg/api/resolver_query_find_tag.go
@@ -16,3 +16,8 @@ func (r *queryResolver) AllTags(ctx context.Context) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.All()
}
+
+func (r *queryResolver) AllTagsSlim(ctx context.Context) ([]*models.Tag, error) {
+ qb := models.NewTagQueryBuilder()
+ return qb.AllSlim()
+}
diff --git a/pkg/api/resolver_query_metadata.go b/pkg/api/resolver_query_metadata.go
index ad8dcbf3e..862d91eae 100644
--- a/pkg/api/resolver_query_metadata.go
+++ b/pkg/api/resolver_query_metadata.go
@@ -7,36 +7,6 @@ import (
"github.com/stashapp/stash/pkg/models"
)
-func (r *queryResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) {
- manager.GetInstance().Scan(input.UseFileMetadata)
- return "todo", nil
-}
-
-func (r *queryResolver) MetadataImport(ctx context.Context) (string, error) {
- manager.GetInstance().Import()
- return "todo", nil
-}
-
-func (r *queryResolver) MetadataExport(ctx context.Context) (string, error) {
- manager.GetInstance().Export()
- return "todo", nil
-}
-
-func (r *queryResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) {
- manager.GetInstance().Generate(input.Sprites, input.Previews, input.Markers, input.Transcodes)
- return "todo", nil
-}
-
-func (r *queryResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) {
- manager.GetInstance().AutoTag(input.Performers, input.Studios, input.Tags)
- return "todo", nil
-}
-
-func (r *queryResolver) MetadataClean(ctx context.Context) (string, error) {
- manager.GetInstance().Clean()
- return "todo", nil
-}
-
func (r *queryResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) {
status := manager.GetInstance().Status
ret := models.MetadataUpdateStatus{
@@ -47,7 +17,3 @@ func (r *queryResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateSt
return &ret, nil
}
-
-func (r *queryResolver) StopJob(ctx context.Context) (bool, error) {
- return manager.GetInstance().Status.Stop(), nil
-}
diff --git a/pkg/api/routes_gallery.go b/pkg/api/routes_gallery.go
index 0c53cf35c..4b5a7f4cd 100644
--- a/pkg/api/routes_gallery.go
+++ b/pkg/api/routes_gallery.go
@@ -23,11 +23,15 @@ func (rs galleryRoutes) Routes() chi.Router {
func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) {
gallery := r.Context().Value(galleryKey).(*models.Gallery)
+ if gallery == nil {
+ http.Error(w, http.StatusText(404), 404)
+ return
+ }
fileIndex, _ := strconv.Atoi(chi.URLParam(r, "fileIndex"))
thumb := r.URL.Query().Get("thumb")
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
if thumb == "true" {
- _, _ = w.Write(gallery.GetThumbnail(fileIndex, 200))
+ _, _ = w.Write(cacheGthumb(gallery, fileIndex, models.DefaultGthumbWidth))
} else if thumb == "" {
_, _ = w.Write(gallery.GetImage(fileIndex))
} else {
@@ -36,7 +40,7 @@ func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) {
http.Error(w, http.StatusText(400), 400)
return
}
- _, _ = w.Write(gallery.GetThumbnail(fileIndex, int(width)))
+ _, _ = w.Write(cacheGthumb(gallery, fileIndex, int(width)))
}
}
diff --git a/pkg/api/routes_movie.go b/pkg/api/routes_movie.go
new file mode 100644
index 000000000..3c6659c59
--- /dev/null
+++ b/pkg/api/routes_movie.go
@@ -0,0 +1,54 @@
+package api
+
+import (
+ "context"
+ "net/http"
+ "strconv"
+
+ "github.com/go-chi/chi"
+ "github.com/stashapp/stash/pkg/models"
+)
+
+type movieRoutes struct{}
+
+func (rs movieRoutes) Routes() chi.Router {
+ r := chi.NewRouter()
+
+ r.Route("/{movieId}", func(r chi.Router) {
+ r.Use(MovieCtx)
+ r.Get("/frontimage", rs.FrontImage)
+ r.Get("/backimage", rs.BackImage)
+ })
+
+ return r
+}
+
+func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
+ movie := r.Context().Value(movieKey).(*models.Movie)
+ _, _ = w.Write(movie.FrontImage)
+}
+
+func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
+ movie := r.Context().Value(movieKey).(*models.Movie)
+ _, _ = w.Write(movie.BackImage)
+}
+
+func MovieCtx(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ movieID, err := strconv.Atoi(chi.URLParam(r, "movieId"))
+ if err != nil {
+ http.Error(w, http.StatusText(404), 404)
+ return
+ }
+
+ qb := models.NewMovieQueryBuilder()
+ movie, err := qb.Find(movieID, nil)
+ if err != nil {
+ http.Error(w, http.StatusText(404), 404)
+ return
+ }
+
+ ctx := context.WithValue(r.Context(), movieKey, movie)
+ next.ServeHTTP(w, r.WithContext(ctx))
+ })
+}
diff --git a/pkg/api/routes_scene.go b/pkg/api/routes_scene.go
index b5cf8827e..171d868b4 100644
--- a/pkg/api/routes_scene.go
+++ b/pkg/api/routes_scene.go
@@ -4,6 +4,7 @@ import (
"context"
"io"
"net/http"
+ "os"
"strconv"
"strings"
@@ -42,13 +43,32 @@ func (rs sceneRoutes) Routes() chi.Router {
// region Handlers
func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
+
scene := r.Context().Value(sceneKey).(*models.Scene)
+ container := ""
+ if scene.Format.Valid {
+ container = scene.Format.String
+ } else { // container isn't in the DB
+ // shouldn't happen, fallback to ffprobe
+ tmpVideoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path)
+ if err != nil {
+ logger.Errorf("[transcode] error reading video file: %s", err.Error())
+ return
+ }
+
+ container = string(ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path))
+ }
+
// detect if not a streamable file and try to transcode it instead
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.Checksum)
videoCodec := scene.VideoCodec.String
+ audioCodec := ffmpeg.MissingUnsupported
+ if scene.AudioCodec.Valid {
+ audioCodec = ffmpeg.AudioCodec(scene.AudioCodec.String)
+ }
hasTranscode, _ := manager.HasTranscode(scene)
- if ffmpeg.IsValidCodec(videoCodec) || hasTranscode {
+ if ffmpeg.IsValidCodec(videoCodec) && ffmpeg.IsValidCombo(videoCodec, ffmpeg.Container(container)) && ffmpeg.IsValidAudioForContainer(audioCodec, ffmpeg.Container(container)) || hasTranscode {
manager.RegisterStream(filepath, &w)
http.ServeFile(w, r, filepath)
manager.WaitAndDeregisterStream(filepath, &w, r)
@@ -69,16 +89,50 @@ func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
encoder := ffmpeg.NewEncoder(manager.GetInstance().FFMPEGPath)
- stream, process, err := encoder.StreamTranscode(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
+ var stream io.ReadCloser
+ var process *os.Process
+ mimeType := ffmpeg.MimeWebm
+
+ if audioCodec == ffmpeg.MissingUnsupported {
+ //ffmpeg fails if it trys to transcode a non supported audio codec
+ stream, process, err = encoder.StreamTranscodeVideo(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
+ } else {
+ copyVideo := false // try to be smart if the video to be transcoded is in a Matroska container
+ // mp4 has always supported audio so it doesn't need to be checked
+ // while mpeg_ts has seeking issues if we don't reencode the video
+
+ if config.GetForceMKV() { // If MKV is forced as supported and video codec is also supported then only transcode audio
+ if ffmpeg.Container(container) == ffmpeg.Matroska {
+ switch videoCodec {
+ case ffmpeg.H264, ffmpeg.Vp9, ffmpeg.Vp8:
+ copyVideo = true
+ case ffmpeg.Hevc:
+ if config.GetForceHEVC() {
+ copyVideo = true
+ }
+
+ }
+ }
+ }
+
+ if copyVideo { // copy video stream instead of transcoding it
+ stream, process, err = encoder.StreamMkvTranscodeAudio(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
+ mimeType = ffmpeg.MimeMkv
+
+ } else {
+ stream, process, err = encoder.StreamTranscode(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
+ }
+ }
+
if err != nil {
logger.Errorf("[stream] error transcoding video file: %s", err.Error())
return
}
w.WriteHeader(http.StatusOK)
- w.Header().Set("Content-Type", "video/webm")
+ w.Header().Set("Content-Type", mimeType)
- logger.Info("[stream] transcoding video file")
+ logger.Infof("[stream] transcoding video file to %s", mimeType)
// handle if client closes the connection
notify := r.Context().Done()
diff --git a/pkg/api/routes_studio.go b/pkg/api/routes_studio.go
index 8080a641a..22d0702c5 100644
--- a/pkg/api/routes_studio.go
+++ b/pkg/api/routes_studio.go
@@ -2,10 +2,13 @@ package api
import (
"context"
+ "crypto/md5"
+ "fmt"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models"
"net/http"
"strconv"
+ "strings"
)
type studioRoutes struct{}
@@ -23,6 +26,21 @@ func (rs studioRoutes) Routes() chi.Router {
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
studio := r.Context().Value(studioKey).(*models.Studio)
+ etag := fmt.Sprintf("%x", md5.Sum(studio.Image))
+ if match := r.Header.Get("If-None-Match"); match != "" {
+ if strings.Contains(match, etag) {
+ w.WriteHeader(http.StatusNotModified)
+ return
+ }
+ }
+
+ contentType := http.DetectContentType(studio.Image)
+ if contentType == "text/xml; charset=utf-8" || contentType == "text/plain; charset=utf-8" {
+ contentType = "image/svg+xml"
+ }
+
+ w.Header().Set("Content-Type", contentType)
+ w.Header().Add("Etag", etag)
_, _ = w.Write(studio.Image)
}
diff --git a/pkg/api/server.go b/pkg/api/server.go
index 27141c5bf..9d3a038c3 100644
--- a/pkg/api/server.go
+++ b/pkg/api/server.go
@@ -7,6 +7,7 @@ import (
"fmt"
"io/ioutil"
"net/http"
+ "net/url"
"os"
"path"
"path/filepath"
@@ -20,6 +21,7 @@ import (
"github.com/gobuffalo/packr/v2"
"github.com/gorilla/websocket"
"github.com/rs/cors"
+ "github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
@@ -28,46 +30,81 @@ import (
"github.com/stashapp/stash/pkg/utils"
)
-var version string = ""
-var buildstamp string = ""
-var githash string = ""
+var version string
+var buildstamp string
+var githash string
var uiBox *packr.Box
//var legacyUiBox *packr.Box
var setupUIBox *packr.Box
+var loginUIBox *packr.Box
+
+func allowUnauthenticated(r *http.Request) bool {
+ return strings.HasPrefix(r.URL.Path, "/login") || r.URL.Path == "/css"
+}
func authenticateHandler() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- // only do this if credentials have been configured
- if !config.HasCredentials() {
- next.ServeHTTP(w, r)
+ ctx := r.Context()
+
+ // translate api key into current user, if present
+ userID := ""
+ var err error
+
+ // handle session
+ userID, err = getSessionUserID(w, r)
+
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ w.Write([]byte(err.Error()))
return
}
- authUser, authPW, ok := r.BasicAuth()
+ // handle redirect if no user and user is required
+ if userID == "" && config.HasCredentials() && !allowUnauthenticated(r) {
+ // always allow
- if !ok || !config.ValidateCredentials(authUser, authPW) {
- unauthorized(w)
+ // if we don't have a userID, then redirect
+ // if graphql was requested, we just return a forbidden error
+ if r.URL.Path == "/graphql" {
+ w.Header().Add("WWW-Authenticate", `FormBased`)
+ w.WriteHeader(http.StatusUnauthorized)
+ return
+ }
+
+ // otherwise redirect to the login page
+ u := url.URL{
+ Path: "/login",
+ }
+ q := u.Query()
+ q.Set(returnURLParam, r.URL.Path)
+ u.RawQuery = q.Encode()
+ http.Redirect(w, r, u.String(), http.StatusFound)
return
}
+ ctx = context.WithValue(ctx, ContextUser, userID)
+
+ r = r.WithContext(ctx)
+
next.ServeHTTP(w, r)
})
}
}
-func unauthorized(w http.ResponseWriter) {
- w.Header().Add("WWW-Authenticate", `Basic realm=\"Stash\"`)
- w.WriteHeader(http.StatusUnauthorized)
-}
+const setupEndPoint = "/setup"
+const migrateEndPoint = "/migrate"
+const loginEndPoint = "/login"
func Start() {
- uiBox = packr.New("UI Box", "../../ui/v2/build")
+ uiBox = packr.New("UI Box", "../../ui/v2.5/build")
//legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
setupUIBox = packr.New("Setup UI Box", "../../ui/setup")
+ loginUIBox = packr.New("Login UI Box", "../../ui/login")
+ initSessionStore()
initialiseImages()
r := chi.NewRouter()
@@ -83,6 +120,7 @@ func Start() {
r.Use(cors.AllowAll().Handler)
r.Use(BaseURLMiddleware)
r.Use(ConfigCheckMiddleware)
+ r.Use(DatabaseCheckMiddleware)
recoverFunc := handler.RecoverFunc(func(ctx context.Context, err interface{}) error {
logger.Error(err)
@@ -105,12 +143,20 @@ func Start() {
r.Handle("/graphql", gqlHandler)
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
+ // session handlers
+ r.Post(loginEndPoint, handleLogin)
+ r.Get("/logout", handleLogout)
+
+ r.Get(loginEndPoint, getLoginHandler)
+
r.Mount("/gallery", galleryRoutes{}.Routes())
r.Mount("/performer", performerRoutes{}.Routes())
r.Mount("/scene", sceneRoutes{}.Routes())
r.Mount("/studio", studioRoutes{}.Routes())
+ r.Mount("/movie", movieRoutes{}.Routes())
r.HandleFunc("/css", func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "text/css")
if !config.GetCSSEnabled() {
return
}
@@ -125,6 +171,10 @@ func Start() {
http.ServeFile(w, r, fn)
})
+ // Serve the migration UI
+ r.Get("/migrate", getMigrateHandler)
+ r.Post("/migrate", doMigrateHandler)
+
// Serve the setup UI
r.HandleFunc("/setup*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
@@ -136,6 +186,16 @@ func Start() {
http.FileServer(setupUIBox).ServeHTTP(w, r)
}
})
+ r.HandleFunc("/login*", func(w http.ResponseWriter, r *http.Request) {
+ ext := path.Ext(r.URL.Path)
+ if ext == ".html" || ext == "" {
+ data, _ := loginUIBox.Find("login.html")
+ _, _ = w.Write(data)
+ } else {
+ r.URL.Path = strings.Replace(r.URL.Path, loginEndPoint, "", 1)
+ http.FileServer(loginUIBox).ServeHTTP(w, r)
+ }
+ })
r.Post("/init", func(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
@@ -174,7 +234,8 @@ func Start() {
_ = os.Mkdir(downloads, 0755)
- config.Set(config.Stash, stash)
+ // #536 - set stash as slice of strings
+ config.Set(config.Stash, []string{stash})
config.Set(config.Generated, generated)
config.Set(config.Metadata, metadata)
config.Set(config.Cache, cache)
@@ -189,6 +250,7 @@ func Start() {
http.Redirect(w, r, "/", 301)
})
+ startThumbCache()
// Serve the web app
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
@@ -311,10 +373,27 @@ func BaseURLMiddleware(next http.Handler) http.Handler {
func ConfigCheckMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
- shouldRedirect := ext == "" && r.Method == "GET" && r.URL.Path != "/init"
+ shouldRedirect := ext == "" && r.Method == "GET"
if !config.IsValid() && shouldRedirect {
- if !strings.HasPrefix(r.URL.Path, "/setup") {
- http.Redirect(w, r, "/setup", 301)
+ // #539 - don't redirect if loading login page
+ if !strings.HasPrefix(r.URL.Path, setupEndPoint) && !strings.HasPrefix(r.URL.Path, loginEndPoint) {
+ http.Redirect(w, r, setupEndPoint, 301)
+ return
+ }
+ }
+ next.ServeHTTP(w, r)
+ })
+}
+
+func DatabaseCheckMiddleware(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ext := path.Ext(r.URL.Path)
+ shouldRedirect := ext == "" && r.Method == "GET"
+ if shouldRedirect && database.NeedsMigration() {
+ // #451 - don't redirect if loading login page
+ // #539 - or setup page
+ if !strings.HasPrefix(r.URL.Path, migrateEndPoint) && !strings.HasPrefix(r.URL.Path, loginEndPoint) && !strings.HasPrefix(r.URL.Path, setupEndPoint) {
+ http.Redirect(w, r, migrateEndPoint, 301)
return
}
}
diff --git a/pkg/api/session.go b/pkg/api/session.go
new file mode 100644
index 000000000..e619fd782
--- /dev/null
+++ b/pkg/api/session.go
@@ -0,0 +1,127 @@
+package api
+
+import (
+ "fmt"
+ "html/template"
+ "net/http"
+
+ "github.com/stashapp/stash/pkg/manager/config"
+
+ "github.com/gorilla/sessions"
+)
+
+const cookieName = "session"
+const usernameFormKey = "username"
+const passwordFormKey = "password"
+const userIDKey = "userID"
+
+const returnURLParam = "returnURL"
+
+var sessionStore = sessions.NewCookieStore(config.GetSessionStoreKey())
+
+type loginTemplateData struct {
+ URL string
+ Error string
+}
+
+func initSessionStore() {
+ sessionStore.MaxAge(config.GetMaxSessionAge())
+}
+
+func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string) {
+ data, _ := loginUIBox.Find("login.html")
+ templ, err := template.New("Login").Parse(string(data))
+ if err != nil {
+ http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
+ return
+ }
+
+ err = templ.Execute(w, loginTemplateData{URL: returnURL, Error: loginError})
+ if err != nil {
+ http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
+ }
+}
+
+func getLoginHandler(w http.ResponseWriter, r *http.Request) {
+ if !config.HasCredentials() {
+ http.Redirect(w, r, "/", http.StatusFound)
+ return
+ }
+
+ redirectToLogin(w, r.URL.Query().Get(returnURLParam), "")
+}
+
+func handleLogin(w http.ResponseWriter, r *http.Request) {
+ url := r.FormValue(returnURLParam)
+ if url == "" {
+ url = "/"
+ }
+
+ // ignore error - we want a new session regardless
+ newSession, _ := sessionStore.Get(r, cookieName)
+
+ username := r.FormValue("username")
+ password := r.FormValue("password")
+
+ // authenticate the user
+ if !config.ValidateCredentials(username, password) {
+ // redirect back to the login page with an error
+ redirectToLogin(w, url, "Username or password is invalid")
+ return
+ }
+
+ newSession.Values[userIDKey] = username
+
+ err := newSession.Save(r, w)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ return
+ }
+
+ http.Redirect(w, r, url, http.StatusFound)
+}
+
+func handleLogout(w http.ResponseWriter, r *http.Request) {
+ session, err := sessionStore.Get(r, cookieName)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ return
+ }
+
+ delete(session.Values, userIDKey)
+ session.Options.MaxAge = -1
+
+ err = session.Save(r, w)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ return
+ }
+
+ // redirect to the login page if credentials are required
+ getLoginHandler(w, r)
+}
+
+func getSessionUserID(w http.ResponseWriter, r *http.Request) (string, error) {
+ session, err := sessionStore.Get(r, cookieName)
+ // ignore errors and treat as an empty user id, so that we handle expired
+ // cookie
+ if err != nil {
+ return "", nil
+ }
+
+ if !session.IsNew {
+ val := session.Values[userIDKey]
+
+ // refresh the cookie
+ err = session.Save(r, w)
+ if err != nil {
+ return "", err
+ }
+
+ ret, _ := val.(string)
+
+ return ret, nil
+ }
+
+ return "", nil
+}
diff --git a/pkg/api/urlbuilders/movie.go b/pkg/api/urlbuilders/movie.go
new file mode 100644
index 000000000..7e454c070
--- /dev/null
+++ b/pkg/api/urlbuilders/movie.go
@@ -0,0 +1,24 @@
+package urlbuilders
+
+import "strconv"
+
+type MovieURLBuilder struct {
+ BaseURL string
+ MovieID string
+}
+
+func NewMovieURLBuilder(baseURL string, movieID int) MovieURLBuilder {
+ return MovieURLBuilder{
+ BaseURL: baseURL,
+ MovieID: strconv.Itoa(movieID),
+ }
+}
+
+func (b MovieURLBuilder) GetMovieFrontImageURL() string {
+ return b.BaseURL + "/movie/" + b.MovieID + "/frontimage"
+}
+
+func (b MovieURLBuilder) GetMovieBackImageURL() string {
+ return b.BaseURL + "/movie/" + b.MovieID + "/backimage"
+}
+
diff --git a/pkg/database/database.go b/pkg/database/database.go
index 66e0eba43..63f6d9cb1 100644
--- a/pkg/database/database.go
+++ b/pkg/database/database.go
@@ -5,10 +5,11 @@ import (
"errors"
"fmt"
"os"
- "regexp"
+ "time"
"github.com/gobuffalo/packr/v2"
"github.com/golang-migrate/migrate/v4"
+ sqlite3mig "github.com/golang-migrate/migrate/v4/database/sqlite3"
"github.com/golang-migrate/migrate/v4/source"
"github.com/jmoiron/sqlx"
sqlite3 "github.com/mattn/go-sqlite3"
@@ -17,26 +18,62 @@ import (
)
var DB *sqlx.DB
-var appSchemaVersion uint = 3
+var dbPath string
+var appSchemaVersion uint = 8
+var databaseSchemaVersion uint
-const sqlite3Driver = "sqlite3_regexp"
+const sqlite3Driver = "sqlite3ex"
func init() {
// register custom driver with regexp function
- registerRegexpFunc()
+ registerCustomDriver()
}
func Initialize(databasePath string) {
- runMigrations(databasePath)
+ dbPath = databasePath
+ if err := getDatabaseSchemaVersion(); err != nil {
+ panic(err)
+ }
+
+ if databaseSchemaVersion == 0 {
+ // new database, just run the migrations
+ if err := RunMigrations(); err != nil {
+ panic(err)
+ }
+ // RunMigrations calls Initialise. Just return
+ return
+ } else {
+ if databaseSchemaVersion > appSchemaVersion {
+ panic(fmt.Sprintf("Database schema version %d is incompatible with required schema version %d", databaseSchemaVersion, appSchemaVersion))
+ }
+
+ // if migration is needed, then don't open the connection
+ if NeedsMigration() {
+ logger.Warnf("Database schema version %d does not match required schema version %d.", databaseSchemaVersion, appSchemaVersion)
+ return
+ }
+ }
+
+ const disableForeignKeys = false
+ DB = open(databasePath, disableForeignKeys)
+}
+
+func open(databasePath string, disableForeignKeys bool) *sqlx.DB {
// https://github.com/mattn/go-sqlite3
- conn, err := sqlx.Open(sqlite3Driver, "file:"+databasePath+"?_fk=true")
+ url := "file:" + databasePath
+ if !disableForeignKeys {
+ url += "?_fk=true"
+ }
+
+ conn, err := sqlx.Open(sqlite3Driver, url)
conn.SetMaxOpenConns(25)
conn.SetMaxIdleConns(4)
if err != nil {
logger.Fatalf("db.Open(): %q\n", err)
}
- DB = conn
+
+ return conn
}
func Reset(databasePath string) error {
@@ -55,45 +92,123 @@ func Reset(databasePath string) error {
return nil
}
+// Backup the database
+func Backup(backupPath string) error {
+ db, err := sqlx.Connect(sqlite3Driver, "file:"+dbPath+"?_fk=true")
+ if err != nil {
+ return fmt.Errorf("Open database %s failed:%s", dbPath, err)
+ }
+ defer db.Close()
+
+ _, err = db.Exec(`VACUUM INTO "` + backupPath + `"`)
+ if err != nil {
+ return fmt.Errorf("Vacuum failed: %s", err)
+ }
+
+ return nil
+}
+
+func RestoreFromBackup(backupPath string) error {
+ return os.Rename(backupPath, dbPath)
+}
+
// Migrate the database
-func runMigrations(databasePath string) {
+func NeedsMigration() bool {
+ return databaseSchemaVersion != appSchemaVersion
+}
+
+func AppSchemaVersion() uint {
+ return appSchemaVersion
+}
+
+func DatabaseBackupPath() string {
+ return fmt.Sprintf("%s.%d.%s", dbPath, databaseSchemaVersion, time.Now().Format("20060102_150405"))
+}
+
+func Version() uint {
+ return databaseSchemaVersion
+}
+
+func getMigrate() (*migrate.Migrate, error) {
migrationsBox := packr.New("Migrations Box", "./migrations")
packrSource := &Packr2Source{
Box: migrationsBox,
Migrations: source.NewMigrations(),
}
- databasePath = utils.FixWindowsPath(databasePath)
+ databasePath := utils.FixWindowsPath(dbPath)
s, _ := WithInstance(packrSource)
- m, err := migrate.NewWithSourceInstance(
+
+ const disableForeignKeys = true
+ conn := open(databasePath, disableForeignKeys)
+
+ driver, err := sqlite3mig.WithInstance(conn.DB, &sqlite3mig.Config{})
+ if err != nil {
+ return nil, err
+ }
+
+ // use sqlite3Driver so that migration has access to durationToTinyInt
+ return migrate.NewWithInstance(
"packr2",
s,
- fmt.Sprintf("sqlite3://%s", "file:"+databasePath),
+ databasePath,
+ driver,
)
+}
+
+func getDatabaseSchemaVersion() error {
+ m, err := getMigrate()
+ if err != nil {
+ return err
+ }
+
+ databaseSchemaVersion, _, _ = m.Version()
+ m.Close()
+ return nil
+}
+
+// Migrate the database
+func RunMigrations() error {
+ m, err := getMigrate()
if err != nil {
panic(err.Error())
}
- databaseSchemaVersion, _, _ := m.Version()
+ databaseSchemaVersion, _, _ = m.Version()
stepNumber := appSchemaVersion - databaseSchemaVersion
if stepNumber != 0 {
err = m.Steps(int(stepNumber))
if err != nil {
- panic(err.Error())
+ // migration failed
+ m.Close()
+ return err
}
}
m.Close()
+
+ // re-initialise the database
+ Initialize(dbPath)
+
+ return nil
}
-func registerRegexpFunc() {
- regexFn := func(re, s string) (bool, error) {
- return regexp.MatchString(re, s)
- }
-
+func registerCustomDriver() {
sql.Register(sqlite3Driver,
&sqlite3.SQLiteDriver{
ConnectHook: func(conn *sqlite3.SQLiteConn) error {
- return conn.RegisterFunc("regexp", regexFn, true)
+ funcs := map[string]interface{}{
+ "regexp": regexFn,
+ "durationToTinyInt": durationToTinyIntFn,
+ }
+
+ for name, fn := range funcs {
+ if err := conn.RegisterFunc(name, fn, true); err != nil {
+ return fmt.Errorf("Error registering function %s: %s", name, err.Error())
+ }
+ }
+
+ return nil
},
- })
+ },
+ )
}
diff --git a/pkg/database/functions.go b/pkg/database/functions.go
new file mode 100644
index 000000000..69dc8c0fc
--- /dev/null
+++ b/pkg/database/functions.go
@@ -0,0 +1,37 @@
+package database
+
+import (
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+func regexFn(re, s string) (bool, error) {
+ return regexp.MatchString(re, s)
+}
+
+func durationToTinyIntFn(str string) (int64, error) {
+ splits := strings.Split(str, ":")
+
+ if len(splits) > 3 {
+ return 0, nil
+ }
+
+ seconds := 0
+ factor := 1
+ for len(splits) > 0 {
+ // pop the last split
+ var thisSplit string
+ thisSplit, splits = splits[len(splits)-1], splits[:len(splits)-1]
+
+ thisInt, err := strconv.Atoi(thisSplit)
+ if err != nil {
+ return 0, nil
+ }
+
+ seconds += factor * thisInt
+ factor *= 60
+ }
+
+ return int64(seconds), nil
+}
diff --git a/pkg/database/migrations/4_movie.up.sql b/pkg/database/migrations/4_movie.up.sql
new file mode 100644
index 000000000..8dd6d0e00
--- /dev/null
+++ b/pkg/database/migrations/4_movie.up.sql
@@ -0,0 +1,32 @@
+CREATE TABLE `movies` (
+ `id` integer not null primary key autoincrement,
+ `name` varchar(255),
+ `aliases` varchar(255),
+ `duration` varchar(6),
+ `date` date,
+ `rating` varchar(1),
+ `director` varchar(255),
+ `synopsis` text,
+ `front_image` blob not null,
+ `back_image` blob,
+ `checksum` varchar(255) not null,
+ `url` varchar(255),
+ `created_at` datetime not null,
+ `updated_at` datetime not null
+);
+CREATE TABLE `movies_scenes` (
+ `movie_id` integer,
+ `scene_id` integer,
+ `scene_index` varchar(2),
+ foreign key(`movie_id`) references `movies`(`id`),
+ foreign key(`scene_id`) references `scenes`(`id`)
+);
+
+
+ALTER TABLE `scraped_items` ADD COLUMN `movie_id` integer;
+CREATE UNIQUE INDEX `movies_checksum_unique` on `movies` (`checksum`);
+CREATE UNIQUE INDEX `index_movie_id_scene_index_unique` ON `movies_scenes` ( `movie_id`, `scene_index` );
+CREATE INDEX `index_movies_scenes_on_movie_id` on `movies_scenes` (`movie_id`);
+CREATE INDEX `index_movies_scenes_on_scene_id` on `movies_scenes` (`scene_id`);
+
+
diff --git a/pkg/database/migrations/5_performer_gender.down.sql b/pkg/database/migrations/5_performer_gender.down.sql
new file mode 100644
index 000000000..abe9c746c
--- /dev/null
+++ b/pkg/database/migrations/5_performer_gender.down.sql
@@ -0,0 +1,89 @@
+
+PRAGMA foreign_keys=off;
+
+-- need to re-create the performers table without the added column.
+-- also need re-create the performers_scenes table due to the foreign key
+
+-- rename existing performers table
+ALTER TABLE `performers` RENAME TO `performers_old`;
+ALTER TABLE `performers_scenes` RENAME TO `performers_scenes_old`;
+
+-- drop the indexes
+DROP INDEX IF EXISTS `index_performers_on_name`;
+DROP INDEX IF EXISTS `index_performers_on_checksum`;
+DROP INDEX IF EXISTS `index_performers_scenes_on_scene_id`;
+DROP INDEX IF EXISTS `index_performers_scenes_on_performer_id`;
+
+-- recreate the tables
+CREATE TABLE `performers` (
+ `id` integer not null primary key autoincrement,
+ `image` blob not null,
+ `checksum` varchar(255) not null,
+ `name` varchar(255),
+ `url` varchar(255),
+ `twitter` varchar(255),
+ `instagram` varchar(255),
+ `birthdate` date,
+ `ethnicity` varchar(255),
+ `country` varchar(255),
+ `eye_color` varchar(255),
+ `height` varchar(255),
+ `measurements` varchar(255),
+ `fake_tits` varchar(255),
+ `career_length` varchar(255),
+ `tattoos` varchar(255),
+ `piercings` varchar(255),
+ `aliases` varchar(255),
+ `favorite` boolean not null default '0',
+ `created_at` datetime not null,
+ `updated_at` datetime not null
+);
+
+CREATE TABLE `performers_scenes` (
+ `performer_id` integer,
+ `scene_id` integer,
+ foreign key(`performer_id`) references `performers`(`id`),
+ foreign key(`scene_id`) references `scenes`(`id`)
+);
+
+INSERT INTO `performers`
+ SELECT
+ `id`,
+ `image`,
+ `checksum`,
+ `name`,
+ `url`,
+ `twitter`,
+ `instagram`,
+ `birthdate`,
+ `ethnicity`,
+ `country`,
+ `eye_color`,
+ `height`,
+ `measurements`,
+ `fake_tits`,
+ `career_length`,
+ `tattoos`,
+ `piercings`,
+ `aliases`,
+ `favorite`,
+ `created_at`,
+ `updated_at`
+ FROM `performers_old`;
+
+INSERT INTO `performers_scenes`
+ SELECT
+ `performer_id`,
+ `scene_id`
+ FROM `performers_scenes_old`;
+
+DROP TABLE `performers_scenes_old`;
+DROP TABLE `performers_old`;
+
+-- re-create the indexes after removing the old tables
+CREATE INDEX `index_performers_on_name` on `performers` (`name`);
+CREATE INDEX `index_performers_on_checksum` on `performers` (`checksum`);
+CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
+CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
+
+PRAGMA foreign_keys=on;
diff --git a/pkg/database/migrations/5_performer_gender.up.sql b/pkg/database/migrations/5_performer_gender.up.sql
new file mode 100644
index 000000000..1f6e85485
--- /dev/null
+++ b/pkg/database/migrations/5_performer_gender.up.sql
@@ -0,0 +1 @@
+ALTER TABLE `performers` ADD COLUMN `gender` varchar(20);
diff --git a/pkg/database/migrations/6_scenes_format.up.sql b/pkg/database/migrations/6_scenes_format.up.sql
new file mode 100644
index 000000000..93f5c44a9
--- /dev/null
+++ b/pkg/database/migrations/6_scenes_format.up.sql
@@ -0,0 +1 @@
+ALTER TABLE `scenes` ADD COLUMN `format` varchar(255);
diff --git a/pkg/database/migrations/7_performer_optimization.up.sql b/pkg/database/migrations/7_performer_optimization.up.sql
new file mode 100644
index 000000000..c09d3c4b4
--- /dev/null
+++ b/pkg/database/migrations/7_performer_optimization.up.sql
@@ -0,0 +1,101 @@
+DROP INDEX `performers_checksum_unique`;
+DROP INDEX `index_performers_on_name`;
+DROP INDEX `index_performers_on_checksum`;
+ALTER TABLE `performers` RENAME TO `temp_old_performers`;
+CREATE TABLE `performers` (
+ `id` integer not null primary key autoincrement,
+ `checksum` varchar(255) not null,
+ `name` varchar(255),
+ `gender` varchar(20),
+ `url` varchar(255),
+ `twitter` varchar(255),
+ `instagram` varchar(255),
+ `birthdate` date,
+ `ethnicity` varchar(255),
+ `country` varchar(255),
+ `eye_color` varchar(255),
+ `height` varchar(255),
+ `measurements` varchar(255),
+ `fake_tits` varchar(255),
+ `career_length` varchar(255),
+ `tattoos` varchar(255),
+ `piercings` varchar(255),
+ `aliases` varchar(255),
+ `favorite` boolean not null default '0',
+ `created_at` datetime not null,
+ `updated_at` datetime not null,
+ `image` blob not null
+);
+CREATE UNIQUE INDEX `performers_checksum_unique` on `performers` (`checksum`);
+CREATE INDEX `index_performers_on_name` on `performers` (`name`);
+INSERT INTO `performers` (
+ `id`,
+ `checksum`,
+ `name`,
+ `gender`,
+ `url`,
+ `twitter`,
+ `instagram`,
+ `birthdate`,
+ `ethnicity`,
+ `country`,
+ `eye_color`,
+ `height`,
+ `measurements`,
+ `fake_tits`,
+ `career_length`,
+ `tattoos`,
+ `piercings`,
+ `aliases`,
+ `favorite`,
+ `created_at`,
+ `updated_at`,
+ `image`
+)
+SELECT
+ `id`,
+ `checksum`,
+ `name`,
+ `gender`,
+ `url`,
+ `twitter`,
+ `instagram`,
+ `birthdate`,
+ `ethnicity`,
+ `country`,
+ `eye_color`,
+ `height`,
+ `measurements`,
+ `fake_tits`,
+ `career_length`,
+ `tattoos`,
+ `piercings`,
+ `aliases`,
+ `favorite`,
+ `created_at`,
+ `updated_at`,
+ `image`
+FROM `temp_old_performers`;
+
+DROP INDEX `index_performers_scenes_on_scene_id`;
+DROP INDEX `index_performers_scenes_on_performer_id`;
+ALTER TABLE performers_scenes RENAME TO temp_old_performers_scenes;
+CREATE TABLE `performers_scenes` (
+ `performer_id` integer,
+ `scene_id` integer,
+ foreign key(`performer_id`) references `performers`(`id`),
+ foreign key(`scene_id`) references `scenes`(`id`)
+);
+CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
+CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
+INSERT INTO `performers_scenes` (
+ `performer_id`,
+ `scene_id`
+)
+SELECT
+ `performer_id`,
+ `scene_id`
+FROM `temp_old_performers_scenes`;
+
+DROP TABLE `temp_old_performers`;
+DROP TABLE `temp_old_performers_scenes`;
diff --git a/pkg/database/migrations/8_movie_fix.up.sql b/pkg/database/migrations/8_movie_fix.up.sql
new file mode 100644
index 000000000..33414bd99
--- /dev/null
+++ b/pkg/database/migrations/8_movie_fix.up.sql
@@ -0,0 +1,106 @@
+ALTER TABLE `movies` rename to `_movies_old`;
+ALTER TABLE `movies_scenes` rename to `_movies_scenes_old`;
+
+DROP INDEX IF EXISTS `movies_checksum_unique`;
+DROP INDEX IF EXISTS `index_movie_id_scene_index_unique`;
+DROP INDEX IF EXISTS `index_movies_scenes_on_movie_id`;
+DROP INDEX IF EXISTS `index_movies_scenes_on_scene_id`;
+
+-- recreate the movies table with fixed column types and constraints
+CREATE TABLE `movies` (
+ `id` integer not null primary key autoincrement,
+ -- add not null
+ `name` varchar(255) not null,
+ `aliases` varchar(255),
+ -- varchar(6) -> integer
+ `duration` integer,
+ `date` date,
+ -- varchar(1) -> tinyint
+ `rating` tinyint,
+ `studio_id` integer,
+ `director` varchar(255),
+ `synopsis` text,
+ `checksum` varchar(255) not null,
+ `url` varchar(255),
+ `created_at` datetime not null,
+ `updated_at` datetime not null,
+ `front_image` blob not null,
+ `back_image` blob,
+ foreign key(`studio_id`) references `studios`(`id`) on delete set null
+);
+CREATE TABLE `movies_scenes` (
+ `movie_id` integer,
+ `scene_id` integer,
+ -- varchar(2) -> tinyint
+ `scene_index` tinyint,
+ foreign key(`movie_id`) references `movies`(`id`) on delete cascade,
+ foreign key(`scene_id`) references `scenes`(`id`) on delete cascade
+);
+
+-- add unique index on movie name
+CREATE UNIQUE INDEX `movies_name_unique` on `movies` (`name`);
+CREATE UNIQUE INDEX `movies_checksum_unique` on `movies` (`checksum`);
+-- remove unique index on movies_scenes
+CREATE INDEX `index_movies_scenes_on_movie_id` on `movies_scenes` (`movie_id`);
+CREATE INDEX `index_movies_scenes_on_scene_id` on `movies_scenes` (`scene_id`);
+CREATE INDEX `index_movies_on_studio_id` on `movies` (`studio_id`);
+
+-- custom functions cannot accept NULL values, so massage the old data
+UPDATE `_movies_old` set `duration` = 0 WHERE `duration` IS NULL;
+
+-- now populate from the old tables
+INSERT INTO `movies`
+ (
+ `id`,
+ `name`,
+ `aliases`,
+ `duration`,
+ `date`,
+ `rating`,
+ `director`,
+ `synopsis`,
+ `front_image`,
+ `back_image`,
+ `checksum`,
+ `url`,
+ `created_at`,
+ `updated_at`
+ )
+ SELECT
+ `id`,
+ `name`,
+ `aliases`,
+ durationToTinyInt(`duration`),
+ `date`,
+ CAST(`rating` as tinyint),
+ `director`,
+ `synopsis`,
+ `front_image`,
+ `back_image`,
+ `checksum`,
+ `url`,
+ `created_at`,
+ `updated_at`
+ FROM `_movies_old`
+ -- ignore null named movies
+ WHERE `name` is not null;
+
+-- durationToTinyInt returns 0 if it cannot parse the string
+-- set these values to null instead
+UPDATE `movies` SET `duration` = NULL WHERE `duration` = 0;
+
+INSERT INTO `movies_scenes`
+ (
+ `movie_id`,
+ `scene_id`,
+ `scene_index`
+ )
+ SELECT
+ `movie_id`,
+ `scene_id`,
+ CAST(`scene_index` as tinyint)
+ FROM `_movies_scenes_old`;
+
+-- drop old tables
+DROP TABLE `_movies_scenes_old`;
+DROP TABLE `_movies_old`;
diff --git a/pkg/ffmpeg/encoder.go b/pkg/ffmpeg/encoder.go
index 7ceb4b594..46711d146 100644
--- a/pkg/ffmpeg/encoder.go
+++ b/pkg/ffmpeg/encoder.go
@@ -18,8 +18,8 @@ type Encoder struct {
}
var (
- runningEncoders map[string][]*os.Process = make(map[string][]*os.Process)
- runningEncodersMutex = sync.RWMutex{}
+ runningEncoders = make(map[string][]*os.Process)
+ runningEncodersMutex = sync.RWMutex{}
)
func NewEncoder(ffmpegPath string) Encoder {
diff --git a/pkg/ffmpeg/encoder_scene_preview_chunk.go b/pkg/ffmpeg/encoder_scene_preview_chunk.go
index 9ff655ead..4f6a8a6fd 100644
--- a/pkg/ffmpeg/encoder_scene_preview_chunk.go
+++ b/pkg/ffmpeg/encoder_scene_preview_chunk.go
@@ -13,9 +13,10 @@ type ScenePreviewChunkOptions struct {
OutputPath string
}
-func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePreviewChunkOptions) {
+func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePreviewChunkOptions, preset string) {
args := []string{
"-v", "error",
+ "-xerror",
"-ss", strconv.Itoa(options.Time),
"-i", probeResult.Path,
"-t", "0.75",
@@ -25,7 +26,7 @@ func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePre
"-pix_fmt", "yuv420p",
"-profile:v", "high",
"-level", "4.2",
- "-preset", "veryslow",
+ "-preset", preset,
"-crf", "21",
"-threads", "4",
"-vf", fmt.Sprintf("scale=%v:-2", options.Width),
diff --git a/pkg/ffmpeg/encoder_screenshot.go b/pkg/ffmpeg/encoder_screenshot.go
index f9923cf1e..bd1926ba3 100644
--- a/pkg/ffmpeg/encoder_screenshot.go
+++ b/pkg/ffmpeg/encoder_screenshot.go
@@ -10,7 +10,7 @@ type ScreenshotOptions struct {
Verbosity string
}
-func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) {
+func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) error {
if options.Verbosity == "" {
options.Verbosity = "error"
}
@@ -28,5 +28,7 @@ func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) {
"-f", "image2",
options.OutputPath,
}
- _, _ = e.run(probeResult, args)
+ _, err := e.run(probeResult, args)
+
+ return err
}
diff --git a/pkg/ffmpeg/encoder_transcode.go b/pkg/ffmpeg/encoder_transcode.go
index a908f00ac..12b596396 100644
--- a/pkg/ffmpeg/encoder_transcode.go
+++ b/pkg/ffmpeg/encoder_transcode.go
@@ -69,6 +69,49 @@ func (e *Encoder) Transcode(probeResult VideoFile, options TranscodeOptions) {
_, _ = e.run(probeResult, args)
}
+//transcode the video, remove the audio
+//in some videos where the audio codec is not supported by ffmpeg
+//ffmpeg fails if you try to transcode the audio
+func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions) {
+ scale := calculateTranscodeScale(probeResult, options.MaxTranscodeSize)
+ args := []string{
+ "-i", probeResult.Path,
+ "-an",
+ "-c:v", "libx264",
+ "-pix_fmt", "yuv420p",
+ "-profile:v", "high",
+ "-level", "4.2",
+ "-preset", "superfast",
+ "-crf", "23",
+ "-vf", "scale=" + scale,
+ options.OutputPath,
+ }
+ _, _ = e.run(probeResult, args)
+}
+
+//copy the video stream as is, transcode audio
+func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions) {
+ args := []string{
+ "-i", probeResult.Path,
+ "-c:v", "copy",
+ "-c:a", "aac",
+ "-strict", "-2",
+ options.OutputPath,
+ }
+ _, _ = e.run(probeResult, args)
+}
+
+//copy the video stream as is, drop audio
+func (e *Encoder) CopyVideo(probeResult VideoFile, options TranscodeOptions) {
+ args := []string{
+ "-i", probeResult.Path,
+ "-an",
+ "-c:v", "copy",
+ options.OutputPath,
+ }
+ _, _ = e.run(probeResult, args)
+}
+
func (e *Encoder) StreamTranscode(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
scale := calculateTranscodeScale(probeResult, maxTranscodeSize)
args := []string{}
@@ -92,3 +135,53 @@ func (e *Encoder) StreamTranscode(probeResult VideoFile, startTime string, maxTr
return e.stream(probeResult, args)
}
+
+//transcode the video, remove the audio
+//in some videos where the audio codec is not supported by ffmpeg
+//ffmpeg fails if you try to transcode the audio
+func (e *Encoder) StreamTranscodeVideo(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
+ scale := calculateTranscodeScale(probeResult, maxTranscodeSize)
+ args := []string{}
+
+ if startTime != "" {
+ args = append(args, "-ss", startTime)
+ }
+
+ args = append(args,
+ "-i", probeResult.Path,
+ "-an",
+ "-c:v", "libvpx-vp9",
+ "-vf", "scale="+scale,
+ "-deadline", "realtime",
+ "-cpu-used", "5",
+ "-row-mt", "1",
+ "-crf", "30",
+ "-b:v", "0",
+ "-f", "webm",
+ "pipe:",
+ )
+
+ return e.stream(probeResult, args)
+}
+
+//it is very common in MKVs to have just the audio codec unsupported
+//copy the video stream, transcode the audio and serve as Matroska
+func (e *Encoder) StreamMkvTranscodeAudio(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
+ args := []string{}
+
+ if startTime != "" {
+ args = append(args, "-ss", startTime)
+ }
+
+ args = append(args,
+ "-i", probeResult.Path,
+ "-c:v", "copy",
+ "-c:a", "libopus",
+ "-b:a", "96k",
+ "-vbr", "on",
+ "-f", "matroska",
+ "pipe:",
+ )
+
+ return e.stream(probeResult, args)
+}
diff --git a/pkg/ffmpeg/ffprobe.go b/pkg/ffmpeg/ffprobe.go
index cbddb14d3..e309619e3 100644
--- a/pkg/ffmpeg/ffprobe.go
+++ b/pkg/ffmpeg/ffprobe.go
@@ -10,11 +10,106 @@ import (
"strconv"
"strings"
"time"
+
+ "github.com/stashapp/stash/pkg/logger"
+ "github.com/stashapp/stash/pkg/manager/config"
)
-var ValidCodecs = []string{"h264", "h265", "vp8", "vp9"}
+type Container string
+type AudioCodec string
+
+const (
+ Mp4 Container = "mp4"
+ M4v Container = "m4v"
+ Mov Container = "mov"
+ Wmv Container = "wmv"
+ Webm Container = "webm"
+ Matroska Container = "matroska"
+ Avi Container = "avi"
+ Flv Container = "flv"
+ Mpegts Container = "mpegts"
+ Aac AudioCodec = "aac"
+ Mp3 AudioCodec = "mp3"
+ Opus AudioCodec = "opus"
+ Vorbis AudioCodec = "vorbis"
+ MissingUnsupported AudioCodec = ""
+ Mp4Ffmpeg string = "mov,mp4,m4a,3gp,3g2,mj2" // browsers support all of them
+ M4vFfmpeg string = "mov,mp4,m4a,3gp,3g2,mj2" // so we don't care that ffmpeg
+ MovFfmpeg string = "mov,mp4,m4a,3gp,3g2,mj2" // can't differentiate between them
+ WmvFfmpeg string = "asf"
+ WebmFfmpeg string = "matroska,webm"
+ MatroskaFfmpeg string = "matroska,webm"
+ AviFfmpeg string = "avi"
+ FlvFfmpeg string = "flv"
+ MpegtsFfmpeg string = "mpegts"
+ H264 string = "h264"
+ H265 string = "h265" // found in rare cases from a faulty encoder
+ Hevc string = "hevc"
+ Vp8 string = "vp8"
+ Vp9 string = "vp9"
+ MimeWebm string = "video/webm"
+ MimeMkv string = "video/x-matroska"
+)
+
+var ValidCodecs = []string{H264, H265, Vp8, Vp9}
+
+var validForH264Mkv = []Container{Mp4, Matroska}
+var validForH264 = []Container{Mp4}
+var validForH265Mkv = []Container{Mp4, Matroska}
+var validForH265 = []Container{Mp4}
+var validForVp8 = []Container{Webm}
+var validForVp9Mkv = []Container{Webm, Matroska}
+var validForVp9 = []Container{Webm}
+var validForHevcMkv = []Container{Mp4, Matroska}
+var validForHevc = []Container{Mp4}
+
+var validAudioForMkv = []AudioCodec{Aac, Mp3, Vorbis, Opus}
+var validAudioForWebm = []AudioCodec{Vorbis, Opus}
+var validAudioForMp4 = []AudioCodec{Aac, Mp3}
+
+//maps user readable container strings to ffprobe's format_name
+//on some formats ffprobe can't differentiate
+var ContainerToFfprobe = map[Container]string{
+ Mp4: Mp4Ffmpeg,
+ M4v: M4vFfmpeg,
+ Mov: MovFfmpeg,
+ Wmv: WmvFfmpeg,
+ Webm: WebmFfmpeg,
+ Matroska: MatroskaFfmpeg,
+ Avi: AviFfmpeg,
+ Flv: FlvFfmpeg,
+ Mpegts: MpegtsFfmpeg,
+}
+
+var FfprobeToContainer = map[string]Container{
+ Mp4Ffmpeg: Mp4,
+ WmvFfmpeg: Wmv,
+ AviFfmpeg: Avi,
+ FlvFfmpeg: Flv,
+ MpegtsFfmpeg: Mpegts,
+ MatroskaFfmpeg: Matroska,
+}
+
+func MatchContainer(format string, filePath string) Container { // match ffprobe string to our Container
+
+ container := FfprobeToContainer[format]
+ if container == Matroska {
+ container = MagicContainer(filePath) // use magic number instead of ffprobe for matroska,webm
+ }
+ if container == "" { // if format is not in our Container list leave it as ffprobes reported format_name
+ container = Container(format)
+ }
+ return container
+}
func IsValidCodec(codecName string) bool {
+ forceHEVC := config.GetForceHEVC()
+ if forceHEVC {
+ if codecName == Hevc {
+ return true
+ }
+ }
+
for _, c := range ValidCodecs {
if c == codecName {
return true
@@ -23,6 +118,78 @@ func IsValidCodec(codecName string) bool {
return false
}
+func IsValidAudio(audio AudioCodec, ValidCodecs []AudioCodec) bool {
+
+ // if audio codec is missing or unsupported by ffmpeg we can't do anything about it
+ // report it as valid so that the file can at least be streamed directly if the video codec is supported
+ if audio == MissingUnsupported {
+ return true
+ }
+
+ for _, c := range ValidCodecs {
+ if c == audio {
+ return true
+ }
+ }
+
+ return false
+}
+
+func IsValidAudioForContainer(audio AudioCodec, format Container) bool {
+ switch format {
+ case Matroska:
+ return IsValidAudio(audio, validAudioForMkv)
+ case Webm:
+ return IsValidAudio(audio, validAudioForWebm)
+ case Mp4:
+ return IsValidAudio(audio, validAudioForMp4)
+ }
+ return false
+
+}
+
+func IsValidForContainer(format Container, validContainers []Container) bool {
+ for _, fmt := range validContainers {
+ if fmt == format {
+ return true
+ }
+ }
+ return false
+}
+
+//extend stream validation check to take into account container
+func IsValidCombo(codecName string, format Container) bool {
+ forceMKV := config.GetForceMKV()
+ forceHEVC := config.GetForceHEVC()
+ switch codecName {
+ case H264:
+ if forceMKV {
+ return IsValidForContainer(format, validForH264Mkv)
+ }
+ return IsValidForContainer(format, validForH264)
+ case H265:
+ if forceMKV {
+ return IsValidForContainer(format, validForH265Mkv)
+ }
+ return IsValidForContainer(format, validForH265)
+ case Vp8:
+ return IsValidForContainer(format, validForVp8)
+ case Vp9:
+ if forceMKV {
+ return IsValidForContainer(format, validForVp9Mkv)
+ }
+ return IsValidForContainer(format, validForVp9)
+ case Hevc:
+ if forceHEVC {
+ if forceMKV {
+ return IsValidForContainer(format, validForHevcMkv)
+ }
+ return IsValidForContainer(format, validForHevc)
+ }
+ }
+ return false
+}
+
type VideoFile struct {
JSON FFProbeJSON
AudioStream *FFProbeStream
@@ -98,7 +265,11 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
result.Container = probeJSON.Format.FormatName
duration, _ := strconv.ParseFloat(probeJSON.Format.Duration, 64)
result.Duration = math.Round(duration*100) / 100
- fileStat, _ := os.Stat(filePath)
+ fileStat, err := os.Stat(filePath)
+ if err != nil {
+ logger.Errorf("Error statting file: %v", err)
+ return nil, err
+ }
result.Size = fileStat.Size()
result.StartTime, _ = strconv.ParseFloat(probeJSON.Format.StartTime, 64)
result.CreationTime = probeJSON.Format.Tags.CreationTime.Time
diff --git a/pkg/ffmpeg/media_detection.go b/pkg/ffmpeg/media_detection.go
new file mode 100644
index 000000000..4de7e4ba6
--- /dev/null
+++ b/pkg/ffmpeg/media_detection.go
@@ -0,0 +1,66 @@
+package ffmpeg
+
+import (
+ "bytes"
+ "github.com/stashapp/stash/pkg/logger"
+ "os"
+)
+
+// detect file format from magic file number
+// https://github.com/lex-r/filetype/blob/73c10ad714e3b8ecf5cd1564c882ed6d440d5c2d/matchers/video.go
+
+func mkv(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x1A && buf[1] == 0x45 &&
+ buf[2] == 0xDF && buf[3] == 0xA3 &&
+ containsMatroskaSignature(buf, []byte{'m', 'a', 't', 'r', 'o', 's', 'k', 'a'})
+}
+
+func webm(buf []byte) bool {
+ return len(buf) > 3 &&
+ buf[0] == 0x1A && buf[1] == 0x45 &&
+ buf[2] == 0xDF && buf[3] == 0xA3 &&
+ containsMatroskaSignature(buf, []byte{'w', 'e', 'b', 'm'})
+}
+
+func containsMatroskaSignature(buf, subType []byte) bool {
+ limit := 4096
+ if len(buf) < limit {
+ limit = len(buf)
+ }
+
+ index := bytes.Index(buf[:limit], subType)
+ if index < 3 {
+ return false
+ }
+
+ return buf[index-3] == 0x42 && buf[index-2] == 0x82
+}
+
+//returns container as string ("" on error or no match)
+//implements only mkv or webm as ffprobe can't distinguish between them
+//and not all browsers support mkv
+func MagicContainer(file_path string) Container {
+ file, err := os.Open(file_path)
+ if err != nil {
+ logger.Errorf("[magicfile] %v", err)
+ return ""
+ }
+
+ defer file.Close()
+
+ buf := make([]byte, 4096)
+ _, err = file.Read(buf)
+ if err != nil {
+ logger.Errorf("[magicfile] %v", err)
+ return ""
+ }
+
+ if webm(buf) {
+ return Webm
+ }
+ if mkv(buf) {
+ return Matroska
+ }
+ return ""
+}
diff --git a/pkg/manager/config/config.go b/pkg/manager/config/config.go
index 490094966..f21493329 100644
--- a/pkg/manager/config/config.go
+++ b/pkg/manager/config/config.go
@@ -19,10 +19,12 @@ const Metadata = "metadata"
const Downloads = "downloads"
const Username = "username"
const Password = "password"
+const MaxSessionAge = "max_session_age"
+
+const DefaultMaxSessionAge = 60 * 60 * 1 // 1 hours
const Database = "database"
-const ScrapersPath = "scrapers_path"
const Exclude = "exclude"
const MaxTranscodeSize = "max_transcode_size"
@@ -32,6 +34,19 @@ const Host = "host"
const Port = "port"
const ExternalHost = "external_host"
+// key used to sign JWT tokens
+const JWTSignKey = "jwt_secret_key"
+
+// key used for session store
+const SessionStoreKey = "session_store_key"
+
+// scraping options
+const ScrapersPath = "scrapers_path"
+const ScraperUserAgent = "scraper_user_agent"
+
+// i18n
+const Language = "language"
+
// Interface options
const SoundOnPreview = "sound_on_preview"
const WallShowTitle = "wall_show_title"
@@ -39,6 +54,11 @@ const MaximumLoopDuration = "maximum_loop_duration"
const AutostartVideo = "autostart_video"
const ShowStudioAsText = "show_studio_as_text"
const CSSEnabled = "cssEnabled"
+const WallPlayback = "wall_playback"
+
+// Playback force codec,container
+const ForceMKV = "forceMKV"
+const ForceHEVC = "forceHEVC"
// Logging options
const LogFile = "logFile"
@@ -83,6 +103,14 @@ func GetDatabasePath() string {
return viper.GetString(Database)
}
+func GetJWTSignKey() []byte {
+ return []byte(viper.GetString(JWTSignKey))
+}
+
+func GetSessionStoreKey() []byte {
+ return []byte(viper.GetString(SessionStoreKey))
+}
+
func GetDefaultScrapersPath() string {
// default to the same directory as the config file
configFileUsed := viper.ConfigFileUsed()
@@ -97,10 +125,25 @@ func GetExcludes() []string {
return viper.GetStringSlice(Exclude)
}
+func GetLanguage() string {
+ ret := viper.GetString(Language)
+
+ // default to English
+ if ret == "" {
+ return "en-US"
+ }
+
+ return ret
+}
+
func GetScrapersPath() string {
return viper.GetString(ScrapersPath)
}
+func GetScraperUserAgent() string {
+ return viper.GetString(ScraperUserAgent)
+}
+
func GetHost() string {
return viper.GetString(Host)
}
@@ -181,6 +224,13 @@ func ValidateCredentials(username string, password string) bool {
return username == authUser && err == nil
}
+// GetMaxSessionAge gets the maximum age for session cookies, in seconds.
+// Session cookie expiry times are refreshed every request.
+func GetMaxSessionAge() int {
+ viper.SetDefault(MaxSessionAge, DefaultMaxSessionAge)
+ return viper.GetInt(MaxSessionAge)
+}
+
// Interface options
func GetSoundOnPreview() bool {
viper.SetDefault(SoundOnPreview, true)
@@ -192,6 +242,11 @@ func GetWallShowTitle() bool {
return viper.GetBool(WallShowTitle)
}
+func GetWallPlayback() string {
+ viper.SetDefault(WallPlayback, "video")
+ return viper.GetString(WallPlayback)
+}
+
func GetMaximumLoopDuration() int {
viper.SetDefault(MaximumLoopDuration, 0)
return viper.GetInt(MaximumLoopDuration)
@@ -246,6 +301,15 @@ func GetCSSEnabled() bool {
return viper.GetBool(CSSEnabled)
}
+// force codec,container
+func GetForceMKV() bool {
+ return viper.GetBool(ForceMKV)
+}
+
+func GetForceHEVC() bool {
+ return viper.GetBool(ForceHEVC)
+}
+
// GetLogFile returns the filename of the file to output logs to.
// An empty string means that file logging will be disabled.
func GetLogFile() string {
@@ -294,3 +358,21 @@ func IsValid() bool {
// TODO: check valid paths
return setPaths
}
+
+// SetInitialConfig fills in missing required config fields
+func SetInitialConfig() error {
+ // generate some api keys
+ const apiKeyLength = 32
+
+ if string(GetJWTSignKey()) == "" {
+ signKey := utils.GenerateRandomKey(apiKeyLength)
+ Set(JWTSignKey, signKey)
+ }
+
+ if string(GetSessionStoreKey()) == "" {
+ sessionStoreKey := utils.GenerateRandomKey(apiKeyLength)
+ Set(SessionStoreKey, sessionStoreKey)
+ }
+
+ return Write()
+}
diff --git a/pkg/manager/exclude_files.go b/pkg/manager/exclude_files.go
index 818cb25f1..6d5a28f9f 100644
--- a/pkg/manager/exclude_files.go
+++ b/pkg/manager/exclude_files.go
@@ -1,9 +1,10 @@
package manager
import (
- "github.com/stashapp/stash/pkg/logger"
"regexp"
"strings"
+
+ "github.com/stashapp/stash/pkg/logger"
)
func excludeFiles(files []string, patterns []string) ([]string, int) {
@@ -37,21 +38,13 @@ func excludeFiles(files []string, patterns []string) ([]string, int) {
}
func matchFile(file string, patterns []string) bool {
- if patterns == nil {
- logger.Infof("No exclude patterns in config.")
-
- } else {
+ if patterns != nil {
fileRegexps := generateRegexps(patterns)
- if len(fileRegexps) == 0 {
- return false
- }
-
for _, regPattern := range fileRegexps {
if regPattern.MatchString(strings.ToLower(file)) {
return true
}
-
}
}
diff --git a/pkg/manager/filename_parser.go b/pkg/manager/filename_parser.go
index 869948f46..eae6ba365 100644
--- a/pkg/manager/filename_parser.go
+++ b/pkg/manager/filename_parser.go
@@ -7,6 +7,7 @@ import (
"regexp"
"strconv"
"strings"
+ "time"
"github.com/stashapp/stash/pkg/models"
@@ -87,8 +88,10 @@ func initParserFields() {
//I = new ParserField("i", undefined, "Matches any ignored word", false);
ret["d"] = newParserField("d", `(?:\.|-|_)`, false)
+ ret["rating"] = newParserField("rating", `\d`, true)
ret["performer"] = newParserField("performer", ".*", true)
ret["studio"] = newParserField("studio", ".*", true)
+ ret["movie"] = newParserField("movie", ".*", true)
ret["tag"] = newParserField("tag", ".*", true)
// date fields
@@ -96,6 +99,7 @@ func initParserFields() {
ret["yyyy"] = newParserField("yyyy", `\d{4}`, true)
ret["yy"] = newParserField("yy", `\d{2}`, true)
ret["mm"] = newParserField("mm", `\d{2}`, true)
+ ret["mmm"] = newParserField("mmm", `\w{3}`, true)
ret["dd"] = newParserField("dd", `\d{2}`, true)
ret["yyyymmdd"] = newFullDateParserField("yyyymmdd", `\d{8}`)
ret["yymmdd"] = newFullDateParserField("yymmdd", `\d{6}`)
@@ -204,6 +208,7 @@ type sceneHolder struct {
mm string
dd string
performers []string
+ movies []string
studio string
tags []string
}
@@ -222,6 +227,10 @@ func newSceneHolder(scene *models.Scene) *sceneHolder {
return &ret
}
+func validateRating(rating int) bool {
+ return rating >= 1 && rating <= 5
+}
+
func validateDate(dateStr string) bool {
splits := strings.Split(dateStr, "-")
if len(splits) != 3 {
@@ -283,6 +292,20 @@ func (h *sceneHolder) setDate(field *parserField, value string) {
}
}
+func mmmToMonth(mmm string) string {
+ format := "02-Jan-2006"
+ dateStr := "01-" + mmm + "-2000"
+ t, err := time.Parse(format, dateStr)
+
+ if err != nil {
+ return ""
+ }
+
+ // expect month in two-digit format
+ format = "01-02-2006"
+ return t.Format(format)[0:2]
+}
+
func (h *sceneHolder) setField(field parserField, value interface{}) {
if field.isFullDateField {
h.setDate(&field, value.(string))
@@ -302,27 +325,35 @@ func (h *sceneHolder) setField(field parserField, value interface{}) {
Valid: true,
}
}
+ case "rating":
+ rating, _ := strconv.Atoi(value.(string))
+ if validateRating(rating) {
+ h.result.Rating = sql.NullInt64{
+ Int64: int64(rating),
+ Valid: true,
+ }
+ }
case "performer":
// add performer to list
h.performers = append(h.performers, value.(string))
case "studio":
h.studio = value.(string)
+ case "movie":
+ h.movies = append(h.movies, value.(string))
case "tag":
h.tags = append(h.tags, value.(string))
case "yyyy":
h.yyyy = value.(string)
- break
case "yy":
v := value.(string)
v = "20" + v
h.yyyy = v
- break
+ case "mmm":
+ h.mm = mmmToMonth(value.(string))
case "mm":
h.mm = value.(string)
- break
case "dd":
h.dd = value.(string)
- break
}
}
@@ -374,7 +405,7 @@ func (m parseMapper) parse(scene *models.Scene) *sceneHolder {
}
type performerQueryer interface {
- FindByNames(names []string, tx *sqlx.Tx) ([]*models.Performer, error)
+ FindByNames(names []string, tx *sqlx.Tx, nocase bool) ([]*models.Performer, error)
}
type sceneQueryer interface {
@@ -382,11 +413,15 @@ type sceneQueryer interface {
}
type tagQueryer interface {
- FindByName(name string, tx *sqlx.Tx) (*models.Tag, error)
+ FindByName(name string, tx *sqlx.Tx, nocase bool) (*models.Tag, error)
}
type studioQueryer interface {
- FindByName(name string, tx *sqlx.Tx) (*models.Studio, error)
+ FindByName(name string, tx *sqlx.Tx, nocase bool) (*models.Studio, error)
+}
+
+type movieQueryer interface {
+ FindByName(name string, tx *sqlx.Tx, nocase bool) (*models.Movie, error)
}
type SceneFilenameParser struct {
@@ -396,12 +431,14 @@ type SceneFilenameParser struct {
whitespaceRE *regexp.Regexp
performerCache map[string]*models.Performer
studioCache map[string]*models.Studio
+ movieCache map[string]*models.Movie
tagCache map[string]*models.Tag
performerQuery performerQueryer
sceneQuery sceneQueryer
tagQuery tagQueryer
studioQuery studioQueryer
+ movieQuery movieQueryer
}
func NewSceneFilenameParser(filter *models.FindFilterType, config models.SceneParserInput) *SceneFilenameParser {
@@ -413,6 +450,7 @@ func NewSceneFilenameParser(filter *models.FindFilterType, config models.ScenePa
p.performerCache = make(map[string]*models.Performer)
p.studioCache = make(map[string]*models.Studio)
+ p.movieCache = make(map[string]*models.Movie)
p.tagCache = make(map[string]*models.Tag)
p.initWhiteSpaceRegex()
@@ -429,6 +467,9 @@ func NewSceneFilenameParser(filter *models.FindFilterType, config models.ScenePa
studioQuery := models.NewStudioQueryBuilder()
p.studioQuery = &studioQuery
+ movieQuery := models.NewMovieQueryBuilder()
+ p.movieQuery = &movieQuery
+
return p
}
@@ -505,7 +546,7 @@ func (p *SceneFilenameParser) queryPerformer(performerName string) *models.Perfo
}
// perform an exact match and grab the first
- performers, _ := p.performerQuery.FindByNames([]string{performerName}, nil)
+ performers, _ := p.performerQuery.FindByNames([]string{performerName}, nil, true)
var ret *models.Performer
if len(performers) > 0 {
@@ -527,7 +568,7 @@ func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
return ret
}
- ret, _ := p.studioQuery.FindByName(studioName, nil)
+ ret, _ := p.studioQuery.FindByName(studioName, nil, true)
// add result to cache
p.studioCache[studioName] = ret
@@ -535,6 +576,23 @@ func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
return ret
}
+func (p *SceneFilenameParser) queryMovie(movieName string) *models.Movie {
+ // massage the movie name
+ movieName = delimiterRE.ReplaceAllString(movieName, " ")
+
+ // check cache first
+ if ret, found := p.movieCache[movieName]; found {
+ return ret
+ }
+
+ ret, _ := p.movieQuery.FindByName(movieName, nil, true)
+
+ // add result to cache
+ p.movieCache[movieName] = ret
+
+ return ret
+}
+
func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
// massage the performer name
tagName = delimiterRE.ReplaceAllString(tagName, " ")
@@ -545,7 +603,7 @@ func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
}
// match tag name exactly
- ret, _ := p.tagQuery.FindByName(tagName, nil)
+ ret, _ := p.tagQuery.FindByName(tagName, nil, true)
// add result to cache
p.tagCache[tagName] = ret
@@ -596,6 +654,24 @@ func (p *SceneFilenameParser) setStudio(h sceneHolder, result *models.SceneParse
}
}
+func (p *SceneFilenameParser) setMovies(h sceneHolder, result *models.SceneParserResult) {
+ // query for each movie
+ moviesSet := make(map[int]bool)
+ for _, movieName := range h.movies {
+ if movieName != "" {
+ movie := p.queryMovie(movieName)
+ if movie != nil {
+ if _, found := moviesSet[movie.ID]; !found {
+ result.Movies = append(result.Movies, &models.SceneMovieID{
+ MovieID: strconv.Itoa(movie.ID),
+ })
+ moviesSet[movie.ID] = true
+ }
+ }
+ }
+ }
+}
+
func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.SceneParserResult) {
if h.result.Title.Valid {
title := h.result.Title.String
@@ -612,6 +688,11 @@ func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.Scen
result.Date = &h.result.Date.String
}
+ if h.result.Rating.Valid {
+ rating := int(h.result.Rating.Int64)
+ result.Rating = &rating
+ }
+
if len(h.performers) > 0 {
p.setPerformers(h, result)
}
@@ -619,4 +700,9 @@ func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.Scen
p.setTags(h, result)
}
p.setStudio(h, result)
+
+ if len(h.movies) > 0 {
+ p.setMovies(h, result)
+ }
+
}
diff --git a/pkg/manager/generator_preview.go b/pkg/manager/generator_preview.go
index 5043ce349..f91fdf956 100644
--- a/pkg/manager/generator_preview.go
+++ b/pkg/manager/generator_preview.go
@@ -16,9 +16,14 @@ type PreviewGenerator struct {
VideoFilename string
ImageFilename string
OutputDirectory string
+
+ GenerateVideo bool
+ GenerateImage bool
+
+ PreviewPreset string
}
-func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, imageFilename string, outputDirectory string) (*PreviewGenerator, error) {
+func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, imageFilename string, outputDirectory string, generateVideo bool, generateImage bool, previewPreset string) (*PreviewGenerator, error) {
exists, err := utils.FileExists(videoFile.Path)
if !exists {
return nil, err
@@ -37,6 +42,9 @@ func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, image
VideoFilename: videoFilename,
ImageFilename: imageFilename,
OutputDirectory: outputDirectory,
+ GenerateVideo: generateVideo,
+ GenerateImage: generateImage,
+ PreviewPreset: previewPreset,
}, nil
}
@@ -47,11 +55,16 @@ func (g *PreviewGenerator) Generate() error {
if err := g.generateConcatFile(); err != nil {
return err
}
- if err := g.generateVideo(&encoder); err != nil {
- return err
+
+ if g.GenerateVideo {
+ if err := g.generateVideo(&encoder); err != nil {
+ return err
+ }
}
- if err := g.generateImage(&encoder); err != nil {
- return err
+ if g.GenerateImage {
+ if err := g.generateImage(&encoder); err != nil {
+ return err
+ }
}
return nil
}
@@ -91,7 +104,7 @@ func (g *PreviewGenerator) generateVideo(encoder *ffmpeg.Encoder) error {
Width: 640,
OutputPath: chunkOutputPath,
}
- encoder.ScenePreviewVideoChunk(g.Info.VideoFile, options)
+ encoder.ScenePreviewVideoChunk(g.Info.VideoFile, options, g.PreviewPreset)
}
videoOutputPath := filepath.Join(g.OutputDirectory, g.VideoFilename)
diff --git a/pkg/manager/json_utils.go b/pkg/manager/json_utils.go
index af7205f52..384f5937c 100644
--- a/pkg/manager/json_utils.go
+++ b/pkg/manager/json_utils.go
@@ -38,6 +38,14 @@ func (jp *jsonUtils) saveStudio(checksum string, studio *jsonschema.Studio) erro
return jsonschema.SaveStudioFile(instance.Paths.JSON.StudioJSONPath(checksum), studio)
}
+func (jp *jsonUtils) getMovie(checksum string) (*jsonschema.Movie, error) {
+ return jsonschema.LoadMovieFile(instance.Paths.JSON.MovieJSONPath(checksum))
+}
+
+func (jp *jsonUtils) saveMovie(checksum string, movie *jsonschema.Movie) error {
+ return jsonschema.SaveMovieFile(instance.Paths.JSON.MovieJSONPath(checksum), movie)
+}
+
func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) {
return jsonschema.LoadSceneFile(instance.Paths.JSON.SceneJSONPath(checksum))
}
diff --git a/pkg/manager/jsonschema/mappings.go b/pkg/manager/jsonschema/mappings.go
index 7a41ebc92..621ffc024 100644
--- a/pkg/manager/jsonschema/mappings.go
+++ b/pkg/manager/jsonschema/mappings.go
@@ -1,8 +1,8 @@
package jsonschema
import (
- "encoding/json"
"fmt"
+ "github.com/json-iterator/go"
"os"
)
@@ -19,6 +19,7 @@ type PathMapping struct {
type Mappings struct {
Performers []NameMapping `json:"performers"`
Studios []NameMapping `json:"studios"`
+ Movies []NameMapping `json:"movies"`
Galleries []PathMapping `json:"galleries"`
Scenes []PathMapping `json:"scenes"`
}
@@ -30,6 +31,7 @@ func LoadMappingsFile(filePath string) (*Mappings, error) {
if err != nil {
return nil, err
}
+ var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&mappings)
if err != nil {
diff --git a/pkg/manager/jsonschema/movie.go b/pkg/manager/jsonschema/movie.go
new file mode 100644
index 000000000..ae062acb6
--- /dev/null
+++ b/pkg/manager/jsonschema/movie.go
@@ -0,0 +1,47 @@
+package jsonschema
+
+import (
+ "fmt"
+ "github.com/json-iterator/go"
+ "os"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+type Movie struct {
+ Name string `json:"name,omitempty"`
+ Aliases string `json:"aliases,omitempty"`
+ Duration int `json:"duration,omitempty"`
+ Date string `json:"date,omitempty"`
+ Rating int `json:"rating,omitempty"`
+ Director string `json:"director,omitempty"`
+ Synopsis string `json:"sypnopsis,omitempty"`
+ FrontImage string `json:"front_image,omitempty"`
+ BackImage string `json:"back_image,omitempty"`
+ URL string `json:"url,omitempty"`
+ CreatedAt models.JSONTime `json:"created_at,omitempty"`
+ UpdatedAt models.JSONTime `json:"updated_at,omitempty"`
+}
+
+func LoadMovieFile(filePath string) (*Movie, error) {
+ var movie Movie
+ file, err := os.Open(filePath)
+ defer file.Close()
+ if err != nil {
+ return nil, err
+ }
+ var json = jsoniter.ConfigCompatibleWithStandardLibrary
+ jsonParser := json.NewDecoder(file)
+ err = jsonParser.Decode(&movie)
+ if err != nil {
+ return nil, err
+ }
+ return &movie, nil
+}
+
+func SaveMovieFile(filePath string, movie *Movie) error {
+ if movie == nil {
+ return fmt.Errorf("movie must not be nil")
+ }
+ return marshalToFile(filePath, movie)
+}
diff --git a/pkg/manager/jsonschema/performer.go b/pkg/manager/jsonschema/performer.go
index 44aec069a..52122dd0a 100644
--- a/pkg/manager/jsonschema/performer.go
+++ b/pkg/manager/jsonschema/performer.go
@@ -1,14 +1,16 @@
package jsonschema
import (
- "encoding/json"
"fmt"
- "github.com/stashapp/stash/pkg/models"
+ "github.com/json-iterator/go"
"os"
+
+ "github.com/stashapp/stash/pkg/models"
)
type Performer struct {
Name string `json:"name,omitempty"`
+ Gender string `json:"gender,omitempty"`
URL string `json:"url,omitempty"`
Twitter string `json:"twitter,omitempty"`
Instagram string `json:"instagram,omitempty"`
@@ -36,6 +38,7 @@ func LoadPerformerFile(filePath string) (*Performer, error) {
if err != nil {
return nil, err
}
+ var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&performer)
if err != nil {
diff --git a/pkg/manager/jsonschema/scene.go b/pkg/manager/jsonschema/scene.go
index 0f4098571..b08c8a844 100644
--- a/pkg/manager/jsonschema/scene.go
+++ b/pkg/manager/jsonschema/scene.go
@@ -1,10 +1,11 @@
package jsonschema
import (
- "encoding/json"
"fmt"
- "github.com/stashapp/stash/pkg/models"
+ "github.com/json-iterator/go"
"os"
+
+ "github.com/stashapp/stash/pkg/models"
)
type SceneMarker struct {
@@ -21,21 +22,29 @@ type SceneFile struct {
Duration string `json:"duration"`
VideoCodec string `json:"video_codec"`
AudioCodec string `json:"audio_codec"`
+ Format string `json:"format"`
Width int `json:"width"`
Height int `json:"height"`
Framerate string `json:"framerate"`
Bitrate int `json:"bitrate"`
}
+type SceneMovie struct {
+ MovieName string `json:"movieName,omitempty"`
+ SceneIndex int `json:"scene_index,omitempty"`
+}
+
type Scene struct {
Title string `json:"title,omitempty"`
Studio string `json:"studio,omitempty"`
URL string `json:"url,omitempty"`
Date string `json:"date,omitempty"`
Rating int `json:"rating,omitempty"`
+ OCounter int `json:"o_counter,omitempty"`
Details string `json:"details,omitempty"`
Gallery string `json:"gallery,omitempty"`
Performers []string `json:"performers,omitempty"`
+ Movies []SceneMovie `json:"movies,omitempty"`
Tags []string `json:"tags,omitempty"`
Markers []SceneMarker `json:"markers,omitempty"`
File *SceneFile `json:"file,omitempty"`
@@ -51,6 +60,7 @@ func LoadSceneFile(filePath string) (*Scene, error) {
if err != nil {
return nil, err
}
+ var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&scene)
if err != nil {
diff --git a/pkg/manager/jsonschema/scraped.go b/pkg/manager/jsonschema/scraped.go
index 26684a85d..5ac26b676 100644
--- a/pkg/manager/jsonschema/scraped.go
+++ b/pkg/manager/jsonschema/scraped.go
@@ -1,8 +1,8 @@
package jsonschema
import (
- "encoding/json"
"fmt"
+ "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/models"
"os"
)
@@ -31,6 +31,7 @@ func LoadScrapedFile(filePath string) ([]ScrapedItem, error) {
if err != nil {
return nil, err
}
+ var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&scraped)
if err != nil {
diff --git a/pkg/manager/jsonschema/studio.go b/pkg/manager/jsonschema/studio.go
index 75e50d302..246c36050 100644
--- a/pkg/manager/jsonschema/studio.go
+++ b/pkg/manager/jsonschema/studio.go
@@ -1,8 +1,8 @@
package jsonschema
import (
- "encoding/json"
"fmt"
+ "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/models"
"os"
)
@@ -22,6 +22,7 @@ func LoadStudioFile(filePath string) (*Studio, error) {
if err != nil {
return nil, err
}
+ var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&studio)
if err != nil {
diff --git a/pkg/manager/jsonschema/utils.go b/pkg/manager/jsonschema/utils.go
index 18e180cf3..cbe3cd3ad 100644
--- a/pkg/manager/jsonschema/utils.go
+++ b/pkg/manager/jsonschema/utils.go
@@ -2,7 +2,8 @@ package jsonschema
import (
"bytes"
- "encoding/json"
+ "github.com/json-iterator/go"
+
"io/ioutil"
"time"
)
@@ -25,6 +26,7 @@ func marshalToFile(filePath string, j interface{}) error {
func encode(j interface{}) ([]byte, error) {
buffer := &bytes.Buffer{}
+ var json = jsoniter.ConfigCompatibleWithStandardLibrary
encoder := json.NewEncoder(buffer)
encoder.SetEscapeHTML(false)
encoder.SetIndent("", " ")
diff --git a/pkg/manager/manager.go b/pkg/manager/manager.go
index b75ff4d51..f3bef5530 100644
--- a/pkg/manager/manager.go
+++ b/pkg/manager/manager.go
@@ -154,9 +154,6 @@ func (s *singleton) RefreshConfig() {
_ = utils.EnsureDir(s.Paths.Generated.Markers)
_ = utils.EnsureDir(s.Paths.Generated.Transcodes)
- _ = utils.EnsureDir(s.Paths.JSON.Performers)
- _ = utils.EnsureDir(s.Paths.JSON.Scenes)
- _ = utils.EnsureDir(s.Paths.JSON.Galleries)
- _ = utils.EnsureDir(s.Paths.JSON.Studios)
+ paths.EnsureJSONDirs()
}
}
diff --git a/pkg/manager/manager_tasks.go b/pkg/manager/manager_tasks.go
index db6da95c4..70d83c09a 100644
--- a/pkg/manager/manager_tasks.go
+++ b/pkg/manager/manager_tasks.go
@@ -1,17 +1,44 @@
package manager
import (
+ "path/filepath"
+ "strconv"
+ "sync"
+ "time"
+
"github.com/bmatcuk/doublestar"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
- "path/filepath"
- "strconv"
- "sync"
- "time"
)
+var extensionsToScan = []string{"zip", "m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm"}
+var extensionsGallery = []string{"zip"}
+
+func constructGlob() string { // create a sequence for glob doublestar from our extensions
+ extLen := len(extensionsToScan)
+ glb := "{"
+ for i := 0; i < extLen-1; i++ { // append extensions and commas
+ glb += extensionsToScan[i] + ","
+ }
+ if extLen >= 1 { // append last extension without comma
+ glb += extensionsToScan[extLen-1]
+ }
+ glb += "}"
+ return glb
+
+}
+
+func isGallery(pathname string) bool {
+ for _, ext := range extensionsGallery {
+ if filepath.Ext(pathname) == "."+ext {
+ return true
+ }
+ }
+ return false
+}
+
type TaskStatus struct {
Status JobStatus
Progress float64
@@ -67,7 +94,7 @@ func (s *singleton) Scan(useFileMetadata bool) {
var results []string
for _, path := range config.GetStashPaths() {
- globPath := filepath.Join(path, "**/*.{zip,m4v,mp4,mov,wmv,avi,mpg,mpeg,rmvb,rm,flv,asf,mkv,webm}") // TODO: Make this configurable
+ globPath := filepath.Join(path, "**/*."+constructGlob())
globResults, _ := doublestar.Glob(globPath)
results = append(results, globResults...)
}
@@ -96,6 +123,15 @@ func (s *singleton) Scan(useFileMetadata bool) {
}
logger.Info("Finished scan")
+ for _, path := range results {
+ if isGallery(path) {
+ wg.Add(1)
+ task := ScanTask{FilePath: path, UseFileMetadata: false}
+ go task.associateGallery(&wg)
+ wg.Wait()
+ }
+ }
+ logger.Info("Finished gallery association")
}()
}
@@ -135,7 +171,7 @@ func (s *singleton) Export() {
}()
}
-func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcodes bool) {
+func (s *singleton) Generate(sprites bool, previews bool, previewPreset *models.PreviewPreset, imagePreviews bool, markers bool, transcodes bool, thumbnails bool) {
if s.Status.Status != Idle {
return
}
@@ -143,13 +179,21 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
+ qg := models.NewGalleryQueryBuilder()
//this.job.total = await ObjectionUtils.getCount(Scene);
instance.Paths.Generated.EnsureTmpDir()
+ preset := string(models.PreviewPresetSlow)
+ if previewPreset != nil && previewPreset.IsValid() {
+ preset = string(*previewPreset)
+ }
+
go func() {
defer s.returnToIdleState()
scenes, err := qb.All()
+ var galleries []*models.Gallery
+
if err != nil {
logger.Errorf("failed to get scenes for generate")
return
@@ -158,18 +202,27 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
delta := utils.Btoi(sprites) + utils.Btoi(previews) + utils.Btoi(markers) + utils.Btoi(transcodes)
var wg sync.WaitGroup
s.Status.Progress = 0
- total := len(scenes)
+ lenScenes := len(scenes)
+ total := lenScenes
+ if thumbnails {
+ galleries, err = qg.All()
+ if err != nil {
+ logger.Errorf("failed to get galleries for generate")
+ return
+ }
+ total += len(galleries)
+ }
if s.Status.stopping {
logger.Info("Stopping due to user request")
return
}
- totalsNeeded := s.neededGenerate(scenes, sprites, previews, markers, transcodes)
+ totalsNeeded := s.neededGenerate(scenes, sprites, previews, imagePreviews, markers, transcodes)
if totalsNeeded == nil {
logger.Infof("Taking too long to count content. Skipping...")
logger.Infof("Generating content")
} else {
- logger.Infof("Generating %d sprites %d previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.markers, totalsNeeded.transcodes)
+ logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes)
}
for i, scene := range scenes {
s.Status.setProgress(i, total)
@@ -196,7 +249,7 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
}
if previews {
- task := GeneratePreviewTask{Scene: *scene}
+ task := GeneratePreviewTask{Scene: *scene, ImagePreview: imagePreviews, PreviewPreset: preset}
go task.Start(&wg)
}
@@ -212,6 +265,77 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
wg.Wait()
}
+
+ if thumbnails {
+ logger.Infof("Generating thumbnails for the galleries")
+ for i, gallery := range galleries {
+ s.Status.setProgress(lenScenes+i, total)
+ if s.Status.stopping {
+ logger.Info("Stopping due to user request")
+ return
+ }
+
+ if gallery == nil {
+ logger.Errorf("nil gallery, skipping generate")
+ continue
+ }
+
+ wg.Add(1)
+ task := GenerateGthumbsTask{Gallery: *gallery}
+ go task.Start(&wg)
+ wg.Wait()
+ }
+ }
+
+ logger.Infof("Generate finished")
+ }()
+}
+
+func (s *singleton) GenerateDefaultScreenshot(sceneId string) {
+ s.generateScreenshot(sceneId, nil)
+}
+
+func (s *singleton) GenerateScreenshot(sceneId string, at float64) {
+ s.generateScreenshot(sceneId, &at)
+}
+
+// generate default screenshot if at is nil
+func (s *singleton) generateScreenshot(sceneId string, at *float64) {
+ if s.Status.Status != Idle {
+ return
+ }
+ s.Status.SetStatus(Generate)
+ s.Status.indefiniteProgress()
+
+ qb := models.NewSceneQueryBuilder()
+ instance.Paths.Generated.EnsureTmpDir()
+
+ go func() {
+ defer s.returnToIdleState()
+
+ sceneIdInt, err := strconv.Atoi(sceneId)
+ if err != nil {
+ logger.Errorf("Error parsing scene id %s: %s", sceneId, err.Error())
+ return
+ }
+
+ scene, err := qb.Find(sceneIdInt)
+ if err != nil || scene == nil {
+ logger.Errorf("failed to get scene for generate")
+ return
+ }
+
+ task := GenerateScreenshotTask{
+ Scene: *scene,
+ ScreenshotAt: at,
+ }
+
+ var wg sync.WaitGroup
+ wg.Add(1)
+ go task.Start(&wg)
+
+ wg.Wait()
+
logger.Infof("Generate finished")
}()
}
@@ -390,6 +514,7 @@ func (s *singleton) Clean() {
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
+ gqb := models.NewGalleryQueryBuilder()
go func() {
defer s.returnToIdleState()
@@ -400,6 +525,12 @@ func (s *singleton) Clean() {
return
}
+ galleries, err := gqb.All()
+ if err != nil {
+ logger.Errorf("failed to fetch list of galleries for cleaning")
+ return
+ }
+
if s.Status.stopping {
logger.Info("Stopping due to user request")
return
@@ -407,7 +538,7 @@ func (s *singleton) Clean() {
var wg sync.WaitGroup
s.Status.Progress = 0
- total := len(scenes)
+ total := len(scenes) + len(galleries)
for i, scene := range scenes {
s.Status.setProgress(i, total)
if s.Status.stopping {
@@ -422,7 +553,26 @@ func (s *singleton) Clean() {
wg.Add(1)
- task := CleanTask{Scene: *scene}
+ task := CleanTask{Scene: scene}
+ go task.Start(&wg)
+ wg.Wait()
+ }
+
+ for i, gallery := range galleries {
+ s.Status.setProgress(len(scenes)+i, total)
+ if s.Status.stopping {
+ logger.Info("Stopping due to user request")
+ return
+ }
+
+ if gallery == nil {
+ logger.Errorf("nil gallery, skipping Clean")
+ continue
+ }
+
+ wg.Add(1)
+
+ task := CleanTask{Gallery: gallery}
go task.Start(&wg)
wg.Wait()
}
@@ -445,7 +595,7 @@ func (s *singleton) returnToIdleState() {
}
func (s *singleton) neededScan(paths []string) int64 {
- var neededScans int64 = 0
+ var neededScans int64
for _, path := range paths {
task := ScanTask{FilePath: path}
@@ -457,23 +607,24 @@ func (s *singleton) neededScan(paths []string) int64 {
}
type totalsGenerate struct {
- sprites int64
- previews int64
- markers int64
- transcodes int64
+ sprites int64
+ previews int64
+ imagePreviews int64
+ markers int64
+ transcodes int64
}
-func (s *singleton) neededGenerate(scenes []*models.Scene, sprites, previews, markers, transcodes bool) *totalsGenerate {
+func (s *singleton) neededGenerate(scenes []*models.Scene, sprites, previews, imagePreviews, markers, transcodes bool) *totalsGenerate {
var totals totalsGenerate
- const timeoutSecs = 90 * time.Second
+ const timeout = 90 * time.Second
// create a control channel through which to signal the counting loop when the timeout is reached
chTimeout := make(chan struct{})
//run the timeout function in a separate thread
go func() {
- time.Sleep(timeoutSecs)
+ time.Sleep(timeout)
chTimeout <- struct{}{}
}()
@@ -488,10 +639,13 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, sprites, previews, ma
}
if previews {
- task := GeneratePreviewTask{Scene: *scene}
- if !task.doesPreviewExist(task.Scene.Checksum) {
+ task := GeneratePreviewTask{Scene: *scene, ImagePreview: imagePreviews}
+ if !task.doesVideoPreviewExist(task.Scene.Checksum) {
totals.previews++
}
+ if imagePreviews && !task.doesImagePreviewExist(task.Scene.Checksum) {
+ totals.imagePreviews++
+ }
}
if markers {
diff --git a/pkg/manager/paths/paths_gallery.go b/pkg/manager/paths/paths_gallery.go
index 4db60e311..9e4665c95 100644
--- a/pkg/manager/paths/paths_gallery.go
+++ b/pkg/manager/paths/paths_gallery.go
@@ -1,12 +1,18 @@
package paths
import (
+ "fmt"
"github.com/stashapp/stash/pkg/manager/config"
+ "github.com/stashapp/stash/pkg/utils"
"path/filepath"
)
type galleryPaths struct{}
+const thumbDir = "gthumbs"
+const thumbDirDepth int = 2
+const thumbDirLength int = 2 // thumbDirDepth * thumbDirLength must be smaller than the length of checksum
+
func newGalleryPaths() *galleryPaths {
return &galleryPaths{}
}
@@ -15,6 +21,19 @@ func (gp *galleryPaths) GetExtractedPath(checksum string) string {
return filepath.Join(config.GetCachePath(), checksum)
}
+func GetGthumbCache() string {
+ return filepath.Join(config.GetCachePath(), thumbDir)
+}
+
+func GetGthumbDir(checksum string) string {
+ return filepath.Join(config.GetCachePath(), thumbDir, utils.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), checksum)
+}
+
+func GetGthumbPath(checksum string, index int, width int) string {
+ fname := fmt.Sprintf("%s_%d_%d.jpg", checksum, index, width)
+ return filepath.Join(config.GetCachePath(), thumbDir, utils.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), checksum, fname)
+}
+
func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string {
return filepath.Join(config.GetCachePath(), checksum, fileName)
}
diff --git a/pkg/manager/paths/paths_json.go b/pkg/manager/paths/paths_json.go
index 8344352a4..3f2ccbcb1 100644
--- a/pkg/manager/paths/paths_json.go
+++ b/pkg/manager/paths/paths_json.go
@@ -2,10 +2,13 @@ package paths
import (
"github.com/stashapp/stash/pkg/manager/config"
+ "github.com/stashapp/stash/pkg/utils"
"path/filepath"
)
type jsonPaths struct {
+ Metadata string
+
MappingsFile string
ScrapedFile string
@@ -13,19 +16,37 @@ type jsonPaths struct {
Scenes string
Galleries string
Studios string
+ Movies string
}
func newJSONPaths() *jsonPaths {
jp := jsonPaths{}
+ jp.Metadata = config.GetMetadataPath()
jp.MappingsFile = filepath.Join(config.GetMetadataPath(), "mappings.json")
jp.ScrapedFile = filepath.Join(config.GetMetadataPath(), "scraped.json")
jp.Performers = filepath.Join(config.GetMetadataPath(), "performers")
jp.Scenes = filepath.Join(config.GetMetadataPath(), "scenes")
jp.Galleries = filepath.Join(config.GetMetadataPath(), "galleries")
jp.Studios = filepath.Join(config.GetMetadataPath(), "studios")
+ jp.Movies = filepath.Join(config.GetMetadataPath(), "movies")
return &jp
}
+func GetJSONPaths() *jsonPaths {
+ jp := newJSONPaths()
+ return jp
+}
+
+func EnsureJSONDirs() {
+ jsonPaths := GetJSONPaths()
+ utils.EnsureDir(jsonPaths.Metadata)
+ utils.EnsureDir(jsonPaths.Scenes)
+ utils.EnsureDir(jsonPaths.Galleries)
+ utils.EnsureDir(jsonPaths.Performers)
+ utils.EnsureDir(jsonPaths.Studios)
+ utils.EnsureDir(jsonPaths.Movies)
+}
+
func (jp *jsonPaths) PerformerJSONPath(checksum string) string {
return filepath.Join(jp.Performers, checksum+".json")
}
@@ -37,3 +58,7 @@ func (jp *jsonPaths) SceneJSONPath(checksum string) string {
func (jp *jsonPaths) StudioJSONPath(checksum string) string {
return filepath.Join(jp.Studios, checksum+".json")
}
+
+func (jp *jsonPaths) MovieJSONPath(checksum string) string {
+ return filepath.Join(jp.Movies, checksum+".json")
+}
diff --git a/pkg/manager/scene.go b/pkg/manager/scene.go
index 1bca4568f..9afb07221 100644
--- a/pkg/manager/scene.go
+++ b/pkg/manager/scene.go
@@ -122,6 +122,27 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
}
}
+func DeleteSceneMarkerFiles(scene *models.Scene, seconds int) {
+ videoPath := GetInstance().Paths.SceneMarkers.GetStreamPath(scene.Checksum, seconds)
+ imagePath := GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.Checksum, seconds)
+
+ exists, _ := utils.FileExists(videoPath)
+ if exists {
+ err := os.Remove(videoPath)
+ if err != nil {
+ logger.Warnf("Could not delete file %s: %s", videoPath, err.Error())
+ }
+ }
+
+ exists, _ = utils.FileExists(imagePath)
+ if exists {
+ err := os.Remove(imagePath)
+ if err != nil {
+ logger.Warnf("Could not delete file %s: %s", videoPath, err.Error())
+ }
+ }
+}
+
func DeleteSceneFile(scene *models.Scene) {
// kill any running encoders
KillRunningStreams(scene.Path)
diff --git a/pkg/manager/screenshot.go b/pkg/manager/screenshot.go
new file mode 100644
index 000000000..fc417ede7
--- /dev/null
+++ b/pkg/manager/screenshot.go
@@ -0,0 +1,16 @@
+package manager
+
+import (
+ "github.com/stashapp/stash/pkg/ffmpeg"
+)
+
+func makeScreenshot(probeResult ffmpeg.VideoFile, outputPath string, quality int, width int, time float64) {
+ encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
+ options := ffmpeg.ScreenshotOptions{
+ OutputPath: outputPath,
+ Quality: quality,
+ Time: time,
+ Width: width,
+ }
+ encoder.Screenshot(probeResult, options)
+}
diff --git a/pkg/manager/task_autotag.go b/pkg/manager/task_autotag.go
index 4e6e150cc..78f37ce69 100644
--- a/pkg/manager/task_autotag.go
+++ b/pkg/manager/task_autotag.go
@@ -24,12 +24,10 @@ func (t *AutoTagPerformerTask) Start(wg *sync.WaitGroup) {
func getQueryRegex(name string) string {
const separatorChars = `.\-_ `
// handle path separators
- const endSeparatorChars = separatorChars + `\\/`
const separator = `[` + separatorChars + `]`
- const endSeparator = `[` + endSeparatorChars + `]`
ret := strings.Replace(name, " ", separator+"*", -1)
- ret = "(?:^|" + endSeparator + "+)" + ret + "(?:$|" + endSeparator + "+)"
+ ret = `(?:^|_|[^\w\d])` + ret + `(?:$|_|[^\w\d])`
return ret
}
diff --git a/pkg/manager/task_autotag_test.go b/pkg/manager/task_autotag_test.go
index 05221e399..1b302a94e 100644
--- a/pkg/manager/task_autotag_test.go
+++ b/pkg/manager/task_autotag_test.go
@@ -36,7 +36,15 @@ var testSeparators = []string{
" ",
}
-func generateNamePatterns(name string, separator string) []string {
+var testEndSeparators = []string{
+ "{",
+ "}",
+ "(",
+ ")",
+ ",",
+}
+
+func generateNamePatterns(name, separator string) []string {
var ret []string
ret = append(ret, fmt.Sprintf("%s%saaa"+testExtension, name, separator))
ret = append(ret, fmt.Sprintf("aaa%s%s"+testExtension, separator, name))
@@ -152,13 +160,20 @@ func createScenes(tx *sqlx.Tx) error {
// create the scenes
var scenePatterns []string
var falseScenePatterns []string
- for _, separator := range testSeparators {
+
+ separators := append(testSeparators, testEndSeparators...)
+
+ for _, separator := range separators {
scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator)...)
scenePatterns = append(scenePatterns, generateNamePatterns(strings.ToLower(testName), separator)...)
+ falseScenePatterns = append(falseScenePatterns, generateFalseNamePattern(testName, separator))
+ }
+
+ // add test cases for intra-name separators
+ for _, separator := range testSeparators {
if separator != " " {
scenePatterns = append(scenePatterns, generateNamePatterns(strings.Replace(testName, " ", separator, -1), separator)...)
}
- falseScenePatterns = append(falseScenePatterns, generateFalseNamePattern(testName, separator))
}
for _, fn := range scenePatterns {
diff --git a/pkg/manager/task_clean.go b/pkg/manager/task_clean.go
index 580d3631a..8cca80bbd 100644
--- a/pkg/manager/task_clean.go
+++ b/pkg/manager/task_clean.go
@@ -2,33 +2,48 @@ package manager
import (
"context"
- "github.com/stashapp/stash/pkg/database"
- "github.com/stashapp/stash/pkg/logger"
- "github.com/stashapp/stash/pkg/manager/config"
- "github.com/stashapp/stash/pkg/models"
"os"
"path/filepath"
"strings"
"sync"
+
+ "github.com/stashapp/stash/pkg/database"
+ "github.com/stashapp/stash/pkg/logger"
+ "github.com/stashapp/stash/pkg/manager/config"
+ "github.com/stashapp/stash/pkg/manager/paths"
+ "github.com/stashapp/stash/pkg/models"
)
type CleanTask struct {
- Scene models.Scene
+ Scene *models.Scene
+ Gallery *models.Gallery
}
func (t *CleanTask) Start(wg *sync.WaitGroup) {
defer wg.Done()
- if t.fileExists(t.Scene.Path) && t.pathInStash() {
- logger.Debugf("File Found: %s", t.Scene.Path)
- if matchFile(t.Scene.Path, config.GetExcludes()) {
- logger.Infof("File matched regex. Cleaning: \"%s\"", t.Scene.Path)
- t.deleteScene(t.Scene.ID)
- }
- } else {
- logger.Infof("File not found. Cleaning: \"%s\"", t.Scene.Path)
+ if t.Scene != nil && t.shouldClean(t.Scene.Path) {
t.deleteScene(t.Scene.ID)
}
+
+ if t.Gallery != nil && t.shouldClean(t.Gallery.Path) {
+ t.deleteGallery(t.Gallery.ID)
+ }
+}
+
+func (t *CleanTask) shouldClean(path string) bool {
+ if t.fileExists(path) && t.pathInStash(path) {
+ logger.Debugf("File Found: %s", path)
+ if matchFile(path, config.GetExcludes()) {
+ logger.Infof("File matched regex. Cleaning: \"%s\"", path)
+ return true
+ }
+ } else {
+ logger.Infof("File not found. Cleaning: \"%s\"", path)
+ return true
+ }
+
+ return false
}
func (t *CleanTask) deleteScene(sceneID int) {
@@ -40,19 +55,43 @@ func (t *CleanTask) deleteScene(sceneID int) {
err = DestroyScene(sceneID, tx)
if err != nil {
- logger.Infof("Error deleting scene from database: %s", err.Error())
+ logger.Errorf("Error deleting scene from database: %s", err.Error())
tx.Rollback()
return
}
if err := tx.Commit(); err != nil {
- logger.Infof("Error deleting scene from database: %s", err.Error())
+ logger.Errorf("Error deleting scene from database: %s", err.Error())
return
}
DeleteGeneratedSceneFiles(scene)
}
+func (t *CleanTask) deleteGallery(galleryID int) {
+ ctx := context.TODO()
+ qb := models.NewGalleryQueryBuilder()
+ tx := database.DB.MustBeginTx(ctx, nil)
+
+ err := qb.Destroy(galleryID, tx)
+
+ if err != nil {
+ logger.Errorf("Error deleting gallery from database: %s", err.Error())
+ tx.Rollback()
+ return
+ }
+
+ if err := tx.Commit(); err != nil {
+ logger.Errorf("Error deleting gallery from database: %s", err.Error())
+ return
+ }
+
+ pathErr := os.RemoveAll(paths.GetGthumbDir(t.Gallery.Checksum)) // remove cache dir of gallery
+ if pathErr != nil {
+ logger.Errorf("Error deleting gallery directory from cache: %s", pathErr)
+ }
+}
+
func (t *CleanTask) fileExists(filename string) bool {
info, err := os.Stat(filename)
if os.IsNotExist(err) {
@@ -61,19 +100,19 @@ func (t *CleanTask) fileExists(filename string) bool {
return !info.IsDir()
}
-func (t *CleanTask) pathInStash() bool {
+func (t *CleanTask) pathInStash(pathToCheck string) bool {
for _, path := range config.GetStashPaths() {
- rel, error := filepath.Rel(path, filepath.Dir(t.Scene.Path))
+ rel, error := filepath.Rel(path, filepath.Dir(pathToCheck))
if error == nil {
if !strings.HasPrefix(rel, ".."+string(filepath.Separator)) {
- logger.Debugf("File %s belongs to stash path %s", t.Scene.Path, path)
+ logger.Debugf("File %s belongs to stash path %s", pathToCheck, path)
return true
}
}
}
- logger.Debugf("File %s is out from stash path", t.Scene.Path)
+ logger.Debugf("File %s is out from stash path", pathToCheck)
return false
}
diff --git a/pkg/manager/task_export.go b/pkg/manager/task_export.go
index f9c883b26..27d8bb6cc 100644
--- a/pkg/manager/task_export.go
+++ b/pkg/manager/task_export.go
@@ -3,14 +3,18 @@ package manager
import (
"context"
"fmt"
+ "github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/jsonschema"
+ "github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
"math"
+ "runtime"
"strconv"
"sync"
+ "time"
)
type ExportTask struct {
@@ -20,46 +24,77 @@ type ExportTask struct {
func (t *ExportTask) Start(wg *sync.WaitGroup) {
defer wg.Done()
- // @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count
+ // @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count + Movie.count
+ workerCount := runtime.GOMAXPROCS(0) // set worker count to number of cpus available
t.Mappings = &jsonschema.Mappings{}
t.Scraped = []jsonschema.ScrapedItem{}
ctx := context.TODO()
+ startTime := time.Now()
- t.ExportScenes(ctx)
+ paths.EnsureJSONDirs()
+
+ t.ExportScenes(ctx, workerCount)
t.ExportGalleries(ctx)
- t.ExportPerformers(ctx)
- t.ExportStudios(ctx)
+ t.ExportPerformers(ctx, workerCount)
+ t.ExportStudios(ctx, workerCount)
+ t.ExportMovies(ctx, workerCount)
if err := instance.JSON.saveMappings(t.Mappings); err != nil {
logger.Errorf("[mappings] failed to save json: %s", err.Error())
}
t.ExportScrapedItems(ctx)
+ logger.Infof("Export complete in %s.", time.Since(startTime))
}
-func (t *ExportTask) ExportScenes(ctx context.Context) {
- tx := database.DB.MustBeginTx(ctx, nil)
- defer tx.Commit()
+func (t *ExportTask) ExportScenes(ctx context.Context, workers int) {
+ var scenesWg sync.WaitGroup
+
qb := models.NewSceneQueryBuilder()
- studioQB := models.NewStudioQueryBuilder()
- galleryQB := models.NewGalleryQueryBuilder()
- performerQB := models.NewPerformerQueryBuilder()
- tagQB := models.NewTagQueryBuilder()
- sceneMarkerQB := models.NewSceneMarkerQueryBuilder()
+
scenes, err := qb.All()
if err != nil {
logger.Errorf("[scenes] failed to fetch all scenes: %s", err.Error())
}
+ jobCh := make(chan *models.Scene, workers*2) // make a buffered channel to feed workers
+
logger.Info("[scenes] exporting")
+ startTime := time.Now()
+
+ for w := 0; w < workers; w++ { // create export Scene workers
+ scenesWg.Add(1)
+ go exportScene(&scenesWg, jobCh, t, nil) // no db data is changed so tx is set to nil
+ }
for i, scene := range scenes {
index := i + 1
- logger.Progressf("[scenes] %d of %d", index, len(scenes))
+ if (i % 100) == 0 { // make progress easier to read
+ logger.Progressf("[scenes] %d of %d", index, len(scenes))
+ }
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{Path: scene.Path, Checksum: scene.Checksum})
+ jobCh <- scene // feed workers
+ }
+
+ close(jobCh) // close channel so that workers will know no more jobs are available
+ scenesWg.Wait()
+
+ logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers)
+}
+func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask, tx *sqlx.Tx) {
+ defer wg.Done()
+ studioQB := models.NewStudioQueryBuilder()
+ movieQB := models.NewMovieQueryBuilder()
+ galleryQB := models.NewGalleryQueryBuilder()
+ performerQB := models.NewPerformerQueryBuilder()
+ tagQB := models.NewTagQueryBuilder()
+ sceneMarkerQB := models.NewSceneMarkerQueryBuilder()
+ joinQB := models.NewJoinsQueryBuilder()
+
+ for scene := range jobChan {
newSceneJSON := jsonschema.Scene{
CreatedAt: models.JSONTime{Time: scene.CreatedAt.Timestamp},
UpdatedAt: models.JSONTime{Time: scene.UpdatedAt.Timestamp},
@@ -79,7 +114,8 @@ func (t *ExportTask) ExportScenes(ctx context.Context) {
galleryChecksum = gallery.Checksum
}
- performers, _ := performerQB.FindBySceneID(scene.ID, tx)
+ performers, _ := performerQB.FindNameBySceneID(scene.ID, tx)
+ sceneMovies, _ := joinQB.GetSceneMovies(scene.ID, tx)
tags, _ := tagQB.FindBySceneID(scene.ID, tx)
sceneMarkers, _ := sceneMarkerQB.FindBySceneID(scene.ID, tx)
@@ -98,6 +134,9 @@ func (t *ExportTask) ExportScenes(ctx context.Context) {
if scene.Rating.Valid {
newSceneJSON.Rating = int(scene.Rating.Int64)
}
+
+ newSceneJSON.OCounter = scene.OCounter
+
if scene.Details.Valid {
newSceneJSON.Details = scene.Details.String
}
@@ -135,6 +174,18 @@ func (t *ExportTask) ExportScenes(ctx context.Context) {
newSceneJSON.Markers = append(newSceneJSON.Markers, sceneMarkerJSON)
}
+ for _, sceneMovie := range sceneMovies {
+ movie, _ := movieQB.Find(sceneMovie.MovieID, tx)
+
+ if movie.Name.Valid {
+ sceneMovieJSON := jsonschema.SceneMovie{
+ MovieName: movie.Name.String,
+ SceneIndex: int(sceneMovie.SceneIndex.Int64),
+ }
+ newSceneJSON.Movies = append(newSceneJSON.Movies, sceneMovieJSON)
+ }
+ }
+
newSceneJSON.File = &jsonschema.SceneFile{}
if scene.Size.Valid {
newSceneJSON.File.Size = scene.Size.String
@@ -148,6 +199,9 @@ func (t *ExportTask) ExportScenes(ctx context.Context) {
if scene.AudioCodec.Valid {
newSceneJSON.File.AudioCodec = scene.AudioCodec.String
}
+ if scene.Format.Valid {
+ newSceneJSON.File.Format = scene.Format.String
+ }
if scene.Width.Valid {
newSceneJSON.File.Width = int(scene.Width.Int64)
}
@@ -177,7 +231,6 @@ func (t *ExportTask) ExportScenes(ctx context.Context) {
}
}
- logger.Infof("[scenes] export complete")
}
func (t *ExportTask) ExportGalleries(ctx context.Context) {
@@ -198,21 +251,42 @@ func (t *ExportTask) ExportGalleries(ctx context.Context) {
logger.Infof("[galleries] export complete")
}
-func (t *ExportTask) ExportPerformers(ctx context.Context) {
+func (t *ExportTask) ExportPerformers(ctx context.Context, workers int) {
+ var performersWg sync.WaitGroup
+
qb := models.NewPerformerQueryBuilder()
performers, err := qb.All()
if err != nil {
logger.Errorf("[performers] failed to fetch all performers: %s", err.Error())
}
+ jobCh := make(chan *models.Performer, workers*2) // make a buffered channel to feed workers
logger.Info("[performers] exporting")
+ startTime := time.Now()
+
+ for w := 0; w < workers; w++ { // create export Performer workers
+ performersWg.Add(1)
+ go exportPerformer(&performersWg, jobCh)
+ }
for i, performer := range performers {
index := i + 1
logger.Progressf("[performers] %d of %d", index, len(performers))
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.NameMapping{Name: performer.Name.String, Checksum: performer.Checksum})
+ jobCh <- performer // feed workers
+ }
+ close(jobCh) // close channel so workers will know that no more jobs are available
+ performersWg.Wait()
+
+ logger.Infof("[performers] export complete in %s. %d workers used.", time.Since(startTime), workers)
+}
+
+func exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models.Performer) {
+ defer wg.Done()
+
+ for performer := range jobChan {
newPerformerJSON := jsonschema.Performer{
CreatedAt: models.JSONTime{Time: performer.CreatedAt.Timestamp},
UpdatedAt: models.JSONTime{Time: performer.UpdatedAt.Timestamp},
@@ -221,6 +295,9 @@ func (t *ExportTask) ExportPerformers(ctx context.Context) {
if performer.Name.Valid {
newPerformerJSON.Name = performer.Name.String
}
+ if performer.Gender.Valid {
+ newPerformerJSON.Gender = performer.Gender.String
+ }
if performer.URL.Valid {
newPerformerJSON.URL = performer.URL.String
}
@@ -280,11 +357,11 @@ func (t *ExportTask) ExportPerformers(ctx context.Context) {
logger.Errorf("[performers] <%s> failed to save json: %s", performer.Checksum, err.Error())
}
}
-
- logger.Infof("[performers] export complete")
}
-func (t *ExportTask) ExportStudios(ctx context.Context) {
+func (t *ExportTask) ExportStudios(ctx context.Context, workers int) {
+ var studiosWg sync.WaitGroup
+
qb := models.NewStudioQueryBuilder()
studios, err := qb.All()
if err != nil {
@@ -292,12 +369,33 @@ func (t *ExportTask) ExportStudios(ctx context.Context) {
}
logger.Info("[studios] exporting")
+ startTime := time.Now()
+
+ jobCh := make(chan *models.Studio, workers*2) // make a buffered channel to feed workers
+
+ for w := 0; w < workers; w++ { // create export Studio workers
+ studiosWg.Add(1)
+ go exportStudio(&studiosWg, jobCh)
+ }
for i, studio := range studios {
index := i + 1
logger.Progressf("[studios] %d of %d", index, len(studios))
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.NameMapping{Name: studio.Name.String, Checksum: studio.Checksum})
+ jobCh <- studio // feed workers
+ }
+
+ close(jobCh)
+ studiosWg.Wait()
+
+ logger.Infof("[studios] export complete in %s. %d workers used.", time.Since(startTime), workers)
+}
+
+func exportStudio(wg *sync.WaitGroup, jobChan <-chan *models.Studio) {
+ defer wg.Done()
+
+ for studio := range jobChan {
newStudioJSON := jsonschema.Studio{
CreatedAt: models.JSONTime{Time: studio.CreatedAt.Timestamp},
@@ -324,8 +422,91 @@ func (t *ExportTask) ExportStudios(ctx context.Context) {
logger.Errorf("[studios] <%s> failed to save json: %s", studio.Checksum, err.Error())
}
}
+}
- logger.Infof("[studios] export complete")
+func (t *ExportTask) ExportMovies(ctx context.Context, workers int) {
+ var moviesWg sync.WaitGroup
+
+ qb := models.NewMovieQueryBuilder()
+ movies, err := qb.All()
+ if err != nil {
+ logger.Errorf("[movies] failed to fetch all movies: %s", err.Error())
+ }
+
+ logger.Info("[movies] exporting")
+ startTime := time.Now()
+
+ jobCh := make(chan *models.Movie, workers*2) // make a buffered channel to feed workers
+
+ for w := 0; w < workers; w++ { // create export Studio workers
+ moviesWg.Add(1)
+ go exportMovie(&moviesWg, jobCh)
+ }
+
+ for i, movie := range movies {
+ index := i + 1
+ logger.Progressf("[movies] %d of %d", index, len(movies))
+
+ t.Mappings.Movies = append(t.Mappings.Movies, jsonschema.NameMapping{Name: movie.Name.String, Checksum: movie.Checksum})
+ jobCh <- movie // feed workers
+ }
+
+ close(jobCh)
+ moviesWg.Wait()
+
+ logger.Infof("[movies] export complete in %s. %d workers used.", time.Since(startTime), workers)
+
+}
+func exportMovie(wg *sync.WaitGroup, jobChan <-chan *models.Movie) {
+ defer wg.Done()
+
+ for movie := range jobChan {
+ newMovieJSON := jsonschema.Movie{
+ CreatedAt: models.JSONTime{Time: movie.CreatedAt.Timestamp},
+ UpdatedAt: models.JSONTime{Time: movie.UpdatedAt.Timestamp},
+ }
+
+ if movie.Name.Valid {
+ newMovieJSON.Name = movie.Name.String
+ }
+ if movie.Aliases.Valid {
+ newMovieJSON.Aliases = movie.Aliases.String
+ }
+ if movie.Date.Valid {
+ newMovieJSON.Date = utils.GetYMDFromDatabaseDate(movie.Date.String)
+ }
+ if movie.Rating.Valid {
+ newMovieJSON.Rating = int(movie.Rating.Int64)
+ }
+ if movie.Duration.Valid {
+ newMovieJSON.Duration = int(movie.Duration.Int64)
+ }
+
+ if movie.Director.Valid {
+ newMovieJSON.Director = movie.Director.String
+ }
+
+ if movie.Synopsis.Valid {
+ newMovieJSON.Synopsis = movie.Synopsis.String
+ }
+
+ if movie.URL.Valid {
+ newMovieJSON.URL = movie.URL.String
+ }
+
+ newMovieJSON.FrontImage = utils.GetBase64StringFromData(movie.FrontImage)
+ newMovieJSON.BackImage = utils.GetBase64StringFromData(movie.BackImage)
+ movieJSON, err := instance.JSON.getMovie(movie.Checksum)
+ if err != nil {
+ logger.Debugf("[movies] error reading movie json: %s", err.Error())
+ } else if jsonschema.CompareJSON(*movieJSON, newMovieJSON) {
+ continue
+ }
+
+ if err := instance.JSON.saveMovie(movie.Checksum, &newMovieJSON); err != nil {
+ logger.Errorf("[movies] <%s> failed to save json: %s", movie.Checksum, err.Error())
+ }
+ }
}
func (t *ExportTask) ExportScrapedItems(ctx context.Context) {
diff --git a/pkg/manager/task_generate_gallery_thumbs.go b/pkg/manager/task_generate_gallery_thumbs.go
new file mode 100644
index 000000000..2079e980d
--- /dev/null
+++ b/pkg/manager/task_generate_gallery_thumbs.go
@@ -0,0 +1,37 @@
+package manager
+
+import (
+ "github.com/stashapp/stash/pkg/logger"
+ "github.com/stashapp/stash/pkg/manager/paths"
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/utils"
+ "sync"
+)
+
+type GenerateGthumbsTask struct {
+ Gallery models.Gallery
+}
+
+func (t *GenerateGthumbsTask) Start(wg *sync.WaitGroup) {
+ defer wg.Done()
+ generated := 0
+ count := t.Gallery.ImageCount()
+ for i := 0; i < count; i++ {
+ thumbPath := paths.GetGthumbPath(t.Gallery.Checksum, i, models.DefaultGthumbWidth)
+ exists, _ := utils.FileExists(thumbPath)
+ if exists {
+ continue
+ }
+ data := t.Gallery.GetThumbnail(i, models.DefaultGthumbWidth)
+ err := utils.WriteFile(thumbPath, data)
+ if err != nil {
+ logger.Errorf("error writing gallery thumbnail: %s", err)
+ } else {
+ generated++
+ }
+
+ }
+ if generated > 0 {
+ logger.Infof("Generated %d thumbnails for %s", generated, t.Gallery.Path)
+ }
+}
diff --git a/pkg/manager/task_generate_preview.go b/pkg/manager/task_generate_preview.go
index 380560ba0..4aba82095 100644
--- a/pkg/manager/task_generate_preview.go
+++ b/pkg/manager/task_generate_preview.go
@@ -9,7 +9,9 @@ import (
)
type GeneratePreviewTask struct {
- Scene models.Scene
+ Scene models.Scene
+ ImagePreview bool
+ PreviewPreset string
}
func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
@@ -17,7 +19,8 @@ func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
videoFilename := t.videoFilename()
imageFilename := t.imageFilename()
- if t.doesPreviewExist(t.Scene.Checksum) {
+ videoExists := t.doesVideoPreviewExist(t.Scene.Checksum)
+ if (!t.ImagePreview || t.doesImagePreviewExist(t.Scene.Checksum)) && videoExists {
return
}
@@ -27,7 +30,7 @@ func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
return
}
- generator, err := NewPreviewGenerator(*videoFile, videoFilename, imageFilename, instance.Paths.Generated.Screenshots)
+ generator, err := NewPreviewGenerator(*videoFile, videoFilename, imageFilename, instance.Paths.Generated.Screenshots, !videoExists, t.ImagePreview, t.PreviewPreset)
if err != nil {
logger.Errorf("error creating preview generator: %s", err.Error())
return
@@ -39,10 +42,14 @@ func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
}
}
-func (t *GeneratePreviewTask) doesPreviewExist(sceneChecksum string) bool {
+func (t *GeneratePreviewTask) doesVideoPreviewExist(sceneChecksum string) bool {
videoExists, _ := utils.FileExists(instance.Paths.Scene.GetStreamPreviewPath(sceneChecksum))
+ return videoExists
+}
+
+func (t *GeneratePreviewTask) doesImagePreviewExist(sceneChecksum string) bool {
imageExists, _ := utils.FileExists(instance.Paths.Scene.GetStreamPreviewImagePath(sceneChecksum))
- return videoExists && imageExists
+ return imageExists
}
func (t *GeneratePreviewTask) videoFilename() string {
diff --git a/pkg/manager/task_generate_screenshot.go b/pkg/manager/task_generate_screenshot.go
new file mode 100644
index 000000000..c4360b482
--- /dev/null
+++ b/pkg/manager/task_generate_screenshot.go
@@ -0,0 +1,84 @@
+package manager
+
+import (
+ "context"
+ "io/ioutil"
+ "os"
+ "sync"
+ "time"
+
+ "github.com/stashapp/stash/pkg/database"
+ "github.com/stashapp/stash/pkg/ffmpeg"
+ "github.com/stashapp/stash/pkg/logger"
+ "github.com/stashapp/stash/pkg/models"
+)
+
+type GenerateScreenshotTask struct {
+ Scene models.Scene
+ ScreenshotAt *float64
+}
+
+func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) {
+ defer wg.Done()
+
+ scenePath := t.Scene.Path
+ probeResult, err := ffmpeg.NewVideoFile(instance.FFProbePath, scenePath)
+
+ if err != nil {
+ logger.Error(err.Error())
+ return
+ }
+
+ var at float64
+ if t.ScreenshotAt == nil {
+ at = float64(probeResult.Duration) * 0.2
+ } else {
+ at = *t.ScreenshotAt
+ }
+
+ checksum := t.Scene.Checksum
+ normalPath := instance.Paths.Scene.GetScreenshotPath(checksum)
+
+ // we'll generate the screenshot, grab the generated data and set it
+ // in the database. We'll use SetSceneScreenshot to set the data
+ // which also generates the thumbnail
+
+ logger.Debugf("Creating screenshot for %s", scenePath)
+ makeScreenshot(*probeResult, normalPath, 2, probeResult.Width, at)
+
+ f, err := os.Open(normalPath)
+ if err != nil {
+ logger.Errorf("Error reading screenshot: %s", err.Error())
+ return
+ }
+ defer f.Close()
+
+ coverImageData, err := ioutil.ReadAll(f)
+ if err != nil {
+ logger.Errorf("Error reading screenshot: %s", err.Error())
+ return
+ }
+
+ ctx := context.TODO()
+ tx := database.DB.MustBeginTx(ctx, nil)
+
+ qb := models.NewSceneQueryBuilder()
+ updatedTime := time.Now()
+ updatedScene := models.ScenePartial{
+ ID: t.Scene.ID,
+ UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
+ }
+
+ updatedScene.Cover = &coverImageData
+ err = SetSceneScreenshot(t.Scene.Checksum, coverImageData)
+ _, err = qb.Update(updatedScene, tx)
+ if err != nil {
+ logger.Errorf("Error setting screenshot: %s", err.Error())
+ return
+ }
+
+ if err := tx.Commit(); err != nil {
+ logger.Errorf("Error setting screenshot: %s", err.Error())
+ return
+ }
+}
diff --git a/pkg/manager/task_import.go b/pkg/manager/task_import.go
index 634c164dc..efc2d62c2 100644
--- a/pkg/manager/task_import.go
+++ b/pkg/manager/task_import.go
@@ -46,6 +46,7 @@ func (t *ImportTask) Start(wg *sync.WaitGroup) {
t.ImportPerformers(ctx)
t.ImportStudios(ctx)
+ t.ImportMovies(ctx)
t.ImportGalleries(ctx)
t.ImportTags(ctx)
@@ -93,6 +94,9 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
if performerJSON.Name != "" {
newPerformer.Name = sql.NullString{String: performerJSON.Name, Valid: true}
}
+ if performerJSON.Gender != "" {
+ newPerformer.Gender = sql.NullString{String: performerJSON.Gender, Valid: true}
+ }
if performerJSON.URL != "" {
newPerformer.URL = sql.NullString{String: performerJSON.URL, Valid: true}
}
@@ -204,6 +208,77 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
logger.Info("[studios] import complete")
}
+func (t *ImportTask) ImportMovies(ctx context.Context) {
+ tx := database.DB.MustBeginTx(ctx, nil)
+ qb := models.NewMovieQueryBuilder()
+
+ for i, mappingJSON := range t.Mappings.Movies {
+ index := i + 1
+ movieJSON, err := instance.JSON.getMovie(mappingJSON.Checksum)
+ if err != nil {
+ logger.Errorf("[movies] failed to read json: %s", err.Error())
+ continue
+ }
+ if mappingJSON.Checksum == "" || mappingJSON.Name == "" || movieJSON == nil {
+ return
+ }
+
+ logger.Progressf("[movies] %d of %d", index, len(t.Mappings.Movies))
+
+ // generate checksum from movie name rather than image
+ checksum := utils.MD5FromString(movieJSON.Name)
+
+ // Process the base 64 encoded image string
+ _, frontimageData, err := utils.ProcessBase64Image(movieJSON.FrontImage)
+ if err != nil {
+ _ = tx.Rollback()
+ logger.Errorf("[movies] <%s> invalid front_image: %s", mappingJSON.Checksum, err.Error())
+ return
+ }
+ _, backimageData, err := utils.ProcessBase64Image(movieJSON.BackImage)
+ if err != nil {
+ _ = tx.Rollback()
+ logger.Errorf("[movies] <%s> invalid back_image: %s", mappingJSON.Checksum, err.Error())
+ return
+ }
+
+ // Populate a new movie from the input
+ newMovie := models.Movie{
+ FrontImage: frontimageData,
+ BackImage: backimageData,
+ Checksum: checksum,
+ Name: sql.NullString{String: movieJSON.Name, Valid: true},
+ Aliases: sql.NullString{String: movieJSON.Aliases, Valid: true},
+ Date: models.SQLiteDate{String: movieJSON.Date, Valid: true},
+ Director: sql.NullString{String: movieJSON.Director, Valid: true},
+ Synopsis: sql.NullString{String: movieJSON.Synopsis, Valid: true},
+ URL: sql.NullString{String: movieJSON.URL, Valid: true},
+ CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.CreatedAt)},
+ UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.UpdatedAt)},
+ }
+
+ if movieJSON.Rating != 0 {
+ newMovie.Rating = sql.NullInt64{Int64: int64(movieJSON.Rating), Valid: true}
+ }
+ if movieJSON.Duration != 0 {
+ newMovie.Duration = sql.NullInt64{Int64: int64(movieJSON.Duration), Valid: true}
+ }
+
+ _, err = qb.Create(newMovie, tx)
+ if err != nil {
+ _ = tx.Rollback()
+ logger.Errorf("[movies] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
+ return
+ }
+ }
+
+ logger.Info("[movies] importing")
+ if err := tx.Commit(); err != nil {
+ logger.Errorf("[movies] import failed to commit: %s", err.Error())
+ }
+ logger.Info("[movies] import complete")
+}
+
func (t *ImportTask) ImportGalleries(ctx context.Context) {
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewGalleryQueryBuilder()
@@ -335,7 +410,7 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(mappingJSON.UpdatedAt)},
}
- studio, err := sqb.FindByName(mappingJSON.Studio, tx)
+ studio, err := sqb.FindByName(mappingJSON.Studio, tx, false)
if err != nil {
logger.Errorf("[scraped sites] failed to fetch studio: %s", err.Error())
}
@@ -414,6 +489,8 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
if sceneJSON.Rating != 0 {
newScene.Rating = sql.NullInt64{Int64: int64(sceneJSON.Rating), Valid: true}
}
+
+ newScene.OCounter = sceneJSON.OCounter
newScene.CreatedAt = models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(sceneJSON.CreatedAt)}
newScene.UpdatedAt = models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(sceneJSON.UpdatedAt)}
@@ -431,6 +508,9 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
if sceneJSON.File.AudioCodec != "" {
newScene.AudioCodec = sql.NullString{String: sceneJSON.File.AudioCodec, Valid: true}
}
+ if sceneJSON.File.Format != "" {
+ newScene.Format = sql.NullString{String: sceneJSON.File.Format, Valid: true}
+ }
if sceneJSON.File.Width != 0 {
newScene.Width = sql.NullInt64{Int64: int64(sceneJSON.File.Width), Valid: true}
}
@@ -452,7 +532,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
// Populate the studio ID
if sceneJSON.Studio != "" {
sqb := models.NewStudioQueryBuilder()
- studio, err := sqb.FindByName(sceneJSON.Studio, tx)
+ studio, err := sqb.FindByName(sceneJSON.Studio, tx, false)
if err != nil {
logger.Warnf("[scenes] studio <%s> does not exist: %s", sceneJSON.Studio, err.Error())
} else {
@@ -508,6 +588,18 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
}
}
+ // Relate the scene to the movies
+ if len(sceneJSON.Movies) > 0 {
+ moviesScenes, err := t.getMoviesScenes(sceneJSON.Movies, scene.ID, tx)
+ if err != nil {
+ logger.Warnf("[scenes] <%s> failed to fetch movies: %s", scene.Checksum, err.Error())
+ } else {
+ if err := jqb.CreateMoviesScenes(moviesScenes, tx); err != nil {
+ logger.Errorf("[scenes] <%s> failed to associate movies: %s", scene.Checksum, err.Error())
+ }
+ }
+ }
+
// Relate the scene to the tags
if len(sceneJSON.Tags) > 0 {
tags, err := t.getTags(scene.Checksum, sceneJSON.Tags, tx)
@@ -542,7 +634,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(marker.UpdatedAt)},
}
- primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx)
+ primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx, false)
if err != nil {
logger.Errorf("[scenes] <%s> failed to find primary tag for marker: %s", scene.Checksum, err.Error())
} else {
@@ -590,7 +682,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]*models.Performer, error) {
pqb := models.NewPerformerQueryBuilder()
- performers, err := pqb.FindByNames(names, tx)
+ performers, err := pqb.FindByNames(names, tx, false)
if err != nil {
return nil, err
}
@@ -614,9 +706,41 @@ func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]*models.Perfo
return performers, nil
}
+func (t *ImportTask) getMoviesScenes(input []jsonschema.SceneMovie, sceneID int, tx *sqlx.Tx) ([]models.MoviesScenes, error) {
+ mqb := models.NewMovieQueryBuilder()
+
+ var movies []models.MoviesScenes
+ for _, inputMovie := range input {
+ movie, err := mqb.FindByName(inputMovie.MovieName, tx, false)
+ if err != nil {
+ return nil, err
+ }
+
+ if movie == nil {
+ logger.Warnf("[scenes] movie %s does not exist", inputMovie.MovieName)
+ } else {
+ toAdd := models.MoviesScenes{
+ MovieID: movie.ID,
+ SceneID: sceneID,
+ }
+
+ if inputMovie.SceneIndex != 0 {
+ toAdd.SceneIndex = sql.NullInt64{
+ Int64: int64(inputMovie.SceneIndex),
+ Valid: true,
+ }
+ }
+
+ movies = append(movies, toAdd)
+ }
+ }
+
+ return movies, nil
+}
+
func (t *ImportTask) getTags(sceneChecksum string, names []string, tx *sqlx.Tx) ([]*models.Tag, error) {
tqb := models.NewTagQueryBuilder()
- tags, err := tqb.FindByNames(names, tx)
+ tags, err := tqb.FindByNames(names, tx, false)
if err != nil {
return nil, err
}
diff --git a/pkg/manager/task_scan.go b/pkg/manager/task_scan.go
index 579a3fc00..3e784ae56 100644
--- a/pkg/manager/task_scan.go
+++ b/pkg/manager/task_scan.go
@@ -5,6 +5,7 @@ import (
"database/sql"
"path/filepath"
"strconv"
+ "strings"
"sync"
"time"
@@ -21,7 +22,7 @@ type ScanTask struct {
}
func (t *ScanTask) Start(wg *sync.WaitGroup) {
- if filepath.Ext(t.FilePath) == ".zip" {
+ if isGallery(t.FilePath) {
t.scanGallery()
} else {
t.scanScene()
@@ -33,11 +34,16 @@ func (t *ScanTask) Start(wg *sync.WaitGroup) {
func (t *ScanTask) scanGallery() {
qb := models.NewGalleryQueryBuilder()
gallery, _ := qb.FindByPath(t.FilePath)
+
if gallery != nil {
// We already have this item in the database, keep going
return
}
+ ok, err := utils.IsZipFileUncompressed(t.FilePath)
+ if err == nil && !ok {
+ logger.Warnf("%s is using above store (0) level compression.", t.FilePath)
+ }
checksum, err := t.calculateChecksum()
if err != nil {
logger.Error(err.Error())
@@ -60,6 +66,7 @@ func (t *ScanTask) scanGallery() {
} else {
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
currentTime := time.Now()
+
newGallery := models.Gallery{
Checksum: checksum,
Path: t.FilePath,
@@ -77,12 +84,85 @@ func (t *ScanTask) scanGallery() {
}
}
+// associates a gallery to a scene with the same basename
+func (t *ScanTask) associateGallery(wg *sync.WaitGroup) {
+ qb := models.NewGalleryQueryBuilder()
+ gallery, _ := qb.FindByPath(t.FilePath)
+ if gallery == nil {
+ // shouldn't happen , associate is run after scan is finished
+ logger.Errorf("associate: gallery %s not found in DB", t.FilePath)
+ wg.Done()
+ return
+ }
+
+ if !gallery.SceneID.Valid { // gallery has no SceneID
+ basename := strings.TrimSuffix(t.FilePath, filepath.Ext(t.FilePath))
+ var relatedFiles []string
+ for _, ext := range extensionsToScan { // make a list of media files that can be related to the gallery
+ related := basename + "." + ext
+ if !isGallery(related) { //exclude gallery extensions from the related files
+ relatedFiles = append(relatedFiles, related)
+ }
+ }
+ for _, scenePath := range relatedFiles {
+ qbScene := models.NewSceneQueryBuilder()
+ scene, _ := qbScene.FindByPath(scenePath)
+ if scene != nil { // found related Scene
+ logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID)
+
+ gallery.SceneID.Int64 = int64(scene.ID)
+ gallery.SceneID.Valid = true
+
+ ctx := context.TODO()
+ tx := database.DB.MustBeginTx(ctx, nil)
+
+ _, err := qb.Update(*gallery, tx)
+ if err != nil {
+ logger.Errorf("associate: Error updating gallery sceneId %s", err)
+ _ = tx.Rollback()
+ } else if err := tx.Commit(); err != nil {
+ logger.Error(err.Error())
+ }
+
+ break // since a gallery can have only one related scene
+ // only first found is associated
+ }
+
+ }
+
+ }
+ wg.Done()
+}
+
func (t *ScanTask) scanScene() {
qb := models.NewSceneQueryBuilder()
scene, _ := qb.FindByPath(t.FilePath)
if scene != nil {
- // We already have this item in the database, check for thumbnails,screenshots
+ // We already have this item in the database
+ //check for thumbnails,screenshots
t.makeScreenshots(nil, scene.Checksum)
+
+ //check for container
+ if !scene.Format.Valid {
+ videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath)
+ if err != nil {
+ logger.Error(err.Error())
+ return
+ }
+ container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
+ logger.Infof("Adding container %s to file %s", container, t.FilePath)
+
+ ctx := context.TODO()
+ tx := database.DB.MustBeginTx(ctx, nil)
+ err = qb.UpdateFormat(scene.ID, string(container), tx)
+ if err != nil {
+ logger.Error(err.Error())
+ _ = tx.Rollback()
+ } else if err := tx.Commit(); err != nil {
+ logger.Error(err.Error())
+ }
+
+ }
return
}
@@ -91,6 +171,7 @@ func (t *ScanTask) scanScene() {
logger.Error(err.Error())
return
}
+ container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
// Override title to be filename if UseFileMetadata is false
if !t.UseFileMetadata {
@@ -130,6 +211,7 @@ func (t *ScanTask) scanScene() {
Duration: sql.NullFloat64{Float64: videoFile.Duration, Valid: true},
VideoCodec: sql.NullString{String: videoFile.VideoCodec, Valid: true},
AudioCodec: sql.NullString{String: videoFile.AudioCodec, Valid: true},
+ Format: sql.NullString{String: string(container), Valid: true},
Width: sql.NullInt64{Int64: int64(videoFile.Width), Valid: true},
Height: sql.NullInt64{Int64: int64(videoFile.Height), Valid: true},
Framerate: sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true},
@@ -177,28 +259,19 @@ func (t *ScanTask) makeScreenshots(probeResult *ffmpeg.VideoFile, checksum strin
logger.Infof("Regenerating images for %s", t.FilePath)
}
+ at := float64(probeResult.Duration) * 0.2
+
if !thumbExists {
logger.Debugf("Creating thumbnail for %s", t.FilePath)
- t.makeScreenshot(*probeResult, thumbPath, 5, 320)
+ makeScreenshot(*probeResult, thumbPath, 5, 320, at)
}
if !normalExists {
logger.Debugf("Creating screenshot for %s", t.FilePath)
- t.makeScreenshot(*probeResult, normalPath, 2, probeResult.Width)
+ makeScreenshot(*probeResult, normalPath, 2, probeResult.Width, at)
}
}
-func (t *ScanTask) makeScreenshot(probeResult ffmpeg.VideoFile, outputPath string, quality int, width int) {
- encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
- options := ffmpeg.ScreenshotOptions{
- OutputPath: outputPath,
- Quality: quality,
- Time: float64(probeResult.Duration) * 0.2,
- Width: width,
- }
- encoder.Screenshot(probeResult, options)
-}
-
func (t *ScanTask) calculateChecksum() (string, error) {
logger.Infof("%s not found. Calculating checksum...", t.FilePath)
checksum, err := utils.MD5FromFilePath(t.FilePath)
diff --git a/pkg/manager/task_transcode.go b/pkg/manager/task_transcode.go
index a43e50cd3..7db3be0e3 100644
--- a/pkg/manager/task_transcode.go
+++ b/pkg/manager/task_transcode.go
@@ -16,17 +16,37 @@ type GenerateTranscodeTask struct {
func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
defer wg.Done()
- videoCodec := t.Scene.VideoCodec.String
- if ffmpeg.IsValidCodec(videoCodec) {
- return
- }
hasTranscode, _ := HasTranscode(&t.Scene)
if hasTranscode {
return
}
- logger.Infof("[transcode] <%s> scene has codec %s", t.Scene.Checksum, t.Scene.VideoCodec.String)
+ var container ffmpeg.Container
+
+ if t.Scene.Format.Valid {
+ container = ffmpeg.Container(t.Scene.Format.String)
+
+ } else { // container isn't in the DB
+ // shouldn't happen unless user hasn't scanned after updating to PR#384+ version
+ tmpVideoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path)
+ if err != nil {
+ logger.Errorf("[transcode] error reading video file: %s", err.Error())
+ return
+ }
+
+ container = ffmpeg.MatchContainer(tmpVideoFile.Container, t.Scene.Path)
+ }
+
+ videoCodec := t.Scene.VideoCodec.String
+ audioCodec := ffmpeg.MissingUnsupported
+ if t.Scene.AudioCodec.Valid {
+ audioCodec = ffmpeg.AudioCodec(t.Scene.AudioCodec.String)
+ }
+
+ if ffmpeg.IsValidCodec(videoCodec) && ffmpeg.IsValidCombo(videoCodec, container) && ffmpeg.IsValidAudioForContainer(audioCodec, container) {
+ return
+ }
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path)
if err != nil {
@@ -41,24 +61,52 @@ func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
MaxTranscodeSize: transcodeSize,
}
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
- encoder.Transcode(*videoFile, options)
+
+ if videoCodec == ffmpeg.H264 { // for non supported h264 files stream copy the video part
+ if audioCodec == ffmpeg.MissingUnsupported {
+ encoder.CopyVideo(*videoFile, options)
+ } else {
+ encoder.TranscodeAudio(*videoFile, options)
+ }
+ } else {
+ if audioCodec == ffmpeg.MissingUnsupported {
+ //ffmpeg fails if it trys to transcode an unsupported audio codec
+ encoder.TranscodeVideo(*videoFile, options)
+ } else {
+ encoder.Transcode(*videoFile, options)
+ }
+ }
+
if err := os.Rename(outputPath, instance.Paths.Scene.GetTranscodePath(t.Scene.Checksum)); err != nil {
logger.Errorf("[transcode] error generating transcode: %s", err.Error())
return
}
+
logger.Debugf("[transcode] <%s> created transcode: %s", t.Scene.Checksum, outputPath)
return
}
+// return true if transcode is needed
+// used only when counting files to generate, doesn't affect the actual transcode generation
+// if container is missing from DB it is treated as non supported in order not to delay the user
func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
videoCodec := t.Scene.VideoCodec.String
- hasTranscode, _ := HasTranscode(&t.Scene)
+ container := ""
+ audioCodec := ffmpeg.MissingUnsupported
+ if t.Scene.AudioCodec.Valid {
+ audioCodec = ffmpeg.AudioCodec(t.Scene.AudioCodec.String)
+ }
- if ffmpeg.IsValidCodec(videoCodec) {
+ if t.Scene.Format.Valid {
+ container = t.Scene.Format.String
+ }
+
+ if ffmpeg.IsValidCodec(videoCodec) && ffmpeg.IsValidCombo(videoCodec, ffmpeg.Container(container)) && ffmpeg.IsValidAudioForContainer(audioCodec, ffmpeg.Container(container)) {
return false
}
+ hasTranscode, _ := HasTranscode(&t.Scene)
if hasTranscode {
return false
}
diff --git a/pkg/manager/utils.go b/pkg/manager/utils.go
index af767a291..38f747d95 100644
--- a/pkg/manager/utils.go
+++ b/pkg/manager/utils.go
@@ -4,6 +4,7 @@ import (
"fmt"
"github.com/stashapp/stash/pkg/ffmpeg"
+ "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
@@ -12,12 +13,30 @@ func IsStreamable(scene *models.Scene) (bool, error) {
if scene == nil {
return false, fmt.Errorf("nil scene")
}
+ var container ffmpeg.Container
+ if scene.Format.Valid {
+ container = ffmpeg.Container(scene.Format.String)
+ } else { // container isn't in the DB
+ // shouldn't happen, fallback to ffprobe reading from file
+ tmpVideoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, scene.Path)
+ if err != nil {
+ return false, fmt.Errorf("error reading video file: %s", err.Error())
+ }
+ container = ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path)
+ }
videoCodec := scene.VideoCodec.String
- if ffmpeg.IsValidCodec(videoCodec) {
+ audioCodec := ffmpeg.MissingUnsupported
+ if scene.AudioCodec.Valid {
+ audioCodec = ffmpeg.AudioCodec(scene.AudioCodec.String)
+ }
+
+ if ffmpeg.IsValidCodec(videoCodec) && ffmpeg.IsValidCombo(videoCodec, container) && ffmpeg.IsValidAudioForContainer(audioCodec, container) {
+ logger.Debugf("File is streamable %s, %s, %s\n", videoCodec, audioCodec, container)
return true, nil
} else {
hasTranscode, _ := HasTranscode(scene)
+ logger.Debugf("File is not streamable , transcode is needed %s, %s, %s\n", videoCodec, audioCodec, container)
return hasTranscode, nil
}
}
diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go
index 53ab2b406..9f8a83f5d 100644
--- a/pkg/models/model_gallery.go
+++ b/pkg/models/model_gallery.go
@@ -8,6 +8,7 @@ import (
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
+ _ "golang.org/x/image/webp"
"image"
"image/jpeg"
"io/ioutil"
@@ -25,6 +26,8 @@ type Gallery struct {
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
}
+const DefaultGthumbWidth int = 200
+
func (g *Gallery) GetFiles(baseURL string) []*GalleryFilesType {
var galleryFiles []*GalleryFilesType
filteredFiles, readCloser, err := g.listZipContents()
@@ -88,7 +91,7 @@ func (g *Gallery) readZipFile(index int) ([]byte, error) {
func (g *Gallery) listZipContents() ([]*zip.File, *zip.ReadCloser, error) {
readCloser, err := zip.OpenReader(g.Path)
if err != nil {
- logger.Warn("failed to read zip file")
+ logger.Warnf("failed to read zip file %s", g.Path)
return nil, nil, err
}
@@ -98,7 +101,8 @@ func (g *Gallery) listZipContents() ([]*zip.File, *zip.ReadCloser, error) {
continue
}
ext := filepath.Ext(file.Name)
- if ext != ".jpg" && ext != ".jpeg" && ext != ".png" && ext != ".gif" {
+ ext = strings.ToLower(ext)
+ if ext != ".jpg" && ext != ".jpeg" && ext != ".png" && ext != ".gif" && ext != ".webp" {
continue
}
if strings.Contains(file.Name, "__MACOSX") {
@@ -112,5 +116,49 @@ func (g *Gallery) listZipContents() ([]*zip.File, *zip.ReadCloser, error) {
return utils.NaturalCompare(a.Name, b.Name)
})
+ cover := contains(filteredFiles, "cover.jpg") // first image with cover.jpg in the name
+ if cover >= 0 { // will be moved to the start
+ reorderedFiles := reorder(filteredFiles, cover)
+ if reorderedFiles != nil {
+ return reorderedFiles, readCloser, nil
+ }
+ }
+
return filteredFiles, readCloser, nil
}
+
+// return index of first occurrenece of string x ( case insensitive ) in name of zip contents, -1 otherwise
+func contains(a []*zip.File, x string) int {
+ for i, n := range a {
+ if strings.Contains(strings.ToLower(n.Name), strings.ToLower(x)) {
+ return i
+ }
+ }
+ return -1
+}
+
+// reorder slice so that element with position toFirst gets at the start
+func reorder(a []*zip.File, toFirst int) []*zip.File {
+ var first *zip.File
+ switch {
+ case toFirst < 0 || toFirst >= len(a):
+ return nil
+ case toFirst == 0:
+ return a
+ default:
+ first = a[toFirst]
+ copy(a[toFirst:], a[toFirst+1:]) // Shift a[toFirst+1:] left one index removing a[toFirst] element
+ a[len(a)-1] = nil // Nil now unused element for garbage collection
+ a = a[:len(a)-1] // Truncate slice
+ a = append([]*zip.File{first}, a...) // Push first to the start of the slice
+ }
+ return a
+}
+
+func (g *Gallery) ImageCount() int {
+ images, _, _ := g.listZipContents()
+ if images == nil {
+ return 0
+ }
+ return len(images)
+}
diff --git a/pkg/models/model_joins.go b/pkg/models/model_joins.go
index 2a1129bd1..f69be8946 100644
--- a/pkg/models/model_joins.go
+++ b/pkg/models/model_joins.go
@@ -1,10 +1,18 @@
package models
+import "database/sql"
+
type PerformersScenes struct {
PerformerID int `db:"performer_id" json:"performer_id"`
SceneID int `db:"scene_id" json:"scene_id"`
}
+type MoviesScenes struct {
+ MovieID int `db:"movie_id" json:"movie_id"`
+ SceneID int `db:"scene_id" json:"scene_id"`
+ SceneIndex sql.NullInt64 `db:"scene_index" json:"scene_index"`
+}
+
type ScenesTags struct {
SceneID int `db:"scene_id" json:"scene_id"`
TagID int `db:"tag_id" json:"tag_id"`
diff --git a/pkg/models/model_movie.go b/pkg/models/model_movie.go
new file mode 100644
index 000000000..865fcab0e
--- /dev/null
+++ b/pkg/models/model_movie.go
@@ -0,0 +1,43 @@
+package models
+
+import (
+ "database/sql"
+)
+
+type Movie struct {
+ ID int `db:"id" json:"id"`
+ FrontImage []byte `db:"front_image" json:"front_image"`
+ BackImage []byte `db:"back_image" json:"back_image"`
+ Checksum string `db:"checksum" json:"checksum"`
+ Name sql.NullString `db:"name" json:"name"`
+ Aliases sql.NullString `db:"aliases" json:"aliases"`
+ Duration sql.NullInt64 `db:"duration" json:"duration"`
+ Date SQLiteDate `db:"date" json:"date"`
+ Rating sql.NullInt64 `db:"rating" json:"rating"`
+ StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
+ Director sql.NullString `db:"director" json:"director"`
+ Synopsis sql.NullString `db:"synopsis" json:"synopsis"`
+ URL sql.NullString `db:"url" json:"url"`
+ CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
+ UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
+}
+
+type MoviePartial struct {
+ ID int `db:"id" json:"id"`
+ FrontImage *[]byte `db:"front_image" json:"front_image"`
+ BackImage *[]byte `db:"back_image" json:"back_image"`
+ Checksum *string `db:"checksum" json:"checksum"`
+ Name *sql.NullString `db:"name" json:"name"`
+ Aliases *sql.NullString `db:"aliases" json:"aliases"`
+ Duration *sql.NullInt64 `db:"duration" json:"duration"`
+ Date *SQLiteDate `db:"date" json:"date"`
+ Rating *sql.NullInt64 `db:"rating" json:"rating"`
+ StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
+ Director *sql.NullString `db:"director" json:"director"`
+ Synopsis *sql.NullString `db:"synopsis" json:"synopsis"`
+ URL *sql.NullString `db:"url" json:"url"`
+ CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
+ UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
+}
+
+var DefaultMovieImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC"
diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go
index 8d3beb3db..71818a025 100644
--- a/pkg/models/model_performer.go
+++ b/pkg/models/model_performer.go
@@ -9,6 +9,7 @@ type Performer struct {
Image []byte `db:"image" json:"image"`
Checksum string `db:"checksum" json:"checksum"`
Name sql.NullString `db:"name" json:"name"`
+ Gender sql.NullString `db:"gender" json:"gender"`
URL sql.NullString `db:"url" json:"url"`
Twitter sql.NullString `db:"twitter" json:"twitter"`
Instagram sql.NullString `db:"instagram" json:"instagram"`
diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go
index e55f42117..1623533be 100644
--- a/pkg/models/model_scene.go
+++ b/pkg/models/model_scene.go
@@ -19,6 +19,7 @@ type Scene struct {
Size sql.NullString `db:"size" json:"size"`
Duration sql.NullFloat64 `db:"duration" json:"duration"`
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
+ Format sql.NullString `db:"format" json:"format_name"`
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
Width sql.NullInt64 `db:"width" json:"width"`
Height sql.NullInt64 `db:"height" json:"height"`
@@ -48,6 +49,7 @@ type ScenePartial struct {
Framerate *sql.NullFloat64 `db:"framerate" json:"framerate"`
Bitrate *sql.NullInt64 `db:"bitrate" json:"bitrate"`
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
+ MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"`
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
}
diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go
index cdef49bc5..719274b5f 100644
--- a/pkg/models/model_scraped_item.go
+++ b/pkg/models/model_scraped_item.go
@@ -25,6 +25,28 @@ type ScrapedItem struct {
type ScrapedPerformer struct {
Name *string `graphql:"name" json:"name"`
+ Gender *string `graphql:"gender" json:"gender"`
+ URL *string `graphql:"url" json:"url"`
+ Twitter *string `graphql:"twitter" json:"twitter"`
+ Instagram *string `graphql:"instagram" json:"instagram"`
+ Birthdate *string `graphql:"birthdate" json:"birthdate"`
+ Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
+ Country *string `graphql:"country" json:"country"`
+ EyeColor *string `graphql:"eye_color" json:"eye_color"`
+ Height *string `graphql:"height" json:"height"`
+ Measurements *string `graphql:"measurements" json:"measurements"`
+ FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
+ CareerLength *string `graphql:"career_length" json:"career_length"`
+ Tattoos *string `graphql:"tattoos" json:"tattoos"`
+ Piercings *string `graphql:"piercings" json:"piercings"`
+ Aliases *string `graphql:"aliases" json:"aliases"`
+ Image *string `graphql:"image" json:"image"`
+}
+
+// this type has no Image field
+type ScrapedPerformerStash struct {
+ Name *string `graphql:"name" json:"name"`
+ Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
@@ -42,6 +64,21 @@ type ScrapedPerformer struct {
}
type ScrapedScene struct {
+ Title *string `graphql:"title" json:"title"`
+ Details *string `graphql:"details" json:"details"`
+ URL *string `graphql:"url" json:"url"`
+ Date *string `graphql:"date" json:"date"`
+ Image *string `graphql:"image" json:"image"`
+ File *SceneFileType `graphql:"file" json:"file"`
+ Studio *ScrapedSceneStudio `graphql:"studio" json:"studio"`
+ Movies []*ScrapedSceneMovie `graphql:"movies" json:"movies"`
+ Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
+ Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
+}
+
+// stash doesn't return image, and we need id
+type ScrapedSceneStash struct {
+ ID string `graphql:"id" json:"id"`
Title *string `graphql:"title" json:"title"`
Details *string `graphql:"details" json:"details"`
URL *string `graphql:"url" json:"url"`
@@ -56,6 +93,7 @@ type ScrapedScenePerformer struct {
// Set if performer matched
ID *string `graphql:"id" json:"id"`
Name string `graphql:"name" json:"name"`
+ Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
@@ -79,6 +117,19 @@ type ScrapedSceneStudio struct {
URL *string `graphql:"url" json:"url"`
}
+type ScrapedSceneMovie struct {
+ // Set if movie matched
+ ID *string `graphql:"id" json:"id"`
+ Name string `graphql:"name" json:"name"`
+ Aliases string `graphql:"aliases" json:"aliases"`
+ Duration string `graphql:"duration" json:"duration"`
+ Date string `graphql:"date" json:"date"`
+ Rating string `graphql:"rating" json:"rating"`
+ Director string `graphql:"director" json:"director"`
+ Synopsis string `graphql:"synopsis" json:"synopsis"`
+ URL *string `graphql:"url" json:"url"`
+}
+
type ScrapedSceneTag struct {
// Set if tag matched
ID *string `graphql:"id" json:"id"`
diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go
index 51ec69ba0..e978bb439 100644
--- a/pkg/models/model_studio.go
+++ b/pkg/models/model_studio.go
@@ -14,4 +14,4 @@ type Studio struct {
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
}
-var DefaultStudioImage string = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC"
+var DefaultStudioImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC"
diff --git a/pkg/models/querybuilder_gallery.go b/pkg/models/querybuilder_gallery.go
index f77fe0de2..0f2b06e06 100644
--- a/pkg/models/querybuilder_gallery.go
+++ b/pkg/models/querybuilder_gallery.go
@@ -3,6 +3,7 @@ package models
import (
"database/sql"
"path/filepath"
+ "strconv"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/database"
@@ -51,6 +52,10 @@ func (qb *GalleryQueryBuilder) Update(updatedGallery Gallery, tx *sqlx.Tx) (*Gal
return &updatedGallery, nil
}
+func (qb *GalleryQueryBuilder) Destroy(id int, tx *sqlx.Tx) error {
+ return executeDeleteQuery("galleries", strconv.Itoa(id), tx)
+}
+
type GalleryNullSceneID struct {
SceneID sql.NullInt64
}
@@ -88,7 +93,7 @@ func (qb *GalleryQueryBuilder) FindByPath(path string) (*Gallery, error) {
}
func (qb *GalleryQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) (*Gallery, error) {
- query := "SELECT galleries.* FROM galleries JOIN scenes ON scenes.id = galleries.scene_id WHERE scenes.id = ? LIMIT 1"
+ query := "SELECT galleries.* FROM galleries WHERE galleries.scene_id = ? LIMIT 1"
args := []interface{}{sceneID}
return qb.queryGallery(query, args, tx)
}
@@ -119,7 +124,9 @@ func (qb *GalleryQueryBuilder) Query(findFilter *FindFilterType) ([]*Gallery, in
if q := findFilter.Q; q != nil && *q != "" {
searchColumns := []string{"galleries.path", "galleries.checksum"}
- whereClauses = append(whereClauses, getSearch(searchColumns, *q))
+ clause, thisArgs := getSearchBinding(searchColumns, *q, false)
+ whereClauses = append(whereClauses, clause)
+ args = append(args, thisArgs...)
}
sortAndPagination := qb.getGallerySort(findFilter) + getPagination(findFilter)
@@ -137,13 +144,13 @@ func (qb *GalleryQueryBuilder) Query(findFilter *FindFilterType) ([]*Gallery, in
func (qb *GalleryQueryBuilder) getGallerySort(findFilter *FindFilterType) string {
var sort string
var direction string
- //if findFilter == nil { // TODO temp until title is removed from schema and UI
- sort = "path"
- direction = "ASC"
- //} else {
- // sort = findFilter.getSort("path")
- // direction = findFilter.getDirection()
- //}
+ if findFilter == nil {
+ sort = "path"
+ direction = "ASC"
+ } else {
+ sort = findFilter.GetSort("path")
+ direction = findFilter.GetDirection()
+ }
return getSort(sort, direction, "galleries")
}
diff --git a/pkg/models/querybuilder_gallery_test.go b/pkg/models/querybuilder_gallery_test.go
new file mode 100644
index 000000000..d3a409d08
--- /dev/null
+++ b/pkg/models/querybuilder_gallery_test.go
@@ -0,0 +1,107 @@
+// +build integration
+
+package models_test
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+func TestGalleryFind(t *testing.T) {
+ gqb := models.NewGalleryQueryBuilder()
+
+ const galleryIdx = 0
+ gallery, err := gqb.Find(galleryIDs[galleryIdx])
+
+ if err != nil {
+ t.Fatalf("Error finding gallery: %s", err.Error())
+ }
+
+ assert.Equal(t, getGalleryStringValue(galleryIdx, "Path"), gallery.Path)
+
+ gallery, err = gqb.Find(0)
+
+ if err != nil {
+ t.Fatalf("Error finding gallery: %s", err.Error())
+ }
+
+ assert.Nil(t, gallery)
+}
+
+func TestGalleryFindByChecksum(t *testing.T) {
+ gqb := models.NewGalleryQueryBuilder()
+
+ const galleryIdx = 0
+ galleryChecksum := getGalleryStringValue(galleryIdx, "Checksum")
+ gallery, err := gqb.FindByChecksum(galleryChecksum, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding gallery: %s", err.Error())
+ }
+
+ assert.Equal(t, getGalleryStringValue(galleryIdx, "Path"), gallery.Path)
+
+ galleryChecksum = "not exist"
+ gallery, err = gqb.FindByChecksum(galleryChecksum, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding gallery: %s", err.Error())
+ }
+
+ assert.Nil(t, gallery)
+}
+
+func TestGalleryFindByPath(t *testing.T) {
+ gqb := models.NewGalleryQueryBuilder()
+
+ const galleryIdx = 0
+ galleryPath := getGalleryStringValue(galleryIdx, "Path")
+ gallery, err := gqb.FindByPath(galleryPath)
+
+ if err != nil {
+ t.Fatalf("Error finding gallery: %s", err.Error())
+ }
+
+ assert.Equal(t, galleryPath, gallery.Path)
+
+ galleryPath = "not exist"
+ gallery, err = gqb.FindByPath(galleryPath)
+
+ if err != nil {
+ t.Fatalf("Error finding gallery: %s", err.Error())
+ }
+
+ assert.Nil(t, gallery)
+}
+
+func TestGalleryFindBySceneID(t *testing.T) {
+ gqb := models.NewGalleryQueryBuilder()
+
+ sceneID := sceneIDs[sceneIdxWithGallery]
+ gallery, err := gqb.FindBySceneID(sceneID, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding gallery: %s", err.Error())
+ }
+
+ assert.Equal(t, getGalleryStringValue(galleryIdxWithScene, "Path"), gallery.Path)
+
+ gallery, err = gqb.FindBySceneID(0, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding gallery: %s", err.Error())
+ }
+
+ assert.Nil(t, gallery)
+}
+
+// TODO ValidGalleriesForScenePath
+// TODO Count
+// TODO All
+// TODO Query
+// TODO Update
+// TODO Destroy
+// TODO ClearGalleryId
diff --git a/pkg/models/querybuilder_joins.go b/pkg/models/querybuilder_joins.go
index 310bc8dad..416b85a18 100644
--- a/pkg/models/querybuilder_joins.go
+++ b/pkg/models/querybuilder_joins.go
@@ -111,6 +111,109 @@ func (qb *JoinsQueryBuilder) DestroyPerformersScenes(sceneID int, tx *sqlx.Tx) e
return err
}
+func (qb *JoinsQueryBuilder) GetSceneMovies(sceneID int, tx *sqlx.Tx) ([]MoviesScenes, error) {
+ query := `SELECT * from movies_scenes WHERE scene_id = ?`
+
+ var rows *sqlx.Rows
+ var err error
+ if tx != nil {
+ rows, err = tx.Queryx(query, sceneID)
+ } else {
+ rows, err = database.DB.Queryx(query, sceneID)
+ }
+
+ if err != nil && err != sql.ErrNoRows {
+ return nil, err
+ }
+ defer rows.Close()
+
+ movieScenes := make([]MoviesScenes, 0)
+ for rows.Next() {
+ movieScene := MoviesScenes{}
+ if err := rows.StructScan(&movieScene); err != nil {
+ return nil, err
+ }
+ movieScenes = append(movieScenes, movieScene)
+ }
+
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+
+ return movieScenes, nil
+}
+
+func (qb *JoinsQueryBuilder) CreateMoviesScenes(newJoins []MoviesScenes, tx *sqlx.Tx) error {
+ ensureTx(tx)
+ for _, join := range newJoins {
+ _, err := tx.NamedExec(
+ `INSERT INTO movies_scenes (movie_id, scene_id, scene_index) VALUES (:movie_id, :scene_id, :scene_index)`,
+ join,
+ )
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// AddMovieScene adds a movie to a scene. It does not make any change
+// if the movie already exists on the scene. It returns true if scene
+// movie was added.
+
+func (qb *JoinsQueryBuilder) AddMoviesScene(sceneID int, movieID int, sceneIdx *int, tx *sqlx.Tx) (bool, error) {
+ ensureTx(tx)
+
+ existingMovies, err := qb.GetSceneMovies(sceneID, tx)
+
+ if err != nil {
+ return false, err
+ }
+
+ // ensure not already present
+ for _, p := range existingMovies {
+ if p.MovieID == movieID && p.SceneID == sceneID {
+ return false, nil
+ }
+ }
+
+ movieJoin := MoviesScenes{
+ MovieID: movieID,
+ SceneID: sceneID,
+ }
+
+ if sceneIdx != nil {
+ movieJoin.SceneIndex = sql.NullInt64{
+ Int64: int64(*sceneIdx),
+ Valid: true,
+ }
+ }
+ movieJoins := append(existingMovies, movieJoin)
+
+ err = qb.UpdateMoviesScenes(sceneID, movieJoins, tx)
+
+ return err == nil, err
+}
+
+func (qb *JoinsQueryBuilder) UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes, tx *sqlx.Tx) error {
+ ensureTx(tx)
+
+ // Delete the existing joins and then create new ones
+ _, err := tx.Exec("DELETE FROM movies_scenes WHERE scene_id = ?", sceneID)
+ if err != nil {
+ return err
+ }
+ return qb.CreateMoviesScenes(updatedJoins, tx)
+}
+
+func (qb *JoinsQueryBuilder) DestroyMoviesScenes(sceneID int, tx *sqlx.Tx) error {
+ ensureTx(tx)
+
+ // Delete the existing joins
+ _, err := tx.Exec("DELETE FROM movies_scenes WHERE scene_id = ?", sceneID)
+ return err
+}
+
func (qb *JoinsQueryBuilder) GetSceneTags(sceneID int, tx *sqlx.Tx) ([]ScenesTags, error) {
ensureTx(tx)
diff --git a/pkg/models/querybuilder_movies.go b/pkg/models/querybuilder_movies.go
new file mode 100644
index 000000000..cccd4d180
--- /dev/null
+++ b/pkg/models/querybuilder_movies.go
@@ -0,0 +1,240 @@
+package models
+
+import (
+ "database/sql"
+ "strconv"
+
+ "github.com/jmoiron/sqlx"
+ "github.com/stashapp/stash/pkg/database"
+)
+
+type MovieQueryBuilder struct{}
+
+func NewMovieQueryBuilder() MovieQueryBuilder {
+ return MovieQueryBuilder{}
+}
+
+func (qb *MovieQueryBuilder) Create(newMovie Movie, tx *sqlx.Tx) (*Movie, error) {
+ ensureTx(tx)
+ result, err := tx.NamedExec(
+ `INSERT INTO movies (front_image, back_image, checksum, name, aliases, duration, date, rating, studio_id, director, synopsis, url, created_at, updated_at)
+ VALUES (:front_image, :back_image, :checksum, :name, :aliases, :duration, :date, :rating, :studio_id, :director, :synopsis, :url, :created_at, :updated_at)
+ `,
+ newMovie,
+ )
+ if err != nil {
+ return nil, err
+ }
+ movieID, err := result.LastInsertId()
+ if err != nil {
+ return nil, err
+ }
+
+ if err := tx.Get(&newMovie, `SELECT * FROM movies WHERE id = ? LIMIT 1`, movieID); err != nil {
+ return nil, err
+ }
+ return &newMovie, nil
+}
+
+func (qb *MovieQueryBuilder) Update(updatedMovie MoviePartial, tx *sqlx.Tx) (*Movie, error) {
+ ensureTx(tx)
+ _, err := tx.NamedExec(
+ `UPDATE movies SET `+SQLGenKeysPartial(updatedMovie)+` WHERE movies.id = :id`,
+ updatedMovie,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ return qb.Find(updatedMovie.ID, tx)
+}
+
+func (qb *MovieQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
+ // delete movie from movies_scenes
+
+ _, err := tx.Exec("DELETE FROM movies_scenes WHERE movie_id = ?", id)
+ if err != nil {
+ return err
+ }
+
+ // // remove movie from scraped items
+ // _, err = tx.Exec("UPDATE scraped_items SET movie_id = null WHERE movie_id = ?", id)
+ // if err != nil {
+ // return err
+ // }
+
+ return executeDeleteQuery("movies", id, tx)
+}
+
+func (qb *MovieQueryBuilder) Find(id int, tx *sqlx.Tx) (*Movie, error) {
+ query := "SELECT * FROM movies WHERE id = ? LIMIT 1"
+ args := []interface{}{id}
+ return qb.queryMovie(query, args, tx)
+}
+
+func (qb *MovieQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]*Movie, error) {
+ query := `
+ SELECT movies.* FROM movies
+ LEFT JOIN movies_scenes as scenes_join on scenes_join.movie_id = movies.id
+ WHERE scenes_join.scene_id = ?
+ GROUP BY movies.id
+ `
+ args := []interface{}{sceneID}
+ return qb.queryMovies(query, args, tx)
+}
+
+func (qb *MovieQueryBuilder) FindByName(name string, tx *sqlx.Tx, nocase bool) (*Movie, error) {
+ query := "SELECT * FROM movies WHERE name = ?"
+ if nocase {
+ query += " COLLATE NOCASE"
+ }
+ query += " LIMIT 1"
+ args := []interface{}{name}
+ return qb.queryMovie(query, args, tx)
+}
+
+func (qb *MovieQueryBuilder) FindByNames(names []string, tx *sqlx.Tx, nocase bool) ([]*Movie, error) {
+ query := "SELECT * FROM movies WHERE name"
+ if nocase {
+ query += " COLLATE NOCASE"
+ }
+ query += " IN " + getInBinding(len(names))
+ var args []interface{}
+ for _, name := range names {
+ args = append(args, name)
+ }
+ return qb.queryMovies(query, args, tx)
+}
+
+func (qb *MovieQueryBuilder) Count() (int, error) {
+ return runCountQuery(buildCountQuery("SELECT movies.id FROM movies"), nil)
+}
+
+func (qb *MovieQueryBuilder) All() ([]*Movie, error) {
+ return qb.queryMovies(selectAll("movies")+qb.getMovieSort(nil), nil, nil)
+}
+
+func (qb *MovieQueryBuilder) AllSlim() ([]*Movie, error) {
+ return qb.queryMovies("SELECT movies.id, movies.name FROM movies "+qb.getMovieSort(nil), nil, nil)
+}
+
+func (qb *MovieQueryBuilder) Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int) {
+ if findFilter == nil {
+ findFilter = &FindFilterType{}
+ }
+ if movieFilter == nil {
+ movieFilter = &MovieFilterType{}
+ }
+
+ var whereClauses []string
+ var havingClauses []string
+ var args []interface{}
+ body := selectDistinctIDs("movies")
+ body += `
+ left join movies_scenes as scenes_join on scenes_join.movie_id = movies.id
+ left join scenes on scenes_join.scene_id = scenes.id
+ left join studios as studio on studio.id = movies.studio_id
+`
+
+ if q := findFilter.Q; q != nil && *q != "" {
+ searchColumns := []string{"movies.name"}
+ clause, thisArgs := getSearchBinding(searchColumns, *q, false)
+ whereClauses = append(whereClauses, clause)
+ args = append(args, thisArgs...)
+ }
+
+ if studiosFilter := movieFilter.Studios; studiosFilter != nil && len(studiosFilter.Value) > 0 {
+ for _, studioID := range studiosFilter.Value {
+ args = append(args, studioID)
+ }
+
+ whereClause, havingClause := qb.getMultiCriterionClause("studio", "", "studio_id", studiosFilter)
+ whereClauses = appendClause(whereClauses, whereClause)
+ havingClauses = appendClause(havingClauses, havingClause)
+ }
+
+ sortAndPagination := qb.getMovieSort(findFilter) + getPagination(findFilter)
+ idsResult, countResult := executeFindQuery("movies", body, args, sortAndPagination, whereClauses, havingClauses)
+
+ var movies []*Movie
+ for _, id := range idsResult {
+ movie, _ := qb.Find(id, nil)
+ movies = append(movies, movie)
+ }
+
+ return movies, countResult
+}
+
+// returns where clause and having clause
+func (qb *MovieQueryBuilder) getMultiCriterionClause(table string, joinTable string, joinTableField string, criterion *MultiCriterionInput) (string, string) {
+ whereClause := ""
+ havingClause := ""
+ if criterion.Modifier == CriterionModifierIncludes {
+ // includes any of the provided ids
+ whereClause = table + ".id IN " + getInBinding(len(criterion.Value))
+ } else if criterion.Modifier == CriterionModifierIncludesAll {
+ // includes all of the provided ids
+ whereClause = table + ".id IN " + getInBinding(len(criterion.Value))
+ havingClause = "count(distinct " + table + ".id) IS " + strconv.Itoa(len(criterion.Value))
+ } else if criterion.Modifier == CriterionModifierExcludes {
+ // excludes all of the provided ids
+ if joinTable != "" {
+ whereClause = "not exists (select " + joinTable + ".movie_id from " + joinTable + " where " + joinTable + ".movie_id = movies.id and " + joinTable + "." + joinTableField + " in " + getInBinding(len(criterion.Value)) + ")"
+ } else {
+ whereClause = "not exists (select m.id from movies as m where m.id = movies.id and m." + joinTableField + " in " + getInBinding(len(criterion.Value)) + ")"
+ }
+ }
+
+ return whereClause, havingClause
+}
+
+func (qb *MovieQueryBuilder) getMovieSort(findFilter *FindFilterType) string {
+ var sort string
+ var direction string
+ if findFilter == nil {
+ sort = "name"
+ direction = "ASC"
+ } else {
+ sort = findFilter.GetSort("name")
+ direction = findFilter.GetDirection()
+ }
+ return getSort(sort, direction, "movies")
+}
+
+func (qb *MovieQueryBuilder) queryMovie(query string, args []interface{}, tx *sqlx.Tx) (*Movie, error) {
+ results, err := qb.queryMovies(query, args, tx)
+ if err != nil || len(results) < 1 {
+ return nil, err
+ }
+ return results[0], nil
+}
+
+func (qb *MovieQueryBuilder) queryMovies(query string, args []interface{}, tx *sqlx.Tx) ([]*Movie, error) {
+ var rows *sqlx.Rows
+ var err error
+ if tx != nil {
+ rows, err = tx.Queryx(query, args...)
+ } else {
+ rows, err = database.DB.Queryx(query, args...)
+ }
+
+ if err != nil && err != sql.ErrNoRows {
+ return nil, err
+ }
+ defer rows.Close()
+
+ movies := make([]*Movie, 0)
+ for rows.Next() {
+ movie := Movie{}
+ if err := rows.StructScan(&movie); err != nil {
+ return nil, err
+ }
+ movies = append(movies, &movie)
+ }
+
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+
+ return movies, nil
+}
diff --git a/pkg/models/querybuilder_movies_test.go b/pkg/models/querybuilder_movies_test.go
new file mode 100644
index 000000000..a11f94f18
--- /dev/null
+++ b/pkg/models/querybuilder_movies_test.go
@@ -0,0 +1,94 @@
+// +build integration
+
+package models_test
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+func TestMovieFindBySceneID(t *testing.T) {
+ mqb := models.NewMovieQueryBuilder()
+ sceneID := sceneIDs[sceneIdxWithMovie]
+
+ movies, err := mqb.FindBySceneID(sceneID, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding movie: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, len(movies), "expect 1 movie")
+
+ movie := movies[0]
+ assert.Equal(t, getMovieStringValue(movieIdxWithScene, "Name"), movie.Name.String)
+
+ movies, err = mqb.FindBySceneID(0, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding movie: %s", err.Error())
+ }
+
+ assert.Equal(t, 0, len(movies))
+}
+
+func TestMovieFindByName(t *testing.T) {
+
+ mqb := models.NewMovieQueryBuilder()
+
+ name := movieNames[movieIdxWithScene] // find a movie by name
+
+ movie, err := mqb.FindByName(name, nil, false)
+
+ if err != nil {
+ t.Fatalf("Error finding movies: %s", err.Error())
+ }
+
+ assert.Equal(t, movieNames[movieIdxWithScene], movie.Name.String)
+
+ name = movieNames[movieIdxWithDupName] // find a movie by name nocase
+
+ movie, err = mqb.FindByName(name, nil, true)
+
+ if err != nil {
+ t.Fatalf("Error finding movies: %s", err.Error())
+ }
+ // movieIdxWithDupName and movieIdxWithScene should have similar names ( only diff should be Name vs NaMe)
+ //movie.Name should match with movieIdxWithScene since its ID is before moveIdxWithDupName
+ assert.Equal(t, movieNames[movieIdxWithScene], movie.Name.String)
+ //movie.Name should match with movieIdxWithDupName if the check is not case sensitive
+ assert.Equal(t, strings.ToLower(movieNames[movieIdxWithDupName]), strings.ToLower(movie.Name.String))
+}
+
+func TestMovieFindByNames(t *testing.T) {
+ var names []string
+
+ mqb := models.NewMovieQueryBuilder()
+
+ names = append(names, movieNames[movieIdxWithScene]) // find movies by names
+
+ movies, err := mqb.FindByNames(names, nil, false)
+ if err != nil {
+ t.Fatalf("Error finding movies: %s", err.Error())
+ }
+ assert.Len(t, movies, 1)
+ assert.Equal(t, movieNames[movieIdxWithScene], movies[0].Name.String)
+
+ movies, err = mqb.FindByNames(names, nil, true) // find movies by names nocase
+ if err != nil {
+ t.Fatalf("Error finding movies: %s", err.Error())
+ }
+ assert.Len(t, movies, 2) // movieIdxWithScene and movieIdxWithDupName
+ assert.Equal(t, strings.ToLower(movieNames[movieIdxWithScene]), strings.ToLower(movies[0].Name.String))
+ assert.Equal(t, strings.ToLower(movieNames[movieIdxWithScene]), strings.ToLower(movies[1].Name.String))
+}
+
+// TODO Update
+// TODO Destroy
+// TODO Find
+// TODO Count
+// TODO All
+// TODO Query
diff --git a/pkg/models/querybuilder_performer.go b/pkg/models/querybuilder_performer.go
index 7cdb10093..5b0dca723 100644
--- a/pkg/models/querybuilder_performer.go
+++ b/pkg/models/querybuilder_performer.go
@@ -18,10 +18,10 @@ func NewPerformerQueryBuilder() PerformerQueryBuilder {
func (qb *PerformerQueryBuilder) Create(newPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
ensureTx(tx)
result, err := tx.NamedExec(
- `INSERT INTO performers (image, checksum, name, url, twitter, instagram, birthdate, ethnicity, country,
+ `INSERT INTO performers (image, checksum, name, url, gender, twitter, instagram, birthdate, ethnicity, country,
eye_color, height, measurements, fake_tits, career_length, tattoos, piercings,
aliases, favorite, created_at, updated_at)
- VALUES (:image, :checksum, :name, :url, :twitter, :instagram, :birthdate, :ethnicity, :country,
+ VALUES (:image, :checksum, :name, :url, :gender, :twitter, :instagram, :birthdate, :ethnicity, :country,
:eye_color, :height, :measurements, :fake_tits, :career_length, :tattoos, :piercings,
:aliases, :favorite, :created_at, :updated_at)
`,
@@ -77,19 +77,31 @@ func (qb *PerformerQueryBuilder) Find(id int) (*Performer, error) {
}
func (qb *PerformerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]*Performer, error) {
- query := `
- SELECT performers.* FROM performers
+ query := selectAll("performers") + `
LEFT JOIN performers_scenes as scenes_join on scenes_join.performer_id = performers.id
- LEFT JOIN scenes on scenes_join.scene_id = scenes.id
- WHERE scenes.id = ?
- GROUP BY performers.id
+ WHERE scenes_join.scene_id = ?
`
args := []interface{}{sceneID}
return qb.queryPerformers(query, args, tx)
}
-func (qb *PerformerQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]*Performer, error) {
- query := "SELECT * FROM performers WHERE name IN " + getInBinding(len(names))
+func (qb *PerformerQueryBuilder) FindNameBySceneID(sceneID int, tx *sqlx.Tx) ([]*Performer, error) {
+ query := `
+ SELECT performers.name FROM performers
+ LEFT JOIN performers_scenes as scenes_join on scenes_join.performer_id = performers.id
+ WHERE scenes_join.scene_id = ?
+ `
+ args := []interface{}{sceneID}
+ return qb.queryPerformers(query, args, tx)
+}
+
+func (qb *PerformerQueryBuilder) FindByNames(names []string, tx *sqlx.Tx, nocase bool) ([]*Performer, error) {
+ query := "SELECT * FROM performers WHERE name"
+ if nocase {
+ query += " COLLATE NOCASE"
+ }
+ query += " IN " + getInBinding(len(names))
+
var args []interface{}
for _, name := range names {
args = append(args, name)
@@ -105,6 +117,10 @@ func (qb *PerformerQueryBuilder) All() ([]*Performer, error) {
return qb.queryPerformers(selectAll("performers")+qb.getPerformerSort(nil), nil, nil)
}
+func (qb *PerformerQueryBuilder) AllSlim() ([]*Performer, error) {
+ return qb.queryPerformers("SELECT performers.id, performers.name, performers.gender FROM performers "+qb.getPerformerSort(nil), nil, nil)
+}
+
func (qb *PerformerQueryBuilder) Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int) {
if performerFilter == nil {
performerFilter = &PerformerFilterType{}
@@ -113,11 +129,12 @@ func (qb *PerformerQueryBuilder) Query(performerFilter *PerformerFilterType, fin
findFilter = &FindFilterType{}
}
+ tableName := "performers"
query := queryBuilder{
- tableName: "performers",
+ tableName: tableName,
}
- query.body = selectDistinctIDs("performers")
+ query.body = selectDistinctIDs(tableName)
query.body += `
left join performers_scenes as scenes_join on scenes_join.performer_id = performers.id
left join scenes on scenes_join.scene_id = scenes.id
@@ -152,18 +169,32 @@ func (qb *PerformerQueryBuilder) Query(performerFilter *PerformerFilterType, fin
query.addArg(thisArgs...)
}
- handleStringCriterion("ethnicity", performerFilter.Ethnicity, &query)
- handleStringCriterion("country", performerFilter.Country, &query)
- handleStringCriterion("eye_color", performerFilter.EyeColor, &query)
- handleStringCriterion("height", performerFilter.Height, &query)
- handleStringCriterion("measurements", performerFilter.Measurements, &query)
- handleStringCriterion("fake_tits", performerFilter.FakeTits, &query)
- handleStringCriterion("career_length", performerFilter.CareerLength, &query)
- handleStringCriterion("tattoos", performerFilter.Tattoos, &query)
- handleStringCriterion("piercings", performerFilter.Piercings, &query)
+ if gender := performerFilter.Gender; gender != nil {
+ query.addWhere("performers.gender = ?")
+ query.addArg(gender.Value.String())
+ }
+
+ if isMissingFilter := performerFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" {
+ switch *isMissingFilter {
+ case "scenes":
+ query.addWhere("scenes_join.scene_id IS NULL")
+ default:
+ query.addWhere("performers." + *isMissingFilter + " IS NULL")
+ }
+ }
+
+ handleStringCriterion(tableName+".ethnicity", performerFilter.Ethnicity, &query)
+ handleStringCriterion(tableName+".country", performerFilter.Country, &query)
+ handleStringCriterion(tableName+".eye_color", performerFilter.EyeColor, &query)
+ handleStringCriterion(tableName+".height", performerFilter.Height, &query)
+ handleStringCriterion(tableName+".measurements", performerFilter.Measurements, &query)
+ handleStringCriterion(tableName+".fake_tits", performerFilter.FakeTits, &query)
+ handleStringCriterion(tableName+".career_length", performerFilter.CareerLength, &query)
+ handleStringCriterion(tableName+".tattoos", performerFilter.Tattoos, &query)
+ handleStringCriterion(tableName+".piercings", performerFilter.Piercings, &query)
// TODO - need better handling of aliases
- handleStringCriterion("aliases", performerFilter.Aliases, &query)
+ handleStringCriterion(tableName+".aliases", performerFilter.Aliases, &query)
query.sortAndPagination = qb.getPerformerSort(findFilter) + getPagination(findFilter)
idsResult, countResult := query.executeFind()
diff --git a/pkg/models/querybuilder_performer_test.go b/pkg/models/querybuilder_performer_test.go
new file mode 100644
index 000000000..1c2b26979
--- /dev/null
+++ b/pkg/models/querybuilder_performer_test.go
@@ -0,0 +1,112 @@
+// +build integration
+
+package models_test
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+func TestPerformerFindBySceneID(t *testing.T) {
+ pqb := models.NewPerformerQueryBuilder()
+ sceneID := sceneIDs[sceneIdxWithPerformer]
+
+ performers, err := pqb.FindBySceneID(sceneID, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding performer: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, len(performers))
+ performer := performers[0]
+
+ assert.Equal(t, getPerformerStringValue(performerIdxWithScene, "Name"), performer.Name.String)
+
+ performers, err = pqb.FindBySceneID(0, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding performer: %s", err.Error())
+ }
+
+ assert.Equal(t, 0, len(performers))
+}
+
+func TestPerformerFindNameBySceneID(t *testing.T) {
+ pqb := models.NewPerformerQueryBuilder()
+ sceneID := sceneIDs[sceneIdxWithPerformer]
+
+ performers, err := pqb.FindNameBySceneID(sceneID, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding performer: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, len(performers))
+ performer := performers[0]
+
+ assert.Equal(t, getPerformerStringValue(performerIdxWithScene, "Name"), performer.Name.String)
+
+ performers, err = pqb.FindBySceneID(0, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding performer: %s", err.Error())
+ }
+
+ assert.Equal(t, 0, len(performers))
+}
+
+func TestPerformerFindByNames(t *testing.T) {
+ var names []string
+
+ pqb := models.NewPerformerQueryBuilder()
+
+ names = append(names, performerNames[performerIdxWithScene]) // find performers by names
+
+ performers, err := pqb.FindByNames(names, nil, false)
+ if err != nil {
+ t.Fatalf("Error finding performers: %s", err.Error())
+ }
+ assert.Len(t, performers, 1)
+ assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String)
+
+ performers, err = pqb.FindByNames(names, nil, true) // find performers by names nocase
+ if err != nil {
+ t.Fatalf("Error finding performers: %s", err.Error())
+ }
+ assert.Len(t, performers, 2) // performerIdxWithScene and performerIdxWithDupName
+ assert.Equal(t, strings.ToLower(performerNames[performerIdxWithScene]), strings.ToLower(performers[0].Name.String))
+ assert.Equal(t, strings.ToLower(performerNames[performerIdxWithScene]), strings.ToLower(performers[1].Name.String))
+
+ names = append(names, performerNames[performerIdx1WithScene]) // find performers by names ( 2 names )
+
+ performers, err = pqb.FindByNames(names, nil, false)
+ if err != nil {
+ t.Fatalf("Error finding performers: %s", err.Error())
+ }
+ assert.Len(t, performers, 2) // performerIdxWithScene and performerIdx1WithScene
+ assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String)
+ assert.Equal(t, performerNames[performerIdx1WithScene], performers[1].Name.String)
+
+ performers, err = pqb.FindByNames(names, nil, true) // find performers by names ( 2 names nocase)
+ if err != nil {
+ t.Fatalf("Error finding performers: %s", err.Error())
+ }
+ assert.Len(t, performers, 4) // performerIdxWithScene and performerIdxWithDupName , performerIdx1WithScene and performerIdx1WithDupName
+ assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String)
+ assert.Equal(t, performerNames[performerIdx1WithScene], performers[1].Name.String)
+ assert.Equal(t, performerNames[performerIdx1WithDupName], performers[2].Name.String)
+ assert.Equal(t, performerNames[performerIdxWithDupName], performers[3].Name.String)
+
+}
+
+// TODO Update
+// TODO Destroy
+// TODO Find
+// TODO Count
+// TODO All
+// TODO AllSlim
+// TODO Query
diff --git a/pkg/models/querybuilder_scene.go b/pkg/models/querybuilder_scene.go
index 502af4bb5..2a8d162c4 100644
--- a/pkg/models/querybuilder_scene.go
+++ b/pkg/models/querybuilder_scene.go
@@ -7,31 +7,40 @@ import (
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/database"
+ "github.com/stashapp/stash/pkg/utils"
)
-const scenesForPerformerQuery = `
-SELECT scenes.* FROM scenes
+const sceneTable = "scenes"
+
+var scenesForPerformerQuery = selectAll(sceneTable) + `
LEFT JOIN performers_scenes as performers_join on performers_join.scene_id = scenes.id
-LEFT JOIN performers on performers_join.performer_id = performers.id
-WHERE performers.id = ?
+WHERE performers_join.performer_id = ?
GROUP BY scenes.id
`
-const scenesForStudioQuery = `
-SELECT scenes.* FROM scenes
+var countScenesForPerformerQuery = `
+SELECT performer_id FROM performers_scenes as performers_join
+WHERE performer_id = ?
+GROUP BY scene_id
+`
+
+var scenesForStudioQuery = selectAll(sceneTable) + `
JOIN studios ON studios.id = scenes.studio_id
WHERE studios.id = ?
GROUP BY scenes.id
`
-
-const scenesForTagQuery = `
-SELECT scenes.* FROM scenes
-LEFT JOIN scenes_tags as tags_join on tags_join.scene_id = scenes.id
-LEFT JOIN tags on tags_join.tag_id = tags.id
-WHERE tags.id = ?
+var scenesForMovieQuery = selectAll(sceneTable) + `
+LEFT JOIN movies_scenes as movies_join on movies_join.scene_id = scenes.id
+WHERE movies_join.movie_id = ?
GROUP BY scenes.id
`
+var countScenesForTagQuery = `
+SELECT tag_id AS id FROM scenes_tags
+WHERE scenes_tags.tag_id = ?
+GROUP BY scenes_tags.scene_id
+`
+
type SceneQueryBuilder struct{}
func NewSceneQueryBuilder() SceneQueryBuilder {
@@ -41,11 +50,11 @@ func NewSceneQueryBuilder() SceneQueryBuilder {
func (qb *SceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error) {
ensureTx(tx)
result, err := tx.NamedExec(
- `INSERT INTO scenes (checksum, path, title, details, url, date, rating, size, duration, video_codec,
- audio_codec, width, height, framerate, bitrate, studio_id, cover,
+ `INSERT INTO scenes (checksum, path, title, details, url, date, rating, o_counter, size, duration, video_codec,
+ audio_codec, format, width, height, framerate, bitrate, studio_id, cover,
created_at, updated_at)
- VALUES (:checksum, :path, :title, :details, :url, :date, :rating, :size, :duration, :video_codec,
- :audio_codec, :width, :height, :framerate, :bitrate, :studio_id, :cover,
+ VALUES (:checksum, :path, :title, :details, :url, :date, :rating, :o_counter, :size, :duration, :video_codec,
+ :audio_codec, :format, :width, :height, :framerate, :bitrate, :studio_id, :cover,
:created_at, :updated_at)
`,
newScene,
@@ -131,6 +140,10 @@ func (qb *SceneQueryBuilder) ResetOCounter(id int, tx *sqlx.Tx) (int, error) {
}
func (qb *SceneQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
+ _, err := tx.Exec("DELETE FROM movies_scenes WHERE scene_id = ?", id)
+ if err != nil {
+ return err
+ }
return executeDeleteQuery("scenes", id, tx)
}
func (qb *SceneQueryBuilder) Find(id int) (*Scene, error) {
@@ -138,7 +151,7 @@ func (qb *SceneQueryBuilder) Find(id int) (*Scene, error) {
}
func (qb *SceneQueryBuilder) find(id int, tx *sqlx.Tx) (*Scene, error) {
- query := "SELECT * FROM scenes WHERE id = ? LIMIT 1"
+ query := selectAll(sceneTable) + "WHERE id = ? LIMIT 1"
args := []interface{}{id}
return qb.queryScene(query, args, tx)
}
@@ -150,7 +163,7 @@ func (qb *SceneQueryBuilder) FindByChecksum(checksum string) (*Scene, error) {
}
func (qb *SceneQueryBuilder) FindByPath(path string) (*Scene, error) {
- query := "SELECT * FROM scenes WHERE path = ? LIMIT 1"
+ query := selectAll(sceneTable) + "WHERE path = ? LIMIT 1"
args := []interface{}{path}
return qb.queryScene(query, args, nil)
}
@@ -162,7 +175,7 @@ func (qb *SceneQueryBuilder) FindByPerformerID(performerID int) ([]*Scene, error
func (qb *SceneQueryBuilder) CountByPerformerID(performerID int) (int, error) {
args := []interface{}{performerID}
- return runCountQuery(buildCountQuery(scenesForPerformerQuery), args)
+ return runCountQuery(buildCountQuery(countScenesForPerformerQuery), args)
}
func (qb *SceneQueryBuilder) FindByStudioID(studioID int) ([]*Scene, error) {
@@ -170,10 +183,28 @@ func (qb *SceneQueryBuilder) FindByStudioID(studioID int) ([]*Scene, error) {
return qb.queryScenes(scenesForStudioQuery, args, nil)
}
+func (qb *SceneQueryBuilder) FindByMovieID(movieID int) ([]*Scene, error) {
+ args := []interface{}{movieID}
+ return qb.queryScenes(scenesForMovieQuery, args, nil)
+}
+
+func (qb *SceneQueryBuilder) CountByMovieID(movieID int) (int, error) {
+ args := []interface{}{movieID}
+ return runCountQuery(buildCountQuery(scenesForMovieQuery), args)
+}
+
func (qb *SceneQueryBuilder) Count() (int, error) {
return runCountQuery(buildCountQuery("SELECT scenes.id FROM scenes"), nil)
}
+func (qb *SceneQueryBuilder) SizeCount() (string, error) {
+ sum, err := runSumQuery("SELECT SUM(size) as sum FROM scenes", nil)
+ if err != nil {
+ return "0 B", err
+ }
+ return utils.HumanizeBytes(sum), err
+}
+
func (qb *SceneQueryBuilder) CountByStudioID(studioID int) (int, error) {
args := []interface{}{studioID}
return runCountQuery(buildCountQuery(scenesForStudioQuery), args)
@@ -181,7 +212,7 @@ func (qb *SceneQueryBuilder) CountByStudioID(studioID int) (int, error) {
func (qb *SceneQueryBuilder) CountByTagID(tagID int) (int, error) {
args := []interface{}{tagID}
- return runCountQuery(buildCountQuery(scenesForTagQuery), args)
+ return runCountQuery(buildCountQuery(countScenesForTagQuery), args)
}
func (qb *SceneQueryBuilder) Wall(q *string) ([]*Scene, error) {
@@ -189,12 +220,12 @@ func (qb *SceneQueryBuilder) Wall(q *string) ([]*Scene, error) {
if q != nil {
s = *q
}
- query := "SELECT scenes.* FROM scenes WHERE scenes.details LIKE '%" + s + "%' ORDER BY RANDOM() LIMIT 80"
+ query := selectAll(sceneTable) + "WHERE scenes.details LIKE '%" + s + "%' ORDER BY RANDOM() LIMIT 80"
return qb.queryScenes(query, nil, nil)
}
func (qb *SceneQueryBuilder) All() ([]*Scene, error) {
- return qb.queryScenes(selectAll("scenes")+qb.getSceneSort(nil), nil, nil)
+ return qb.queryScenes(selectAll(sceneTable)+qb.getSceneSort(nil), nil, nil)
}
func (qb *SceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *FindFilterType) ([]*Scene, int) {
@@ -205,121 +236,138 @@ func (qb *SceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *Fin
findFilter = &FindFilterType{}
}
- var whereClauses []string
- var havingClauses []string
- var args []interface{}
- body := selectDistinctIDs("scenes")
- body = body + `
+ query := queryBuilder{
+ tableName: sceneTable,
+ }
+
+ query.body = selectDistinctIDs(sceneTable)
+ query.body += `
left join scene_markers on scene_markers.scene_id = scenes.id
left join performers_scenes as performers_join on performers_join.scene_id = scenes.id
- left join performers on performers_join.performer_id = performers.id
+ left join movies_scenes as movies_join on movies_join.scene_id = scenes.id
left join studios as studio on studio.id = scenes.studio_id
left join galleries as gallery on gallery.scene_id = scenes.id
left join scenes_tags as tags_join on tags_join.scene_id = scenes.id
- left join tags on tags_join.tag_id = tags.id
`
if q := findFilter.Q; q != nil && *q != "" {
searchColumns := []string{"scenes.title", "scenes.details", "scenes.path", "scenes.checksum", "scene_markers.title"}
- whereClauses = append(whereClauses, getSearch(searchColumns, *q))
+ clause, thisArgs := getSearchBinding(searchColumns, *q, false)
+ query.addWhere(clause)
+ query.addArg(thisArgs...)
}
if rating := sceneFilter.Rating; rating != nil {
clause, count := getIntCriterionWhereClause("scenes.rating", *sceneFilter.Rating)
- whereClauses = append(whereClauses, clause)
+ query.addWhere(clause)
if count == 1 {
- args = append(args, sceneFilter.Rating.Value)
+ query.addArg(sceneFilter.Rating.Value)
}
}
if oCounter := sceneFilter.OCounter; oCounter != nil {
clause, count := getIntCriterionWhereClause("scenes.o_counter", *sceneFilter.OCounter)
- whereClauses = append(whereClauses, clause)
+ query.addWhere(clause)
if count == 1 {
- args = append(args, sceneFilter.OCounter.Value)
+ query.addArg(sceneFilter.OCounter.Value)
}
}
if durationFilter := sceneFilter.Duration; durationFilter != nil {
clause, thisArgs := getDurationWhereClause(*durationFilter)
- whereClauses = append(whereClauses, clause)
- args = append(args, thisArgs...)
+ query.addWhere(clause)
+ query.addArg(thisArgs...)
}
if resolutionFilter := sceneFilter.Resolution; resolutionFilter != nil {
if resolution := resolutionFilter.String(); resolutionFilter.IsValid() {
switch resolution {
case "LOW":
- whereClauses = append(whereClauses, "(scenes.height >= 240 AND scenes.height < 480)")
+ query.addWhere("scenes.height < 480")
case "STANDARD":
- whereClauses = append(whereClauses, "(scenes.height >= 480 AND scenes.height < 720)")
+ query.addWhere("(scenes.height >= 480 AND scenes.height < 720)")
case "STANDARD_HD":
- whereClauses = append(whereClauses, "(scenes.height >= 720 AND scenes.height < 1080)")
+ query.addWhere("(scenes.height >= 720 AND scenes.height < 1080)")
case "FULL_HD":
- whereClauses = append(whereClauses, "(scenes.height >= 1080 AND scenes.height < 2160)")
+ query.addWhere("(scenes.height >= 1080 AND scenes.height < 2160)")
case "FOUR_K":
- whereClauses = append(whereClauses, "scenes.height >= 2160")
- default:
- whereClauses = append(whereClauses, "scenes.height < 240")
+ query.addWhere("scenes.height >= 2160")
}
}
}
if hasMarkersFilter := sceneFilter.HasMarkers; hasMarkersFilter != nil {
if strings.Compare(*hasMarkersFilter, "true") == 0 {
- havingClauses = append(havingClauses, "count(scene_markers.scene_id) > 0")
+ query.addHaving("count(scene_markers.scene_id) > 0")
} else {
- whereClauses = append(whereClauses, "scene_markers.id IS NULL")
+ query.addWhere("scene_markers.id IS NULL")
}
}
if isMissingFilter := sceneFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" {
switch *isMissingFilter {
case "gallery":
- whereClauses = append(whereClauses, "gallery.scene_id IS NULL")
+ query.addWhere("gallery.scene_id IS NULL")
case "studio":
- whereClauses = append(whereClauses, "scenes.studio_id IS NULL")
+ query.addWhere("scenes.studio_id IS NULL")
+ case "movie":
+ query.addWhere("movies_join.scene_id IS NULL")
case "performers":
- whereClauses = append(whereClauses, "performers_join.scene_id IS NULL")
+ query.addWhere("performers_join.scene_id IS NULL")
case "date":
- whereClauses = append(whereClauses, "scenes.date IS \"\" OR scenes.date IS \"0001-01-01\"")
+ query.addWhere("scenes.date IS \"\" OR scenes.date IS \"0001-01-01\"")
+ case "tags":
+ query.addWhere("tags_join.scene_id IS NULL")
default:
- whereClauses = append(whereClauses, "scenes."+*isMissingFilter+" IS NULL")
+ query.addWhere("scenes." + *isMissingFilter + " IS NULL")
}
}
if tagsFilter := sceneFilter.Tags; tagsFilter != nil && len(tagsFilter.Value) > 0 {
for _, tagID := range tagsFilter.Value {
- args = append(args, tagID)
+ query.addArg(tagID)
}
+ query.body += " LEFT JOIN tags on tags_join.tag_id = tags.id"
whereClause, havingClause := getMultiCriterionClause("tags", "scenes_tags", "tag_id", tagsFilter)
- whereClauses = appendClause(whereClauses, whereClause)
- havingClauses = appendClause(havingClauses, havingClause)
+ query.addWhere(whereClause)
+ query.addHaving(havingClause)
}
if performersFilter := sceneFilter.Performers; performersFilter != nil && len(performersFilter.Value) > 0 {
for _, performerID := range performersFilter.Value {
- args = append(args, performerID)
+ query.addArg(performerID)
}
+ query.body += " LEFT JOIN performers ON performers_join.performer_id = performers.id"
whereClause, havingClause := getMultiCriterionClause("performers", "performers_scenes", "performer_id", performersFilter)
- whereClauses = appendClause(whereClauses, whereClause)
- havingClauses = appendClause(havingClauses, havingClause)
+ query.addWhere(whereClause)
+ query.addHaving(havingClause)
}
if studiosFilter := sceneFilter.Studios; studiosFilter != nil && len(studiosFilter.Value) > 0 {
for _, studioID := range studiosFilter.Value {
- args = append(args, studioID)
+ query.addArg(studioID)
}
whereClause, havingClause := getMultiCriterionClause("studio", "", "studio_id", studiosFilter)
- whereClauses = appendClause(whereClauses, whereClause)
- havingClauses = appendClause(havingClauses, havingClause)
+ query.addWhere(whereClause)
+ query.addHaving(havingClause)
}
- sortAndPagination := qb.getSceneSort(findFilter) + getPagination(findFilter)
- idsResult, countResult := executeFindQuery("scenes", body, args, sortAndPagination, whereClauses, havingClauses)
+ if moviesFilter := sceneFilter.Movies; moviesFilter != nil && len(moviesFilter.Value) > 0 {
+ for _, movieID := range moviesFilter.Value {
+ query.addArg(movieID)
+ }
+
+ query.body += " LEFT JOIN movies ON movies_join.movie_id = movies.id"
+ whereClause, havingClause := getMultiCriterionClause("movies", "movies_scenes", "movie_id", moviesFilter)
+ query.addWhere(whereClause)
+ query.addHaving(havingClause)
+ }
+
+ query.sortAndPagination = qb.getSceneSort(findFilter) + getPagination(findFilter)
+ idsResult, countResult := query.executeFind()
var scenes []*Scene
for _, id := range idsResult {
@@ -488,3 +536,16 @@ func (qb *SceneQueryBuilder) queryScenes(query string, args []interface{}, tx *s
return scenes, nil
}
+
+func (qb *SceneQueryBuilder) UpdateFormat(id int, format string, tx *sqlx.Tx) error {
+ ensureTx(tx)
+ _, err := tx.Exec(
+ `UPDATE scenes SET format = ? WHERE scenes.id = ? `,
+ format, id,
+ )
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/pkg/models/querybuilder_scene_marker.go b/pkg/models/querybuilder_scene_marker.go
index 5e0b683db..8cd9f0d69 100644
--- a/pkg/models/querybuilder_scene_marker.go
+++ b/pkg/models/querybuilder_scene_marker.go
@@ -7,12 +7,10 @@ import (
"strconv"
)
-const sceneMarkersForTagQuery = `
-SELECT scene_markers.* FROM scene_markers
+const countSceneMarkersForTagQuery = `
+SELECT scene_markers.id FROM scene_markers
LEFT JOIN scene_markers_tags as tags_join on tags_join.scene_marker_id = scene_markers.id
-LEFT JOIN tags on tags_join.tag_id = tags.id
-LEFT JOIN tags AS ptj ON ptj.id = scene_markers.primary_tag_id
-WHERE tags.id = ? OR ptj.id = ?
+WHERE tags_join.tag_id = ? OR scene_markers.primary_tag_id = ?
GROUP BY scene_markers.id
`
@@ -77,8 +75,7 @@ func (qb *SceneMarkerQueryBuilder) Find(id int) (*SceneMarker, error) {
func (qb *SceneMarkerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]*SceneMarker, error) {
query := `
SELECT scene_markers.* FROM scene_markers
- JOIN scenes ON scenes.id = scene_markers.scene_id
- WHERE scenes.id = ?
+ WHERE scene_markers.scene_id = ?
GROUP BY scene_markers.id
ORDER BY scene_markers.seconds ASC
`
@@ -88,7 +85,7 @@ func (qb *SceneMarkerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]*S
func (qb *SceneMarkerQueryBuilder) CountByTagID(tagID int) (int, error) {
args := []interface{}{tagID, tagID}
- return runCountQuery(buildCountQuery(sceneMarkersForTagQuery), args)
+ return runCountQuery(buildCountQuery(countSceneMarkersForTagQuery), args)
}
func (qb *SceneMarkerQueryBuilder) GetMarkerStrings(q *string, sort *string) ([]*MarkerStringsResultType, error) {
@@ -227,7 +224,9 @@ func (qb *SceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterTyp
if q := findFilter.Q; q != nil && *q != "" {
searchColumns := []string{"scene_markers.title", "scene.title"}
- whereClauses = append(whereClauses, getSearch(searchColumns, *q))
+ clause, thisArgs := getSearchBinding(searchColumns, *q, false)
+ whereClauses = append(whereClauses, clause)
+ args = append(args, thisArgs...)
}
if tagID := sceneMarkerFilter.TagID; tagID != nil {
diff --git a/pkg/models/querybuilder_scene_marker_test.go b/pkg/models/querybuilder_scene_marker_test.go
new file mode 100644
index 000000000..ee8be5406
--- /dev/null
+++ b/pkg/models/querybuilder_scene_marker_test.go
@@ -0,0 +1,68 @@
+// +build integration
+
+package models_test
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+func TestMarkerFindBySceneID(t *testing.T) {
+ mqb := models.NewSceneMarkerQueryBuilder()
+
+ sceneID := sceneIDs[sceneIdxWithMarker]
+ markers, err := mqb.FindBySceneID(sceneID, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding markers: %s", err.Error())
+ }
+
+ assert.Len(t, markers, 1)
+ assert.Equal(t, markerIDs[markerIdxWithScene], markers[0].ID)
+
+ markers, err = mqb.FindBySceneID(0, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding marker: %s", err.Error())
+ }
+
+ assert.Len(t, markers, 0)
+}
+
+func TestMarkerCountByTagID(t *testing.T) {
+ mqb := models.NewSceneMarkerQueryBuilder()
+
+ markerCount, err := mqb.CountByTagID(tagIDs[tagIdxWithPrimaryMarker])
+
+ if err != nil {
+ t.Fatalf("error calling CountByTagID: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, markerCount)
+
+ markerCount, err = mqb.CountByTagID(tagIDs[tagIdxWithMarker])
+
+ if err != nil {
+ t.Fatalf("error calling CountByTagID: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, markerCount)
+
+ markerCount, err = mqb.CountByTagID(0)
+
+ if err != nil {
+ t.Fatalf("error calling CountByTagID: %s", err.Error())
+ }
+
+ assert.Equal(t, 0, markerCount)
+}
+
+// TODO Update
+// TODO Destroy
+// TODO Find
+// TODO GetMarkerStrings
+// TODO Wall
+// TODO Query
diff --git a/pkg/models/querybuilder_scene_test.go b/pkg/models/querybuilder_scene_test.go
new file mode 100644
index 000000000..df6b0e817
--- /dev/null
+++ b/pkg/models/querybuilder_scene_test.go
@@ -0,0 +1,869 @@
+// +build integration
+
+package models_test
+
+import (
+ "database/sql"
+ "strconv"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/stashapp/stash/pkg/models"
+)
+
+func TestSceneFind(t *testing.T) {
+ // assume that the first scene is sceneWithGalleryPath
+ sqb := models.NewSceneQueryBuilder()
+
+ const sceneIdx = 0
+ sceneID := sceneIDs[sceneIdx]
+ scene, err := sqb.Find(sceneID)
+
+ if err != nil {
+ t.Fatalf("Error finding scene: %s", err.Error())
+ }
+
+ assert.Equal(t, getSceneStringValue(sceneIdx, "Path"), scene.Path)
+
+ sceneID = 0
+ scene, err = sqb.Find(sceneID)
+
+ if err != nil {
+ t.Fatalf("Error finding scene: %s", err.Error())
+ }
+
+ assert.Nil(t, scene)
+}
+
+func TestSceneFindByPath(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+
+ const sceneIdx = 1
+ scenePath := getSceneStringValue(sceneIdx, "Path")
+ scene, err := sqb.FindByPath(scenePath)
+
+ if err != nil {
+ t.Fatalf("Error finding scene: %s", err.Error())
+ }
+
+ assert.Equal(t, sceneIDs[sceneIdx], scene.ID)
+ assert.Equal(t, scenePath, scene.Path)
+
+ scenePath = "not exist"
+ scene, err = sqb.FindByPath(scenePath)
+
+ if err != nil {
+ t.Fatalf("Error finding scene: %s", err.Error())
+ }
+
+ assert.Nil(t, scene)
+}
+
+func TestSceneCountByPerformerID(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ count, err := sqb.CountByPerformerID(performerIDs[performerIdxWithScene])
+
+ if err != nil {
+ t.Fatalf("Error counting scenes: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, count)
+
+ count, err = sqb.CountByPerformerID(0)
+
+ if err != nil {
+ t.Fatalf("Error counting scenes: %s", err.Error())
+ }
+
+ assert.Equal(t, 0, count)
+}
+
+func TestSceneWall(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+
+ const sceneIdx = 2
+ wallQuery := getSceneStringValue(sceneIdx, "Details")
+ scenes, err := sqb.Wall(&wallQuery)
+
+ if err != nil {
+ t.Fatalf("Error finding scenes: %s", err.Error())
+ }
+
+ assert.Len(t, scenes, 1)
+ scene := scenes[0]
+ assert.Equal(t, sceneIDs[sceneIdx], scene.ID)
+ assert.Equal(t, getSceneStringValue(sceneIdx, "Path"), scene.Path)
+
+ wallQuery = "not exist"
+ scenes, err = sqb.Wall(&wallQuery)
+
+ if err != nil {
+ t.Fatalf("Error finding scene: %s", err.Error())
+ }
+
+ assert.Len(t, scenes, 0)
+}
+
+func TestSceneQueryQ(t *testing.T) {
+ const sceneIdx = 2
+
+ q := getSceneStringValue(sceneIdx, titleField)
+
+ sqb := models.NewSceneQueryBuilder()
+
+ sceneQueryQ(t, sqb, q, sceneIdx)
+}
+
+func sceneQueryQ(t *testing.T, sqb models.SceneQueryBuilder, q string, expectedSceneIdx int) {
+ filter := models.FindFilterType{
+ Q: &q,
+ }
+ scenes, _ := sqb.Query(nil, &filter)
+
+ assert.Len(t, scenes, 1)
+ scene := scenes[0]
+ assert.Equal(t, sceneIDs[expectedSceneIdx], scene.ID)
+
+ // no Q should return all results
+ filter.Q = nil
+ scenes, _ = sqb.Query(nil, &filter)
+
+ assert.Len(t, scenes, totalScenes)
+}
+
+func TestSceneQueryRating(t *testing.T) {
+ const rating = 3
+ ratingCriterion := models.IntCriterionInput{
+ Value: rating,
+ Modifier: models.CriterionModifierEquals,
+ }
+
+ verifyScenesRating(t, ratingCriterion)
+
+ ratingCriterion.Modifier = models.CriterionModifierNotEquals
+ verifyScenesRating(t, ratingCriterion)
+
+ ratingCriterion.Modifier = models.CriterionModifierGreaterThan
+ verifyScenesRating(t, ratingCriterion)
+
+ ratingCriterion.Modifier = models.CriterionModifierLessThan
+ verifyScenesRating(t, ratingCriterion)
+
+ ratingCriterion.Modifier = models.CriterionModifierIsNull
+ verifyScenesRating(t, ratingCriterion)
+
+ ratingCriterion.Modifier = models.CriterionModifierNotNull
+ verifyScenesRating(t, ratingCriterion)
+}
+
+func verifyScenesRating(t *testing.T, ratingCriterion models.IntCriterionInput) {
+ sqb := models.NewSceneQueryBuilder()
+ sceneFilter := models.SceneFilterType{
+ Rating: &ratingCriterion,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ for _, scene := range scenes {
+ verifyInt64(t, scene.Rating, ratingCriterion)
+ }
+}
+
+func verifyInt64(t *testing.T, value sql.NullInt64, criterion models.IntCriterionInput) {
+ assert := assert.New(t)
+ if criterion.Modifier == models.CriterionModifierIsNull {
+ assert.False(value.Valid, "expect is null values to be null")
+ }
+ if criterion.Modifier == models.CriterionModifierNotNull {
+ assert.True(value.Valid, "expect is null values to be null")
+ }
+ if criterion.Modifier == models.CriterionModifierEquals {
+ assert.Equal(int64(criterion.Value), value.Int64)
+ }
+ if criterion.Modifier == models.CriterionModifierNotEquals {
+ assert.NotEqual(int64(criterion.Value), value.Int64)
+ }
+ if criterion.Modifier == models.CriterionModifierGreaterThan {
+ assert.True(value.Int64 > int64(criterion.Value))
+ }
+ if criterion.Modifier == models.CriterionModifierLessThan {
+ assert.True(value.Int64 < int64(criterion.Value))
+ }
+}
+
+func TestSceneQueryOCounter(t *testing.T) {
+ const oCounter = 1
+ oCounterCriterion := models.IntCriterionInput{
+ Value: oCounter,
+ Modifier: models.CriterionModifierEquals,
+ }
+
+ verifyScenesOCounter(t, oCounterCriterion)
+
+ oCounterCriterion.Modifier = models.CriterionModifierNotEquals
+ verifyScenesOCounter(t, oCounterCriterion)
+
+ oCounterCriterion.Modifier = models.CriterionModifierGreaterThan
+ verifyScenesOCounter(t, oCounterCriterion)
+
+ oCounterCriterion.Modifier = models.CriterionModifierLessThan
+ verifyScenesOCounter(t, oCounterCriterion)
+}
+
+func verifyScenesOCounter(t *testing.T, oCounterCriterion models.IntCriterionInput) {
+ sqb := models.NewSceneQueryBuilder()
+ sceneFilter := models.SceneFilterType{
+ OCounter: &oCounterCriterion,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ for _, scene := range scenes {
+ verifyInt(t, scene.OCounter, oCounterCriterion)
+ }
+}
+
+func verifyInt(t *testing.T, value int, criterion models.IntCriterionInput) {
+ assert := assert.New(t)
+ if criterion.Modifier == models.CriterionModifierEquals {
+ assert.Equal(criterion.Value, value)
+ }
+ if criterion.Modifier == models.CriterionModifierNotEquals {
+ assert.NotEqual(criterion.Value, value)
+ }
+ if criterion.Modifier == models.CriterionModifierGreaterThan {
+ assert.True(value > criterion.Value)
+ }
+ if criterion.Modifier == models.CriterionModifierLessThan {
+ assert.True(value < criterion.Value)
+ }
+}
+
+func TestSceneQueryDuration(t *testing.T) {
+ duration := 200.432
+
+ durationCriterion := models.IntCriterionInput{
+ Value: int(duration),
+ Modifier: models.CriterionModifierEquals,
+ }
+ verifyScenesDuration(t, durationCriterion)
+
+ durationCriterion.Modifier = models.CriterionModifierNotEquals
+ verifyScenesDuration(t, durationCriterion)
+
+ durationCriterion.Modifier = models.CriterionModifierGreaterThan
+ verifyScenesDuration(t, durationCriterion)
+
+ durationCriterion.Modifier = models.CriterionModifierLessThan
+ verifyScenesDuration(t, durationCriterion)
+
+ durationCriterion.Modifier = models.CriterionModifierIsNull
+ verifyScenesDuration(t, durationCriterion)
+
+ durationCriterion.Modifier = models.CriterionModifierNotNull
+ verifyScenesDuration(t, durationCriterion)
+}
+
+func verifyScenesDuration(t *testing.T, durationCriterion models.IntCriterionInput) {
+ sqb := models.NewSceneQueryBuilder()
+ sceneFilter := models.SceneFilterType{
+ Duration: &durationCriterion,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ for _, scene := range scenes {
+ if durationCriterion.Modifier == models.CriterionModifierEquals {
+ assert.True(t, scene.Duration.Float64 >= float64(durationCriterion.Value) && scene.Duration.Float64 < float64(durationCriterion.Value+1))
+ } else if durationCriterion.Modifier == models.CriterionModifierNotEquals {
+ assert.True(t, scene.Duration.Float64 < float64(durationCriterion.Value) || scene.Duration.Float64 >= float64(durationCriterion.Value+1))
+ } else {
+ verifyFloat64(t, scene.Duration, durationCriterion)
+ }
+ }
+}
+
+func verifyFloat64(t *testing.T, value sql.NullFloat64, criterion models.IntCriterionInput) {
+ assert := assert.New(t)
+ if criterion.Modifier == models.CriterionModifierIsNull {
+ assert.False(value.Valid, "expect is null values to be null")
+ }
+ if criterion.Modifier == models.CriterionModifierNotNull {
+ assert.True(value.Valid, "expect is null values to be null")
+ }
+ if criterion.Modifier == models.CriterionModifierEquals {
+ assert.Equal(float64(criterion.Value), value.Float64)
+ }
+ if criterion.Modifier == models.CriterionModifierNotEquals {
+ assert.NotEqual(float64(criterion.Value), value.Float64)
+ }
+ if criterion.Modifier == models.CriterionModifierGreaterThan {
+ assert.True(value.Float64 > float64(criterion.Value))
+ }
+ if criterion.Modifier == models.CriterionModifierLessThan {
+ assert.True(value.Float64 < float64(criterion.Value))
+ }
+}
+
+func TestSceneQueryResolution(t *testing.T) {
+ verifyScenesResolution(t, models.ResolutionEnumLow)
+ verifyScenesResolution(t, models.ResolutionEnumStandard)
+ verifyScenesResolution(t, models.ResolutionEnumStandardHd)
+ verifyScenesResolution(t, models.ResolutionEnumFullHd)
+ verifyScenesResolution(t, models.ResolutionEnumFourK)
+ verifyScenesResolution(t, models.ResolutionEnum("unknown"))
+}
+
+func verifyScenesResolution(t *testing.T, resolution models.ResolutionEnum) {
+ sqb := models.NewSceneQueryBuilder()
+ sceneFilter := models.SceneFilterType{
+ Resolution: &resolution,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ for _, scene := range scenes {
+ verifySceneResolution(t, scene.Height, resolution)
+ }
+}
+
+func verifySceneResolution(t *testing.T, height sql.NullInt64, resolution models.ResolutionEnum) {
+ assert := assert.New(t)
+ h := height.Int64
+
+ switch resolution {
+ case models.ResolutionEnumLow:
+ assert.True(h < 480)
+ case models.ResolutionEnumStandard:
+ assert.True(h >= 480 && h < 720)
+ case models.ResolutionEnumStandardHd:
+ assert.True(h >= 720 && h < 1080)
+ case models.ResolutionEnumFullHd:
+ assert.True(h >= 1080 && h < 2160)
+ case models.ResolutionEnumFourK:
+ assert.True(h >= 2160)
+ }
+}
+
+func TestSceneQueryHasMarkers(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ hasMarkers := "true"
+ sceneFilter := models.SceneFilterType{
+ HasMarkers: &hasMarkers,
+ }
+
+ q := getSceneStringValue(sceneIdxWithMarker, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, &findFilter)
+
+ assert.Len(t, scenes, 1)
+ assert.Equal(t, sceneIDs[sceneIdxWithMarker], scenes[0].ID)
+
+ hasMarkers = "false"
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+ assert.Len(t, scenes, 0)
+
+ findFilter.Q = nil
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+
+ assert.NotEqual(t, 0, len(scenes))
+
+ // ensure non of the ids equal the one with gallery
+ for _, scene := range scenes {
+ assert.NotEqual(t, sceneIDs[sceneIdxWithMarker], scene.ID)
+ }
+}
+
+func TestSceneQueryIsMissingGallery(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ isMissing := "gallery"
+ sceneFilter := models.SceneFilterType{
+ IsMissing: &isMissing,
+ }
+
+ q := getSceneStringValue(sceneIdxWithGallery, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, &findFilter)
+
+ assert.Len(t, scenes, 0)
+
+ findFilter.Q = nil
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+
+ // ensure non of the ids equal the one with gallery
+ for _, scene := range scenes {
+ assert.NotEqual(t, sceneIDs[sceneIdxWithGallery], scene.ID)
+ }
+}
+
+func TestSceneQueryIsMissingStudio(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ isMissing := "studio"
+ sceneFilter := models.SceneFilterType{
+ IsMissing: &isMissing,
+ }
+
+ q := getSceneStringValue(sceneIdxWithStudio, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, &findFilter)
+
+ assert.Len(t, scenes, 0)
+
+ findFilter.Q = nil
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+
+ // ensure non of the ids equal the one with studio
+ for _, scene := range scenes {
+ assert.NotEqual(t, sceneIDs[sceneIdxWithStudio], scene.ID)
+ }
+}
+
+func TestSceneQueryIsMissingMovies(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ isMissing := "movie"
+ sceneFilter := models.SceneFilterType{
+ IsMissing: &isMissing,
+ }
+
+ q := getSceneStringValue(sceneIdxWithMovie, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, &findFilter)
+
+ assert.Len(t, scenes, 0)
+
+ findFilter.Q = nil
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+
+ // ensure non of the ids equal the one with movies
+ for _, scene := range scenes {
+ assert.NotEqual(t, sceneIDs[sceneIdxWithMovie], scene.ID)
+ }
+}
+
+func TestSceneQueryIsMissingPerformers(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ isMissing := "performers"
+ sceneFilter := models.SceneFilterType{
+ IsMissing: &isMissing,
+ }
+
+ q := getSceneStringValue(sceneIdxWithPerformer, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, &findFilter)
+
+ assert.Len(t, scenes, 0)
+
+ findFilter.Q = nil
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+
+ assert.True(t, len(scenes) > 0)
+
+ // ensure non of the ids equal the one with movies
+ for _, scene := range scenes {
+ assert.NotEqual(t, sceneIDs[sceneIdxWithPerformer], scene.ID)
+ }
+}
+
+func TestSceneQueryIsMissingDate(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ isMissing := "date"
+ sceneFilter := models.SceneFilterType{
+ IsMissing: &isMissing,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ assert.True(t, len(scenes) > 0)
+
+ // ensure date is null, empty or "0001-01-01"
+ for _, scene := range scenes {
+ assert.True(t, !scene.Date.Valid || scene.Date.String == "" || scene.Date.String == "0001-01-01")
+ }
+}
+
+func TestSceneQueryIsMissingTags(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ isMissing := "tags"
+ sceneFilter := models.SceneFilterType{
+ IsMissing: &isMissing,
+ }
+
+ q := getSceneStringValue(sceneIdxWithTwoTags, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, &findFilter)
+
+ assert.Len(t, scenes, 0)
+
+ findFilter.Q = nil
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+
+ assert.True(t, len(scenes) > 0)
+}
+
+func TestSceneQueryIsMissingRating(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ isMissing := "rating"
+ sceneFilter := models.SceneFilterType{
+ IsMissing: &isMissing,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ assert.True(t, len(scenes) > 0)
+
+ // ensure date is null, empty or "0001-01-01"
+ for _, scene := range scenes {
+ assert.True(t, !scene.Rating.Valid)
+ }
+}
+
+func TestSceneQueryPerformers(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ performerCriterion := models.MultiCriterionInput{
+ Value: []string{
+ strconv.Itoa(performerIDs[performerIdxWithScene]),
+ strconv.Itoa(performerIDs[performerIdx1WithScene]),
+ },
+ Modifier: models.CriterionModifierIncludes,
+ }
+
+ sceneFilter := models.SceneFilterType{
+ Performers: &performerCriterion,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ assert.Len(t, scenes, 2)
+
+ // ensure ids are correct
+ for _, scene := range scenes {
+ assert.True(t, scene.ID == sceneIDs[sceneIdxWithPerformer] || scene.ID == sceneIDs[sceneIdxWithTwoPerformers])
+ }
+
+ performerCriterion = models.MultiCriterionInput{
+ Value: []string{
+ strconv.Itoa(performerIDs[performerIdx1WithScene]),
+ strconv.Itoa(performerIDs[performerIdx2WithScene]),
+ },
+ Modifier: models.CriterionModifierIncludesAll,
+ }
+
+ scenes, _ = sqb.Query(&sceneFilter, nil)
+
+ assert.Len(t, scenes, 1)
+ assert.Equal(t, sceneIDs[sceneIdxWithTwoPerformers], scenes[0].ID)
+
+ performerCriterion = models.MultiCriterionInput{
+ Value: []string{
+ strconv.Itoa(performerIDs[performerIdx1WithScene]),
+ },
+ Modifier: models.CriterionModifierExcludes,
+ }
+
+ q := getSceneStringValue(sceneIdxWithTwoPerformers, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+ assert.Len(t, scenes, 0)
+}
+
+func TestSceneQueryTags(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ tagCriterion := models.MultiCriterionInput{
+ Value: []string{
+ strconv.Itoa(tagIDs[tagIdxWithScene]),
+ strconv.Itoa(tagIDs[tagIdx1WithScene]),
+ },
+ Modifier: models.CriterionModifierIncludes,
+ }
+
+ sceneFilter := models.SceneFilterType{
+ Tags: &tagCriterion,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ assert.Len(t, scenes, 2)
+
+ // ensure ids are correct
+ for _, scene := range scenes {
+ assert.True(t, scene.ID == sceneIDs[sceneIdxWithTag] || scene.ID == sceneIDs[sceneIdxWithTwoTags])
+ }
+
+ tagCriterion = models.MultiCriterionInput{
+ Value: []string{
+ strconv.Itoa(tagIDs[tagIdx1WithScene]),
+ strconv.Itoa(tagIDs[tagIdx2WithScene]),
+ },
+ Modifier: models.CriterionModifierIncludesAll,
+ }
+
+ scenes, _ = sqb.Query(&sceneFilter, nil)
+
+ assert.Len(t, scenes, 1)
+ assert.Equal(t, sceneIDs[sceneIdxWithTwoTags], scenes[0].ID)
+
+ tagCriterion = models.MultiCriterionInput{
+ Value: []string{
+ strconv.Itoa(tagIDs[tagIdx1WithScene]),
+ },
+ Modifier: models.CriterionModifierExcludes,
+ }
+
+ q := getSceneStringValue(sceneIdxWithTwoTags, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+ assert.Len(t, scenes, 0)
+}
+
+func TestSceneQueryStudio(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+ studioCriterion := models.MultiCriterionInput{
+ Value: []string{
+ strconv.Itoa(studioIDs[studioIdxWithScene]),
+ },
+ Modifier: models.CriterionModifierIncludes,
+ }
+
+ sceneFilter := models.SceneFilterType{
+ Studios: &studioCriterion,
+ }
+
+ scenes, _ := sqb.Query(&sceneFilter, nil)
+
+ assert.Len(t, scenes, 1)
+
+ // ensure id is correct
+ assert.Equal(t, sceneIDs[sceneIdxWithStudio], scenes[0].ID)
+
+ studioCriterion = models.MultiCriterionInput{
+ Value: []string{
+ strconv.Itoa(studioIDs[studioIdxWithScene]),
+ },
+ Modifier: models.CriterionModifierExcludes,
+ }
+
+ q := getSceneStringValue(sceneIdxWithStudio, titleField)
+ findFilter := models.FindFilterType{
+ Q: &q,
+ }
+
+ scenes, _ = sqb.Query(&sceneFilter, &findFilter)
+ assert.Len(t, scenes, 0)
+}
+
+func TestSceneQuerySorting(t *testing.T) {
+ sort := titleField
+ direction := models.SortDirectionEnumAsc
+ findFilter := models.FindFilterType{
+ Sort: &sort,
+ Direction: &direction,
+ }
+
+ sqb := models.NewSceneQueryBuilder()
+ scenes, _ := sqb.Query(nil, &findFilter)
+
+ // scenes should be in same order as indexes
+ firstScene := scenes[0]
+ lastScene := scenes[len(scenes)-1]
+
+ assert.Equal(t, sceneIDs[0], firstScene.ID)
+ assert.Equal(t, sceneIDs[len(sceneIDs)-1], lastScene.ID)
+
+ // sort in descending order
+ direction = models.SortDirectionEnumDesc
+
+ scenes, _ = sqb.Query(nil, &findFilter)
+ firstScene = scenes[0]
+ lastScene = scenes[len(scenes)-1]
+
+ assert.Equal(t, sceneIDs[len(sceneIDs)-1], firstScene.ID)
+ assert.Equal(t, sceneIDs[0], lastScene.ID)
+}
+
+func TestSceneQueryPagination(t *testing.T) {
+ perPage := 1
+ findFilter := models.FindFilterType{
+ PerPage: &perPage,
+ }
+
+ sqb := models.NewSceneQueryBuilder()
+ scenes, _ := sqb.Query(nil, &findFilter)
+
+ assert.Len(t, scenes, 1)
+
+ firstID := scenes[0].ID
+
+ page := 2
+ findFilter.Page = &page
+ scenes, _ = sqb.Query(nil, &findFilter)
+
+ assert.Len(t, scenes, 1)
+ secondID := scenes[0].ID
+ assert.NotEqual(t, firstID, secondID)
+
+ perPage = 2
+ page = 1
+
+ scenes, _ = sqb.Query(nil, &findFilter)
+ assert.Len(t, scenes, 2)
+ assert.Equal(t, firstID, scenes[0].ID)
+ assert.Equal(t, secondID, scenes[1].ID)
+}
+
+func TestSceneCountByTagID(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+
+ sceneCount, err := sqb.CountByTagID(tagIDs[tagIdxWithScene])
+
+ if err != nil {
+ t.Fatalf("error calling CountByTagID: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, sceneCount)
+
+ sceneCount, err = sqb.CountByTagID(0)
+
+ if err != nil {
+ t.Fatalf("error calling CountByTagID: %s", err.Error())
+ }
+
+ assert.Equal(t, 0, sceneCount)
+}
+
+func TestSceneCountByMovieID(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+
+ sceneCount, err := sqb.CountByMovieID(movieIDs[movieIdxWithScene])
+
+ if err != nil {
+ t.Fatalf("error calling CountByMovieID: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, sceneCount)
+
+ sceneCount, err = sqb.CountByMovieID(0)
+
+ if err != nil {
+ t.Fatalf("error calling CountByMovieID: %s", err.Error())
+ }
+
+ assert.Equal(t, 0, sceneCount)
+}
+
+func TestSceneCountByStudioID(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+
+ sceneCount, err := sqb.CountByStudioID(studioIDs[studioIdxWithScene])
+
+ if err != nil {
+ t.Fatalf("error calling CountByStudioID: %s", err.Error())
+ }
+
+ assert.Equal(t, 1, sceneCount)
+
+ sceneCount, err = sqb.CountByStudioID(0)
+
+ if err != nil {
+ t.Fatalf("error calling CountByStudioID: %s", err.Error())
+ }
+
+ assert.Equal(t, 0, sceneCount)
+}
+
+func TestFindByMovieID(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+
+ scenes, err := sqb.FindByMovieID(movieIDs[movieIdxWithScene])
+
+ if err != nil {
+ t.Fatalf("error calling FindByMovieID: %s", err.Error())
+ }
+
+ assert.Len(t, scenes, 1)
+ assert.Equal(t, sceneIDs[sceneIdxWithMovie], scenes[0].ID)
+
+ scenes, err = sqb.FindByMovieID(0)
+
+ if err != nil {
+ t.Fatalf("error calling FindByMovieID: %s", err.Error())
+ }
+
+ assert.Len(t, scenes, 0)
+}
+
+func TestFindByPerformerID(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+
+ scenes, err := sqb.FindByPerformerID(performerIDs[performerIdxWithScene])
+
+ if err != nil {
+ t.Fatalf("error calling FindByPerformerID: %s", err.Error())
+ }
+
+ assert.Len(t, scenes, 1)
+ assert.Equal(t, sceneIDs[sceneIdxWithPerformer], scenes[0].ID)
+
+ scenes, err = sqb.FindByPerformerID(0)
+
+ if err != nil {
+ t.Fatalf("error calling FindByPerformerID: %s", err.Error())
+ }
+
+ assert.Len(t, scenes, 0)
+}
+
+func TestFindByStudioID(t *testing.T) {
+ sqb := models.NewSceneQueryBuilder()
+
+ scenes, err := sqb.FindByStudioID(performerIDs[studioIdxWithScene])
+
+ if err != nil {
+ t.Fatalf("error calling FindByStudioID: %s", err.Error())
+ }
+
+ assert.Len(t, scenes, 1)
+ assert.Equal(t, sceneIDs[sceneIdxWithStudio], scenes[0].ID)
+
+ scenes, err = sqb.FindByStudioID(0)
+
+ if err != nil {
+ t.Fatalf("error calling FindByStudioID: %s", err.Error())
+ }
+
+ assert.Len(t, scenes, 0)
+}
+
+// TODO Update
+// TODO IncrementOCounter
+// TODO DecrementOCounter
+// TODO ResetOCounter
+// TODO Destroy
+// TODO FindByChecksum
+// TODO Count
+// TODO SizeCount
+// TODO All
diff --git a/pkg/models/querybuilder_sql.go b/pkg/models/querybuilder_sql.go
index e20645b7c..2944462c2 100644
--- a/pkg/models/querybuilder_sql.go
+++ b/pkg/models/querybuilder_sql.go
@@ -29,11 +29,19 @@ func (qb queryBuilder) executeFind() ([]int, int) {
}
func (qb *queryBuilder) addWhere(clauses ...string) {
- qb.whereClauses = append(qb.whereClauses, clauses...)
+ for _, clause := range clauses {
+ if len(clause) > 0 {
+ qb.whereClauses = append(qb.whereClauses, clauses...)
+ }
+ }
}
func (qb *queryBuilder) addHaving(clauses ...string) {
- qb.havingClauses = append(qb.havingClauses, clauses...)
+ for _, clause := range clauses {
+ if len(clause) > 0 {
+ qb.havingClauses = append(qb.havingClauses, clause)
+ }
+ }
}
func (qb *queryBuilder) addArg(args ...interface{}) {
@@ -118,7 +126,7 @@ func getSort(sort string, direction string, tableName string) string {
colName := getColumn(tableName, sort)
var additional string
if tableName == "scenes" {
- additional = ", bitrate DESC, framerate DESC, rating DESC, duration DESC"
+ additional = ", bitrate DESC, framerate DESC, scenes.rating DESC, scenes.duration DESC"
} else if tableName == "scene_markers" {
additional = ", scene_markers.scene_id ASC, scene_markers.seconds ASC"
}
@@ -137,29 +145,6 @@ func getRandomSort(tableName string, direction string, seed float64) string {
return " ORDER BY " + "(substr(" + colName + " * " + randomSortString + ", length(" + colName + ") + 2))" + " " + direction
}
-func getSearch(columns []string, q string) string {
- // TODO - susceptible to SQL injection
- var likeClauses []string
- queryWords := strings.Split(q, " ")
- trimmedQuery := strings.Trim(q, "\"")
- if trimmedQuery == q {
- // Search for any word
- for _, word := range queryWords {
- for _, column := range columns {
- likeClauses = append(likeClauses, column+" LIKE '%"+word+"%'")
- }
- }
- } else {
- // Search the exact query
- for _, column := range columns {
- likeClauses = append(likeClauses, column+" LIKE '%"+trimmedQuery+"%'")
- }
- }
- likes := strings.Join(likeClauses, " OR ")
-
- return "(" + likes + ")"
-}
-
func getSearchBinding(columns []string, q string, not bool) (string, []interface{}) {
var likeClauses []string
var args []interface{}
@@ -281,6 +266,18 @@ func runCountQuery(query string, args []interface{}) (int, error) {
return result.Int, nil
}
+func runSumQuery(query string, args []interface{}) (uint64, error) {
+ // Perform query and fetch result
+ result := struct {
+ Uint uint64 `db:"sum"`
+ }{0}
+ if err := database.DB.Get(&result, query, args...); err != nil && err != sql.ErrNoRows {
+ return 0, err
+ }
+
+ return result.Uint, nil
+}
+
func executeFindQuery(tableName string, body string, args []interface{}, sortAndPagination string, whereClauses []string, havingClauses []string) ([]int, int) {
if len(whereClauses) > 0 {
body = body + " WHERE " + strings.Join(whereClauses, " AND ") // TODO handle AND or OR
diff --git a/pkg/models/querybuilder_studio.go b/pkg/models/querybuilder_studio.go
index 1bf501a43..2b65bba2a 100644
--- a/pkg/models/querybuilder_studio.go
+++ b/pkg/models/querybuilder_studio.go
@@ -79,8 +79,12 @@ func (qb *StudioQueryBuilder) FindBySceneID(sceneID int) (*Studio, error) {
return qb.queryStudio(query, args, nil)
}
-func (qb *StudioQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Studio, error) {
- query := "SELECT * FROM studios WHERE name = ? LIMIT 1"
+func (qb *StudioQueryBuilder) FindByName(name string, tx *sqlx.Tx, nocase bool) (*Studio, error) {
+ query := "SELECT * FROM studios WHERE name = ?"
+ if nocase {
+ query += " COLLATE NOCASE"
+ }
+ query += " LIMIT 1"
args := []interface{}{name}
return qb.queryStudio(query, args, tx)
}
@@ -93,6 +97,10 @@ func (qb *StudioQueryBuilder) All() ([]*Studio, error) {
return qb.queryStudios(selectAll("studios")+qb.getStudioSort(nil), nil, nil)
}
+func (qb *StudioQueryBuilder) AllSlim() ([]*Studio, error) {
+ return qb.queryStudios("SELECT studios.id, studios.name FROM studios "+qb.getStudioSort(nil), nil, nil)
+}
+
func (qb *StudioQueryBuilder) Query(findFilter *FindFilterType) ([]*Studio, int) {
if findFilter == nil {
findFilter = &FindFilterType{}
@@ -108,7 +116,9 @@ func (qb *StudioQueryBuilder) Query(findFilter *FindFilterType) ([]*Studio, int)
if q := findFilter.Q; q != nil && *q != "" {
searchColumns := []string{"studios.name"}
- whereClauses = append(whereClauses, getSearch(searchColumns, *q))
+ clause, thisArgs := getSearchBinding(searchColumns, *q, false)
+ whereClauses = append(whereClauses, clause)
+ args = append(args, thisArgs...)
}
sortAndPagination := qb.getStudioSort(findFilter) + getPagination(findFilter)
diff --git a/pkg/models/querybuilder_studio_test.go b/pkg/models/querybuilder_studio_test.go
new file mode 100644
index 000000000..5f7460844
--- /dev/null
+++ b/pkg/models/querybuilder_studio_test.go
@@ -0,0 +1,50 @@
+// +build integration
+
+package models_test
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestStudioFindByName(t *testing.T) {
+
+ sqb := models.NewStudioQueryBuilder()
+
+ name := studioNames[studioIdxWithScene] // find a studio by name
+
+ studio, err := sqb.FindByName(name, nil, false)
+
+ if err != nil {
+ t.Fatalf("Error finding studios: %s", err.Error())
+ }
+
+ assert.Equal(t, studioNames[studioIdxWithScene], studio.Name.String)
+
+ name = studioNames[studioIdxWithDupName] // find a studio by name nocase
+
+ studio, err = sqb.FindByName(name, nil, true)
+
+ if err != nil {
+ t.Fatalf("Error finding studios: %s", err.Error())
+ }
+ // studioIdxWithDupName and studioIdxWithScene should have similar names ( only diff should be Name vs NaMe)
+ //studio.Name should match with studioIdxWithScene since its ID is before studioIdxWithDupName
+ assert.Equal(t, studioNames[studioIdxWithScene], studio.Name.String)
+ //studio.Name should match with studioIdxWithDupName if the check is not case sensitive
+ assert.Equal(t, strings.ToLower(studioNames[studioIdxWithDupName]), strings.ToLower(studio.Name.String))
+
+}
+
+// TODO Create
+// TODO Update
+// TODO Destroy
+// TODO Find
+// TODO FindBySceneID
+// TODO Count
+// TODO All
+// TODO AllSlim
+// TODO Query
diff --git a/pkg/models/querybuilder_tag.go b/pkg/models/querybuilder_tag.go
index 91c376d33..35c64c323 100644
--- a/pkg/models/querybuilder_tag.go
+++ b/pkg/models/querybuilder_tag.go
@@ -33,6 +33,7 @@ func (qb *TagQueryBuilder) Create(newTag Tag, tx *sqlx.Tx) (*Tag, error) {
if err := tx.Get(&newTag, `SELECT * FROM tags WHERE id = ? LIMIT 1`, studioID); err != nil {
return nil, err
}
+
return &newTag, nil
}
@@ -90,8 +91,7 @@ func (qb *TagQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]*Tag, erro
query := `
SELECT tags.* FROM tags
LEFT JOIN scenes_tags as scenes_join on scenes_join.tag_id = tags.id
- LEFT JOIN scenes on scenes_join.scene_id = scenes.id
- WHERE scenes.id = ?
+ WHERE scenes_join.scene_id = ?
GROUP BY tags.id
`
query += qb.getTagSort(nil)
@@ -103,8 +103,7 @@ func (qb *TagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int, tx *sqlx.Tx) (
query := `
SELECT tags.* FROM tags
LEFT JOIN scene_markers_tags as scene_markers_join on scene_markers_join.tag_id = tags.id
- LEFT JOIN scene_markers on scene_markers_join.scene_marker_id = scene_markers.id
- WHERE scene_markers.id = ?
+ WHERE scene_markers_join.scene_marker_id = ?
GROUP BY tags.id
`
query += qb.getTagSort(nil)
@@ -112,14 +111,22 @@ func (qb *TagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int, tx *sqlx.Tx) (
return qb.queryTags(query, args, tx)
}
-func (qb *TagQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Tag, error) {
- query := "SELECT * FROM tags WHERE name = ? LIMIT 1"
+func (qb *TagQueryBuilder) FindByName(name string, tx *sqlx.Tx, nocase bool) (*Tag, error) {
+ query := "SELECT * FROM tags WHERE name = ?"
+ if nocase {
+ query += " COLLATE NOCASE"
+ }
+ query += " LIMIT 1"
args := []interface{}{name}
return qb.queryTag(query, args, tx)
}
-func (qb *TagQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]*Tag, error) {
- query := "SELECT * FROM tags WHERE name IN " + getInBinding(len(names))
+func (qb *TagQueryBuilder) FindByNames(names []string, tx *sqlx.Tx, nocase bool) ([]*Tag, error) {
+ query := "SELECT * FROM tags WHERE name"
+ if nocase {
+ query += " COLLATE NOCASE"
+ }
+ query += " IN " + getInBinding(len(names))
var args []interface{}
for _, name := range names {
args = append(args, name)
@@ -135,6 +142,10 @@ func (qb *TagQueryBuilder) All() ([]*Tag, error) {
return qb.queryTags(selectAll("tags")+qb.getTagSort(nil), nil, nil)
}
+func (qb *TagQueryBuilder) AllSlim() ([]*Tag, error) {
+ return qb.queryTags("SELECT tags.id, tags.name FROM tags "+qb.getTagSort(nil), nil, nil)
+}
+
func (qb *TagQueryBuilder) Query(findFilter *FindFilterType) ([]*Tag, int) {
if findFilter == nil {
findFilter = &FindFilterType{}
@@ -147,7 +158,9 @@ func (qb *TagQueryBuilder) Query(findFilter *FindFilterType) ([]*Tag, int) {
if q := findFilter.Q; q != nil && *q != "" {
searchColumns := []string{"tags.name"}
- whereClauses = append(whereClauses, getSearch(searchColumns, *q))
+ clause, thisArgs := getSearchBinding(searchColumns, *q, false)
+ whereClauses = append(whereClauses, clause)
+ args = append(args, thisArgs...)
}
sortAndPagination := qb.getTagSort(findFilter) + getPagination(findFilter)
diff --git a/pkg/models/querybuilder_tag_test.go b/pkg/models/querybuilder_tag_test.go
new file mode 100644
index 000000000..052c89fc3
--- /dev/null
+++ b/pkg/models/querybuilder_tag_test.go
@@ -0,0 +1,118 @@
+// +build integration
+
+package models_test
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMarkerFindBySceneMarkerID(t *testing.T) {
+ tqb := models.NewTagQueryBuilder()
+
+ markerID := markerIDs[markerIdxWithScene]
+
+ tags, err := tqb.FindBySceneMarkerID(markerID, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding tags: %s", err.Error())
+ }
+
+ assert.Len(t, tags, 1)
+ assert.Equal(t, tagIDs[tagIdxWithMarker], tags[0].ID)
+
+ tags, err = tqb.FindBySceneMarkerID(0, nil)
+
+ if err != nil {
+ t.Fatalf("Error finding tags: %s", err.Error())
+ }
+
+ assert.Len(t, tags, 0)
+}
+
+func TestTagFindByName(t *testing.T) {
+
+ tqb := models.NewTagQueryBuilder()
+
+ name := tagNames[tagIdxWithScene] // find a tag by name
+
+ tag, err := tqb.FindByName(name, nil, false)
+
+ if err != nil {
+ t.Fatalf("Error finding tags: %s", err.Error())
+ }
+
+ assert.Equal(t, tagNames[tagIdxWithScene], tag.Name)
+
+ name = tagNames[tagIdxWithDupName] // find a tag by name nocase
+
+ tag, err = tqb.FindByName(name, nil, true)
+
+ if err != nil {
+ t.Fatalf("Error finding tags: %s", err.Error())
+ }
+ // tagIdxWithDupName and tagIdxWithScene should have similar names ( only diff should be Name vs NaMe)
+ //tag.Name should match with tagIdxWithScene since its ID is before tagIdxWithDupName
+ assert.Equal(t, tagNames[tagIdxWithScene], tag.Name)
+ //tag.Name should match with tagIdxWithDupName if the check is not case sensitive
+ assert.Equal(t, strings.ToLower(tagNames[tagIdxWithDupName]), strings.ToLower(tag.Name))
+
+}
+
+func TestTagFindByNames(t *testing.T) {
+ var names []string
+
+ tqb := models.NewTagQueryBuilder()
+
+ names = append(names, tagNames[tagIdxWithScene]) // find tags by names
+
+ tags, err := tqb.FindByNames(names, nil, false)
+ if err != nil {
+ t.Fatalf("Error finding tags: %s", err.Error())
+ }
+ assert.Len(t, tags, 1)
+ assert.Equal(t, tagNames[tagIdxWithScene], tags[0].Name)
+
+ tags, err = tqb.FindByNames(names, nil, true) // find tags by names nocase
+ if err != nil {
+ t.Fatalf("Error finding tags: %s", err.Error())
+ }
+ assert.Len(t, tags, 2) // tagIdxWithScene and tagIdxWithDupName
+ assert.Equal(t, strings.ToLower(tagNames[tagIdxWithScene]), strings.ToLower(tags[0].Name))
+ assert.Equal(t, strings.ToLower(tagNames[tagIdxWithScene]), strings.ToLower(tags[1].Name))
+
+ names = append(names, tagNames[tagIdx1WithScene]) // find tags by names ( 2 names )
+
+ tags, err = tqb.FindByNames(names, nil, false)
+ if err != nil {
+ t.Fatalf("Error finding tags: %s", err.Error())
+ }
+ assert.Len(t, tags, 2) // tagIdxWithScene and tagIdx1WithScene
+ assert.Equal(t, tagNames[tagIdxWithScene], tags[0].Name)
+ assert.Equal(t, tagNames[tagIdx1WithScene], tags[1].Name)
+
+ tags, err = tqb.FindByNames(names, nil, true) // find tags by names ( 2 names nocase)
+ if err != nil {
+ t.Fatalf("Error finding tags: %s", err.Error())
+ }
+ assert.Len(t, tags, 4) // tagIdxWithScene and tagIdxWithDupName , tagIdx1WithScene and tagIdx1WithDupName
+ assert.Equal(t, tagNames[tagIdxWithScene], tags[0].Name)
+ assert.Equal(t, tagNames[tagIdx1WithScene], tags[1].Name)
+ assert.Equal(t, tagNames[tagIdx1WithDupName], tags[2].Name)
+ assert.Equal(t, tagNames[tagIdxWithDupName], tags[3].Name)
+
+}
+
+// TODO Create
+// TODO Update
+// TODO Destroy
+// TODO Find
+// TODO FindBySceneID
+// TODO FindBySceneMarkerID
+// TODO Count
+// TODO All
+// TODO AllSlim
+// TODO Query
diff --git a/pkg/models/setup_test.go b/pkg/models/setup_test.go
new file mode 100644
index 000000000..0cfb25328
--- /dev/null
+++ b/pkg/models/setup_test.go
@@ -0,0 +1,584 @@
+// +build integration
+
+package models_test
+
+import (
+ "context"
+ "database/sql"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "strconv"
+ "testing"
+
+ "github.com/jmoiron/sqlx"
+
+ "github.com/stashapp/stash/pkg/database"
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/utils"
+)
+
+const totalScenes = 12
+const performersNameCase = 3
+const performersNameNoCase = 2
+const moviesNameCase = 1
+const moviesNameNoCase = 1
+const totalGalleries = 1
+const tagsNameNoCase = 2
+const tagsNameCase = 5
+const studiosNameCase = 1
+const studiosNameNoCase = 1
+
+var sceneIDs []int
+var performerIDs []int
+var movieIDs []int
+var galleryIDs []int
+var tagIDs []int
+var studioIDs []int
+var markerIDs []int
+
+var tagNames []string
+var studioNames []string
+var movieNames []string
+var performerNames []string
+
+const sceneIdxWithMovie = 0
+const sceneIdxWithGallery = 1
+const sceneIdxWithPerformer = 2
+const sceneIdxWithTwoPerformers = 3
+const sceneIdxWithTag = 4
+const sceneIdxWithTwoTags = 5
+const sceneIdxWithStudio = 6
+const sceneIdxWithMarker = 7
+
+const performerIdxWithScene = 0
+const performerIdx1WithScene = 1
+const performerIdx2WithScene = 2
+
+// performers with dup names start from the end
+const performerIdx1WithDupName = 3
+const performerIdxWithDupName = 4
+
+const movieIdxWithScene = 0
+
+// movies with dup names start from the end
+const movieIdxWithDupName = 1
+
+const galleryIdxWithScene = 0
+
+const tagIdxWithScene = 0
+const tagIdx1WithScene = 1
+const tagIdx2WithScene = 2
+const tagIdxWithPrimaryMarker = 3
+const tagIdxWithMarker = 4
+
+// tags with dup names start from the end
+const tagIdx1WithDupName = 5
+const tagIdxWithDupName = 6
+
+const studioIdxWithScene = 0
+
+// studios with dup names start from the end
+const studioIdxWithDupName = 1
+
+const markerIdxWithScene = 0
+
+const pathField = "Path"
+const checksumField = "Checksum"
+const titleField = "Title"
+
+func TestMain(m *testing.M) {
+ ret := runTests(m)
+ os.Exit(ret)
+}
+
+func testTeardown(databaseFile string) {
+ err := database.DB.Close()
+
+ if err != nil {
+ panic(err)
+ }
+
+ err = os.Remove(databaseFile)
+ if err != nil {
+ panic(err)
+ }
+}
+
+func runTests(m *testing.M) int {
+ // create the database file
+ f, err := ioutil.TempFile("", "*.sqlite")
+ if err != nil {
+ panic(fmt.Sprintf("Could not create temporary file: %s", err.Error()))
+ }
+
+ f.Close()
+ databaseFile := f.Name()
+ database.Initialize(databaseFile)
+
+ // defer close and delete the database
+ defer testTeardown(databaseFile)
+
+ err = populateDB()
+ if err != nil {
+ panic(fmt.Sprintf("Could not populate database: %s", err.Error()))
+ } else {
+ // run the tests
+ return m.Run()
+ }
+}
+
+func populateDB() error {
+ ctx := context.TODO()
+ tx := database.DB.MustBeginTx(ctx, nil)
+
+ if err := createScenes(tx, totalScenes); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := createGalleries(tx, totalGalleries); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := createMovies(tx, moviesNameCase, moviesNameNoCase); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := createPerformers(tx, performersNameCase, performersNameNoCase); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := createTags(tx, tagsNameCase, tagsNameNoCase); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := createStudios(tx, studiosNameCase, studiosNameNoCase); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ // TODO - the link methods use Find which don't accept a transaction, so
+ // to commit the transaction and start a new one
+ if err := tx.Commit(); err != nil {
+ return fmt.Errorf("Error committing: %s", err.Error())
+ }
+
+ tx = database.DB.MustBeginTx(ctx, nil)
+
+ if err := linkSceneGallery(tx, sceneIdxWithGallery, galleryIdxWithScene); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := linkSceneMovie(tx, sceneIdxWithMovie, movieIdxWithScene); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := linkScenePerformers(tx); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := linkSceneTags(tx); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := linkSceneStudio(tx, sceneIdxWithStudio, studioIdxWithScene); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := createMarker(tx, sceneIdxWithMarker, tagIdxWithPrimaryMarker, []int{tagIdxWithMarker}); err != nil {
+ tx.Rollback()
+ return err
+ }
+
+ if err := tx.Commit(); err != nil {
+ return fmt.Errorf("Error committing: %s", err.Error())
+ }
+
+ return nil
+}
+
+func getSceneStringValue(index int, field string) string {
+ return fmt.Sprintf("scene_%04d_%s", index, field)
+}
+
+func getSceneRating(index int) sql.NullInt64 {
+ rating := index % 6
+ return sql.NullInt64{Int64: int64(rating), Valid: rating > 0}
+}
+
+func getSceneOCounter(index int) int {
+ return index % 3
+}
+
+func getSceneDuration(index int) sql.NullFloat64 {
+ duration := index % 4
+ duration = duration * 100
+
+ return sql.NullFloat64{
+ Float64: float64(duration) + 0.432,
+ Valid: duration != 0,
+ }
+}
+
+func getSceneHeight(index int) sql.NullInt64 {
+ heights := []int64{0, 200, 240, 300, 480, 700, 720, 800, 1080, 1500, 2160, 3000}
+ height := heights[index%len(heights)]
+ return sql.NullInt64{
+ Int64: height,
+ Valid: height != 0,
+ }
+}
+
+func getSceneDate(index int) models.SQLiteDate {
+ dates := []string{"null", "", "0001-01-01", "2001-02-03"}
+ date := dates[index%len(dates)]
+ return models.SQLiteDate{
+ String: date,
+ Valid: date != "null",
+ }
+}
+
+func createScenes(tx *sqlx.Tx, n int) error {
+ sqb := models.NewSceneQueryBuilder()
+
+ for i := 0; i < n; i++ {
+ scene := models.Scene{
+ Path: getSceneStringValue(i, pathField),
+ Title: sql.NullString{String: getSceneStringValue(i, titleField), Valid: true},
+ Checksum: getSceneStringValue(i, checksumField),
+ Details: sql.NullString{String: getSceneStringValue(i, "Details"), Valid: true},
+ Rating: getSceneRating(i),
+ OCounter: getSceneOCounter(i),
+ Duration: getSceneDuration(i),
+ Height: getSceneHeight(i),
+ Date: getSceneDate(i),
+ }
+
+ created, err := sqb.Create(scene, tx)
+
+ if err != nil {
+ return fmt.Errorf("Error creating scene %v+: %s", scene, err.Error())
+ }
+
+ sceneIDs = append(sceneIDs, created.ID)
+ }
+
+ return nil
+}
+
+func getGalleryStringValue(index int, field string) string {
+ return "gallery_" + strconv.FormatInt(int64(index), 10) + "_" + field
+}
+
+func createGalleries(tx *sqlx.Tx, n int) error {
+ gqb := models.NewGalleryQueryBuilder()
+
+ for i := 0; i < n; i++ {
+ gallery := models.Gallery{
+ Path: getGalleryStringValue(i, pathField),
+ Checksum: getGalleryStringValue(i, checksumField),
+ }
+
+ created, err := gqb.Create(gallery, tx)
+
+ if err != nil {
+ return fmt.Errorf("Error creating gallery %v+: %s", gallery, err.Error())
+ }
+
+ galleryIDs = append(galleryIDs, created.ID)
+ }
+
+ return nil
+}
+
+func getMovieStringValue(index int, field string) string {
+ return "movie_" + strconv.FormatInt(int64(index), 10) + "_" + field
+}
+
+//createMoviees creates n movies with plain Name and o movies with camel cased NaMe included
+func createMovies(tx *sqlx.Tx, n int, o int) error {
+ mqb := models.NewMovieQueryBuilder()
+ const namePlain = "Name"
+ const nameNoCase = "NaMe"
+
+ name := namePlain
+
+ for i := 0; i < n+o; i++ {
+ index := i
+
+ if i >= n { // i=n movies get dup names if case is not checked
+ index = n + o - (i + 1) // for the name to be the same the number (index) must be the same also
+ } // so count backwards to 0 as needed
+ // movies [ i ] and [ n + o - i - 1 ] should have similar names with only the Name!=NaMe part different
+
+ movie := models.Movie{
+ Name: sql.NullString{String: getMovieStringValue(index, name), Valid: true},
+ FrontImage: []byte(models.DefaultMovieImage),
+ Checksum: utils.MD5FromString(name),
+ }
+
+ created, err := mqb.Create(movie, tx)
+
+ if err != nil {
+ return fmt.Errorf("Error creating movie %v+: %s", movie, err.Error())
+ }
+
+ movieIDs = append(movieIDs, created.ID)
+ movieNames = append(movieNames, created.Name.String)
+ }
+
+ return nil
+}
+
+func getPerformerStringValue(index int, field string) string {
+ return "performer_" + strconv.FormatInt(int64(index), 10) + "_" + field
+}
+
+func getPerformerBoolValue(index int) bool {
+ index = index % 2
+ return index == 1
+}
+
+//createPerformers creates n performers with plain Name and o performers with camel cased NaMe included
+func createPerformers(tx *sqlx.Tx, n int, o int) error {
+ pqb := models.NewPerformerQueryBuilder()
+ const namePlain = "Name"
+ const nameNoCase = "NaMe"
+
+ name := namePlain
+
+ for i := 0; i < n+o; i++ {
+ index := i
+
+ if i >= n { // i=n performers get dup names if case is not checked
+ index = n + o - (i + 1) // for the name to be the same the number (index) must be the same also
+ } // so count backwards to 0 as needed
+ // performers [ i ] and [ n + o - i - 1 ] should have similar names with only the Name!=NaMe part different
+
+ performer := models.Performer{
+ Name: sql.NullString{String: getPerformerStringValue(index, name), Valid: true},
+ Checksum: getPerformerStringValue(i, checksumField),
+ // just use movie image
+ Image: []byte(models.DefaultMovieImage),
+ Favorite: sql.NullBool{Bool: getPerformerBoolValue(i), Valid: true},
+ }
+
+ created, err := pqb.Create(performer, tx)
+
+ if err != nil {
+ return fmt.Errorf("Error creating performer %v+: %s", performer, err.Error())
+ }
+
+ performerIDs = append(performerIDs, created.ID)
+ performerNames = append(performerNames, created.Name.String)
+ }
+
+ return nil
+}
+
+func getTagStringValue(index int, field string) string {
+ return "tag_" + strconv.FormatInt(int64(index), 10) + "_" + field
+}
+
+//createTags creates n tags with plain Name and o tags with camel cased NaMe included
+func createTags(tx *sqlx.Tx, n int, o int) error {
+ tqb := models.NewTagQueryBuilder()
+ const namePlain = "Name"
+ const nameNoCase = "NaMe"
+
+ name := namePlain
+
+ for i := 0; i < n+o; i++ {
+ index := i
+
+ if i >= n { // i=n tags get dup names if case is not checked
+ index = n + o - (i + 1) // for the name to be the same the number (index) must be the same also
+ } // so count backwards to 0 as needed
+ // tags [ i ] and [ n + o - i - 1 ] should have similar names with only the Name!=NaMe part different
+
+ tag := models.Tag{
+ Name: getTagStringValue(index, name),
+ }
+
+ created, err := tqb.Create(tag, tx)
+
+ if err != nil {
+ return fmt.Errorf("Error creating tag %v+: %s", tag, err.Error())
+ }
+
+ tagIDs = append(tagIDs, created.ID)
+ tagNames = append(tagNames, created.Name)
+
+ }
+
+ return nil
+}
+
+func getStudioStringValue(index int, field string) string {
+ return "studio_" + strconv.FormatInt(int64(index), 10) + "_" + field
+}
+
+//createStudios creates n studios with plain Name and o studios with camel cased NaMe included
+func createStudios(tx *sqlx.Tx, n int, o int) error {
+ sqb := models.NewStudioQueryBuilder()
+ const namePlain = "Name"
+ const nameNoCase = "NaMe"
+
+ name := namePlain
+
+ for i := 0; i < n+o; i++ {
+ index := i
+
+ if i >= n { // i=n studios get dup names if case is not checked
+ index = n + o - (i + 1) // for the name to be the same the number (index) must be the same also
+ } // so count backwards to 0 as needed
+ // studios [ i ] and [ n + o - i - 1 ] should have similar names with only the Name!=NaMe part different
+
+ tag := models.Studio{
+ Name: sql.NullString{String: getStudioStringValue(index, name), Valid: true},
+ Image: []byte(models.DefaultStudioImage),
+ Checksum: utils.MD5FromString(name),
+ }
+
+ created, err := sqb.Create(tag, tx)
+
+ if err != nil {
+ return fmt.Errorf("Error creating studio %v+: %s", tag, err.Error())
+ }
+
+ studioIDs = append(studioIDs, created.ID)
+ studioNames = append(studioNames, created.Name.String)
+ }
+
+ return nil
+}
+
+func createMarker(tx *sqlx.Tx, sceneIdx, primaryTagIdx int, tagIdxs []int) error {
+ mqb := models.NewSceneMarkerQueryBuilder()
+
+ marker := models.SceneMarker{
+ SceneID: sql.NullInt64{Int64: int64(sceneIDs[sceneIdx]), Valid: true},
+ PrimaryTagID: tagIDs[primaryTagIdx],
+ }
+
+ created, err := mqb.Create(marker, tx)
+
+ if err != nil {
+ return fmt.Errorf("Error creating marker %v+: %s", marker, err.Error())
+ }
+
+ markerIDs = append(markerIDs, created.ID)
+
+ jqb := models.NewJoinsQueryBuilder()
+
+ joins := []models.SceneMarkersTags{}
+
+ for _, tagIdx := range tagIdxs {
+ join := models.SceneMarkersTags{
+ SceneMarkerID: created.ID,
+ TagID: tagIDs[tagIdx],
+ }
+ joins = append(joins, join)
+ }
+
+ if err := jqb.CreateSceneMarkersTags(joins, tx); err != nil {
+ return fmt.Errorf("Error creating marker/tag join: %s", err.Error())
+ }
+
+ return nil
+}
+
+func linkSceneMovie(tx *sqlx.Tx, sceneIndex, movieIndex int) error {
+ jqb := models.NewJoinsQueryBuilder()
+
+ _, err := jqb.AddMoviesScene(sceneIDs[sceneIndex], movieIDs[movieIndex], nil, tx)
+ return err
+}
+
+func linkScenePerformers(tx *sqlx.Tx) error {
+ if err := linkScenePerformer(tx, sceneIdxWithPerformer, performerIdxWithScene); err != nil {
+ return err
+ }
+ if err := linkScenePerformer(tx, sceneIdxWithTwoPerformers, performerIdx1WithScene); err != nil {
+ return err
+ }
+ if err := linkScenePerformer(tx, sceneIdxWithTwoPerformers, performerIdx2WithScene); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func linkScenePerformer(tx *sqlx.Tx, sceneIndex, performerIndex int) error {
+ jqb := models.NewJoinsQueryBuilder()
+
+ _, err := jqb.AddPerformerScene(sceneIDs[sceneIndex], performerIDs[performerIndex], tx)
+ return err
+}
+
+func linkSceneGallery(tx *sqlx.Tx, sceneIndex, galleryIndex int) error {
+ gqb := models.NewGalleryQueryBuilder()
+
+ gallery, err := gqb.Find(galleryIDs[galleryIndex])
+
+ if err != nil {
+ return fmt.Errorf("error finding gallery: %s", err.Error())
+ }
+
+ if gallery == nil {
+ return errors.New("gallery is nil")
+ }
+
+ gallery.SceneID = sql.NullInt64{Int64: int64(sceneIDs[sceneIndex]), Valid: true}
+ _, err = gqb.Update(*gallery, tx)
+
+ return err
+}
+
+func linkSceneTags(tx *sqlx.Tx) error {
+ if err := linkSceneTag(tx, sceneIdxWithTag, tagIdxWithScene); err != nil {
+ return err
+ }
+ if err := linkSceneTag(tx, sceneIdxWithTwoTags, tagIdx1WithScene); err != nil {
+ return err
+ }
+ if err := linkSceneTag(tx, sceneIdxWithTwoTags, tagIdx2WithScene); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func linkSceneTag(tx *sqlx.Tx, sceneIndex, tagIndex int) error {
+ jqb := models.NewJoinsQueryBuilder()
+
+ _, err := jqb.AddSceneTag(sceneIDs[sceneIndex], tagIDs[tagIndex], tx)
+ return err
+}
+
+func linkSceneStudio(tx *sqlx.Tx, sceneIndex, studioIndex int) error {
+ sqb := models.NewSceneQueryBuilder()
+
+ scene := models.ScenePartial{
+ ID: sceneIDs[sceneIndex],
+ StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true},
+ }
+ _, err := sqb.Update(scene, tx)
+
+ return err
+}
diff --git a/pkg/scraper/config.go b/pkg/scraper/config.go
index bfeb1e1bc..07e916d16 100644
--- a/pkg/scraper/config.go
+++ b/pkg/scraper/config.go
@@ -1,6 +1,7 @@
package scraper
import (
+ "io"
"os"
"path/filepath"
"strings"
@@ -139,6 +140,10 @@ func (c *scrapeSceneByURLConfig) resolveFn() {
}
}
+type scraperDebugOptions struct {
+ PrintHTML bool `yaml:"printHTML"`
+}
+
type scraperConfig struct {
ID string
Name string `yaml:"name"`
@@ -148,21 +153,32 @@ type scraperConfig struct {
SceneByFragment *sceneByFragmentConfig `yaml:"sceneByFragment"`
SceneByURL []*scrapeSceneByURLConfig `yaml:"sceneByURL"`
- StashServer *stashServer `yaml:"stashServer"`
- XPathScrapers xpathScrapers `yaml:"xPathScrapers"`
+ DebugOptions *scraperDebugOptions `yaml:"debug"`
+ StashServer *stashServer `yaml:"stashServer"`
+ XPathScrapers xpathScrapers `yaml:"xPathScrapers"`
}
-func loadScraperFromYAML(path string) (*scraperConfig, error) {
+func loadScraperFromYAML(id string, reader io.Reader) (*scraperConfig, error) {
ret := &scraperConfig{}
- file, err := os.Open(path)
- defer file.Close()
+ parser := yaml.NewDecoder(reader)
+ parser.SetStrict(true)
+ err := parser.Decode(&ret)
if err != nil {
return nil, err
}
- parser := yaml.NewDecoder(file)
- parser.SetStrict(true)
- err = parser.Decode(&ret)
+
+ ret.ID = id
+
+ // set the scraper interface
+ ret.initialiseConfigs()
+
+ return ret, nil
+}
+
+func loadScraperFromYAMLFile(path string) (*scraperConfig, error) {
+ file, err := os.Open(path)
+ defer file.Close()
if err != nil {
return nil, err
}
@@ -170,12 +186,8 @@ func loadScraperFromYAML(path string) (*scraperConfig, error) {
// set id to the filename
id := filepath.Base(path)
id = id[:strings.LastIndex(id, ".")]
- ret.ID = id
- // set the scraper interface
- ret.initialiseConfigs()
-
- return ret, nil
+ return loadScraperFromYAML(id, file)
}
func (c *scraperConfig) initialiseConfigs() {
diff --git a/pkg/scraper/freeones.go b/pkg/scraper/freeones.go
index 94bde0e19..3bfc8bcc6 100644
--- a/pkg/scraper/freeones.go
+++ b/pkg/scraper/freeones.go
@@ -1,306 +1,108 @@
package scraper
import (
- "fmt"
- "net/http"
- "net/url"
- "regexp"
"strings"
- "time"
- "github.com/PuerkitoBio/goquery"
"github.com/stashapp/stash/pkg/logger"
- "github.com/stashapp/stash/pkg/models"
)
const freeonesScraperID = "builtin_freeones"
-const freeonesName = "Freeones"
-var freeonesURLs = []string{
- "freeones.com",
-}
+// 537: stolen from: https://github.com/stashapp/CommunityScrapers/blob/master/scrapers/NewFreeones.yml
+const freeonesScraperConfig = `
+name: Freeones
+performerByName:
+ action: scrapeXPath
+ queryURL: https://www.freeones.xxx/babes?q={}&v=teasers&s=relevance&l=96&m%5BcanPreviewFeatures%5D=0
+ scraper: performerSearch
+performerByURL:
+ - action: scrapeXPath
+ url:
+ - https://www.freeones.xxx
+ scraper: performerScraper
+
+xPathScrapers:
+ performerSearch:
+ performer:
+ Name: //div[@id="search-result"]//p[@data-test="subject-name"]/text()
+ URL:
+ selector: //div[@id="search-result"]//div[@data-test="teaser-subject"]/a/@href
+ replace:
+ - regex: ^
+ with: https://www.freeones.xxx
+ - regex: $
+ with: /profile
+
+ performerScraper:
+ performer:
+ Name: //h1
+ URL:
+ selector: //a[span[text()="Profile"]]/@href
+ replace:
+ - regex: ^
+ with: https://www.freeones.xxx
+ Twitter: //div[p[text()='Follow On']]//div//a[@class='d-flex align-items-center justify-content-center mr-2 social-icons color-twitter']/@href
+ Instagram: //div[p[text()='Follow On']]//div//a[@class='d-flex align-items-center justify-content-center mr-2 social-icons color-telegram']/@href
+ Birthdate:
+ selector: //div[p[text()='Personal Information']]//div//p/a/span[contains(text(),'Born On')]
+ replace:
+ - regex: Born On
+ with:
+ - regex: ","
+ with:
+ parseDate: January 2 2006
+ Ethnicity:
+ selector: //div[p[text()='Ethnicity']]//div//p[@class='mb-0 text-center']
+ replace:
+ - regex: Asian
+ with: "asian"
+ - regex: Caucasian
+ with: "white"
+ - regex: Black
+ with: "black"
+ - regex: Latin
+ with: "hispanic"
+ Country: //div[p[text()='Personal Information']]//div//p//a[@data-test="link-country"]
+ EyeColor: //div[p[text()='Eye Color']]//div//p//a//span
+ Height:
+ selector: //div[p[text()='Height']]//div//p//a//span
+ replace:
+ - regex: \D+[\s\S]+
+ with: ""
+ Measurements:
+ selector: //div[p[text()='Measurements']]//div[@class='p-3']//p
+ replace:
+ - regex: Unknown
+ with:
+ FakeTits:
+ selector: //span[@data-test='link_span_boobs']
+ replace:
+ - regex: Unknown
+ with:
+ - regex: Fake
+ with: "Yes"
+ - regex: Natural
+ with: "No"
+ CareerLength:
+ selector: //div[p[text()='career']]//div//div[@class='timeline-horizontal mb-3']//div//p[@class='m-0']
+ concat: "-"
+ replace:
+ - regex: -\w+-\w+-\w+-\w+-\w+$
+ with: ""
+ Aliases: //div[p[text()='Aliases']]//div//p[@class='mb-0 text-center']
+ Tattoos: //div[p[text()='Tattoos']]//div//p[@class='mb-0 text-center']
+ Piercings: //div[p[text()='Piercings']]//div//p[@class='mb-0 text-center']
+ Image:
+ selector: //div[@class='profile-image-large']//a/img/@src
+`
func GetFreeonesScraper() scraperConfig {
- return scraperConfig{
- ID: freeonesScraperID,
- Name: "Freeones",
- PerformerByName: &performerByNameConfig{
- performScrape: GetPerformerNames,
- },
- PerformerByFragment: &performerByFragmentConfig{
- performScrape: GetPerformer,
- },
- PerformerByURL: []*scrapePerformerByURLConfig{
- &scrapePerformerByURLConfig{
- scrapeByURLConfig: scrapeByURLConfig{
- URL: freeonesURLs,
- },
- performScrape: GetPerformerURL,
- },
- },
- }
-}
+ yml := freeonesScraperConfig
-func GetPerformerNames(c scraperTypeConfig, q string) ([]*models.ScrapedPerformer, error) {
- // Request the HTML page.
- queryURL := "https://www.freeones.com/suggestions.php?q=" + url.PathEscape(q) + "&t=1"
- res, err := http.Get(queryURL)
+ scraper, err := loadScraperFromYAML(freeonesScraperID, strings.NewReader(yml))
if err != nil {
- logger.Fatal(err)
- }
- defer res.Body.Close()
- if res.StatusCode != 200 {
- return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
+ logger.Fatalf("Error loading builtin freeones scraper: %s", err.Error())
}
- // Load the HTML document
- doc, err := goquery.NewDocumentFromReader(res.Body)
- if err != nil {
- return nil, err
- }
-
- // Find the performers
- var performers []*models.ScrapedPerformer
- doc.Find(".suggestion").Each(func(i int, s *goquery.Selection) {
- name := strings.Trim(s.Text(), " ")
- p := models.ScrapedPerformer{
- Name: &name,
- }
- performers = append(performers, &p)
- })
-
- return performers, nil
-}
-
-func GetPerformerURL(c scraperTypeConfig, href string) (*models.ScrapedPerformer, error) {
- // if we're already in the bio page, just scrape it
- if regexp.MustCompile(`\/bio_.*\.php$`).MatchString(href) {
- return getPerformerBio(c, href)
- }
-
- // otherwise try to get the bio page from the url
- profileRE := regexp.MustCompile(`_links\/(.*?)\/$`)
- if profileRE.MatchString(href) {
- href = profileRE.ReplaceAllString(href, "_links/bio_$1.php")
- return getPerformerBio(c, href)
- }
-
- return nil, nil
-}
-
-func getPerformerBio(c scraperTypeConfig, href string) (*models.ScrapedPerformer, error) {
- bioRes, err := http.Get(href)
- if err != nil {
- return nil, err
- }
- defer bioRes.Body.Close()
- if bioRes.StatusCode != 200 {
- return nil, fmt.Errorf("status code error: %d %s", bioRes.StatusCode, bioRes.Status)
- }
-
- // Load the HTML document
- bioDoc, err := goquery.NewDocumentFromReader(bioRes.Body)
- if err != nil {
- return nil, err
- }
-
- params := bioDoc.Find(".paramvalue")
- paramIndexes := getIndexes(bioDoc)
-
- result := models.ScrapedPerformer{}
-
- performerURL := bioRes.Request.URL.String()
- result.URL = &performerURL
-
- name := paramValue(params, paramIndexes["name"])
- result.Name = &name
-
- ethnicity := getEthnicity(paramValue(params, paramIndexes["ethnicity"]))
- result.Ethnicity = ðnicity
-
- country := paramValue(params, paramIndexes["country"])
- result.Country = &country
-
- eyeColor := paramValue(params, paramIndexes["eye_color"])
- result.EyeColor = &eyeColor
-
- measurements := paramValue(params, paramIndexes["measurements"])
- result.Measurements = &measurements
-
- fakeTits := paramValue(params, paramIndexes["fake_tits"])
- result.FakeTits = &fakeTits
-
- careerLength := paramValue(params, paramIndexes["career_length"])
- careerRegex := regexp.MustCompile(`\([\s\S]*`)
- careerLength = careerRegex.ReplaceAllString(careerLength, "")
- careerLength = trim(careerLength)
- result.CareerLength = &careerLength
-
- tattoos := paramValue(params, paramIndexes["tattoos"])
- result.Tattoos = &tattoos
-
- piercings := paramValue(params, paramIndexes["piercings"])
- result.Piercings = &piercings
-
- aliases := paramValue(params, paramIndexes["aliases"])
- result.Aliases = &aliases
-
- birthdate := paramValue(params, paramIndexes["birthdate"])
- birthdateRegex := regexp.MustCompile(` \(\d* years old\)`)
- birthdate = birthdateRegex.ReplaceAllString(birthdate, "")
- birthdate = trim(birthdate)
- if birthdate != "Unknown" && len(birthdate) > 0 {
- t, _ := time.Parse("January _2, 2006", birthdate) // TODO
- formattedBirthdate := t.Format("2006-01-02")
- result.Birthdate = &formattedBirthdate
- }
-
- height := paramValue(params, paramIndexes["height"])
- heightRegex := regexp.MustCompile(`heightcm = "(.*)"\;`)
- heightMatches := heightRegex.FindStringSubmatch(height)
- if len(heightMatches) > 1 {
- result.Height = &heightMatches[1]
- }
-
- twitterElement := bioDoc.Find(".twitter a")
- twitterHref, _ := twitterElement.Attr("href")
- if twitterHref != "" {
- twitterURL, _ := url.Parse(twitterHref)
- twitterHandle := strings.Replace(twitterURL.Path, "/", "", -1)
- result.Twitter = &twitterHandle
- }
-
- instaElement := bioDoc.Find(".instagram a")
- instaHref, _ := instaElement.Attr("href")
- if instaHref != "" {
- instaURL, _ := url.Parse(instaHref)
- instaHandle := strings.Replace(instaURL.Path, "/", "", -1)
- result.Instagram = &instaHandle
- }
-
- return &result, nil
-}
-
-func GetPerformer(c scraperTypeConfig, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
- if scrapedPerformer.Name == nil {
- return nil, nil
- }
-
- performerName := *scrapedPerformer.Name
- queryURL := "https://www.freeones.com/search/?t=1&q=" + url.PathEscape(performerName) + "&view=thumbs"
- res, err := http.Get(queryURL)
- if err != nil {
- return nil, err
- }
- defer res.Body.Close()
- if res.StatusCode != 200 {
- return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
- }
-
- // Load the HTML document
- doc, err := goquery.NewDocumentFromReader(res.Body)
- if err != nil {
- return nil, err
- }
-
- performerLink := doc.Find("div.Block3 a").FilterFunction(func(i int, s *goquery.Selection) bool {
- href, _ := s.Attr("href")
- if href == "/html/j_links/Jenna_Leigh_c/" || href == "/html/a_links/Alexa_Grace_c/" {
- return false
- }
- if strings.ToLower(s.Text()) == strings.ToLower(performerName) {
- return true
- }
- alias := s.ParentsFiltered(".babeNameBlock").Find(".babeAlias").First()
- if strings.Contains(strings.ToLower(alias.Text()), strings.ToLower(performerName)) {
- return true
- }
- return false
- })
-
- href, _ := performerLink.Attr("href")
- href = strings.TrimSuffix(href, "/")
- regex := regexp.MustCompile(`.+_links\/(.+)`)
- matches := regex.FindStringSubmatch(href)
- if len(matches) < 2 {
- return nil, fmt.Errorf("No matches found in %s", href)
- }
-
- href = strings.Replace(href, matches[1], "bio_"+matches[1]+".php", -1)
- href = "https://www.freeones.com" + href
-
- return getPerformerBio(c, href)
-}
-
-func getIndexes(doc *goquery.Document) map[string]int {
- var indexes = make(map[string]int)
- doc.Find(".paramname").Each(func(i int, s *goquery.Selection) {
- index := i + 1
- paramName := trim(s.Text())
- switch paramName {
- case "Babe Name:":
- indexes["name"] = index
- case "Ethnicity:":
- indexes["ethnicity"] = index
- case "Country of Origin:":
- indexes["country"] = index
- case "Date of Birth:":
- indexes["birthdate"] = index
- case "Eye Color:":
- indexes["eye_color"] = index
- case "Height:":
- indexes["height"] = index
- case "Measurements:":
- indexes["measurements"] = index
- case "Fake boobs:":
- indexes["fake_tits"] = index
- case "Career Start And End":
- indexes["career_length"] = index
- case "Tattoos:":
- indexes["tattoos"] = index
- case "Piercings:":
- indexes["piercings"] = index
- case "Aliases:":
- indexes["aliases"] = index
- }
- })
- return indexes
-}
-
-func getEthnicity(ethnicity string) string {
- switch ethnicity {
- case "Caucasian":
- return "white"
- case "Black":
- return "black"
- case "Latin":
- return "hispanic"
- case "Asian":
- return "asian"
- default:
- panic("unknown ethnicity")
- }
-}
-
-func paramValue(params *goquery.Selection, paramIndex int) string {
- i := paramIndex - 1
- if paramIndex <= 0 {
- return ""
- }
- node := params.Get(i).FirstChild
- content := trim(node.Data)
- if content != "" {
- return content
- }
- node = node.NextSibling
- if node == nil {
- return ""
- }
- return trim(node.FirstChild.Data)
-}
-
-// https://stackoverflow.com/questions/20305966/why-does-strip-not-remove-the-leading-whitespace
-func trim(text string) string {
- // return text.replace(/\A\p{Space}*|\p{Space}*\z/, "");
- return strings.TrimSpace(text)
+ return *scraper
}
diff --git a/pkg/scraper/image.go b/pkg/scraper/image.go
new file mode 100644
index 000000000..4cdd691c1
--- /dev/null
+++ b/pkg/scraper/image.go
@@ -0,0 +1,95 @@
+package scraper
+
+import (
+ "io/ioutil"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/stashapp/stash/pkg/manager/config"
+ "github.com/stashapp/stash/pkg/models"
+ "github.com/stashapp/stash/pkg/utils"
+)
+
+// Timeout to get the image. Includes transfer time. May want to make this
+// configurable at some point.
+const imageGetTimeout = time.Second * 30
+
+func setPerformerImage(p *models.ScrapedPerformer) error {
+ if p == nil || p.Image == nil || !strings.HasPrefix(*p.Image, "http") {
+ // nothing to do
+ return nil
+ }
+
+ img, err := getImage(*p.Image)
+ if err != nil {
+ return err
+ }
+
+ p.Image = img
+
+ return nil
+}
+
+func setSceneImage(s *models.ScrapedScene) error {
+ // don't try to get the image if it doesn't appear to be a URL
+ if s == nil || s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
+ // nothing to do
+ return nil
+ }
+
+ img, err := getImage(*s.Image)
+ if err != nil {
+ return err
+ }
+
+ s.Image = img
+
+ return nil
+}
+
+func getImage(url string) (*string, error) {
+ client := &http.Client{
+ Timeout: imageGetTimeout,
+ }
+
+ req, err := http.NewRequest("GET", url, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ userAgent := config.GetScraperUserAgent()
+ if userAgent != "" {
+ req.Header.Set("User-Agent", userAgent)
+ }
+
+ // assume is a URL for now
+ resp, err := client.Do(req)
+ if err != nil {
+ return nil, err
+ }
+
+ defer resp.Body.Close()
+
+ body, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ // determine the image type and set the base64 type
+ contentType := resp.Header.Get("Content-Type")
+ if contentType == "" {
+ contentType = http.DetectContentType(body)
+ }
+
+ img := "data:" + contentType + ";base64," + utils.GetBase64StringFromData(body)
+ return &img, nil
+}
+
+func getStashPerformerImage(stashURL string, performerID string) (*string, error) {
+ return getImage(stashURL + "/performer/" + performerID + "/image")
+}
+
+func getStashSceneImage(stashURL string, sceneID string) (*string, error) {
+ return getImage(stashURL + "/scene/" + sceneID + "/screenshot")
+}
diff --git a/pkg/scraper/scrapers.go b/pkg/scraper/scrapers.go
index 7c7fcb105..c352dfd20 100644
--- a/pkg/scraper/scrapers.go
+++ b/pkg/scraper/scrapers.go
@@ -2,6 +2,7 @@ package scraper
import (
"errors"
+ "os"
"path/filepath"
"strconv"
@@ -21,7 +22,13 @@ func loadScrapers() ([]scraperConfig, error) {
scrapers = make([]scraperConfig, 0)
logger.Debugf("Reading scraper configs from %s", path)
- scraperFiles, err := filepath.Glob(filepath.Join(path, "*.yml"))
+ scraperFiles := []string{}
+ err := filepath.Walk(path, func(fp string, f os.FileInfo, err error) error {
+ if filepath.Ext(fp) == ".yml" {
+ scraperFiles = append(scraperFiles, fp)
+ }
+ return nil
+ })
if err != nil {
logger.Errorf("Error reading scraper configs: %s", err.Error())
@@ -32,7 +39,7 @@ func loadScrapers() ([]scraperConfig, error) {
scrapers = append(scrapers, GetFreeonesScraper())
for _, file := range scraperFiles {
- scraper, err := loadScraperFromYAML(file)
+ scraper, err := loadScraperFromYAMLFile(file)
if err != nil {
logger.Errorf("Error loading scraper %s: %s", file, err.Error())
} else {
@@ -108,7 +115,17 @@ func ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerI
// find scraper with the provided id
s := findScraper(scraperID)
if s != nil {
- return s.ScrapePerformer(scrapedPerformer)
+ ret, err := s.ScrapePerformer(scrapedPerformer)
+ if err != nil {
+ return nil, err
+ }
+
+ // post-process - set the image if applicable
+ if err := setPerformerImage(ret); err != nil {
+ logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
+ }
+
+ return ret, nil
}
return nil, errors.New("Scraper with ID " + scraperID + " not found")
@@ -117,7 +134,17 @@ func ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerI
func ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) {
for _, s := range scrapers {
if s.matchesPerformerURL(url) {
- return s.ScrapePerformerURL(url)
+ ret, err := s.ScrapePerformerURL(url)
+ if err != nil {
+ return nil, err
+ }
+
+ // post-process - set the image if applicable
+ if err := setPerformerImage(ret); err != nil {
+ logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
+ }
+
+ return ret, nil
}
}
@@ -127,7 +154,7 @@ func ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) {
func matchPerformer(p *models.ScrapedScenePerformer) error {
qb := models.NewPerformerQueryBuilder()
- performers, err := qb.FindByNames([]string{p.Name}, nil)
+ performers, err := qb.FindByNames([]string{p.Name}, nil, true)
if err != nil {
return err
@@ -146,7 +173,7 @@ func matchPerformer(p *models.ScrapedScenePerformer) error {
func matchStudio(s *models.ScrapedSceneStudio) error {
qb := models.NewStudioQueryBuilder()
- studio, err := qb.FindByName(s.Name, nil)
+ studio, err := qb.FindByName(s.Name, nil, true)
if err != nil {
return err
@@ -161,11 +188,29 @@ func matchStudio(s *models.ScrapedSceneStudio) error {
s.ID = &id
return nil
}
+func matchMovie(m *models.ScrapedSceneMovie) error {
+ qb := models.NewMovieQueryBuilder()
+
+ movies, err := qb.FindByNames([]string{m.Name}, nil, true)
+
+ if err != nil {
+ return err
+ }
+
+ if len(movies) != 1 {
+ // ignore - cannot match
+ return nil
+ }
+
+ id := strconv.Itoa(movies[0].ID)
+ m.ID = &id
+ return nil
+}
func matchTag(s *models.ScrapedSceneTag) error {
qb := models.NewTagQueryBuilder()
- tag, err := qb.FindByName(s.Name, nil)
+ tag, err := qb.FindByName(s.Name, nil, true)
if err != nil {
return err
@@ -189,6 +234,13 @@ func postScrapeScene(ret *models.ScrapedScene) error {
}
}
+ for _, p := range ret.Movies {
+ err := matchMovie(p)
+ if err != nil {
+ return err
+ }
+ }
+
for _, t := range ret.Tags {
err := matchTag(t)
if err != nil {
@@ -203,6 +255,11 @@ func postScrapeScene(ret *models.ScrapedScene) error {
}
}
+ // post-process - set the image if applicable
+ if err := setSceneImage(ret); err != nil {
+ logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
+ }
+
return nil
}
diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go
index 3de82efae..92aed73d1 100644
--- a/pkg/scraper/stash.go
+++ b/pkg/scraper/stash.go
@@ -4,6 +4,7 @@ import (
"context"
"strconv"
+ "github.com/jinzhu/copier"
"github.com/shurcooL/graphql"
"github.com/stashapp/stash/pkg/models"
@@ -16,7 +17,7 @@ func getStashClient(c scraperTypeConfig) *graphql.Client {
type stashFindPerformerNamePerformer struct {
ID string `json:"id" graphql:"id"`
- Name string `json:"id" graphql:"name"`
+ Name string `json:"name" graphql:"name"`
}
func (p stashFindPerformerNamePerformer) toPerformer() *models.ScrapedPerformer {
@@ -67,12 +68,14 @@ func scrapePerformerFragmentStash(c scraperTypeConfig, scrapedPerformer models.S
client := getStashClient(c)
var q struct {
- FindPerformer *models.ScrapedPerformer `graphql:"findPerformer(id: $f)"`
+ FindPerformer *models.ScrapedPerformerStash `graphql:"findPerformer(id: $f)"`
}
+ performerID := *scrapedPerformer.URL
+
// get the id from the URL field
vars := map[string]interface{}{
- "f": *scrapedPerformer.URL,
+ "f": performerID,
}
err := client.Query(context.Background(), &q, vars)
@@ -80,7 +83,20 @@ func scrapePerformerFragmentStash(c scraperTypeConfig, scrapedPerformer models.S
return nil, err
}
- return q.FindPerformer, nil
+ // need to copy back to a scraped performer
+ ret := models.ScrapedPerformer{}
+ err = copier.Copy(&ret, q.FindPerformer)
+ if err != nil {
+ return nil, err
+ }
+
+ // get the performer image directly
+ ret.Image, err = getStashPerformerImage(c.scraperConfig.StashServer.URL, performerID)
+ if err != nil {
+ return nil, err
+ }
+
+ return &ret, nil
}
func scrapeSceneFragmentStash(c scraperTypeConfig, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
@@ -99,7 +115,7 @@ func scrapeSceneFragmentStash(c scraperTypeConfig, scene models.SceneUpdateInput
}
var q struct {
- FindScene *models.ScrapedScene `graphql:"findScene(checksum: $c)"`
+ FindScene *models.ScrapedSceneStash `graphql:"findScene(checksum: $c)"`
}
checksum := graphql.String(storedScene.Checksum)
@@ -128,5 +144,18 @@ func scrapeSceneFragmentStash(c scraperTypeConfig, scene models.SceneUpdateInput
}
}
- return q.FindScene, nil
+ // need to copy back to a scraped scene
+ ret := models.ScrapedScene{}
+ err = copier.Copy(&ret, q.FindScene)
+ if err != nil {
+ return nil, err
+ }
+
+ // get the performer image directly
+ ret.Image, err = getStashSceneImage(c.scraperConfig.StashServer.URL, q.FindScene.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ return &ret, nil
}
diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go
index 745e71437..8c2670870 100644
--- a/pkg/scraper/xpath.go
+++ b/pkg/scraper/xpath.go
@@ -1,7 +1,9 @@
package scraper
import (
+ "bytes"
"errors"
+ "net/http"
"net/url"
"reflect"
"regexp"
@@ -10,11 +12,17 @@ import (
"github.com/antchfx/htmlquery"
"golang.org/x/net/html"
+ "golang.org/x/net/html/charset"
"github.com/stashapp/stash/pkg/logger"
+ "github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
)
+// Timeout for the scrape http request. Includes transfer time. May want to make this
+// configurable at some point.
+const scrapeGetTimeout = time.Second * 30
+
type commonXPathConfig map[string]string
func (c commonXPathConfig) applyCommon(src string) string {
@@ -66,7 +74,12 @@ func (c xpathRegexConfig) apply(value string) string {
return value
}
- return re.ReplaceAllString(value, with)
+ ret := re.ReplaceAllString(value, with)
+
+ logger.Debugf(`Replace: '%s' with '%s'`, regex, with)
+ logger.Debugf("Before: %s", value)
+ logger.Debugf("After: %s", ret)
+ return ret
}
return value
@@ -135,12 +148,28 @@ func (c xpathScraperAttrConfig) getReplace() xpathRegexConfigs {
return ret
}
+func (c xpathScraperAttrConfig) getSubScraper() xpathScraperAttrConfig {
+ const subScraperKey = "subScraper"
+ val, _ := c[subScraperKey]
+
+ if val == nil {
+ return nil
+ }
+
+ asMap, _ := val.(map[interface{}]interface{})
+ if asMap != nil {
+ return xpathScraperAttrConfig(asMap)
+ }
+
+ return nil
+}
+
func (c xpathScraperAttrConfig) concatenateResults(nodes []*html.Node) string {
separator := c.getConcat()
result := []string{}
for _, elem := range nodes {
- text := htmlquery.InnerText(elem)
+ text := NodeText(elem)
text = commonPostProcess(text)
result = append(result, text)
@@ -174,10 +203,45 @@ func (c xpathScraperAttrConfig) replaceRegex(value string) string {
return replace.apply(value)
}
+func (c xpathScraperAttrConfig) applySubScraper(value string) string {
+ subScraper := c.getSubScraper()
+
+ if subScraper == nil {
+ return value
+ }
+
+ logger.Debugf("Sub-scraping for: %s", value)
+ doc, err := loadURL(value, nil)
+
+ if err != nil {
+ logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())
+ return ""
+ }
+
+ found := runXPathQuery(doc, subScraper.getSelector(), nil)
+
+ if len(found) > 0 {
+ // check if we're concatenating the results into a single result
+ var result string
+ if subScraper.hasConcat() {
+ result = subScraper.concatenateResults(found)
+ } else {
+ result = NodeText(found[0])
+ result = commonPostProcess(result)
+ }
+
+ result = subScraper.postProcess(result)
+ return result
+ }
+
+ return ""
+}
+
func (c xpathScraperAttrConfig) postProcess(value string) string {
// perform regex replacements first
value = c.replaceRegex(value)
value = c.parseDate(value)
+ value = c.applySubScraper(value)
return value
}
@@ -219,7 +283,7 @@ func (s xpathScraperConfig) process(doc *html.Node, common commonXPathConfig) xP
if len(found) > 0 {
for i, elem := range found {
- text := htmlquery.InnerText(elem)
+ text := NodeText(elem)
text = commonPostProcess(text)
ret = ret.setKey(i, k, text)
@@ -239,7 +303,7 @@ func (s xpathScraperConfig) process(doc *html.Node, common commonXPathConfig) xP
ret = ret.setKey(i, k, result)
} else {
for i, elem := range found {
- text := htmlquery.InnerText(elem)
+ text := NodeText(elem)
text = commonPostProcess(text)
text = attrConfig.postProcess(text)
@@ -265,6 +329,7 @@ const (
XPathScraperConfigSceneTags = "Tags"
XPathScraperConfigScenePerformers = "Performers"
XPathScraperConfigSceneStudio = "Studio"
+ XPathScraperConfigSceneMovies = "Movies"
)
func (s xpathScraper) GetSceneSimple() xpathScraperConfig {
@@ -274,7 +339,7 @@ func (s xpathScraper) GetSceneSimple() xpathScraperConfig {
if mapped != nil {
for k, v := range mapped {
- if k != XPathScraperConfigSceneTags && k != XPathScraperConfigScenePerformers && k != XPathScraperConfigSceneStudio {
+ if k != XPathScraperConfigSceneTags && k != XPathScraperConfigScenePerformers && k != XPathScraperConfigSceneStudio && k != XPathScraperConfigSceneMovies {
ret[k] = v
}
}
@@ -313,6 +378,10 @@ func (s xpathScraper) GetSceneStudio() xpathScraperConfig {
return s.getSceneSubMap(XPathScraperConfigSceneStudio)
}
+func (s xpathScraper) GetSceneMovies() xpathScraperConfig {
+ return s.getSceneSubMap(XPathScraperConfigSceneMovies)
+}
+
func (s xpathScraper) scrapePerformer(doc *html.Node) (*models.ScrapedPerformer, error) {
var ret models.ScrapedPerformer
@@ -358,13 +427,16 @@ func (s xpathScraper) scrapeScene(doc *html.Node) (*models.ScrapedScene, error)
scenePerformersMap := s.GetScenePerformers()
sceneTagsMap := s.GetSceneTags()
sceneStudioMap := s.GetSceneStudio()
+ sceneMoviesMap := s.GetSceneMovies()
+ logger.Debug(`Processing scene:`)
results := sceneMap.process(doc, s.Common)
if len(results) > 0 {
results[0].apply(&ret)
// now apply the performers and tags
if scenePerformersMap != nil {
+ logger.Debug(`Processing scene performers:`)
performerResults := scenePerformersMap.process(doc, s.Common)
for _, p := range performerResults {
@@ -375,6 +447,7 @@ func (s xpathScraper) scrapeScene(doc *html.Node) (*models.ScrapedScene, error)
}
if sceneTagsMap != nil {
+ logger.Debug(`Processing scene tags:`)
tagResults := sceneTagsMap.process(doc, s.Common)
for _, p := range tagResults {
@@ -385,6 +458,7 @@ func (s xpathScraper) scrapeScene(doc *html.Node) (*models.ScrapedScene, error)
}
if sceneStudioMap != nil {
+ logger.Debug(`Processing scene studio:`)
studioResults := sceneStudioMap.process(doc, s.Common)
if len(studioResults) > 0 {
@@ -393,6 +467,18 @@ func (s xpathScraper) scrapeScene(doc *html.Node) (*models.ScrapedScene, error)
ret.Studio = studio
}
}
+
+ if sceneMoviesMap != nil {
+ logger.Debug(`Processing scene movies:`)
+ movieResults := sceneMoviesMap.process(doc, s.Common)
+
+ for _, p := range movieResults {
+ movie := &models.ScrapedSceneMovie{}
+ p.apply(movie)
+ ret.Movies = append(ret.Movies, movie)
+ }
+
+ }
}
return &ret, nil
@@ -433,10 +519,47 @@ func (r xPathResults) setKey(index int, key string, value string) xPathResults {
r = append(r, make(xPathResult))
}
+ logger.Debugf(`[%d][%s] = %s`, index, key, value)
r[index][key] = value
return r
}
+func loadURL(url string, c *scraperConfig) (*html.Node, error) {
+ client := &http.Client{
+ Timeout: scrapeGetTimeout,
+ }
+ req, err := http.NewRequest("GET", url, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ userAgent := config.GetScraperUserAgent()
+ if userAgent != "" {
+ req.Header.Set("User-Agent", userAgent)
+ }
+
+ resp, err := client.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ r, err := charset.NewReader(resp.Body, resp.Header.Get("Content-Type"))
+ if err != nil {
+ return nil, err
+ }
+
+ ret, err := html.Parse(r)
+
+ if err == nil && c != nil && c.DebugOptions != nil && c.DebugOptions.PrintHTML {
+ var b bytes.Buffer
+ html.Render(&b, ret)
+ logger.Infof("loadURL (%s) response: \n%s", url, b.String())
+ }
+
+ return ret, err
+}
+
func scrapePerformerURLXpath(c scraperTypeConfig, url string) (*models.ScrapedPerformer, error) {
scraper := c.scraperConfig.XPathScrapers[c.Scraper]
@@ -444,7 +567,7 @@ func scrapePerformerURLXpath(c scraperTypeConfig, url string) (*models.ScrapedPe
return nil, errors.New("xpath scraper with name " + c.Scraper + " not found in config")
}
- doc, err := htmlquery.LoadURL(url)
+ doc, err := loadURL(url, c.scraperConfig)
if err != nil {
return nil, err
@@ -460,7 +583,7 @@ func scrapeSceneURLXPath(c scraperTypeConfig, url string) (*models.ScrapedScene,
return nil, errors.New("xpath scraper with name " + c.Scraper + " not found in config")
}
- doc, err := htmlquery.LoadURL(url)
+ doc, err := loadURL(url, c.scraperConfig)
if err != nil {
return nil, err
@@ -484,7 +607,7 @@ func scrapePerformerNamesXPath(c scraperTypeConfig, name string) ([]*models.Scra
u := c.QueryURL
u = strings.Replace(u, placeholder, escapedName, -1)
- doc, err := htmlquery.LoadURL(u)
+ doc, err := loadURL(u, c.scraperConfig)
if err != nil {
return nil, err
@@ -492,3 +615,10 @@ func scrapePerformerNamesXPath(c scraperTypeConfig, name string) ([]*models.Scra
return scraper.scrapePerformers(doc)
}
+
+func NodeText(n *html.Node) string {
+ if n != nil && n.Type == html.CommentNode {
+ return htmlquery.OutputHTML(n, true)
+ }
+ return htmlquery.InnerText(n)
+}
diff --git a/pkg/utils/crypto.go b/pkg/utils/crypto.go
index 4fd911ccb..ab765b6a0 100644
--- a/pkg/utils/crypto.go
+++ b/pkg/utils/crypto.go
@@ -2,6 +2,7 @@ package utils
import (
"crypto/md5"
+ "crypto/rand"
"fmt"
"io"
"os"
@@ -31,3 +32,9 @@ func MD5FromFilePath(filePath string) (string, error) {
checksum := h.Sum(nil)
return fmt.Sprintf("%x", checksum), nil
}
+
+func GenerateRandomKey(l int) string {
+ b := make([]byte, l)
+ rand.Read(b)
+ return fmt.Sprintf("%x", b)
+}
diff --git a/pkg/utils/file.go b/pkg/utils/file.go
index e029c3073..b80e888c9 100644
--- a/pkg/utils/file.go
+++ b/pkg/utils/file.go
@@ -1,10 +1,12 @@
package utils
import (
+ "archive/zip"
"fmt"
"github.com/h2non/filetype"
"github.com/h2non/filetype/types"
"io/ioutil"
+ "math"
"os"
"os/user"
"path/filepath"
@@ -26,11 +28,8 @@ func FileExists(path string) (bool, error) {
_, err := os.Stat(path)
if err == nil {
return true, nil
- } else if os.IsNotExist(err) {
- return false, err
- } else {
- panic(err)
}
+ return false, err
}
// DirExists returns true if the given path exists and is a directory
@@ -66,7 +65,12 @@ func EnsureDir(path string) error {
return err
}
-// RemoveDir removes the given file path along with all of its contents
+// EnsureDirAll will create a directory at the given path along with any necessary parents if they don't already exist
+func EnsureDirAll(path string) error {
+ return os.MkdirAll(path, 0755)
+}
+
+// RemoveDir removes the given dir (if it exists) along with all of its contents
func RemoveDir(path string) error {
return os.RemoveAll(path)
}
@@ -96,15 +100,6 @@ func EmptyDir(path string) error {
// ListDir will return the contents of a given directory path as a string slice
func ListDir(path string) []string {
- if path == "" {
- path = GetHomeDirectory()
- }
-
- absolutePath, err := filepath.Abs(path)
- if err == nil {
- path = absolutePath
- }
-
files, err := ioutil.ReadDir(path)
if err != nil {
path = filepath.Dir(path)
@@ -133,3 +128,94 @@ func GetHomeDirectory() string {
}
return currentUser.HomeDir
}
+
+// IsZipFileUnmcompressed returns true if zip file in path is using 0 compression level
+func IsZipFileUncompressed(path string) (bool, error) {
+ r, err := zip.OpenReader(path)
+ if err != nil {
+ fmt.Printf("Error reading zip file %s: %s\n", path, err)
+ return false, err
+ } else {
+ defer r.Close()
+ for _, f := range r.File {
+ if f.FileInfo().IsDir() { // skip dirs, they always get store level compression
+ continue
+ }
+ return f.Method == 0, nil // check compression level of first actual file
+ }
+ }
+ return false, nil
+}
+
+// humanize code taken from https://github.com/dustin/go-humanize and adjusted
+
+func logn(n, b float64) float64 {
+ return math.Log(n) / math.Log(b)
+}
+
+// HumanizeBytes returns a human readable bytes string of a uint
+func HumanizeBytes(s uint64) string {
+ sizes := []string{"B", "KB", "MB", "GB", "TB", "PB", "EB"}
+ if s < 10 {
+ return fmt.Sprintf("%d B", s)
+ }
+ e := math.Floor(logn(float64(s), 1024))
+ suffix := sizes[int(e)]
+ val := math.Floor(float64(s)/math.Pow(1024, e)*10+0.5) / 10
+ f := "%.0f %s"
+ if val < 10 {
+ f = "%.1f %s"
+ }
+
+ return fmt.Sprintf(f, val, suffix)
+}
+
+// WriteFile writes file to path creating parent directories if needed
+func WriteFile(path string, file []byte) error {
+ pathErr := EnsureDirAll(filepath.Dir(path))
+ if pathErr != nil {
+ return fmt.Errorf("Cannot ensure path %s", pathErr)
+ }
+
+ err := ioutil.WriteFile(path, file, 0755)
+ if err != nil {
+ return fmt.Errorf("Write error for thumbnail %s: %s ", path, err)
+ }
+ return nil
+}
+
+// GetIntraDir returns a string that can be added to filepath.Join to implement directory depth, "" on error
+//eg given a pattern of 0af63ce3c99162e9df23a997f62621c5 and a depth of 2 length of 3
+//returns 0af/63c or 0af\63c ( dependin on os) that can be later used like this filepath.Join(directory, intradir, basename)
+func GetIntraDir(pattern string, depth, length int) string {
+ if depth < 1 || length < 1 || (depth*length > len(pattern)) {
+ return ""
+ }
+ intraDir := pattern[0:length] // depth 1 , get length number of characters from pattern
+ for i := 1; i < depth; i++ { // for every extra depth: move to the right of the pattern length positions, get length number of chars
+ intraDir = filepath.Join(intraDir, pattern[length*i:length*(i+1)]) // adding each time to intradir the extra characters with a filepath join
+ }
+ return intraDir
+}
+
+func GetDir(path string) string {
+ if path == "" {
+ path = GetHomeDirectory()
+ }
+
+ absolutePath, err := filepath.Abs(path)
+ if err == nil {
+ path = absolutePath
+ }
+ return absolutePath
+}
+
+func GetParent(path string) *string {
+ isRoot := path[len(path)-1:] == "/"
+ if isRoot {
+ return nil
+ } else {
+ parentPath := filepath.Clean(path + "/..")
+ return &parentPath
+ }
+}
diff --git a/scripts/check-gofmt.sh b/scripts/check-gofmt.sh
new file mode 100644
index 000000000..7ea5015c8
--- /dev/null
+++ b/scripts/check-gofmt.sh
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Go Authors. All rights reserved.
+
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+gofiles=$(git diff --name-only --diff-filter=ACM develop -- '*.go' ':!vendor')
+[ -z "$gofiles" ] && exit 0
+
+unformatted=$(gofmt -l $gofiles)
+[ -z "$unformatted" ] && exit 0
+
+# Some files are not gofmt'd. Print message and fail.
+
+echo >&2 "Go files must be formatted with gofmt. Please run:"
+for fn in $unformatted; do
+ echo >&2 " gofmt -w $PWD/$fn"
+done
+
+exit 1
diff --git a/scripts/cross-compile.sh b/scripts/cross-compile.sh
index daf9930ef..4601b8c85 100755
--- a/scripts/cross-compile.sh
+++ b/scripts/cross-compile.sh
@@ -4,11 +4,11 @@ DATE=`go run -mod=vendor scripts/getDate.go`
GITHASH=`git rev-parse --short HEAD`
STASH_VERSION=`git describe --tags --exclude latest_develop`
VERSION_FLAGS="-X 'github.com/stashapp/stash/pkg/api.version=$STASH_VERSION' -X 'github.com/stashapp/stash/pkg/api.buildstamp=$DATE' -X 'github.com/stashapp/stash/pkg/api.githash=$GITHASH'"
-SETUP="export GO111MODULE=on; export CGO_ENABLED=1;"
-WINDOWS="GOOS=windows GOARCH=amd64 CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ packr2 build -o dist/stash-win.exe -ldflags \"-extldflags '-static' $VERSION_FLAGS\" -tags extended -v -mod=vendor;"
-DARWIN="GOOS=darwin GOARCH=amd64 CC=o64-clang CXX=o64-clang++ packr2 build -o dist/stash-osx -ldflags \"$VERSION_FLAGS\" -tags extended -v -mod=vendor;"
-LINUX="packr2 build -o dist/stash-linux -ldflags \"$VERSION_FLAGS\" -v -mod=vendor;"
-RASPPI="GOOS=linux GOARCH=arm GOARM=5 CC=arm-linux-gnueabi-gcc packr2 build -o dist/stash-pi -ldflags \"$VERSION_FLAGS\" -v -mod=vendor;"
+SETUP="export GO111MODULE=on; export CGO_ENABLED=1; packr2;"
+WINDOWS="echo '=== Building Windows binary ==='; GOOS=windows GOARCH=amd64 CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ go build -o dist/stash-win.exe -ldflags \"-extldflags '-static' $VERSION_FLAGS\" -tags extended -v -mod=vendor;"
+DARWIN="echo '=== Building OSX binary ==='; GOOS=darwin GOARCH=amd64 CC=o64-clang CXX=o64-clang++ go build -o dist/stash-osx -ldflags \"$VERSION_FLAGS\" -tags extended -v -mod=vendor;"
+LINUX="echo '=== Building Linux binary ==='; go build -o dist/stash-linux -ldflags \"$VERSION_FLAGS\" -v -mod=vendor;"
+RASPPI="echo '=== Building Raspberry Pi binary ==='; GOOS=linux GOARCH=arm GOARM=5 CC=arm-linux-gnueabi-gcc go build -o dist/stash-pi -ldflags \"$VERSION_FLAGS\" -v -mod=vendor;"
COMMAND="$SETUP $WINDOWS $DARWIN $LINUX $RASPPI"
diff --git a/static/performer_male/noname_male_01.jpg b/static/performer_male/noname_male_01.jpg
new file mode 100644
index 000000000..f2c6fe51d
Binary files /dev/null and b/static/performer_male/noname_male_01.jpg differ
diff --git a/static/performer_male/noname_male_02.jpg b/static/performer_male/noname_male_02.jpg
new file mode 100644
index 000000000..93ad7ec9d
Binary files /dev/null and b/static/performer_male/noname_male_02.jpg differ
diff --git a/ui/login/login.css b/ui/login/login.css
new file mode 100644
index 000000000..e0ed5f797
--- /dev/null
+++ b/ui/login/login.css
@@ -0,0 +1,117 @@
+/* try to reflect the default css as much as possible */
+* {
+ box-sizing: border-box;
+}
+html {
+ font-size: 14px;
+}
+
+body {
+ background-color: #202b33;
+ color: #f5f8fa;
+ font-family: -apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";
+ -webkit-font-smoothing: antialiased;
+ -moz-osx-font-smoothing: grayscale;
+ margin: 0;
+ padding: 0;
+ overflow-y: hidden;
+}
+
+h6 {
+ font-size: 1rem;
+ margin-top: 0;
+ margin-bottom: .5rem;
+ font-weight: 500;
+ line-height: 1.2;
+}
+
+button, input {
+ margin: 0;
+ font-family: inherit;
+ font-size: inherit;
+ line-height: inherit;
+}
+
+.card {
+ background-color: #30404d;
+ border-radius: 3px;
+ box-shadow: 0 0 0 1px rgba(16,22,26,.4), 0 0 0 rgba(16,22,26,0), 0 0 0 rgba(16,22,26,0);
+ padding: 20px;
+}
+
+.dialog {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+
+ width: 100%;
+ height: 100vh;
+ padding-right: 15px;
+ padding-left: 15px;
+ margin-right: auto;
+ margin-left: auto;
+}
+
+.form-group {
+ margin-bottom: 1rem;
+}
+
+.form-control {
+ display: block;
+ width: 100%;
+ height: calc(1.5em + .75rem + 2px);
+ padding: .375rem .75rem;
+ font-size: 1rem;
+ font-weight: 400;
+ line-height: 1.5;
+ color: #495057;
+ background-clip: padding-box;
+ border: 1px solid #ced4da;
+ border-radius: .25rem;
+ -webkit-transition: border-color .15s ease-in-out,box-shadow .15s ease-in-out;
+ transition: border-color .15s ease-in-out,box-shadow .15s ease-in-out;
+}
+
+.text-input {
+ border: 0;
+ box-shadow: 0 0 0 0 rgba(19,124,189,0), 0 0 0 0 rgba(19,124,189,0), 0 0 0 0 rgba(19,124,189,0), inset 0 0 0 1px rgba(16,22,26,.3), inset 0 1px 1px rgba(16,22,26,.4);
+ color: #f5f8fa;
+}
+
+.text-input, .text-input:focus, .text-input[readonly] {
+ background-color: rgba(16,22,26,.3);
+}
+
+.btn {
+ display: inline-block;
+ font-weight: 400;
+ color: #212529;
+ text-align: center;
+ vertical-align: middle;
+ cursor: pointer;
+ -webkit-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+ background-color: initial;
+ border: 1px solid transparent;
+ padding: .375rem .75rem;
+ font-size: 1rem;
+ line-height: 1.5;
+ border-radius: .25rem;
+ -webkit-transition: color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;
+ transition: color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;
+}
+
+.btn-primary {
+ color: #fff;
+ background-color: #137cbd;
+ border-color: #137cbd;
+}
+
+.login-error {
+ color: #db3737;
+ font-size: 80%;
+ font-weight: 500;
+ padding-bottom: 1rem;
+}
diff --git a/ui/login/login.html b/ui/login/login.html
new file mode 100644
index 000000000..c8ad61256
--- /dev/null
+++ b/ui/login/login.html
@@ -0,0 +1,40 @@
+
+
+
+
+
+ Login
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/ui/setup/migrate.html b/ui/setup/migrate.html
new file mode 100644
index 000000000..632f30144
--- /dev/null
+++ b/ui/setup/migrate.html
@@ -0,0 +1,37 @@
+
+
+
+
+ Stash
+
+
+
+
+
+
+
+
+
+ Your current stash database is schema version {{.ExistingVersion}} and needs to be migrated to version {{.MigrateVersion}}.
+ This version of Stash will not function without migrating the database. The schema migration process is not reversible. Once the migration is
+ performed, your database will be incompatible with previous versions of stash.
+
+
+
+ It is recommended that you backup your existing database before you migrate. We can do this for you, writing a backup to {{.BackupPath}} if required.
+
")),Object(s.a)(o.icon,"jw-idle-label"),o.icon.appendChild(l))}return o}var n,i,o;return function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&ve(t,e)}(e,t),n=e,(i=[{key:"element",value:function(){return this.el}}])&&ge(n.prototype,i),o&&ge(n,o),e}(r.a);function xe(t,e){for(var n=0;n0&&void 0!==arguments[0]?arguments[0]:"",e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"",i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"";return'
")},Le={link:function(t){var e=t.link,n=t.title,i=t.logo;return'').concat(i).concat(n||"","")},info:function(t,e){return'")},share:function(t,e){return'")},keyboardShortcuts:function(t,e){return'")}},Be=n(23),_e=n(6),Ve=n(13);function Ae(t,e){for(var n=0;nJW Player '.concat(t,""),a={items:[{type:"info"},{title:Object(Ve.e)(i)?"".concat(o," ").concat(i):"".concat(i," ").concat(o),type:"link",featured:!0,showLogo:!0,link:"https://jwplayer.com/learn-more?e=".concat(Ne[n])}]},r=e.get("provider"),l=a.items;if(r&&r.name.indexOf("flash")>=0){var s="Flash Version "+Object(_e.a)();l.push({title:s,type:"link",link:"http://www.adobe.com/software/flash/about/"})}return this.shortcutsTooltip&&l.splice(l.length-1,0,{type:"keyboardShortcuts"}),a}},{key:"rightClick",value:function(t){if(this.lazySetup(),this.mouseOverContext)return!1;this.hideMenu(),this.showMenu(t),this.addHideMenuHandlers()}},{key:"getOffset",value:function(t){var e=Object(s.c)(this.wrapperElement),n=t.pageX-e.left,i=t.pageY-e.top;return this.model.get("touchMode")&&(i-=100),{x:n,y:i}}},{key:"showMenu",value:function(t){var e=this,n=this.getOffset(t);return this.el.style.left=n.x+"px",this.el.style.top=n.y+"px",this.outCount=0,Object(s.a)(this.playerContainer,"jw-flag-rightclick-open"),Object(s.a)(this.el,"jw-open"),clearTimeout(this._menuTimeout),this._menuTimeout=setTimeout((function(){return e.hideMenu()}),3e3),!1}},{key:"hideMenu",value:function(t){t&&this.el&&this.el.contains(t.target)||(Object(s.o)(this.playerContainer,"jw-flag-rightclick-open"),Object(s.o)(this.el,"jw-open"))}},{key:"lazySetup",value:function(){var t,e,n,i,o=this,a=(t=this.buildArray(),e=this.model.get("localization"),n=t.items,i=(void 0===n?[]:n).map((function(t){return Ee(t,e)})),'
'+'
'.concat(i.join(""),"
")+"
");if(this.el){if(this.html!==a){this.html=a;var r=He(a);Object(s.h)(this.el);for(var l=r.childNodes.length;l--;)this.el.appendChild(r.firstChild)}}else this.html=a,this.el=He(this.html),this.wrapperElement.appendChild(this.el),this.hideMenuHandler=function(t){return o.hideMenu(t)},this.overHandler=function(){o.mouseOverContext=!0},this.outHandler=function(t){o.mouseOverContext=!1,t.relatedTarget&&!o.el.contains(t.relatedTarget)&&++o.outCount>1&&o.hideMenu()},this.infoOverlayHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.infoOverlay.open()},this.shortcutsTooltipHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.shortcutsTooltip.open()}}},{key:"setup",value:function(t,e,n){this.wrapperElement=n,this.model=t,this.mouseOverContext=!1,this.playerContainer=e,this.ui=new u.a(n).on("longPress",this.rightClick,this)}},{key:"addHideMenuHandlers",value:function(){this.removeHideMenuHandlers(),this.wrapperElement.addEventListener("touchstart",this.hideMenuHandler),document.addEventListener("touchstart",this.hideMenuHandler),o.OS.mobile||(this.wrapperElement.addEventListener("click",this.hideMenuHandler),document.addEventListener("click",this.hideMenuHandler),this.el.addEventListener("mouseover",this.overHandler),this.el.addEventListener("mouseout",this.outHandler)),this.el.querySelector(".jw-info-overlay-item").addEventListener("click",this.infoOverlayHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").addEventListener("click",this.shortcutsTooltipHandler)}},{key:"removeHideMenuHandlers",value:function(){this.wrapperElement&&(this.wrapperElement.removeEventListener("click",this.hideMenuHandler),this.wrapperElement.removeEventListener("touchstart",this.hideMenuHandler)),this.el&&(this.el.querySelector(".jw-info-overlay-item").removeEventListener("click",this.infoOverlayHandler),this.el.removeEventListener("mouseover",this.overHandler),this.el.removeEventListener("mouseout",this.outHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").removeEventListener("click",this.shortcutsTooltipHandler)),document.removeEventListener("click",this.hideMenuHandler),document.removeEventListener("touchstart",this.hideMenuHandler)}},{key:"destroy",value:function(){clearTimeout(this._menuTimeout),this.removeHideMenuHandlers(),this.el&&(this.hideMenu(),this.hideMenuHandler=null,this.el=null),this.wrapperElement&&(this.wrapperElement.oncontextmenu=null,this.wrapperElement=null),this.model&&(this.model=null),this.ui&&(this.ui.destroy(),this.ui=null)}}])&&Ae(e.prototype,n),i&&Ae(e,i),t}(),Ie=function(t){return'")},Re=function(t){return'"},qe=function(t){return'"};function De(t){return(De="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function Ue(t,e){return!e||"object"!==De(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function Fe(t){return(Fe=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}function We(t,e){return(We=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function Ze(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function Ke(t,e){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:Ie;Ze(this,t),this.el=Object(s.e)(i(e)),this.ui=new u.a(this.el).on("click tap enter",n,this)}return Xe(t,[{key:"destroy",value:function(){this.ui.destroy()}}]),t}(),Je=function(t){function e(t,n){var i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:qe;return Ze(this,e),Ue(this,Fe(e).call(this,t,n,i))}return function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&We(t,e)}(e,t),Xe(e,[{key:"activate",value:function(){Object(s.v)(this.el,"jw-settings-item-active",!0),this.el.setAttribute("aria-checked","true"),this.active=!0}},{key:"deactivate",value:function(){Object(s.v)(this.el,"jw-settings-item-active",!1),this.el.setAttribute("aria-checked","false"),this.active=!1}}]),e}(Ge),Qe=function(t,e){return t?'
"};function ht(e,t){for(var i=0;i'.concat(a.playback,"")),Object(l.a)(o.icon,"jw-idle-label"),o.icon.appendChild(s))}return o}var i,n,o;return function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&vt(e,t)}(t,e),i=t,(n=[{key:"element",value:function(){return this.el}}])&&jt(i.prototype,n),o&&jt(i,o),t}(r.a);function kt(e,t){for(var i=0;i0&&void 0!==arguments[0]?arguments[0]:"",t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"",n=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"";return'
")},At={link:function(e){var t=e.link,i=e.title,n=e.logo;return'').concat(n).concat(i||"","")},info:function(e,t){return'")},share:function(e,t){return'")},keyboardShortcuts:function(e,t){return'")}},Pt=i(23),zt=i(6),Lt=i(13);function Bt(e,t){for(var i=0;iJW Player '.concat(e,""),a={items:[{type:"info"},{title:Object(Lt.e)(n)?"".concat(o," ").concat(n):"".concat(n," ").concat(o),type:"link",featured:!0,showLogo:!0,link:"https://jwplayer.com/learn-more?e=".concat(It[i])}]},r=t.get("provider"),s=a.items;if(r&&r.name.indexOf("flash")>=0){var l="Flash Version "+Object(zt.a)();s.push({title:l,type:"link",link:"http://www.adobe.com/software/flash/about/"})}return this.shortcutsTooltip&&s.splice(s.length-1,0,{type:"keyboardShortcuts"}),a}},{key:"rightClick",value:function(e){if(this.lazySetup(),this.mouseOverContext)return!1;this.hideMenu(),this.showMenu(e),this.addHideMenuHandlers()}},{key:"getOffset",value:function(e){var t=Object(l.c)(this.wrapperElement),i=e.pageX-t.left,n=e.pageY-t.top;return this.model.get("touchMode")&&(n-=100),{x:i,y:n}}},{key:"showMenu",value:function(e){var t=this,i=this.getOffset(e);return this.el.style.left=i.x+"px",this.el.style.top=i.y+"px",this.outCount=0,Object(l.a)(this.playerContainer,"jw-flag-rightclick-open"),Object(l.a)(this.el,"jw-open"),clearTimeout(this._menuTimeout),this._menuTimeout=setTimeout((function(){return t.hideMenu()}),3e3),!1}},{key:"hideMenu",value:function(e){e&&this.el&&this.el.contains(e.target)||(Object(l.o)(this.playerContainer,"jw-flag-rightclick-open"),Object(l.o)(this.el,"jw-open"))}},{key:"lazySetup",value:function(){var e,t,i,n,o=this,a=(e=this.buildArray(),t=this.model.get("localization"),i=e.items,n=(void 0===i?[]:i).map((function(e){return Et(e,t)})),'
'+'
'.concat(n.join(""),"
")+"
");if(this.el){if(this.html!==a){this.html=a;var r=Rt(a);Object(l.h)(this.el);for(var s=r.childNodes.length;s--;)this.el.appendChild(r.firstChild)}}else this.html=a,this.el=Rt(this.html),this.wrapperElement.appendChild(this.el),this.hideMenuHandler=function(e){return o.hideMenu(e)},this.overHandler=function(){o.mouseOverContext=!0},this.outHandler=function(e){o.mouseOverContext=!1,e.relatedTarget&&!o.el.contains(e.relatedTarget)&&++o.outCount>1&&o.hideMenu()},this.infoOverlayHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.infoOverlay.open()},this.shortcutsTooltipHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.shortcutsTooltip.open()}}},{key:"setup",value:function(e,t,i){this.wrapperElement=i,this.model=e,this.mouseOverContext=!1,this.playerContainer=t,this.ui=new u.a(i).on("longPress",this.rightClick,this)}},{key:"addHideMenuHandlers",value:function(){this.removeHideMenuHandlers(),this.wrapperElement.addEventListener("touchstart",this.hideMenuHandler),document.addEventListener("touchstart",this.hideMenuHandler),o.OS.mobile||(this.wrapperElement.addEventListener("click",this.hideMenuHandler),document.addEventListener("click",this.hideMenuHandler),this.el.addEventListener("mouseover",this.overHandler),this.el.addEventListener("mouseout",this.outHandler)),this.el.querySelector(".jw-info-overlay-item").addEventListener("click",this.infoOverlayHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").addEventListener("click",this.shortcutsTooltipHandler)}},{key:"removeHideMenuHandlers",value:function(){this.wrapperElement&&(this.wrapperElement.removeEventListener("click",this.hideMenuHandler),this.wrapperElement.removeEventListener("touchstart",this.hideMenuHandler)),this.el&&(this.el.querySelector(".jw-info-overlay-item").removeEventListener("click",this.infoOverlayHandler),this.el.removeEventListener("mouseover",this.overHandler),this.el.removeEventListener("mouseout",this.outHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").removeEventListener("click",this.shortcutsTooltipHandler)),document.removeEventListener("click",this.hideMenuHandler),document.removeEventListener("touchstart",this.hideMenuHandler)}},{key:"destroy",value:function(){clearTimeout(this._menuTimeout),this.removeHideMenuHandlers(),this.el&&(this.hideMenu(),this.hideMenuHandler=null,this.el=null),this.wrapperElement&&(this.wrapperElement.oncontextmenu=null,this.wrapperElement=null),this.model&&(this.model=null),this.ui&&(this.ui.destroy(),this.ui=null)}}])&&Bt(t.prototype,i),n&&Bt(t,n),e}(),Nt=function(e){return'")},Ht=function(e){return'"},Ft=function(e){return'"};function Dt(e){return(Dt="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function qt(e,t){return!t||"object"!==Dt(t)&&"function"!=typeof t?function(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}(e):t}function Ut(e){return(Ut=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function Wt(e,t){return(Wt=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function Qt(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function Yt(e,t){for(var i=0;i2&&void 0!==arguments[2]?arguments[2]:Nt;Qt(this,e),this.el=Object(l.e)(n(t)),this.ui=new u.a(this.el).on("click tap enter",i,this)}return Xt(e,[{key:"destroy",value:function(){this.ui.destroy()}}]),e}(),Zt=function(e){function t(e,i){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:Ft;return Qt(this,t),qt(this,Ut(t).call(this,e,i,n))}return function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&Wt(e,t)}(t,e),Xt(t,[{key:"activate",value:function(){Object(l.v)(this.el,"jw-settings-item-active",!0),this.el.setAttribute("aria-checked","true"),this.active=!0}},{key:"deactivate",value:function(){Object(l.v)(this.el,"jw-settings-item-active",!1),this.el.setAttribute("aria-checked","false"),this.active=!1}}]),t}(Jt),Gt=function(e,t){return e?'
")},zt={link:function(e){var t=e.link,n=e.title,i=e.logo;return'').concat(i).concat(n||"","")},info:function(e,t){return'")},share:function(e,t){return'")},keyboardShortcuts:function(e,t){return'")}},Pt=n(23),At=n(6),Lt=n(13);function Rt(e,t){for(var n=0;nJW Player '.concat(e,""),a={items:[{type:"info"},{title:Object(Lt.e)(i)?"".concat(o," ").concat(i):"".concat(i," ").concat(o),type:"link",featured:!0,showLogo:!0,link:"https://jwplayer.com/learn-more?e=".concat(It[n])}]},r=t.get("provider"),s=a.items;if(r&&r.name.indexOf("flash")>=0){var l="Flash Version "+Object(At.a)();s.push({title:l,type:"link",link:"http://www.adobe.com/software/flash/about/"})}return this.shortcutsTooltip&&s.splice(s.length-1,0,{type:"keyboardShortcuts"}),a}},{key:"rightClick",value:function(e){if(this.lazySetup(),this.mouseOverContext)return!1;this.hideMenu(),this.showMenu(e),this.addHideMenuHandlers()}},{key:"getOffset",value:function(e){var t=Object(l.c)(this.wrapperElement),n=e.pageX-t.left,i=e.pageY-t.top;return this.model.get("touchMode")&&(i-=100),{x:n,y:i}}},{key:"showMenu",value:function(e){var t=this,n=this.getOffset(e);return this.el.style.left=n.x+"px",this.el.style.top=n.y+"px",this.outCount=0,Object(l.a)(this.playerContainer,"jw-flag-rightclick-open"),Object(l.a)(this.el,"jw-open"),clearTimeout(this._menuTimeout),this._menuTimeout=setTimeout((function(){return t.hideMenu()}),3e3),!1}},{key:"hideMenu",value:function(e){e&&this.el&&this.el.contains(e.target)||(Object(l.o)(this.playerContainer,"jw-flag-rightclick-open"),Object(l.o)(this.el,"jw-open"))}},{key:"lazySetup",value:function(){var e,t,n,i,o=this,a=(e=this.buildArray(),t=this.model.get("localization"),n=e.items,i=(void 0===n?[]:n).map((function(e){return _t(e,t)})),'
'+'
'.concat(i.join(""),"
")+"
");if(this.el){if(this.html!==a){this.html=a;var r=Bt(a);Object(l.h)(this.el);for(var s=r.childNodes.length;s--;)this.el.appendChild(r.firstChild)}}else this.html=a,this.el=Bt(this.html),this.wrapperElement.appendChild(this.el),this.hideMenuHandler=function(e){return o.hideMenu(e)},this.overHandler=function(){o.mouseOverContext=!0},this.outHandler=function(e){o.mouseOverContext=!1,e.relatedTarget&&!o.el.contains(e.relatedTarget)&&++o.outCount>1&&o.hideMenu()},this.infoOverlayHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.infoOverlay.open()},this.shortcutsTooltipHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.shortcutsTooltip.open()}}},{key:"setup",value:function(e,t,n){this.wrapperElement=n,this.model=e,this.mouseOverContext=!1,this.playerContainer=t,this.ui=new u.a(n).on("longPress",this.rightClick,this)}},{key:"addHideMenuHandlers",value:function(){this.removeHideMenuHandlers(),this.wrapperElement.addEventListener("touchstart",this.hideMenuHandler),document.addEventListener("touchstart",this.hideMenuHandler),o.OS.mobile||(this.wrapperElement.addEventListener("click",this.hideMenuHandler),document.addEventListener("click",this.hideMenuHandler),this.el.addEventListener("mouseover",this.overHandler),this.el.addEventListener("mouseout",this.outHandler)),this.el.querySelector(".jw-info-overlay-item").addEventListener("click",this.infoOverlayHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").addEventListener("click",this.shortcutsTooltipHandler)}},{key:"removeHideMenuHandlers",value:function(){this.wrapperElement&&(this.wrapperElement.removeEventListener("click",this.hideMenuHandler),this.wrapperElement.removeEventListener("touchstart",this.hideMenuHandler)),this.el&&(this.el.querySelector(".jw-info-overlay-item").removeEventListener("click",this.infoOverlayHandler),this.el.removeEventListener("mouseover",this.overHandler),this.el.removeEventListener("mouseout",this.outHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").removeEventListener("click",this.shortcutsTooltipHandler)),document.removeEventListener("click",this.hideMenuHandler),document.removeEventListener("touchstart",this.hideMenuHandler)}},{key:"destroy",value:function(){clearTimeout(this._menuTimeout),this.removeHideMenuHandlers(),this.el&&(this.hideMenu(),this.hideMenuHandler=null,this.el=null),this.wrapperElement&&(this.wrapperElement.oncontextmenu=null,this.wrapperElement=null),this.model&&(this.model=null),this.ui&&(this.ui.destroy(),this.ui=null)}}])&&Rt(t.prototype,n),i&&Rt(t,i),e}(),Nt=function(e){return'")},Ht=function(e){return'"},Ft=function(e){return'"};function qt(e){return(qt="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function Dt(e,t){return!t||"object"!==qt(t)&&"function"!=typeof t?function(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}(e):t}function Ut(e){return(Ut=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function Wt(e,t){return(Wt=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function Qt(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function Yt(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:Nt;Qt(this,e),this.el=Object(l.e)(i(t)),this.ui=new u.a(this.el).on("click tap enter",n,this)}return Xt(e,[{key:"destroy",value:function(){this.ui.destroy()}}]),e}(),Jt=function(e){function t(e,n){var i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:Ft;return Qt(this,t),Dt(this,Ut(t).call(this,e,n,i))}return function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&Wt(e,t)}(t,e),Xt(t,[{key:"activate",value:function(){Object(l.v)(this.el,"jw-settings-item-active",!0),this.el.setAttribute("aria-checked","true"),this.active=!0}},{key:"deactivate",value:function(){Object(l.v)(this.el,"jw-settings-item-active",!1),this.el.setAttribute("aria-checked","false"),this.active=!1}}]),t}(Kt),Gt=function(e,t){return e?'
"};function fe(t,e){for(var i=0;i'.concat(a.playback,"")),Object(l.a)(o.icon,"jw-idle-label"),o.icon.appendChild(s))}return o}var i,n,o;return function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&ve(t,e)}(e,t),i=e,(n=[{key:"element",value:function(){return this.el}}])&&je(i.prototype,n),o&&je(i,o),e}(r.a);function ke(t,e){for(var i=0;i0&&void 0!==arguments[0]?arguments[0]:"",e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"",n=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"";return'
")},Ie={link:function(t){var e=t.link,i=t.title,n=t.logo;return'').concat(n).concat(i||"","")},info:function(t,e){return'")},share:function(t,e){return'")},keyboardShortcuts:function(t,e){return'")}},Le=i(23),Ae=i(6),Pe=i(13);function Re(t,e){for(var i=0;iJW Player '.concat(t,""),a={items:[{type:"info"},{title:Object(Pe.e)(n)?"".concat(o," ").concat(n):"".concat(n," ").concat(o),type:"link",featured:!0,showLogo:!0,link:"https://jwplayer.com/learn-more?e=".concat(ze[i])}]},r=e.get("provider"),s=a.items;if(r&&r.name.indexOf("flash")>=0){var l="Flash Version "+Object(Ae.a)();s.push({title:l,type:"link",link:"http://www.adobe.com/software/flash/about/"})}return this.shortcutsTooltip&&s.splice(s.length-1,0,{type:"keyboardShortcuts"}),a}},{key:"rightClick",value:function(t){if(this.lazySetup(),this.mouseOverContext)return!1;this.hideMenu(),this.showMenu(t),this.addHideMenuHandlers()}},{key:"getOffset",value:function(t){var e=Object(l.c)(this.wrapperElement),i=t.pageX-e.left,n=t.pageY-e.top;return this.model.get("touchMode")&&(n-=100),{x:i,y:n}}},{key:"showMenu",value:function(t){var e=this,i=this.getOffset(t);return this.el.style.left=i.x+"px",this.el.style.top=i.y+"px",this.outCount=0,Object(l.a)(this.playerContainer,"jw-flag-rightclick-open"),Object(l.a)(this.el,"jw-open"),clearTimeout(this._menuTimeout),this._menuTimeout=setTimeout((function(){return e.hideMenu()}),3e3),!1}},{key:"hideMenu",value:function(t){t&&this.el&&this.el.contains(t.target)||(Object(l.o)(this.playerContainer,"jw-flag-rightclick-open"),Object(l.o)(this.el,"jw-open"))}},{key:"lazySetup",value:function(){var t,e,i,n,o=this,a=(t=this.buildArray(),e=this.model.get("localization"),i=t.items,n=(void 0===i?[]:i).map((function(t){return Ee(t,e)})),'
'+'
'.concat(n.join(""),"
")+"
");if(this.el){if(this.html!==a){this.html=a;var r=Be(a);Object(l.h)(this.el);for(var s=r.childNodes.length;s--;)this.el.appendChild(r.firstChild)}}else this.html=a,this.el=Be(this.html),this.wrapperElement.appendChild(this.el),this.hideMenuHandler=function(t){return o.hideMenu(t)},this.overHandler=function(){o.mouseOverContext=!0},this.outHandler=function(t){o.mouseOverContext=!1,t.relatedTarget&&!o.el.contains(t.relatedTarget)&&++o.outCount>1&&o.hideMenu()},this.infoOverlayHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.infoOverlay.open()},this.shortcutsTooltipHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.shortcutsTooltip.open()}}},{key:"setup",value:function(t,e,i){this.wrapperElement=i,this.model=t,this.mouseOverContext=!1,this.playerContainer=e,this.ui=new u.a(i).on("longPress",this.rightClick,this)}},{key:"addHideMenuHandlers",value:function(){this.removeHideMenuHandlers(),this.wrapperElement.addEventListener("touchstart",this.hideMenuHandler),document.addEventListener("touchstart",this.hideMenuHandler),o.OS.mobile||(this.wrapperElement.addEventListener("click",this.hideMenuHandler),document.addEventListener("click",this.hideMenuHandler),this.el.addEventListener("mouseover",this.overHandler),this.el.addEventListener("mouseout",this.outHandler)),this.el.querySelector(".jw-info-overlay-item").addEventListener("click",this.infoOverlayHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").addEventListener("click",this.shortcutsTooltipHandler)}},{key:"removeHideMenuHandlers",value:function(){this.wrapperElement&&(this.wrapperElement.removeEventListener("click",this.hideMenuHandler),this.wrapperElement.removeEventListener("touchstart",this.hideMenuHandler)),this.el&&(this.el.querySelector(".jw-info-overlay-item").removeEventListener("click",this.infoOverlayHandler),this.el.removeEventListener("mouseover",this.overHandler),this.el.removeEventListener("mouseout",this.outHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").removeEventListener("click",this.shortcutsTooltipHandler)),document.removeEventListener("click",this.hideMenuHandler),document.removeEventListener("touchstart",this.hideMenuHandler)}},{key:"destroy",value:function(){clearTimeout(this._menuTimeout),this.removeHideMenuHandlers(),this.el&&(this.hideMenu(),this.hideMenuHandler=null,this.el=null),this.wrapperElement&&(this.wrapperElement.oncontextmenu=null,this.wrapperElement=null),this.model&&(this.model=null),this.ui&&(this.ui.destroy(),this.ui=null)}}])&&Re(e.prototype,i),n&&Re(e,n),t}(),Ne=function(t){return'")},He=function(t){return'"},Fe=function(t){return'"};function De(t){return(De="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function qe(t,e){return!e||"object"!==De(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function Ue(t){return(Ue=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}function We(t,e){return(We=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function Qe(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function Ye(t,e){for(var i=0;i2&&void 0!==arguments[2]?arguments[2]:Ne;Qe(this,t),this.el=Object(l.e)(n(e)),this.ui=new u.a(this.el).on("click tap enter",i,this)}return Xe(t,[{key:"destroy",value:function(){this.ui.destroy()}}]),t}(),Ze=function(t){function e(t,i){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:Fe;return Qe(this,e),qe(this,Ue(e).call(this,t,i,n))}return function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&We(t,e)}(e,t),Xe(e,[{key:"activate",value:function(){Object(l.v)(this.el,"jw-settings-item-active",!0),this.el.setAttribute("aria-checked","true"),this.active=!0}},{key:"deactivate",value:function(){Object(l.v)(this.el,"jw-settings-item-active",!1),this.el.setAttribute("aria-checked","false"),this.active=!1}}]),e}(Je),Ge=function(t,e){return t?'
")},ze={link:function(t){var e=t.link,n=t.title,i=t.logo;return'').concat(i).concat(n||"","")},info:function(t,e){return'")},share:function(t,e){return'")},keyboardShortcuts:function(t,e){return'")}},Pe=n(23),Ae=n(6),Ie=n(13);function Re(t,e){for(var n=0;nJW Player '.concat(t,""),a={items:[{type:"info"},{title:Object(Ie.e)(i)?"".concat(o," ").concat(i):"".concat(i," ").concat(o),type:"link",featured:!0,showLogo:!0,link:"https://jwplayer.com/learn-more?e=".concat(Le[n])}]},r=e.get("provider"),s=a.items;if(r&&r.name.indexOf("flash")>=0){var l="Flash Version "+Object(Ae.a)();s.push({title:l,type:"link",link:"http://www.adobe.com/software/flash/about/"})}return this.shortcutsTooltip&&s.splice(s.length-1,0,{type:"keyboardShortcuts"}),a}},{key:"rightClick",value:function(t){if(this.lazySetup(),this.mouseOverContext)return!1;this.hideMenu(),this.showMenu(t),this.addHideMenuHandlers()}},{key:"getOffset",value:function(t){var e=Object(l.c)(this.wrapperElement),n=t.pageX-e.left,i=t.pageY-e.top;return this.model.get("touchMode")&&(i-=100),{x:n,y:i}}},{key:"showMenu",value:function(t){var e=this,n=this.getOffset(t);return this.el.style.left=n.x+"px",this.el.style.top=n.y+"px",this.outCount=0,Object(l.a)(this.playerContainer,"jw-flag-rightclick-open"),Object(l.a)(this.el,"jw-open"),clearTimeout(this._menuTimeout),this._menuTimeout=setTimeout((function(){return e.hideMenu()}),3e3),!1}},{key:"hideMenu",value:function(t){t&&this.el&&this.el.contains(t.target)||(Object(l.o)(this.playerContainer,"jw-flag-rightclick-open"),Object(l.o)(this.el,"jw-open"))}},{key:"lazySetup",value:function(){var t,e,n,i,o=this,a=(t=this.buildArray(),e=this.model.get("localization"),n=t.items,i=(void 0===n?[]:n).map((function(t){return Ee(t,e)})),'
'+'
'.concat(i.join(""),"
")+"
");if(this.el){if(this.html!==a){this.html=a;var r=Be(a);Object(l.h)(this.el);for(var s=r.childNodes.length;s--;)this.el.appendChild(r.firstChild)}}else this.html=a,this.el=Be(this.html),this.wrapperElement.appendChild(this.el),this.hideMenuHandler=function(t){return o.hideMenu(t)},this.overHandler=function(){o.mouseOverContext=!0},this.outHandler=function(t){o.mouseOverContext=!1,t.relatedTarget&&!o.el.contains(t.relatedTarget)&&++o.outCount>1&&o.hideMenu()},this.infoOverlayHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.infoOverlay.open()},this.shortcutsTooltipHandler=function(){o.mouseOverContext=!1,o.hideMenu(),o.shortcutsTooltip.open()}}},{key:"setup",value:function(t,e,n){this.wrapperElement=n,this.model=t,this.mouseOverContext=!1,this.playerContainer=e,this.ui=new u.a(n).on("longPress",this.rightClick,this)}},{key:"addHideMenuHandlers",value:function(){this.removeHideMenuHandlers(),this.wrapperElement.addEventListener("touchstart",this.hideMenuHandler),document.addEventListener("touchstart",this.hideMenuHandler),o.OS.mobile||(this.wrapperElement.addEventListener("click",this.hideMenuHandler),document.addEventListener("click",this.hideMenuHandler),this.el.addEventListener("mouseover",this.overHandler),this.el.addEventListener("mouseout",this.outHandler)),this.el.querySelector(".jw-info-overlay-item").addEventListener("click",this.infoOverlayHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").addEventListener("click",this.shortcutsTooltipHandler)}},{key:"removeHideMenuHandlers",value:function(){this.wrapperElement&&(this.wrapperElement.removeEventListener("click",this.hideMenuHandler),this.wrapperElement.removeEventListener("touchstart",this.hideMenuHandler)),this.el&&(this.el.querySelector(".jw-info-overlay-item").removeEventListener("click",this.infoOverlayHandler),this.el.removeEventListener("mouseover",this.overHandler),this.el.removeEventListener("mouseout",this.outHandler),this.shortcutsTooltip&&this.el.querySelector(".jw-shortcuts-item").removeEventListener("click",this.shortcutsTooltipHandler)),document.removeEventListener("click",this.hideMenuHandler),document.removeEventListener("touchstart",this.hideMenuHandler)}},{key:"destroy",value:function(){clearTimeout(this._menuTimeout),this.removeHideMenuHandlers(),this.el&&(this.hideMenu(),this.hideMenuHandler=null,this.el=null),this.wrapperElement&&(this.wrapperElement.oncontextmenu=null,this.wrapperElement=null),this.model&&(this.model=null),this.ui&&(this.ui.destroy(),this.ui=null)}}])&&Re(e.prototype,n),i&&Re(e,i),t}(),Ne=function(t){return'")},He=function(t){return'"},Fe=function(t){return'"};function qe(t){return(qe="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function De(t,e){return!e||"object"!==qe(e)&&"function"!=typeof e?function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t):e}function Ue(t){return(Ue=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}function We(t,e){return(We=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function Qe(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function Ye(t,e){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:Ne;Qe(this,t),this.el=Object(l.e)(i(e)),this.ui=new u.a(this.el).on("click tap enter",n,this)}return Xe(t,[{key:"destroy",value:function(){this.ui.destroy()}}]),t}(),Je=function(t){function e(t,n){var i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:Fe;return Qe(this,e),De(this,Ue(e).call(this,t,n,i))}return function(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),e&&We(t,e)}(e,t),Xe(e,[{key:"activate",value:function(){Object(l.v)(this.el,"jw-settings-item-active",!0),this.el.setAttribute("aria-checked","true"),this.active=!0}},{key:"deactivate",value:function(){Object(l.v)(this.el,"jw-settings-item-active",!1),this.el.setAttribute("aria-checked","false"),this.active=!1}}]),e}(Ke),Ge=function(t,e){return t?'