mirror of
https://github.com/stashapp/stash.git
synced 2026-05-04 18:41:22 +02:00
Merge pull request #1544 from stashapp/develop
Merge develop to master for 0.8
This commit is contained in:
commit
0c417ad439
1206 changed files with 184385 additions and 37484 deletions
51
.github/workflows/build.yml
vendored
51
.github/workflows/build.yml
vendored
|
|
@ -31,21 +31,62 @@ jobs:
|
|||
path: ui/v2.5/node_modules
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock') }}
|
||||
|
||||
- name: Cache UI build
|
||||
uses: actions/cache@v2
|
||||
id: cache-ui
|
||||
env:
|
||||
cache-name: cache-ui
|
||||
with:
|
||||
path: ui/v2.5/build
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
|
||||
|
||||
- name: Cache go build
|
||||
uses: actions/cache@v2
|
||||
env:
|
||||
cache-name: cache-go-cache
|
||||
with:
|
||||
path: .go-cache
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/go.sum') }}
|
||||
|
||||
- name: Start build container
|
||||
run: |
|
||||
mkdir -p .go-cache
|
||||
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated -w /stash $COMPILER_IMAGE tail -f /dev/null
|
||||
|
||||
- name: Pre-install
|
||||
run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make pre-ui"
|
||||
run: docker exec -t build /bin/bash -c "make pre-ui"
|
||||
|
||||
- name: Generate
|
||||
run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make generate"
|
||||
run: docker exec -t build /bin/bash -c "make generate"
|
||||
|
||||
- name: Validate UI
|
||||
# skip UI validation for pull requests if UI is unchanged
|
||||
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
|
||||
run: docker exec -t build /bin/bash -c "make ui-validate"
|
||||
|
||||
# TODO: Replace with `make validate` once `revive` is bundled in COMPILER_IMAGE
|
||||
- name: Validate
|
||||
run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make ui-validate fmt-check vet it"
|
||||
run: docker exec -t build /bin/bash -c "make fmt-check vet it"
|
||||
|
||||
- name: Build UI
|
||||
run: docker run --rm --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated -w /stash $COMPILER_IMAGE /bin/bash -c "make ui-only"
|
||||
# skip UI build for pull requests if UI is unchanged (UI was cached)
|
||||
# this means that the build version/time may be incorrect if the UI is
|
||||
# not changed in a pull request
|
||||
if: ${{ github.event_name != 'pull_request' || steps.cache-ui.outputs.cache-hit != 'true' }}
|
||||
run: docker exec -t build /bin/bash -c "make ui-only"
|
||||
|
||||
- name: Compile for all supported platforms
|
||||
run: ./scripts/cross-compile.sh
|
||||
run: |
|
||||
docker exec -t build /bin/bash -c "make packr"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-windows"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-osx"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-linux"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-linux-arm64v8"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-linux-arm32v7"
|
||||
docker exec -t build /bin/bash -c "make cross-compile-pi"
|
||||
|
||||
- name: Cleanup build container
|
||||
run: docker rm -f -v build
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
|
|
|
|||
48
Makefile
48
Makefile
|
|
@ -22,6 +22,9 @@ ifdef OUTPUT
|
|||
OUTPUT := -o $(OUTPUT)
|
||||
endif
|
||||
|
||||
export CGO_ENABLED = 1
|
||||
export GO111MODULE = on
|
||||
|
||||
.PHONY: release pre-build install clean
|
||||
|
||||
release: generate ui build-release
|
||||
|
|
@ -41,7 +44,7 @@ endif
|
|||
|
||||
build: pre-build
|
||||
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/pkg/api.version=$(STASH_VERSION)' -X 'github.com/stashapp/stash/pkg/api.buildstamp=$(BUILD_DATE)' -X 'github.com/stashapp/stash/pkg/api.githash=$(GITHASH)')
|
||||
$(SET) CGO_ENABLED=1 $(SEPARATOR) go build $(OUTPUT) -mod=vendor -v -tags "sqlite_omit_load_extension osusergo netgo" -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS)"
|
||||
go build $(OUTPUT) -mod=vendor -v -tags "sqlite_omit_load_extension osusergo netgo" -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS)"
|
||||
|
||||
# strips debug symbols from the release build
|
||||
# consider -trimpath in go build if we move to go 1.13+
|
||||
|
|
@ -51,6 +54,49 @@ build-release: build
|
|||
build-release-static: EXTRA_LDFLAGS := -extldflags=-static -s -w
|
||||
build-release-static: build
|
||||
|
||||
# cross-compile- targets should be run within the compiler docker container
|
||||
cross-compile-windows: export GOOS := windows
|
||||
cross-compile-windows: export GOARCH := amd64
|
||||
cross-compile-windows: export CC := x86_64-w64-mingw32-gcc
|
||||
cross-compile-windows: export CXX := x86_64-w64-mingw32-g++
|
||||
cross-compile-windows: OUTPUT := -o dist/stash-win.exe
|
||||
cross-compile-windows: build-release-static
|
||||
|
||||
cross-compile-osx: export GOOS := darwin
|
||||
cross-compile-osx: export GOARCH := amd64
|
||||
cross-compile-osx: export CC := o64-clang
|
||||
cross-compile-osx: export CXX := o64-clang++
|
||||
cross-compile-osx: OUTPUT := -o dist/stash-osx
|
||||
# can't use static build for OSX
|
||||
cross-compile-osx: build-release
|
||||
|
||||
cross-compile-linux: export GOOS := linux
|
||||
cross-compile-linux: export GOARCH := amd64
|
||||
cross-compile-linux: OUTPUT := -o dist/stash-linux
|
||||
cross-compile-linux: build-release-static
|
||||
|
||||
cross-compile-linux-arm64v8: export GOOS := linux
|
||||
cross-compile-linux-arm64v8: export GOARCH := arm64
|
||||
cross-compile-linux-arm64v8: export CC := aarch64-linux-gnu-gcc
|
||||
cross-compile-linux-arm64v8: OUTPUT := -o dist/stash-linux-arm64v8
|
||||
cross-compile-linux-arm64v8: build-release-static
|
||||
|
||||
cross-compile-linux-arm32v7: export GOOS := linux
|
||||
cross-compile-linux-arm32v7: export GOARCH := arm
|
||||
cross-compile-linux-arm32v7: export GOARM := 7
|
||||
cross-compile-linux-arm32v7: export CC := arm-linux-gnueabihf-gcc
|
||||
cross-compile-linux-arm32v7: OUTPUT := -o dist/stash-linux-arm32v7
|
||||
cross-compile-linux-arm32v7: build-release-static
|
||||
|
||||
cross-compile-pi: export GOOS := linux
|
||||
cross-compile-pi: export GOARCH := arm
|
||||
cross-compile-pi: export GOARM := 6
|
||||
cross-compile-pi: export CC := arm-linux-gnueabi-gcc
|
||||
cross-compile-pi: OUTPUT := -o dist/stash-pi
|
||||
cross-compile-pi: build-release-static
|
||||
|
||||
cross-compile-all: cross-compile-windows cross-compile-osx cross-compile-linux cross-compile-linux-arm64v8 cross-compile-linux-arm32v7 cross-compile-pi
|
||||
|
||||
install:
|
||||
packr2 install
|
||||
|
||||
|
|
|
|||
13
README.md
13
README.md
|
|
@ -131,6 +131,7 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MingW.
|
|||
* `make fmt-check` - Ensure changed files are formatted correctly
|
||||
* `make it` - Run the unit and integration tests
|
||||
* `make validate` - Run all of the tests and checks required to submit a PR
|
||||
* `make ui-start` - Runs the UI in development mode. Requires a running stash server to connect to. Stash port can be changed from the default of `9999` with environment variable `REACT_APP_PLATFORM_PORT`.
|
||||
|
||||
## Building a release
|
||||
|
||||
|
|
@ -145,3 +146,15 @@ where the app can be cross-compiled. This process is kicked off by CI via the `
|
|||
command to open a bash shell to the container to poke around:
|
||||
|
||||
`docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t stashappdev/compiler:latest /bin/bash`
|
||||
|
||||
## Profiling
|
||||
|
||||
Stash can be profiled using the `--cpuprofile <output profile filename>` command line flag.
|
||||
|
||||
The resulting file can then be used with pprof as follows:
|
||||
|
||||
`go tool pprof <path to binary> <path to profile filename>`
|
||||
|
||||
With `graphviz` installed and in the path, a call graph can be generated with:
|
||||
|
||||
`go tool pprof -svg <path to binary> <path to profile filename> > <output svg file>`
|
||||
|
|
|
|||
|
|
@ -8,9 +8,12 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-pi; \
|
|||
elif [ "$TARGETPLATFORM" = "linux/amd64" ]; then BIN=stash-linux; \
|
||||
fi; \
|
||||
mv $BIN /stash
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt update && apt install -y python3 python-is-python3 python3-requests python3-requests-toolbelt python3-lxml python3-pip && pip3 install cloudscraper
|
||||
FROM ubuntu:20.04 as app
|
||||
run apt update && apt install -y python3 python3 python-is-python3 python3-requests ffmpeg && rm -rf /var/lib/apt/lists/*
|
||||
run apt update && apt install -y python3 python-is-python3 python3-requests python3-requests-toolbelt python3-lxml ffmpeg && rm -rf /var/lib/apt/lists/*
|
||||
COPY --from=prep /stash /usr/bin/
|
||||
COPY --from=prep /usr/local/lib/python3.8/dist-packages /usr/local/lib/python3.8/dist-packages
|
||||
|
||||
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
|
||||
|
||||
|
|
|
|||
10
go.mod
10
go.mod
|
|
@ -3,9 +3,10 @@ module github.com/stashapp/stash
|
|||
require (
|
||||
github.com/99designs/gqlgen v0.12.2
|
||||
github.com/Yamashou/gqlgenc v0.0.0-20200902035953-4dbef3551953
|
||||
github.com/anacrolix/dms v1.2.2
|
||||
github.com/antchfx/htmlquery v1.2.3
|
||||
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c
|
||||
github.com/chromedp/chromedp v0.5.3
|
||||
github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84
|
||||
github.com/chromedp/chromedp v0.7.3
|
||||
github.com/corona10/goimagehash v1.0.3
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||
github.com/disintegration/imaging v1.6.0
|
||||
|
|
@ -20,9 +21,11 @@ require (
|
|||
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
|
||||
github.com/jmoiron/sqlx v1.2.0
|
||||
github.com/json-iterator/go v1.1.9
|
||||
github.com/knq/sysutil v0.0.0-20191005231841-15668db23d08 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.6
|
||||
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
|
||||
github.com/remeh/sizedwaitgroup v1.0.0
|
||||
github.com/robertkrimen/otto v0.0.0-20200922221731-ef014fd054ac
|
||||
github.com/rs/cors v1.6.0
|
||||
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f
|
||||
github.com/sirupsen/logrus v1.4.2
|
||||
|
|
@ -35,8 +38,9 @@ require (
|
|||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9
|
||||
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
|
||||
golang.org/x/net v0.0.0-20200822124328-c89045814202
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd
|
||||
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22
|
||||
golang.org/x/tools v0.0.0-20200915031644-64986481280e // indirect
|
||||
gopkg.in/sourcemap.v1 v1.0.5 // indirect
|
||||
gopkg.in/yaml.v2 v2.3.0
|
||||
)
|
||||
|
||||
|
|
|
|||
51
go.sum
51
go.sum
|
|
@ -28,6 +28,7 @@ github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF0
|
|||
github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA=
|
||||
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/RoaringBitmap/roaring v0.4.7/go.mod h1:8khRDP4HmeXns4xIj9oGrKSz7XTQiJx2zgh7AcNke4w=
|
||||
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
|
||||
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
|
||||
github.com/Yamashou/gqlgenc v0.0.0-20200902035953-4dbef3551953 h1:+iPJDL28FxZhEdtJ9qykrMt/oDiOvlzTa0zV06nUcFM=
|
||||
|
|
@ -41,6 +42,13 @@ github.com/agnivade/levenshtein v1.1.0/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVb
|
|||
github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/anacrolix/dms v1.2.2 h1:0mk2/DXNqa5KDDbaLgFPf3oMV6VCGdFNh3d/gt4oafM=
|
||||
github.com/anacrolix/dms v1.2.2/go.mod h1:msPKAoppoNRfrYplJqx63FZ+VipDZ4Xsj3KzIQxyU7k=
|
||||
github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
|
||||
github.com/anacrolix/envpprof v1.0.0/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
|
||||
github.com/anacrolix/ffprobe v1.0.0/go.mod h1:BIw+Bjol6CWjm/CRWrVLk2Vy+UYlkgmBZ05vpSYqZPw=
|
||||
github.com/anacrolix/missinggo v1.1.0/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xopPFJJbTi5yIo=
|
||||
github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
|
||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
|
||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
|
||||
|
|
@ -64,12 +72,21 @@ github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCS
|
|||
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
||||
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
|
||||
github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
|
||||
github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/chromedp/cdproto v0.0.0-20200116234248-4da64dd111ac/go.mod h1:PfAWWKJqjlGFYJEidUM6aVIWPr0EpobeyVWEEmplX7g=
|
||||
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c h1:qM1xzKK8kc93zKPkxK4iqtjksqDDrU6g9wGnr30jyLo=
|
||||
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c/go.mod h1:E6LPWRdIJc11h/di5p0rwvRmUYbhGpBEH7ZbPfzDIOE=
|
||||
github.com/chromedp/cdproto v0.0.0-20210526005521-9e51b9051fd0/go.mod h1:At5TxYYdxkbQL0TSefRjhLE3Q0lgvqKKMSFUglJ7i1U=
|
||||
github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84 h1:Xxl4imt7LA3SbkrlIH5mm+mbzsv0tpHomLASTPINlvQ=
|
||||
github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84/go.mod h1:At5TxYYdxkbQL0TSefRjhLE3Q0lgvqKKMSFUglJ7i1U=
|
||||
github.com/chromedp/chromedp v0.5.3 h1:F9LafxmYpsQhWQBdCs+6Sret1zzeeFyHS5LkRF//Ffg=
|
||||
github.com/chromedp/chromedp v0.5.3/go.mod h1:YLdPtndaHQ4rCpSpBG+IPpy9JvX0VD+7aaLxYgYj28w=
|
||||
github.com/chromedp/chromedp v0.7.3 h1:FvgJICfjvXtDX+miuMUY0NHuY8zQvjS/TcEQEG6Ldzs=
|
||||
github.com/chromedp/chromedp v0.7.3/go.mod h1:9gC521Yzgrk078Ulv6KIgG7hJ2x9aWrxMBBobTFk30A=
|
||||
github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic=
|
||||
github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||
github.com/cockroachdb/cockroach-go v0.0.0-20181001143604-e0a95dfd547c/go.mod h1:XGLbWH/ujMcbPbhZq52Nv6UrCghb1yGn//133kEsvDk=
|
||||
|
|
@ -115,6 +132,8 @@ github.com/docker/docker v0.7.3-0.20190103212154-2b7e084dc98b/go.mod h1:eEKB0N0r
|
|||
github.com/docker/docker v0.7.3-0.20190108045446-77df18c24acf/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
|
||||
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
|
||||
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v0.0.0-20180713052910-9f541cc9db5d/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
|
|
@ -133,6 +152,8 @@ github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM4
|
|||
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
|
||||
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||
github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
|
||||
github.com/go-chi/chi v4.0.2+incompatible h1:maB6vn6FqCxrpz4FqWdh4+lwpyZIQS7YEAUcHlgXVRs=
|
||||
github.com/go-chi/chi v4.0.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
|
||||
|
|
@ -354,10 +375,16 @@ github.com/gobuffalo/x v0.0.0-20181003152136-452098b06085/go.mod h1:WevpGD+5YOre
|
|||
github.com/gobuffalo/x v0.0.0-20181007152206-913e47c59ca7/go.mod h1:9rDPXaB3kXdKWzMc4odGQQdG2e2DIEmANy5aSJ9yesY=
|
||||
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0=
|
||||
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
|
||||
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
|
||||
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
|
||||
github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8=
|
||||
github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
|
||||
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
|
||||
github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
|
||||
github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo=
|
||||
github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM=
|
||||
github.com/gobwas/ws v1.1.0-rc.5 h1:QOAag7FoBaBYYHRqzqkhhd8fq5RTubvI4v3Ft/gDVVQ=
|
||||
github.com/gobwas/ws v1.1.0-rc.5/go.mod h1:nzvNcVha5eUziGrbxFCo6qFIojQHjJV5cLYIbezhfL0=
|
||||
github.com/gocql/gocql v0.0.0-20190301043612-f6df8288f9b4/go.mod h1:4Fw1eo5iaEhDUs8XyuhSVCVy52Jq3L+/3GJgYkwc+/0=
|
||||
github.com/gofrs/uuid v3.1.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||
|
|
@ -384,6 +411,7 @@ github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8l
|
|||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4=
|
||||
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
|
|
@ -403,6 +431,7 @@ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+
|
|||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||
github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||
|
|
@ -453,6 +482,7 @@ github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0m
|
|||
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
|
||||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ=
|
||||
|
|
@ -468,10 +498,13 @@ github.com/joho/godotenv v1.2.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqx
|
|||
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||
github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||
|
|
@ -508,6 +541,8 @@ github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czP
|
|||
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
||||
github.com/mailru/easyjson v0.7.1 h1:mdxE1MF9o53iCb2Ghj1VfWvh7ZOwHpnVG/xwXrV90U8=
|
||||
github.com/mailru/easyjson v0.7.1/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/markbates/deplist v1.0.4/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
|
||||
github.com/markbates/deplist v1.0.5/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
|
||||
github.com/markbates/going v1.0.2/go.mod h1:UWCk3zm0UKefHZ7l8BNqi26UyiEMniznk8naLdTcy6c=
|
||||
|
|
@ -569,6 +604,7 @@ github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9
|
|||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/mongodb/mongo-go-driver v0.3.0/go.mod h1:NK/HWDIIZkaYsnYa0hmtP443T5ELr0KDecmIioVuuyU=
|
||||
github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q=
|
||||
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/nakagami/firebirdsql v0.0.0-20190310045651-3c02a58cfed8/go.mod h1:86wM1zFnC6/uDBfZGNwB65O+pR2OFi5q/YQaEUid1qA=
|
||||
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+bczlMBiulwmqYzQpvQNUdtt3oc=
|
||||
|
|
@ -593,6 +629,7 @@ github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJ
|
|||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
|
|
@ -621,6 +658,8 @@ github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40T
|
|||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E=
|
||||
github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo=
|
||||
github.com/robertkrimen/otto v0.0.0-20200922221731-ef014fd054ac h1:kYPjbEN6YPYWWHI6ky1J813KzIq/8+Wg4TO4xU7A/KU=
|
||||
github.com/robertkrimen/otto v0.0.0-20200922221731-ef014fd054ac/go.mod h1:xvqspoSXJTIpemEonrMDFq6XzwHYYgToXWj5eRX1OtY=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
github.com/rogpeppe/go-internal v1.0.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.1.0 h1:g0fH8RicVgNl+zVZDCDfbdWxAWoAEJyI7I3TZYXFiig=
|
||||
|
|
@ -641,6 +680,7 @@ github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0
|
|||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/serenize/snaker v0.0.0-20171204205717-a683aaf2d516/go.mod h1:Yow6lPLSAXx2ifx470yD/nUe22Dv5vBvxK/UK9UUTVs=
|
||||
|
|
@ -689,6 +729,7 @@ github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4
|
|||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
|
||||
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||
|
|
@ -736,6 +777,7 @@ github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0
|
|||
github.com/tidwall/pretty v0.0.0-20180105212114-65a9db5fad51/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
|
|
@ -751,6 +793,7 @@ github.com/vektah/gqlparser/v2 v2.0.1 h1:xgl5abVnsd4hkN9rk65OJID9bfcLSMuTaTcZj77
|
|||
github.com/vektah/gqlparser/v2 v2.0.1/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms=
|
||||
github.com/vektra/mockery/v2 v2.2.1 h1:EYgPvxyYkm/0JKs62qlVc9pO+ljb8biPbDWabk5/PmI=
|
||||
github.com/vektra/mockery/v2 v2.2.1/go.mod h1:rBZUbbhMbiSX1WlCGsOgAi6xjuJGxB7KKbnoL0XNYW8=
|
||||
github.com/willf/bitset v1.1.9/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
|
||||
github.com/xanzy/go-gitlab v0.15.0/go.mod h1:8zdQa/ri1dfn8eS3Ir1SyfvOKlw7WBJ8DVThkpGiXrs=
|
||||
github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
|
||||
github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
|
||||
|
|
@ -850,6 +893,7 @@ golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73r
|
|||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190415214537-1da14a5a36f2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190424112056-4829fb13d2c6/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
|
|
@ -911,6 +955,7 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h
|
|||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190415145633-3fd5a3612ccd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190426135247-a129542de9ae h1:mQLHiymj/JXKnnjc62tb7nD5pZLs940/sXJu+Xp3DBA=
|
||||
golang.org/x/sys v0.0.0-20190426135247-a129542de9ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
|
|
@ -921,6 +966,10 @@ golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201207223542-d4d67f95c62d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22 h1:RqytpXGR1iVNX7psjB3ff8y7sNFinVFvkx1c8SjBkio=
|
||||
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
|
@ -1058,6 +1107,8 @@ gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
|||
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/mail.v2 v2.0.0-20180731213649-a0242b2233b4/go.mod h1:htwXN1Qh09vZJ1NVKxQqHPBaCBbzKhp5GzuJEA4VJWw=
|
||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||
gopkg.in/sourcemap.v1 v1.0.5 h1:inv58fC9f9J3TK2Y2R1NPntXEn3/wjWHkonhIUODNTI=
|
||||
gopkg.in/sourcemap.v1 v1.0.5/go.mod h1:2RlvNNSMglmRrcvhfuzp4hQHwOtjxlbjX7UPY/GXb78=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
|
|
|||
|
|
@ -52,5 +52,7 @@ models:
|
|||
model: github.com/stashapp/stash/pkg/models.ScrapedMovie
|
||||
ScrapedMovieStudio:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedMovieStudio
|
||||
SavedFilter:
|
||||
model: github.com/stashapp/stash/pkg/models.SavedFilter
|
||||
StashID:
|
||||
model: github.com/stashapp/stash/pkg/models.StashID
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
|
|||
calculateMD5
|
||||
videoFileNamingAlgorithm
|
||||
parallelTasks
|
||||
previewAudio
|
||||
previewSegments
|
||||
previewSegmentDuration
|
||||
previewExcludeStart
|
||||
|
|
@ -53,6 +54,14 @@ fragment ConfigInterfaceData on ConfigInterfaceResult {
|
|||
cssEnabled
|
||||
language
|
||||
slideshowDelay
|
||||
handyKey
|
||||
}
|
||||
|
||||
fragment ConfigDLNAData on ConfigDLNAResult {
|
||||
serverName
|
||||
enabled
|
||||
whitelistedIPs
|
||||
interfaces
|
||||
}
|
||||
|
||||
fragment ConfigData on ConfigResult {
|
||||
|
|
@ -62,4 +71,7 @@ fragment ConfigData on ConfigResult {
|
|||
interface {
|
||||
...ConfigInterfaceData
|
||||
}
|
||||
dlna {
|
||||
...ConfigDLNAData
|
||||
}
|
||||
}
|
||||
|
|
|
|||
6
graphql/documents/data/filter.graphql
Normal file
6
graphql/documents/data/filter.graphql
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
fragment SavedFilterData on SavedFilter {
|
||||
id
|
||||
mode
|
||||
name
|
||||
filter
|
||||
}
|
||||
10
graphql/documents/data/job.graphql
Normal file
10
graphql/documents/data/job.graphql
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
fragment JobData on Job {
|
||||
id
|
||||
status
|
||||
subTasks
|
||||
description
|
||||
progress
|
||||
startTime
|
||||
endTime
|
||||
addTime
|
||||
}
|
||||
|
|
@ -12,10 +12,12 @@ fragment SceneMarkerData on SceneMarker {
|
|||
primary_tag {
|
||||
id
|
||||
name
|
||||
aliases
|
||||
}
|
||||
|
||||
tags {
|
||||
id
|
||||
name
|
||||
aliases
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ fragment SlimSceneData on Scene {
|
|||
organized
|
||||
path
|
||||
phash
|
||||
interactive
|
||||
|
||||
file {
|
||||
size
|
||||
|
|
@ -31,6 +32,7 @@ fragment SlimSceneData on Scene {
|
|||
vtt
|
||||
chapters_vtt
|
||||
sprite
|
||||
funscript
|
||||
}
|
||||
|
||||
scene_markers {
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ fragment SceneData on Scene {
|
|||
organized
|
||||
path
|
||||
phash
|
||||
interactive
|
||||
|
||||
file {
|
||||
size
|
||||
|
|
@ -30,6 +31,8 @@ fragment SceneData on Scene {
|
|||
webp
|
||||
vtt
|
||||
chapters_vtt
|
||||
sprite
|
||||
funscript
|
||||
}
|
||||
|
||||
scene_markers {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
fragment SlimTagData on Tag {
|
||||
id
|
||||
name
|
||||
aliases
|
||||
image_path
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
fragment TagData on Tag {
|
||||
id
|
||||
name
|
||||
aliases
|
||||
image_path
|
||||
scene_count
|
||||
scene_marker_count
|
||||
|
|
|
|||
|
|
@ -18,6 +18,12 @@ mutation ConfigureInterface($input: ConfigInterfaceInput!) {
|
|||
}
|
||||
}
|
||||
|
||||
mutation ConfigureDLNA($input: ConfigDLNAInput!) {
|
||||
configureDLNA(input: $input) {
|
||||
...ConfigDLNAData
|
||||
}
|
||||
}
|
||||
|
||||
mutation GenerateAPIKey($input: GenerateAPIKeyInput!) {
|
||||
generateAPIKey(input: $input)
|
||||
}
|
||||
|
|
|
|||
15
graphql/documents/mutations/dlna.graphql
Normal file
15
graphql/documents/mutations/dlna.graphql
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
mutation EnableDLNA($input: EnableDLNAInput!) {
|
||||
enableDLNA(input: $input)
|
||||
}
|
||||
|
||||
mutation DisableDLNA($input: DisableDLNAInput!) {
|
||||
disableDLNA(input: $input)
|
||||
}
|
||||
|
||||
mutation AddTempDLNAIP($input: AddTempDLNAIPInput!) {
|
||||
addTempDLNAIP(input: $input)
|
||||
}
|
||||
|
||||
mutation RemoveTempDLNAIP($input: RemoveTempDLNAIPInput!) {
|
||||
removeTempDLNAIP(input: $input)
|
||||
}
|
||||
13
graphql/documents/mutations/filter.graphql
Normal file
13
graphql/documents/mutations/filter.graphql
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
mutation SaveFilter($input: SaveFilterInput!) {
|
||||
saveFilter(input: $input) {
|
||||
...SavedFilterData
|
||||
}
|
||||
}
|
||||
|
||||
mutation DestroySavedFilter($input: DestroyFilterInput!) {
|
||||
destroySavedFilter(input: $input)
|
||||
}
|
||||
|
||||
mutation SetDefaultFilter($input: SetDefaultFilterInput!) {
|
||||
setDefaultFilter(input: $input)
|
||||
}
|
||||
7
graphql/documents/mutations/job.graphql
Normal file
7
graphql/documents/mutations/job.graphql
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
mutation StopJob($job_id: ID!) {
|
||||
stopJob(job_id: $job_id)
|
||||
}
|
||||
|
||||
mutation StopAllJobs {
|
||||
stopAllJobs
|
||||
}
|
||||
|
|
@ -34,10 +34,6 @@ mutation MigrateHashNaming {
|
|||
migrateHashNaming
|
||||
}
|
||||
|
||||
mutation StopJob {
|
||||
stopJob
|
||||
}
|
||||
|
||||
mutation BackupDatabase($input: BackupDatabaseInput!) {
|
||||
backupDatabase(input: $input)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
mutation TagCreate($name: String!, $image: String) {
|
||||
tagCreate(input: { name: $name, image: $image }) {
|
||||
mutation TagCreate($input: TagCreateInput!) {
|
||||
tagCreate(input: $input) {
|
||||
...TagData
|
||||
}
|
||||
}
|
||||
|
|
@ -17,3 +17,9 @@ mutation TagUpdate($input: TagUpdateInput!) {
|
|||
...TagData
|
||||
}
|
||||
}
|
||||
|
||||
mutation TagsMerge($source: [ID!]!, $destination: ID!) {
|
||||
tagsMerge(input: { source: $source, destination: $destination }) {
|
||||
...TagData
|
||||
}
|
||||
}
|
||||
|
|
|
|||
11
graphql/documents/queries/dlna.graphql
Normal file
11
graphql/documents/queries/dlna.graphql
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
query DLNAStatus {
|
||||
dlnaStatus {
|
||||
running
|
||||
until
|
||||
recentIPAddresses
|
||||
allowedIPAddresses {
|
||||
ipAddress
|
||||
until
|
||||
}
|
||||
}
|
||||
}
|
||||
11
graphql/documents/queries/filter.graphql
Normal file
11
graphql/documents/queries/filter.graphql
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
query FindSavedFilters($mode: FilterMode!) {
|
||||
findSavedFilters(mode: $mode) {
|
||||
...SavedFilterData
|
||||
}
|
||||
}
|
||||
|
||||
query FindDefaultFilter($mode: FilterMode!) {
|
||||
findDefaultFilter(mode: $mode) {
|
||||
...SavedFilterData
|
||||
}
|
||||
}
|
||||
11
graphql/documents/queries/job.graphql
Normal file
11
graphql/documents/queries/job.graphql
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
query JobQueue {
|
||||
jobQueue {
|
||||
...JobData
|
||||
}
|
||||
}
|
||||
|
||||
query FindJob($input: FindJobInput!) {
|
||||
findJob(input: $input) {
|
||||
...JobData
|
||||
}
|
||||
}
|
||||
|
|
@ -33,6 +33,7 @@ query AllTagsForFilter {
|
|||
allTags {
|
||||
id
|
||||
name
|
||||
aliases
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,12 @@ query Plugins {
|
|||
name
|
||||
description
|
||||
}
|
||||
|
||||
hooks {
|
||||
name
|
||||
description
|
||||
hooks
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,3 @@
|
|||
query JobStatus {
|
||||
jobStatus {
|
||||
progress
|
||||
status
|
||||
message
|
||||
}
|
||||
}
|
||||
|
||||
query SystemStatus {
|
||||
systemStatus {
|
||||
databaseSchema
|
||||
|
|
|
|||
|
|
@ -1,8 +1,13 @@
|
|||
subscription MetadataUpdate {
|
||||
metadataUpdate {
|
||||
progress
|
||||
status
|
||||
message
|
||||
subscription JobsSubscribe {
|
||||
jobsSubscribe {
|
||||
type
|
||||
job {
|
||||
id
|
||||
status
|
||||
subTasks
|
||||
description
|
||||
progress
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -10,4 +15,8 @@ subscription LoggingSubscribe {
|
|||
loggingSubscribe {
|
||||
...LogEntryData
|
||||
}
|
||||
}
|
||||
|
||||
subscription ScanCompleteSubscribe {
|
||||
scanCompleteSubscribe
|
||||
}
|
||||
|
|
@ -1,5 +1,9 @@
|
|||
"""The query root for this schema"""
|
||||
type Query {
|
||||
# Filters
|
||||
findSavedFilters(mode: FilterMode!): [SavedFilter!]!
|
||||
findDefaultFilter(mode: FilterMode!): SavedFilter
|
||||
|
||||
"""Find a scene by ID or Checksum"""
|
||||
findScene(id: ID, checksum: String): Scene
|
||||
findSceneByHash(input: SceneHashInput!): Scene
|
||||
|
|
@ -106,9 +110,14 @@ type Query {
|
|||
"""Returns an array of paths for the given path"""
|
||||
directory(path: String): Directory!
|
||||
|
||||
# Metadata
|
||||
# System status
|
||||
systemStatus: SystemStatus!
|
||||
jobStatus: MetadataUpdateStatus!
|
||||
|
||||
# Job status
|
||||
jobQueue: [Job!]
|
||||
findJob(input: FindJobInput!): Job
|
||||
|
||||
dlnaStatus: DLNAStatus!
|
||||
|
||||
# Get everything
|
||||
|
||||
|
|
@ -192,10 +201,17 @@ type Mutation {
|
|||
tagUpdate(input: TagUpdateInput!): Tag
|
||||
tagDestroy(input: TagDestroyInput!): Boolean!
|
||||
tagsDestroy(ids: [ID!]!): Boolean!
|
||||
tagsMerge(input: TagsMergeInput!): Tag
|
||||
|
||||
# Saved filters
|
||||
saveFilter(input: SaveFilterInput!): SavedFilter!
|
||||
destroySavedFilter(input: DestroyFilterInput!): Boolean!
|
||||
setDefaultFilter(input: SetDefaultFilterInput!): Boolean!
|
||||
|
||||
"""Change general configuration options"""
|
||||
configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult!
|
||||
configureInterface(input: ConfigInterfaceInput!): ConfigInterfaceResult!
|
||||
configureDLNA(input: ConfigDLNAInput!): ConfigDLNAResult!
|
||||
|
||||
"""Generate and set (or clear) API key"""
|
||||
generateAPIKey(input: GenerateAPIKeyInput!): String!
|
||||
|
|
@ -204,31 +220,32 @@ type Mutation {
|
|||
exportObjects(input: ExportObjectsInput!): String
|
||||
|
||||
"""Performs an incremental import. Returns the job ID"""
|
||||
importObjects(input: ImportObjectsInput!): String!
|
||||
importObjects(input: ImportObjectsInput!): ID!
|
||||
|
||||
"""Start an full import. Completely wipes the database and imports from the metadata directory. Returns the job ID"""
|
||||
metadataImport: String!
|
||||
metadataImport: ID!
|
||||
"""Start a full export. Outputs to the metadata directory. Returns the job ID"""
|
||||
metadataExport: String!
|
||||
metadataExport: ID!
|
||||
"""Start a scan. Returns the job ID"""
|
||||
metadataScan(input: ScanMetadataInput!): String!
|
||||
metadataScan(input: ScanMetadataInput!): ID!
|
||||
"""Start generating content. Returns the job ID"""
|
||||
metadataGenerate(input: GenerateMetadataInput!): String!
|
||||
metadataGenerate(input: GenerateMetadataInput!): ID!
|
||||
"""Start auto-tagging. Returns the job ID"""
|
||||
metadataAutoTag(input: AutoTagMetadataInput!): String!
|
||||
metadataAutoTag(input: AutoTagMetadataInput!): ID!
|
||||
"""Clean metadata. Returns the job ID"""
|
||||
metadataClean(input: CleanMetadataInput!): String!
|
||||
metadataClean(input: CleanMetadataInput!): ID!
|
||||
"""Migrate generated files for the current hash naming"""
|
||||
migrateHashNaming: String!
|
||||
migrateHashNaming: ID!
|
||||
|
||||
"""Reload scrapers"""
|
||||
reloadScrapers: Boolean!
|
||||
|
||||
"""Run plugin task. Returns the job ID"""
|
||||
runPluginTask(plugin_id: ID!, task_name: String!, args: [PluginArgInput!]): String!
|
||||
runPluginTask(plugin_id: ID!, task_name: String!, args: [PluginArgInput!]): ID!
|
||||
reloadPlugins: Boolean!
|
||||
|
||||
stopJob: Boolean!
|
||||
stopJob(job_id: ID!): Boolean!
|
||||
stopAllJobs: Boolean!
|
||||
|
||||
"""Submit fingerprints to stash-box instance"""
|
||||
submitStashBoxFingerprints(input: StashBoxFingerprintSubmissionInput!): Boolean!
|
||||
|
|
@ -238,13 +255,24 @@ type Mutation {
|
|||
|
||||
"""Run batch performer tag task. Returns the job ID."""
|
||||
stashBoxBatchPerformerTag(input: StashBoxBatchPerformerTagInput!): String!
|
||||
|
||||
"""Enables DLNA for an optional duration. Has no effect if DLNA is enabled by default"""
|
||||
enableDLNA(input: EnableDLNAInput!): Boolean!
|
||||
"""Disables DLNA for an optional duration. Has no effect if DLNA is disabled by default"""
|
||||
disableDLNA(input: DisableDLNAInput!): Boolean!
|
||||
"""Enables an IP address for DLNA for an optional duration"""
|
||||
addTempDLNAIP(input: AddTempDLNAIPInput!): Boolean!
|
||||
"""Removes an IP address from the temporary DLNA whitelist"""
|
||||
removeTempDLNAIP(input: RemoveTempDLNAIPInput!): Boolean!
|
||||
}
|
||||
|
||||
type Subscription {
|
||||
"""Update from the metadata manager"""
|
||||
metadataUpdate: MetadataUpdateStatus!
|
||||
jobsSubscribe: JobStatusUpdate!
|
||||
|
||||
loggingSubscribe: [LogEntry!]!
|
||||
|
||||
scanCompleteSubscribe: Boolean!
|
||||
}
|
||||
|
||||
schema {
|
||||
|
|
|
|||
|
|
@ -47,6 +47,8 @@ input ConfigGeneralInput {
|
|||
videoFileNamingAlgorithm: HashAlgorithm!
|
||||
"""Number of parallel tasks to start during scan/generate"""
|
||||
parallelTasks: Int
|
||||
"""Include audio stream in previews"""
|
||||
previewAudio: Boolean!
|
||||
"""Number of segments in a preview file"""
|
||||
previewSegments: Int
|
||||
"""Preview segment duration, in seconds"""
|
||||
|
|
@ -116,6 +118,8 @@ type ConfigGeneralResult {
|
|||
videoFileNamingAlgorithm: HashAlgorithm!
|
||||
"""Number of parallel tasks to start during scan/generate"""
|
||||
parallelTasks: Int!
|
||||
"""Include audio stream in previews"""
|
||||
previewAudio: Boolean!
|
||||
"""Number of segments in a preview file"""
|
||||
previewSegments: Int!
|
||||
"""Preview segment duration, in seconds"""
|
||||
|
|
@ -190,6 +194,8 @@ input ConfigInterfaceInput {
|
|||
language: String
|
||||
"""Slideshow Delay"""
|
||||
slideshowDelay: Int
|
||||
"""Handy Connection Key"""
|
||||
handyKey: String
|
||||
}
|
||||
|
||||
type ConfigInterfaceResult {
|
||||
|
|
@ -214,12 +220,35 @@ type ConfigInterfaceResult {
|
|||
language: String
|
||||
"""Slideshow Delay"""
|
||||
slideshowDelay: Int
|
||||
"""Handy Connection Key"""
|
||||
handyKey: String
|
||||
}
|
||||
|
||||
input ConfigDLNAInput {
|
||||
serverName: String
|
||||
"""True if DLNA service should be enabled by default"""
|
||||
enabled: Boolean
|
||||
"""List of IPs whitelisted for DLNA service"""
|
||||
whitelistedIPs: [String!]
|
||||
"""List of interfaces to run DLNA on. Empty for all"""
|
||||
interfaces: [String!]
|
||||
}
|
||||
|
||||
type ConfigDLNAResult {
|
||||
serverName: String!
|
||||
"""True if DLNA service should be enabled by default"""
|
||||
enabled: Boolean!
|
||||
"""List of IPs whitelisted for DLNA service"""
|
||||
whitelistedIPs: [String!]!
|
||||
"""List of interfaces to run DLNA on. Empty for all"""
|
||||
interfaces: [String!]!
|
||||
}
|
||||
|
||||
"""All configuration settings"""
|
||||
type ConfigResult {
|
||||
general: ConfigGeneralResult!
|
||||
interface: ConfigInterfaceResult!
|
||||
dlna: ConfigDLNAResult!
|
||||
}
|
||||
|
||||
"""Directory structure of a path"""
|
||||
|
|
|
|||
35
graphql/schema/types/dlna.graphql
Normal file
35
graphql/schema/types/dlna.graphql
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
|
||||
|
||||
type DLNAIP {
|
||||
ipAddress: String!
|
||||
"""Time until IP will be no longer allowed/disallowed"""
|
||||
until: Time
|
||||
}
|
||||
|
||||
type DLNAStatus {
|
||||
running: Boolean!
|
||||
"""If not currently running, time until it will be started. If running, time until it will be stopped"""
|
||||
until: Time
|
||||
recentIPAddresses: [String!]!
|
||||
allowedIPAddresses: [DLNAIP!]!
|
||||
}
|
||||
|
||||
input EnableDLNAInput {
|
||||
"""Duration to enable, in minutes. 0 or null for indefinite."""
|
||||
duration: Int
|
||||
}
|
||||
|
||||
input DisableDLNAInput {
|
||||
"""Duration to enable, in minutes. 0 or null for indefinite."""
|
||||
duration: Int
|
||||
}
|
||||
|
||||
input AddTempDLNAIPInput {
|
||||
address: String!
|
||||
"""Duration to enable, in minutes. 0 or null for indefinite."""
|
||||
duration: Int
|
||||
}
|
||||
|
||||
input RemoveTempDLNAIPInput {
|
||||
address: String!
|
||||
}
|
||||
|
|
@ -29,6 +29,13 @@ enum ResolutionEnum {
|
|||
}
|
||||
|
||||
input PerformerFilterType {
|
||||
AND: PerformerFilterType
|
||||
OR: PerformerFilterType
|
||||
NOT: PerformerFilterType
|
||||
|
||||
name: StringCriterionInput
|
||||
details: StringCriterionInput
|
||||
|
||||
"""Filter by favorite"""
|
||||
filter_favorites: Boolean
|
||||
"""Filter by birth year"""
|
||||
|
|
@ -81,6 +88,8 @@ input PerformerFilterType {
|
|||
weight: IntCriterionInput
|
||||
"""Filter by death year"""
|
||||
death_year: IntCriterionInput
|
||||
"""Filter by studios where performer appears in scene/image/gallery"""
|
||||
studios: HierarchicalMultiCriterionInput
|
||||
}
|
||||
|
||||
input SceneMarkerFilterType {
|
||||
|
|
@ -99,6 +108,15 @@ input SceneFilterType {
|
|||
OR: SceneFilterType
|
||||
NOT: SceneFilterType
|
||||
|
||||
title: StringCriterionInput
|
||||
details: StringCriterionInput
|
||||
|
||||
"""Filter by file oshash"""
|
||||
oshash: StringCriterionInput
|
||||
"""Filter by file checksum"""
|
||||
checksum: StringCriterionInput
|
||||
"""Filter by file phash"""
|
||||
phash: StringCriterionInput
|
||||
"""Filter by path"""
|
||||
path: StringCriterionInput
|
||||
"""Filter by rating"""
|
||||
|
|
@ -116,7 +134,7 @@ input SceneFilterType {
|
|||
"""Filter to only include scenes missing this property"""
|
||||
is_missing: String
|
||||
"""Filter to only include scenes with this studio"""
|
||||
studios: MultiCriterionInput
|
||||
studios: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include scenes with this movie"""
|
||||
movies: MultiCriterionInput
|
||||
"""Filter to only include scenes with these tags"""
|
||||
|
|
@ -133,11 +151,22 @@ input SceneFilterType {
|
|||
stash_id: StringCriterionInput
|
||||
"""Filter by url"""
|
||||
url: StringCriterionInput
|
||||
"""Filter by interactive"""
|
||||
interactive: Boolean
|
||||
}
|
||||
|
||||
input MovieFilterType {
|
||||
|
||||
name: StringCriterionInput
|
||||
director: StringCriterionInput
|
||||
synopsis: StringCriterionInput
|
||||
|
||||
"""Filter by duration (in seconds)"""
|
||||
duration: IntCriterionInput
|
||||
"""Filter by rating"""
|
||||
rating: IntCriterionInput
|
||||
"""Filter to only include movies with this studio"""
|
||||
studios: MultiCriterionInput
|
||||
studios: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include movies missing this property"""
|
||||
is_missing: String
|
||||
"""Filter by url"""
|
||||
|
|
@ -145,6 +174,8 @@ input MovieFilterType {
|
|||
}
|
||||
|
||||
input StudioFilterType {
|
||||
name: StringCriterionInput
|
||||
details: StringCriterionInput
|
||||
"""Filter to only include studios with this parent studio"""
|
||||
parents: MultiCriterionInput
|
||||
"""Filter by StashID"""
|
||||
|
|
@ -168,6 +199,11 @@ input GalleryFilterType {
|
|||
OR: GalleryFilterType
|
||||
NOT: GalleryFilterType
|
||||
|
||||
title: StringCriterionInput
|
||||
details: StringCriterionInput
|
||||
|
||||
"""Filter by file checksum"""
|
||||
checksum: StringCriterionInput
|
||||
"""Filter by path"""
|
||||
path: StringCriterionInput
|
||||
"""Filter to only include galleries missing this property"""
|
||||
|
|
@ -181,7 +217,7 @@ input GalleryFilterType {
|
|||
"""Filter by average image resolution"""
|
||||
average_resolution: ResolutionEnum
|
||||
"""Filter to only include galleries with this studio"""
|
||||
studios: MultiCriterionInput
|
||||
studios: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include galleries with these tags"""
|
||||
tags: MultiCriterionInput
|
||||
"""Filter by tag count"""
|
||||
|
|
@ -203,6 +239,12 @@ input TagFilterType {
|
|||
OR: TagFilterType
|
||||
NOT: TagFilterType
|
||||
|
||||
"""Filter by tag name"""
|
||||
name: StringCriterionInput
|
||||
|
||||
"""Filter by tag aliases"""
|
||||
aliases: StringCriterionInput
|
||||
|
||||
"""Filter to only include tags missing this property"""
|
||||
is_missing: String
|
||||
|
||||
|
|
@ -227,6 +269,10 @@ input ImageFilterType {
|
|||
OR: ImageFilterType
|
||||
NOT: ImageFilterType
|
||||
|
||||
title: StringCriterionInput
|
||||
|
||||
"""Filter by file checksum"""
|
||||
checksum: StringCriterionInput
|
||||
"""Filter by path"""
|
||||
path: StringCriterionInput
|
||||
"""Filter by rating"""
|
||||
|
|
@ -240,7 +286,7 @@ input ImageFilterType {
|
|||
"""Filter to only include images missing this property"""
|
||||
is_missing: String
|
||||
"""Filter to only include images with this studio"""
|
||||
studios: MultiCriterionInput
|
||||
studios: HierarchicalMultiCriterionInput
|
||||
"""Filter to only include images with these tags"""
|
||||
tags: MultiCriterionInput
|
||||
"""Filter by tag count"""
|
||||
|
|
@ -297,3 +343,47 @@ input GenderCriterionInput {
|
|||
value: GenderEnum
|
||||
modifier: CriterionModifier!
|
||||
}
|
||||
|
||||
input HierarchicalMultiCriterionInput {
|
||||
value: [ID!]
|
||||
modifier: CriterionModifier!
|
||||
depth: Int!
|
||||
}
|
||||
|
||||
enum FilterMode {
|
||||
SCENES,
|
||||
PERFORMERS,
|
||||
STUDIOS,
|
||||
GALLERIES,
|
||||
SCENE_MARKERS,
|
||||
MOVIES,
|
||||
TAGS,
|
||||
IMAGES,
|
||||
}
|
||||
|
||||
type SavedFilter {
|
||||
id: ID!
|
||||
mode: FilterMode!
|
||||
name: String!
|
||||
"""JSON-encoded filter string"""
|
||||
filter: String!
|
||||
}
|
||||
|
||||
input SaveFilterInput {
|
||||
"""provide ID to overwrite existing filter"""
|
||||
id: ID
|
||||
mode: FilterMode!
|
||||
name: String!
|
||||
"""JSON-encoded filter string"""
|
||||
filter: String!
|
||||
}
|
||||
|
||||
input DestroyFilterInput {
|
||||
id: ID!
|
||||
}
|
||||
|
||||
input SetDefaultFilterInput {
|
||||
mode: FilterMode!
|
||||
"""JSON-encoded filter string - null to clear"""
|
||||
filter: String
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,10 @@ type Gallery {
|
|||
details: String
|
||||
rating: Int
|
||||
organized: Boolean!
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
file_mod_time: Time
|
||||
|
||||
scenes: [Scene!]!
|
||||
studio: Studio
|
||||
image_count: Int!
|
||||
|
|
|
|||
|
|
@ -6,6 +6,9 @@ type Image {
|
|||
o_counter: Int
|
||||
organized: Boolean!
|
||||
path: String!
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
file_mod_time: Time
|
||||
|
||||
file: ImageFileType! # Resolver
|
||||
paths: ImagePathsType! # Resolver
|
||||
|
|
|
|||
33
graphql/schema/types/job.graphql
Normal file
33
graphql/schema/types/job.graphql
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
enum JobStatus {
|
||||
READY
|
||||
RUNNING
|
||||
FINISHED
|
||||
STOPPING
|
||||
CANCELLED
|
||||
}
|
||||
|
||||
type Job {
|
||||
id: ID!
|
||||
status: JobStatus!
|
||||
subTasks: [String!]
|
||||
description: String!
|
||||
progress: Float
|
||||
startTime: Time
|
||||
endTime: Time
|
||||
addTime: Time!
|
||||
}
|
||||
|
||||
input FindJobInput {
|
||||
id: ID!
|
||||
}
|
||||
|
||||
enum JobStatusUpdateType {
|
||||
ADD
|
||||
REMOVE
|
||||
UPDATE
|
||||
}
|
||||
|
||||
type JobStatusUpdate {
|
||||
type: JobStatusUpdateType!
|
||||
job: Job!
|
||||
}
|
||||
|
|
@ -63,12 +63,6 @@ input AutoTagMetadataInput {
|
|||
tags: [String!]
|
||||
}
|
||||
|
||||
type MetadataUpdateStatus {
|
||||
progress: Float!
|
||||
status: String!
|
||||
message: String!
|
||||
}
|
||||
|
||||
input ExportObjectTypeInput {
|
||||
ids: [String!]
|
||||
all: Boolean
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ type Movie {
|
|||
director: String
|
||||
synopsis: String
|
||||
url: String
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
|
||||
front_image_path: String # Resolver
|
||||
back_image_path: String # Resolver
|
||||
|
|
|
|||
|
|
@ -40,6 +40,8 @@ type Performer {
|
|||
death_date: String
|
||||
hair_color: String
|
||||
weight: Int
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
}
|
||||
|
||||
input PerformerCreateInput {
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ type Plugin {
|
|||
version: String
|
||||
|
||||
tasks: [PluginTask!]
|
||||
hooks: [PluginHook!]
|
||||
}
|
||||
|
||||
type PluginTask {
|
||||
|
|
@ -15,6 +16,13 @@ type PluginTask {
|
|||
plugin: Plugin!
|
||||
}
|
||||
|
||||
type PluginHook {
|
||||
name: String!
|
||||
description: String
|
||||
hooks: [String!]
|
||||
plugin: Plugin!
|
||||
}
|
||||
|
||||
type PluginResult {
|
||||
error: String
|
||||
result: String
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ type SceneMarker {
|
|||
seconds: Float!
|
||||
primary_tag: Tag!
|
||||
tags: [Tag!]!
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
|
||||
"""The path to stream this marker"""
|
||||
stream: String! # Resolver
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ type ScenePathsType {
|
|||
vtt: String # Resolver
|
||||
chapters_vtt: String # Resolver
|
||||
sprite: String # Resolver
|
||||
funscript: String # Resolver
|
||||
}
|
||||
|
||||
type SceneMovie {
|
||||
|
|
@ -37,6 +38,10 @@ type Scene {
|
|||
o_counter: Int
|
||||
path: String!
|
||||
phash: String
|
||||
interactive: Boolean!
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
file_mod_time: Time
|
||||
|
||||
file: SceneFileType! # Resolver
|
||||
paths: ScenePathsType! # Resolver
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ type Studio {
|
|||
stash_ids: [StashID!]!
|
||||
rating: Int
|
||||
details: String
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
}
|
||||
|
||||
input StudioCreateInput {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
type Tag {
|
||||
id: ID!
|
||||
name: String!
|
||||
aliases: [String!]!
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
|
||||
image_path: String # Resolver
|
||||
scene_count: Int # Resolver
|
||||
|
|
@ -12,6 +15,7 @@ type Tag {
|
|||
|
||||
input TagCreateInput {
|
||||
name: String!
|
||||
aliases: [String!]
|
||||
|
||||
"""This should be a URL or a base64 encoded data URL"""
|
||||
image: String
|
||||
|
|
@ -19,7 +23,8 @@ input TagCreateInput {
|
|||
|
||||
input TagUpdateInput {
|
||||
id: ID!
|
||||
name: String!
|
||||
name: String
|
||||
aliases: [String!]
|
||||
|
||||
"""This should be a URL or a base64 encoded data URL"""
|
||||
image: String
|
||||
|
|
@ -32,4 +37,9 @@ input TagDestroyInput {
|
|||
type FindTagsResultType {
|
||||
count: Int!
|
||||
tags: [Tag!]!
|
||||
}
|
||||
}
|
||||
|
||||
input TagsMergeInput {
|
||||
source: [ID!]!
|
||||
destination: ID!
|
||||
}
|
||||
|
|
|
|||
14
main.go
14
main.go
|
|
@ -2,6 +2,11 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/signal"
|
||||
"runtime/pprof"
|
||||
"syscall"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
|
||||
|
|
@ -12,9 +17,16 @@ import (
|
|||
func main() {
|
||||
manager.Initialize()
|
||||
api.Start()
|
||||
|
||||
// stop any profiling at exit
|
||||
defer pprof.StopCPUProfile()
|
||||
blockForever()
|
||||
}
|
||||
|
||||
func blockForever() {
|
||||
select {}
|
||||
// handle signals
|
||||
signals := make(chan os.Signal, 1)
|
||||
signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
<-signals
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@ import (
|
|||
const updateInputField = "input"
|
||||
|
||||
func getArgumentMap(ctx context.Context) map[string]interface{} {
|
||||
rctx := graphql.GetResolverContext(ctx)
|
||||
reqCtx := graphql.GetRequestContext(ctx)
|
||||
rctx := graphql.GetFieldContext(ctx)
|
||||
reqCtx := graphql.GetOperationContext(ctx)
|
||||
return rctx.Field.ArgumentMap(reqCtx.Variables)
|
||||
}
|
||||
|
||||
|
|
@ -65,6 +65,15 @@ func (t changesetTranslator) hasField(field string) bool {
|
|||
return found
|
||||
}
|
||||
|
||||
func (t changesetTranslator) getFields() []string {
|
||||
var ret []string
|
||||
for k := range t.inputMap {
|
||||
ret = append(ret, k)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (t changesetTranslator) nullString(value *string, field string) *sql.NullString {
|
||||
if !t.hasField(field) {
|
||||
return nil
|
||||
|
|
|
|||
|
|
@ -5,13 +5,12 @@ package api
|
|||
type key int
|
||||
|
||||
const (
|
||||
galleryKey key = 0
|
||||
performerKey key = 1
|
||||
sceneKey key = 2
|
||||
studioKey key = 3
|
||||
movieKey key = 4
|
||||
ContextUser key = 5
|
||||
tagKey key = 6
|
||||
downloadKey key = 7
|
||||
imageKey key = 8
|
||||
galleryKey key = iota
|
||||
performerKey
|
||||
sceneKey
|
||||
studioKey
|
||||
movieKey
|
||||
tagKey
|
||||
downloadKey
|
||||
imageKey
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,10 +7,16 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
)
|
||||
|
||||
type hookExecutor interface {
|
||||
ExecutePostHooks(ctx context.Context, id int, hookType plugin.HookTriggerEnum, input interface{}, inputFields []string)
|
||||
}
|
||||
|
||||
type Resolver struct {
|
||||
txnManager models.TransactionManager
|
||||
txnManager models.TransactionManager
|
||||
hookExecutor hookExecutor
|
||||
}
|
||||
|
||||
func (r *Resolver) Gallery() models.GalleryResolver {
|
||||
|
|
@ -208,13 +214,10 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
|
|||
return err
|
||||
}
|
||||
_, hasKey := tags[markerPrimaryTag.ID]
|
||||
var sceneMarkerTag *models.SceneMarkerTag
|
||||
if !hasKey {
|
||||
sceneMarkerTag = &models.SceneMarkerTag{Tag: markerPrimaryTag}
|
||||
sceneMarkerTag := &models.SceneMarkerTag{Tag: markerPrimaryTag}
|
||||
tags[markerPrimaryTag.ID] = sceneMarkerTag
|
||||
keys = append(keys, markerPrimaryTag.ID)
|
||||
} else {
|
||||
sceneMarkerTag = tags[markerPrimaryTag.ID]
|
||||
}
|
||||
tags[markerPrimaryTag.ID].SceneMarkers = append(tags[markerPrimaryTag.ID].SceneMarkers, sceneMarker)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
|
|
@ -153,3 +154,15 @@ func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (
|
|||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) CreatedAt(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) UpdatedAt(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
|
||||
return &obj.FileModTime.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
|
|
@ -91,3 +92,15 @@ func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret
|
|||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) CreatedAt(ctx context.Context, obj *models.Image) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) UpdatedAt(ctx context.Context, obj *models.Image) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
|
||||
return &obj.FileModTime.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
|
|
@ -123,3 +124,11 @@ func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret
|
|||
|
||||
return &res, err
|
||||
}
|
||||
|
||||
func (r *movieResolver) CreatedAt(ctx context.Context, obj *models.Movie) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *movieResolver) UpdatedAt(ctx context.Context, obj *models.Movie) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/gallery"
|
||||
|
|
@ -245,3 +246,11 @@ func (r *performerResolver) Weight(ctx context.Context, obj *models.Performer) (
|
|||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) CreatedAt(ctx context.Context, obj *models.Performer) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) UpdatedAt(ctx context.Context, obj *models.Performer) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
|
|
@ -87,6 +88,8 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
|
|||
vttPath := builder.GetSpriteVTTURL()
|
||||
spritePath := builder.GetSpriteURL()
|
||||
chaptersVttPath := builder.GetChaptersVTTURL()
|
||||
funscriptPath := builder.GetFunscriptURL()
|
||||
|
||||
return &models.ScenePathsType{
|
||||
Screenshot: &screenshotPath,
|
||||
Preview: &previewPath,
|
||||
|
|
@ -95,6 +98,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
|
|||
Vtt: &vttPath,
|
||||
ChaptersVtt: &chaptersVttPath,
|
||||
Sprite: &spritePath,
|
||||
Funscript: &funscriptPath,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
@ -157,8 +161,7 @@ func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*m
|
|||
}
|
||||
|
||||
if sceneIdx.Valid {
|
||||
var idx int
|
||||
idx = int(sceneIdx.Int64)
|
||||
idx := int(sceneIdx.Int64)
|
||||
sceneMovie.SceneIndex = &idx
|
||||
}
|
||||
|
||||
|
|
@ -212,3 +215,15 @@ func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string,
|
|||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) CreatedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) UpdatedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
|
||||
return &obj.FileModTime.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
|
|
@ -56,3 +57,11 @@ func (r *sceneMarkerResolver) Preview(ctx context.Context, obj *models.SceneMark
|
|||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamPreviewURL(obj.ID), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) CreatedAt(ctx context.Context, obj *models.SceneMarker) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) UpdatedAt(ctx context.Context, obj *models.SceneMarker) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/gallery"
|
||||
|
|
@ -131,3 +132,11 @@ func (r *studioResolver) Details(ctx context.Context, obj *models.Studio) (*stri
|
|||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) CreatedAt(ctx context.Context, obj *models.Studio) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) UpdatedAt(ctx context.Context, obj *models.Studio) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/gallery"
|
||||
|
|
@ -9,6 +10,17 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []string, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().GetAliases(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
|
||||
var count int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
|
|
@ -74,3 +86,11 @@ func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string,
|
|||
imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj).GetTagImageURL()
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
func (r *tagResolver) CreatedAt(ctx context.Context, obj *models.Tag) (*time.Time, error) {
|
||||
return &obj.CreatedAt.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *tagResolver) UpdatedAt(ctx context.Context, obj *models.Tag) (*time.Time, error) {
|
||||
return &obj.UpdatedAt.Timestamp, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -88,6 +88,9 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||
if input.ParallelTasks != nil {
|
||||
c.Set(config.ParallelTasks, *input.ParallelTasks)
|
||||
}
|
||||
|
||||
c.Set(config.PreviewAudio, input.PreviewAudio)
|
||||
|
||||
if input.PreviewSegments != nil {
|
||||
c.Set(config.PreviewSegments, *input.PreviewSegments)
|
||||
}
|
||||
|
|
@ -246,6 +249,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
|
|||
c.Set(config.CSSEnabled, *input.CSSEnabled)
|
||||
}
|
||||
|
||||
if input.HandyKey != nil {
|
||||
c.Set(config.HandyKey, *input.HandyKey)
|
||||
}
|
||||
|
||||
if err := c.Write(); err != nil {
|
||||
return makeConfigInterfaceResult(), err
|
||||
}
|
||||
|
|
@ -253,6 +260,37 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
|
|||
return makeConfigInterfaceResult(), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.ConfigDLNAInput) (*models.ConfigDLNAResult, error) {
|
||||
c := config.GetInstance()
|
||||
|
||||
if input.ServerName != nil {
|
||||
c.Set(config.DLNAServerName, *input.ServerName)
|
||||
}
|
||||
|
||||
c.Set(config.DLNADefaultIPWhitelist, input.WhitelistedIPs)
|
||||
|
||||
currentDLNAEnabled := c.GetDLNADefaultEnabled()
|
||||
if input.Enabled != nil && *input.Enabled != currentDLNAEnabled {
|
||||
c.Set(config.DLNADefaultEnabled, *input.Enabled)
|
||||
|
||||
// start/stop the DLNA service as needed
|
||||
dlnaService := manager.GetInstance().DLNAService
|
||||
if !*input.Enabled && dlnaService.IsRunning() {
|
||||
dlnaService.Stop(nil)
|
||||
} else if *input.Enabled && !dlnaService.IsRunning() {
|
||||
dlnaService.Start(nil)
|
||||
}
|
||||
}
|
||||
|
||||
c.Set(config.DLNAInterfaces, input.Interfaces)
|
||||
|
||||
if err := c.Write(); err != nil {
|
||||
return makeConfigDLNAResult(), err
|
||||
}
|
||||
|
||||
return makeConfigDLNAResult(), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GenerateAPIKey(ctx context.Context, input models.GenerateAPIKeyInput) (string, error) {
|
||||
c := config.GetInstance()
|
||||
|
||||
|
|
|
|||
42
pkg/api/resolver_mutation_dlna.go
Normal file
42
pkg/api/resolver_mutation_dlna.go
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) EnableDlna(ctx context.Context, input models.EnableDLNAInput) (bool, error) {
|
||||
err := manager.GetInstance().DLNAService.Start(parseMinutes(input.Duration))
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) DisableDlna(ctx context.Context, input models.DisableDLNAInput) (bool, error) {
|
||||
manager.GetInstance().DLNAService.Stop(parseMinutes(input.Duration))
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) AddTempDlnaip(ctx context.Context, input models.AddTempDLNAIPInput) (bool, error) {
|
||||
manager.GetInstance().DLNAService.AddTempDLNAIP(input.Address, parseMinutes(input.Duration))
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) RemoveTempDlnaip(ctx context.Context, input models.RemoveTempDLNAIPInput) (bool, error) {
|
||||
ret := manager.GetInstance().DLNAService.RemoveTempDLNAIP(input.Address)
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func parseMinutes(minutes *int) *time.Duration {
|
||||
var ret *time.Duration
|
||||
if minutes != nil {
|
||||
d := time.Duration(*minutes) * time.Minute
|
||||
ret = &d
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
|
@ -10,9 +10,21 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) getGallery(ctx context.Context, id int) (ret *models.Gallery, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Gallery().Find(id)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GalleryCreate(ctx context.Context, input models.GalleryCreateInput) (*models.Gallery, error) {
|
||||
// name must be provided
|
||||
if input.Title == "" {
|
||||
|
|
@ -90,7 +102,8 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input models.Galle
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return gallery, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryCreatePost, input, nil)
|
||||
return r.getGallery(ctx, gallery.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) updateGalleryPerformers(qb models.GalleryReaderWriter, galleryID int, performerIDs []string) error {
|
||||
|
|
@ -130,7 +143,9 @@ func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.Galle
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside txn
|
||||
r.hookExecutor.ExecutePostHooks(ctx, ret.ID, plugin.GalleryUpdatePost, input, translator.getFields())
|
||||
return r.getGallery(ctx, ret.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.GalleryUpdateInput) (ret []*models.Gallery, err error) {
|
||||
|
|
@ -156,7 +171,23 @@ func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside txn
|
||||
var newRet []*models.Gallery
|
||||
for i, gallery := range ret {
|
||||
translator := changesetTranslator{
|
||||
inputMap: inputMaps[i],
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryUpdatePost, input, translator.getFields())
|
||||
gallery, err = r.getGallery(ctx, gallery.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newRet = append(newRet, gallery)
|
||||
}
|
||||
|
||||
return newRet, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Gallery, error) {
|
||||
|
|
@ -314,7 +345,20 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.B
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside of txn
|
||||
var newRet []*models.Gallery
|
||||
for _, gallery := range ret {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryUpdatePost, input, translator.getFields())
|
||||
|
||||
gallery, err := r.getGallery(ctx, gallery.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newRet = append(newRet, gallery)
|
||||
}
|
||||
|
||||
return newRet, nil
|
||||
}
|
||||
|
||||
func adjustGalleryPerformerIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
|
|
@ -438,6 +482,16 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
|||
}
|
||||
}
|
||||
|
||||
// call post hook after performing the other actions
|
||||
for _, gallery := range galleries {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, input, nil)
|
||||
}
|
||||
|
||||
// call image destroy post hook as well
|
||||
for _, img := range imgsToDelete {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, nil, nil)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,9 +8,21 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) getImage(ctx context.Context, id int) (ret *models.Image, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Image().Find(id)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (ret *models.Image, err error) {
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
|
|
@ -24,7 +36,9 @@ func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUp
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside txn
|
||||
r.hookExecutor.ExecutePostHooks(ctx, ret.ID, plugin.ImageUpdatePost, input, translator.getFields())
|
||||
return r.getImage(ctx, ret.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) (ret []*models.Image, err error) {
|
||||
|
|
@ -50,7 +64,23 @@ func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.Ima
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside txn
|
||||
var newRet []*models.Image
|
||||
for i, image := range ret {
|
||||
translator := changesetTranslator{
|
||||
inputMap: inputMaps[i],
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageUpdatePost, input, translator.getFields())
|
||||
image, err = r.getImage(ctx, image.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newRet = append(newRet, image)
|
||||
}
|
||||
|
||||
return newRet, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Image, error) {
|
||||
|
|
@ -202,7 +232,20 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.Bul
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside of txn
|
||||
var newRet []*models.Image
|
||||
for _, image := range ret {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageUpdatePost, input, translator.getFields())
|
||||
|
||||
image, err = r.getImage(ctx, image.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newRet = append(newRet, image)
|
||||
}
|
||||
|
||||
return newRet, nil
|
||||
}
|
||||
|
||||
func adjustImageGalleryIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
|
|
@ -268,6 +311,9 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
|
|||
manager.DeleteImageFile(image)
|
||||
}
|
||||
|
||||
// call post hook after performing the other actions
|
||||
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, input, nil)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
@ -315,6 +361,9 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
|
|||
if input.DeleteFile != nil && *input.DeleteFile {
|
||||
manager.DeleteImageFile(image)
|
||||
}
|
||||
|
||||
// call post hook after performing the other actions
|
||||
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, input, nil)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
|
|
|
|||
23
pkg/api/resolver_mutation_job.go
Normal file
23
pkg/api/resolver_mutation_job.go
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) StopJob(ctx context.Context, jobID string) (bool, error) {
|
||||
idInt, err := strconv.Atoi(jobID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
manager.GetInstance().JobManager.CancelJob(idInt)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StopAllJobs(ctx context.Context) (bool, error) {
|
||||
manager.GetInstance().JobManager.CancelAll()
|
||||
return true, nil
|
||||
}
|
||||
|
|
@ -4,6 +4,8 @@ import (
|
|||
"context"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
|
|
@ -15,16 +17,22 @@ import (
|
|||
)
|
||||
|
||||
func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) {
|
||||
manager.GetInstance().Scan(input)
|
||||
return "todo", nil
|
||||
}
|
||||
jobID, err := manager.GetInstance().Scan(ctx, input)
|
||||
|
||||
func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
|
||||
if err := manager.GetInstance().Import(); err != nil {
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return "todo", nil
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
|
||||
jobID, err := manager.GetInstance().Import(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) {
|
||||
|
|
@ -33,30 +41,26 @@ func (r *mutationResolver) ImportObjects(ctx context.Context, input models.Impor
|
|||
return "", err
|
||||
}
|
||||
|
||||
_, err = manager.GetInstance().RunSingleTask(t)
|
||||
jobID := manager.GetInstance().RunSingleTask(ctx, t)
|
||||
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
|
||||
jobID, err := manager.GetInstance().Export(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return "todo", nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
|
||||
if err := manager.GetInstance().Export(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return "todo", nil
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ExportObjects(ctx context.Context, input models.ExportObjectsInput) (*string, error) {
|
||||
t := manager.CreateExportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
|
||||
wg, err := manager.GetInstance().RunSingleTask(t)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
t.Start(&wg)
|
||||
|
||||
if t.DownloadHash != "" {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
|
|
@ -71,38 +75,28 @@ func (r *mutationResolver) ExportObjects(ctx context.Context, input models.Expor
|
|||
}
|
||||
|
||||
func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) {
|
||||
manager.GetInstance().Generate(input)
|
||||
return "todo", nil
|
||||
jobID, err := manager.GetInstance().Generate(ctx, input)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) {
|
||||
manager.GetInstance().AutoTag(input)
|
||||
return "todo", nil
|
||||
jobID := manager.GetInstance().AutoTag(ctx, input)
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataClean(ctx context.Context, input models.CleanMetadataInput) (string, error) {
|
||||
manager.GetInstance().Clean(input)
|
||||
return "todo", nil
|
||||
jobID := manager.GetInstance().Clean(ctx, input)
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error) {
|
||||
manager.GetInstance().MigrateHash()
|
||||
return "todo", nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) {
|
||||
status := manager.GetInstance().Status
|
||||
ret := models.MetadataUpdateStatus{
|
||||
Progress: status.Progress,
|
||||
Status: status.Status.String(),
|
||||
Message: "",
|
||||
}
|
||||
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StopJob(ctx context.Context) (bool, error) {
|
||||
return manager.GetInstance().Status.Stop(), nil
|
||||
jobID := manager.GetInstance().MigrateHash(ctx)
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BackupDatabase(ctx context.Context, input models.BackupDatabaseInput) (*string, error) {
|
||||
|
|
|
|||
|
|
@ -7,9 +7,21 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) getMovie(ctx context.Context, id int) (ret *models.Movie, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Movie().Find(id)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCreateInput) (*models.Movie, error) {
|
||||
// generate checksum from movie name rather than image
|
||||
checksum := utils.MD5FromString(input.Name)
|
||||
|
|
@ -104,7 +116,8 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return movie, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, movie.ID, plugin.MovieCreatePost, input, nil)
|
||||
return r.getMovie(ctx, movie.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUpdateInput) (*models.Movie, error) {
|
||||
|
|
@ -203,7 +216,8 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return movie, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, movie.ID, plugin.MovieUpdatePost, input, translator.getFields())
|
||||
return r.getMovie(ctx, movie.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MovieDestroy(ctx context.Context, input models.MovieDestroyInput) (bool, error) {
|
||||
|
|
@ -217,6 +231,9 @@ func (r *mutationResolver) MovieDestroy(ctx context.Context, input models.MovieD
|
|||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, id, plugin.MovieDestroyPost, input, nil)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
@ -238,5 +255,10 @@ func (r *mutationResolver) MoviesDestroy(ctx context.Context, movieIDs []string)
|
|||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
for _, id := range ids {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, id, plugin.MovieDestroyPost, movieIDs, nil)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,9 +9,21 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/performer"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *models.Performer, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Performer().Find(id)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.PerformerCreateInput) (*models.Performer, error) {
|
||||
// generate checksum from performer name rather than image
|
||||
checksum := utils.MD5FromString(input.Name)
|
||||
|
|
@ -146,7 +158,8 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return performer, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, performer.ID, plugin.PerformerCreatePost, input, nil)
|
||||
return r.getPerformer(ctx, performer.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
|
||||
|
|
@ -267,7 +280,8 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return p, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, p.ID, plugin.PerformerUpdatePost, input, translator.getFields())
|
||||
return r.getPerformer(ctx, p.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) updatePerformerTags(qb models.PerformerReaderWriter, performerID int, tagsIDs []string) error {
|
||||
|
|
@ -372,7 +386,20 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside of txn
|
||||
var newRet []*models.Performer
|
||||
for _, performer := range ret {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, performer.ID, plugin.ImageUpdatePost, input, translator.getFields())
|
||||
|
||||
performer, err = r.getPerformer(ctx, performer.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newRet = append(newRet, performer)
|
||||
}
|
||||
|
||||
return newRet, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerDestroy(ctx context.Context, input models.PerformerDestroyInput) (bool, error) {
|
||||
|
|
@ -386,6 +413,9 @@ func (r *mutationResolver) PerformerDestroy(ctx context.Context, input models.Pe
|
|||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, id, plugin.PerformerDestroyPost, input, nil)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
@ -407,5 +437,10 @@ func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs [
|
|||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
for _, id := range ids {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, id, plugin.PerformerDestroyPost, performerIDs, nil)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,45 +2,20 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin/common"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, taskName string, args []*models.PluginArgInput) (string, error) {
|
||||
currentUser := getCurrentUserID(ctx)
|
||||
|
||||
var cookie *http.Cookie
|
||||
var err error
|
||||
if currentUser != nil {
|
||||
cookie, err = createSessionCookie(*currentUser)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
config := config.GetInstance()
|
||||
serverConnection := common.StashServerConnection{
|
||||
Scheme: "http",
|
||||
Port: config.GetPort(),
|
||||
SessionCookie: cookie,
|
||||
Dir: config.GetConfigPath(),
|
||||
}
|
||||
|
||||
if HasTLSConfig() {
|
||||
serverConnection.Scheme = "https"
|
||||
}
|
||||
|
||||
manager.GetInstance().RunPluginTask(pluginID, taskName, args, serverConnection)
|
||||
m := manager.GetInstance()
|
||||
m.RunPluginTask(ctx, pluginID, taskName, args)
|
||||
return "todo", nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ReloadPlugins(ctx context.Context) (bool, error) {
|
||||
err := manager.GetInstance().PluginCache.ReloadPlugins()
|
||||
err := manager.GetInstance().PluginCache.LoadPlugins()
|
||||
if err != nil {
|
||||
logger.Errorf("Error reading plugin configs: %s", err.Error())
|
||||
}
|
||||
|
|
|
|||
89
pkg/api/resolver_mutation_saved_filter.go
Normal file
89
pkg/api/resolver_mutation_saved_filter.go
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) SaveFilter(ctx context.Context, input models.SaveFilterInput) (ret *models.SavedFilter, err error) {
|
||||
if strings.TrimSpace(input.Name) == "" {
|
||||
return nil, errors.New("name must be non-empty")
|
||||
}
|
||||
|
||||
var id *int
|
||||
if input.ID != nil {
|
||||
idv, err := strconv.Atoi(*input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
id = &idv
|
||||
}
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
f := models.SavedFilter{
|
||||
Mode: input.Mode,
|
||||
Name: input.Name,
|
||||
Filter: input.Filter,
|
||||
}
|
||||
if id == nil {
|
||||
ret, err = repo.SavedFilter().Create(f)
|
||||
} else {
|
||||
f.ID = *id
|
||||
ret, err = repo.SavedFilter().Update(f)
|
||||
}
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input models.DestroyFilterInput) (bool, error) {
|
||||
id, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
return repo.SavedFilter().Destroy(id)
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input models.SetDefaultFilterInput) (bool, error) {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.SavedFilter()
|
||||
|
||||
if input.Filter == nil {
|
||||
// clearing
|
||||
def, err := qb.FindDefault(input.Mode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if def != nil {
|
||||
return qb.Destroy(def.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := qb.SetDefault(models.SavedFilter{
|
||||
Mode: input.Mode,
|
||||
Filter: *input.Filter,
|
||||
})
|
||||
|
||||
return err
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
|
@ -10,9 +10,21 @@ import (
|
|||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) getScene(ctx context.Context, id int) (ret *models.Scene, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Scene().Find(id)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUpdateInput) (ret *models.Scene, err error) {
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
|
|
@ -26,7 +38,8 @@ func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUp
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, ret.ID, plugin.SceneUpdatePost, input, translator.getFields())
|
||||
return r.getScene(ctx, ret.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.SceneUpdateInput) (ret []*models.Scene, err error) {
|
||||
|
|
@ -52,7 +65,24 @@ func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.Sce
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside of txn
|
||||
var newRet []*models.Scene
|
||||
for i, scene := range ret {
|
||||
translator := changesetTranslator{
|
||||
inputMap: inputMaps[i],
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneUpdatePost, input, translator.getFields())
|
||||
|
||||
scene, err = r.getScene(ctx, scene.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newRet = append(newRet, scene)
|
||||
}
|
||||
|
||||
return newRet, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Scene, error) {
|
||||
|
|
@ -281,7 +311,20 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
// execute post hooks outside of txn
|
||||
var newRet []*models.Scene
|
||||
for _, scene := range ret {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneUpdatePost, input, translator.getFields())
|
||||
|
||||
scene, err = r.getScene(ctx, scene.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newRet = append(newRet, scene)
|
||||
}
|
||||
|
||||
return newRet, nil
|
||||
}
|
||||
|
||||
func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
|
||||
|
|
@ -393,6 +436,9 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
|
|||
manager.DeleteSceneFile(scene)
|
||||
}
|
||||
|
||||
// call post hook after performing the other actions
|
||||
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, input, nil)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
@ -406,6 +452,9 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
|
|||
sceneID, _ := strconv.Atoi(id)
|
||||
|
||||
scene, err := qb.Find(sceneID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if scene != nil {
|
||||
scenes = append(scenes, scene)
|
||||
}
|
||||
|
|
@ -439,11 +488,25 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
|
|||
if input.DeleteFile != nil && *input.DeleteFile {
|
||||
manager.DeleteSceneFile(scene)
|
||||
}
|
||||
|
||||
// call post hook after performing the other actions
|
||||
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, input, nil)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) getSceneMarker(ctx context.Context, id int) (ret *models.SceneMarker, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.SceneMarker().Find(id)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.SceneMarkerCreateInput) (*models.SceneMarker, error) {
|
||||
primaryTagID, err := strconv.Atoi(input.PrimaryTagID)
|
||||
if err != nil {
|
||||
|
|
@ -470,7 +533,13 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.S
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return r.changeMarker(ctx, create, newSceneMarker, tagIDs)
|
||||
ret, err := r.changeMarker(ctx, create, newSceneMarker, tagIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, ret.ID, plugin.SceneMarkerCreatePost, input, nil)
|
||||
return r.getSceneMarker(ctx, ret.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.SceneMarkerUpdateInput) (*models.SceneMarker, error) {
|
||||
|
|
@ -504,7 +573,16 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.S
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return r.changeMarker(ctx, update, updatedSceneMarker, tagIDs)
|
||||
ret, err := r.changeMarker(ctx, update, updatedSceneMarker, tagIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
}
|
||||
r.hookExecutor.ExecutePostHooks(ctx, ret.ID, plugin.SceneMarkerUpdatePost, input, translator.getFields())
|
||||
return r.getSceneMarker(ctx, ret.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
|
||||
|
|
@ -541,6 +619,8 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
|
|||
|
||||
postCommitFunc()
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, markerID, plugin.SceneMarkerDestroyPost, id, nil)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
@ -648,9 +728,9 @@ func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int,
|
|||
|
||||
func (r *mutationResolver) SceneGenerateScreenshot(ctx context.Context, id string, at *float64) (string, error) {
|
||||
if at != nil {
|
||||
manager.GetInstance().GenerateScreenshot(id, *at)
|
||||
manager.GetInstance().GenerateScreenshot(ctx, id, *at)
|
||||
} else {
|
||||
manager.GetInstance().GenerateDefaultScreenshot(id)
|
||||
manager.GetInstance().GenerateDefaultScreenshot(ctx, id)
|
||||
}
|
||||
|
||||
return "todo", nil
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ package api
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
|
|
@ -23,6 +24,6 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
|
|||
}
|
||||
|
||||
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) (string, error) {
|
||||
manager.GetInstance().StashBoxBatchPerformerTag(input)
|
||||
return "todo", nil
|
||||
jobID := manager.GetInstance().StashBoxBatchPerformerTag(ctx, input)
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,9 +8,21 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) getStudio(ctx context.Context, id int) (ret *models.Studio, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().Find(id)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioCreate(ctx context.Context, input models.StudioCreateInput) (*models.Studio, error) {
|
||||
// generate checksum from studio name rather than image
|
||||
checksum := utils.MD5FromString(input.Name)
|
||||
|
|
@ -82,7 +94,8 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return studio, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, studio.ID, plugin.StudioCreatePost, input, nil)
|
||||
return r.getStudio(ctx, studio.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) {
|
||||
|
|
@ -162,7 +175,8 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return studio, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, studio.ID, plugin.StudioUpdatePost, input, translator.getFields())
|
||||
return r.getStudio(ctx, studio.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioDestroy(ctx context.Context, input models.StudioDestroyInput) (bool, error) {
|
||||
|
|
@ -176,6 +190,9 @@ func (r *mutationResolver) StudioDestroy(ctx context.Context, input models.Studi
|
|||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, id, plugin.StudioDestroyPost, input, nil)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
@ -197,5 +214,10 @@ func (r *mutationResolver) StudiosDestroy(ctx context.Context, studioIDs []strin
|
|||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
for _, id := range ids {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, id, plugin.StudioDestroyPost, studioIDs, nil)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,11 +6,23 @@ import (
|
|||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/tag"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) getTag(ctx context.Context, id int) (ret *models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().Find(id)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreateInput) (*models.Tag, error) {
|
||||
// Populate a new tag from the input
|
||||
currentTime := time.Now()
|
||||
|
|
@ -31,24 +43,34 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
|
|||
}
|
||||
}
|
||||
|
||||
// Start the transaction and save the tag
|
||||
var tag *models.Tag
|
||||
// Start the transaction and save the t
|
||||
var t *models.Tag
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Tag()
|
||||
|
||||
// ensure name is unique
|
||||
if err := manager.EnsureTagNameUnique(newTag, qb); err != nil {
|
||||
if err := tag.EnsureTagNameUnique(0, newTag.Name, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tag, err = qb.Create(newTag)
|
||||
t, err = qb.Create(newTag)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateImage(tag.ID, imageData); err != nil {
|
||||
if err := qb.UpdateImage(t.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(input.Aliases) > 0 {
|
||||
if err := tag.EnsureAliasesUnique(t.ID, input.Aliases, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.UpdateAliases(t.ID, input.Aliases); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -58,7 +80,8 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return tag, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, t.ID, plugin.TagCreatePost, input, nil)
|
||||
return r.getTag(ctx, t.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdateInput) (*models.Tag, error) {
|
||||
|
|
@ -68,12 +91,6 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
|||
return nil, err
|
||||
}
|
||||
|
||||
updatedTag := models.Tag{
|
||||
ID: tagID,
|
||||
Name: input.Name,
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
|
||||
var imageData []byte
|
||||
|
||||
translator := changesetTranslator{
|
||||
|
|
@ -90,39 +107,56 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
|||
}
|
||||
|
||||
// Start the transaction and save the tag
|
||||
var tag *models.Tag
|
||||
var t *models.Tag
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Tag()
|
||||
|
||||
// ensure name is unique
|
||||
existing, err := qb.Find(tagID)
|
||||
t, err = qb.Find(tagID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if existing == nil {
|
||||
if t == nil {
|
||||
return fmt.Errorf("Tag with ID %d not found", tagID)
|
||||
}
|
||||
|
||||
if existing.Name != updatedTag.Name {
|
||||
if err := manager.EnsureTagNameUnique(updatedTag, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
updatedTag := models.TagPartial{
|
||||
ID: tagID,
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
|
||||
tag, err = qb.Update(updatedTag)
|
||||
if input.Name != nil && t.Name != *input.Name {
|
||||
if err := tag.EnsureTagNameUnique(tagID, *input.Name, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
updatedTag.Name = input.Name
|
||||
}
|
||||
|
||||
t, err = qb.Update(updatedTag)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateImage(tag.ID, imageData); err != nil {
|
||||
if err := qb.UpdateImage(tagID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if imageIncluded {
|
||||
// must be unsetting
|
||||
if err := qb.DestroyImage(tag.ID); err != nil {
|
||||
if err := qb.DestroyImage(tagID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if translator.hasField("aliases") {
|
||||
if err := tag.EnsureAliasesUnique(tagID, input.Aliases, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.UpdateAliases(tagID, input.Aliases); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -132,7 +166,8 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return tag, nil
|
||||
r.hookExecutor.ExecutePostHooks(ctx, t.ID, plugin.TagUpdatePost, input, translator.getFields())
|
||||
return r.getTag(ctx, t.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagDestroy(ctx context.Context, input models.TagDestroyInput) (bool, error) {
|
||||
|
|
@ -146,6 +181,9 @@ func (r *mutationResolver) TagDestroy(ctx context.Context, input models.TagDestr
|
|||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
r.hookExecutor.ExecutePostHooks(ctx, tagID, plugin.TagDestroyPost, input, nil)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
@ -167,5 +205,51 @@ func (r *mutationResolver) TagsDestroy(ctx context.Context, tagIDs []string) (bo
|
|||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
for _, id := range ids {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, id, plugin.TagDestroyPost, tagIDs, nil)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMergeInput) (*models.Tag, error) {
|
||||
source, err := utils.StringSliceToIntSlice(input.Source)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
destination, err := strconv.Atoi(input.Destination)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(source) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var t *models.Tag
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Tag()
|
||||
|
||||
var err error
|
||||
t, err = qb.Find(destination)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if t == nil {
|
||||
return fmt.Errorf("Tag with ID %d not found", destination)
|
||||
}
|
||||
|
||||
if err = qb.Merge(source, destination); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return t, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
|
|
@ -15,7 +16,8 @@ import (
|
|||
// TODO - move this into a common area
|
||||
func newResolver() *Resolver {
|
||||
return &Resolver{
|
||||
txnManager: mocks.NewTransactionManager(),
|
||||
txnManager: mocks.NewTransactionManager(),
|
||||
hookExecutor: &mockHookExecutor{},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -26,15 +28,47 @@ const existingTagID = 1
|
|||
const existingTagName = "existingTagName"
|
||||
const newTagID = 2
|
||||
|
||||
type mockHookExecutor struct{}
|
||||
|
||||
func (*mockHookExecutor) ExecutePostHooks(ctx context.Context, id int, hookType plugin.HookTriggerEnum, input interface{}, inputFields []string) {
|
||||
}
|
||||
|
||||
func TestTagCreate(t *testing.T) {
|
||||
r := newResolver()
|
||||
|
||||
tagRW := r.txnManager.(*mocks.TransactionManager).Tag().(*mocks.TagReaderWriter)
|
||||
tagRW.On("FindByName", existingTagName, true).Return(&models.Tag{
|
||||
ID: existingTagID,
|
||||
Name: existingTagName,
|
||||
}, nil).Once()
|
||||
tagRW.On("FindByName", errTagName, true).Return(nil, nil).Once()
|
||||
|
||||
pp := 1
|
||||
findFilter := &models.FindFilterType{
|
||||
PerPage: &pp,
|
||||
}
|
||||
|
||||
tagFilterForName := func(n string) *models.TagFilterType {
|
||||
return &models.TagFilterType{
|
||||
Name: &models.StringCriterionInput{
|
||||
Value: n,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
tagFilterForAlias := func(n string) *models.TagFilterType {
|
||||
return &models.TagFilterType{
|
||||
Aliases: &models.StringCriterionInput{
|
||||
Value: n,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
tagRW.On("Query", tagFilterForName(existingTagName), findFilter).Return([]*models.Tag{
|
||||
{
|
||||
ID: existingTagID,
|
||||
Name: existingTagName,
|
||||
},
|
||||
}, 1, nil).Once()
|
||||
tagRW.On("Query", tagFilterForName(errTagName), findFilter).Return(nil, 0, nil).Once()
|
||||
tagRW.On("Query", tagFilterForAlias(errTagName), findFilter).Return(nil, 0, nil).Once()
|
||||
|
||||
expectedErr := errors.New("TagCreate error")
|
||||
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, expectedErr)
|
||||
|
|
@ -55,11 +89,14 @@ func TestTagCreate(t *testing.T) {
|
|||
r = newResolver()
|
||||
tagRW = r.txnManager.(*mocks.TransactionManager).Tag().(*mocks.TagReaderWriter)
|
||||
|
||||
tagRW.On("FindByName", tagName, true).Return(nil, nil).Once()
|
||||
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
tagRW.On("Query", tagFilterForName(tagName), findFilter).Return(nil, 0, nil).Once()
|
||||
tagRW.On("Query", tagFilterForAlias(tagName), findFilter).Return(nil, 0, nil).Once()
|
||||
newTag := &models.Tag{
|
||||
ID: newTagID,
|
||||
Name: tagName,
|
||||
}, nil)
|
||||
}
|
||||
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(newTag, nil)
|
||||
tagRW.On("Find", newTagID).Return(newTag, nil)
|
||||
|
||||
tag, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
|
||||
Name: tagName,
|
||||
|
|
|
|||
|
|
@ -13,23 +13,32 @@ func (r *queryResolver) Configuration(ctx context.Context) (*models.ConfigResult
|
|||
}
|
||||
|
||||
func (r *queryResolver) Directory(ctx context.Context, path *string) (*models.Directory, error) {
|
||||
|
||||
directory := &models.Directory{}
|
||||
var err error
|
||||
|
||||
var dirPath = ""
|
||||
if path != nil {
|
||||
dirPath = *path
|
||||
}
|
||||
currentDir := utils.GetDir(dirPath)
|
||||
directories, err := utils.ListDir(currentDir)
|
||||
if err != nil {
|
||||
return directory, err
|
||||
}
|
||||
|
||||
return &models.Directory{
|
||||
Path: currentDir,
|
||||
Parent: utils.GetParent(currentDir),
|
||||
Directories: utils.ListDir(currentDir),
|
||||
}, nil
|
||||
directory.Path = currentDir
|
||||
directory.Parent = utils.GetParent(currentDir)
|
||||
directory.Directories = directories
|
||||
|
||||
return directory, err
|
||||
}
|
||||
|
||||
func makeConfigResult() *models.ConfigResult {
|
||||
return &models.ConfigResult{
|
||||
General: makeConfigGeneralResult(),
|
||||
Interface: makeConfigInterfaceResult(),
|
||||
Dlna: makeConfigDLNAResult(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -53,6 +62,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
|
|||
CalculateMd5: config.IsCalculateMD5(),
|
||||
VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
|
||||
ParallelTasks: config.GetParallelTasks(),
|
||||
PreviewAudio: config.GetPreviewAudio(),
|
||||
PreviewSegments: config.GetPreviewSegments(),
|
||||
PreviewSegmentDuration: config.GetPreviewSegmentDuration(),
|
||||
PreviewExcludeStart: config.GetPreviewExcludeStart(),
|
||||
|
|
@ -94,6 +104,7 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
|
|||
cssEnabled := config.GetCSSEnabled()
|
||||
language := config.GetLanguage()
|
||||
slideshowDelay := config.GetSlideshowDelay()
|
||||
handyKey := config.GetHandyKey()
|
||||
|
||||
return &models.ConfigInterfaceResult{
|
||||
MenuItems: menuItems,
|
||||
|
|
@ -107,5 +118,17 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
|
|||
CSSEnabled: &cssEnabled,
|
||||
Language: &language,
|
||||
SlideshowDelay: &slideshowDelay,
|
||||
HandyKey: &handyKey,
|
||||
}
|
||||
}
|
||||
|
||||
func makeConfigDLNAResult() *models.ConfigDLNAResult {
|
||||
config := config.GetInstance()
|
||||
|
||||
return &models.ConfigDLNAResult{
|
||||
ServerName: config.GetDLNAServerName(),
|
||||
Enabled: config.GetDLNADefaultEnabled(),
|
||||
WhitelistedIPs: config.GetDLNADefaultIPWhitelist(),
|
||||
Interfaces: config.GetDLNAInterfaces(),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
12
pkg/api/resolver_query_dlna.go
Normal file
12
pkg/api/resolver_query_dlna.go
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) DlnaStatus(ctx context.Context) (*models.DLNAStatus, error) {
|
||||
return manager.GetInstance().DLNAService.Status(), nil
|
||||
}
|
||||
|
|
@ -21,6 +21,9 @@ func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *str
|
|||
}
|
||||
|
||||
image, err = qb.Find(idInt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if checksum != nil {
|
||||
image, err = qb.FindByChecksum(*checksum)
|
||||
}
|
||||
|
|
|
|||
27
pkg/api/resolver_query_find_saved_filter.go
Normal file
27
pkg/api/resolver_query_find_saved_filter.go
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindSavedFilters(ctx context.Context, mode models.FilterMode) (ret []*models.SavedFilter, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.SavedFilter().FindByMode(mode)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindDefaultFilter(ctx context.Context, mode models.FilterMode) (ret *models.SavedFilter, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.SavedFilter().FindDefault(mode)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, err
|
||||
}
|
||||
|
|
@ -19,6 +19,9 @@ func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *str
|
|||
return err
|
||||
}
|
||||
scene, err = qb.Find(idInt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if checksum != nil {
|
||||
scene, err = qb.FindByChecksum(*checksum)
|
||||
}
|
||||
|
|
|
|||
52
pkg/api/resolver_query_job.go
Normal file
52
pkg/api/resolver_query_job.go
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/job"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) JobQueue(ctx context.Context) ([]*models.Job, error) {
|
||||
queue := manager.GetInstance().JobManager.GetQueue()
|
||||
|
||||
var ret []*models.Job
|
||||
for _, j := range queue {
|
||||
ret = append(ret, jobToJobModel(j))
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindJob(ctx context.Context, input models.FindJobInput) (*models.Job, error) {
|
||||
jobID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
j := manager.GetInstance().JobManager.GetJob(jobID)
|
||||
if j == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return jobToJobModel(*j), nil
|
||||
}
|
||||
|
||||
func jobToJobModel(j job.Job) *models.Job {
|
||||
ret := &models.Job{
|
||||
ID: strconv.Itoa(j.ID),
|
||||
Status: models.JobStatus(j.Status),
|
||||
Description: j.Description,
|
||||
SubTasks: j.Details,
|
||||
StartTime: j.StartTime,
|
||||
EndTime: j.EndTime,
|
||||
AddTime: j.AddTime,
|
||||
}
|
||||
|
||||
if j.Progress != -1 {
|
||||
ret.Progress = &j.Progress
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
|
@ -7,17 +7,6 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) {
|
||||
status := manager.GetInstance().Status
|
||||
ret := models.MetadataUpdateStatus{
|
||||
Progress: status.Progress,
|
||||
Status: status.Status.String(),
|
||||
Message: "",
|
||||
}
|
||||
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) SystemStatus(ctx context.Context) (*models.SystemStatus, error) {
|
||||
return manager.GetInstance().GetSystemStatus(), nil
|
||||
}
|
||||
|
|
|
|||
64
pkg/api/resolver_subscription_job.go
Normal file
64
pkg/api/resolver_subscription_job.go
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/job"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type throttledUpdate struct {
|
||||
id int
|
||||
pendingUpdate *job.Job
|
||||
lastUpdate time.Time
|
||||
broadcastTimer *time.Timer
|
||||
killTimer *time.Timer
|
||||
}
|
||||
|
||||
func (tu *throttledUpdate) broadcast(output chan *models.JobStatusUpdate) {
|
||||
tu.lastUpdate = time.Now()
|
||||
output <- &models.JobStatusUpdate{
|
||||
Type: models.JobStatusUpdateTypeUpdate,
|
||||
Job: jobToJobModel(*tu.pendingUpdate),
|
||||
}
|
||||
|
||||
tu.broadcastTimer = nil
|
||||
tu.pendingUpdate = nil
|
||||
}
|
||||
|
||||
func makeJobStatusUpdate(t models.JobStatusUpdateType, j job.Job) *models.JobStatusUpdate {
|
||||
return &models.JobStatusUpdate{
|
||||
Type: t,
|
||||
Job: jobToJobModel(j),
|
||||
}
|
||||
}
|
||||
|
||||
func (r *subscriptionResolver) JobsSubscribe(ctx context.Context) (<-chan *models.JobStatusUpdate, error) {
|
||||
msg := make(chan *models.JobStatusUpdate, 100)
|
||||
|
||||
subscription := manager.GetInstance().JobManager.Subscribe(ctx)
|
||||
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case j := <-subscription.NewJob:
|
||||
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeAdd, j)
|
||||
case j := <-subscription.RemovedJob:
|
||||
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeRemove, j)
|
||||
case j := <-subscription.UpdatedJob:
|
||||
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeUpdate, j)
|
||||
case <-ctx.Done():
|
||||
close(msg)
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return msg, nil
|
||||
}
|
||||
|
||||
func (r *subscriptionResolver) ScanCompleteSubscribe(ctx context.Context) (<-chan bool, error) {
|
||||
return manager.GetInstance().ScanSubscribe(ctx), nil
|
||||
}
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *subscriptionResolver) MetadataUpdate(ctx context.Context) (<-chan *models.MetadataUpdateStatus, error) {
|
||||
msg := make(chan *models.MetadataUpdateStatus, 1)
|
||||
|
||||
ticker := time.NewTicker(5 * time.Second)
|
||||
|
||||
go func() {
|
||||
lastStatus := manager.TaskStatus{}
|
||||
for {
|
||||
select {
|
||||
case _ = <-ticker.C:
|
||||
thisStatus := manager.GetInstance().Status
|
||||
if thisStatus != lastStatus {
|
||||
ret := models.MetadataUpdateStatus{
|
||||
Progress: thisStatus.Progress,
|
||||
Status: thisStatus.Status.String(),
|
||||
Message: "",
|
||||
}
|
||||
msg <- &ret
|
||||
}
|
||||
lastStatus = thisStatus
|
||||
case <-ctx.Done():
|
||||
ticker.Stop()
|
||||
close(msg)
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return msg, nil
|
||||
}
|
||||
|
|
@ -16,7 +16,8 @@ import (
|
|||
)
|
||||
|
||||
type sceneRoutes struct {
|
||||
txnManager models.TransactionManager
|
||||
txnManager models.TransactionManager
|
||||
sceneServer manager.SceneServer
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Routes() chi.Router {
|
||||
|
|
@ -37,6 +38,7 @@ func (rs sceneRoutes) Routes() chi.Router {
|
|||
r.Get("/preview", rs.Preview)
|
||||
r.Get("/webp", rs.Webp)
|
||||
r.Get("/vtt/chapter", rs.ChapterVtt)
|
||||
r.Get("/funscript", rs.Funscript)
|
||||
|
||||
r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream)
|
||||
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
|
||||
|
|
@ -69,12 +71,11 @@ func getSceneFileContainer(scene *models.Scene) ffmpeg.Container {
|
|||
|
||||
func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
|
||||
|
||||
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo))
|
||||
manager.RegisterStream(filepath, &w)
|
||||
http.ServeFile(w, r, filepath)
|
||||
manager.WaitAndDeregisterStream(filepath, &w, r)
|
||||
ss := manager.SceneServer{
|
||||
TXNManager: rs.txnManager,
|
||||
}
|
||||
ss.StreamSceneDirect(scene, w, r)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) StreamMKV(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -178,20 +179,11 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
|
|||
|
||||
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
|
||||
|
||||
// fall back to the scene image blob if the file isn't present
|
||||
screenshotExists, _ := utils.FileExists(filepath)
|
||||
if screenshotExists {
|
||||
http.ServeFile(w, r, filepath)
|
||||
} else {
|
||||
var cover []byte
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
cover, _ = repo.Scene().GetCover(scene.ID)
|
||||
return nil
|
||||
})
|
||||
utils.ServeImage(cover, w, r)
|
||||
ss := manager.SceneServer{
|
||||
TXNManager: rs.txnManager,
|
||||
}
|
||||
ss.ServeScreenshot(scene, w, r)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -264,6 +256,12 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
|||
_, _ = w.Write([]byte(vtt))
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
funscript := utils.GetFunscriptPath(scene.Path)
|
||||
utils.ServeFileNoCache(w, r, funscript)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
w.Header().Set("Content-Type", "text/vtt")
|
||||
|
|
|
|||
|
|
@ -14,7 +14,11 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/99designs/gqlgen/handler"
|
||||
gqlHandler "github.com/99designs/gqlgen/graphql/handler"
|
||||
gqlExtension "github.com/99designs/gqlgen/graphql/handler/extension"
|
||||
gqlLru "github.com/99designs/gqlgen/graphql/handler/lru"
|
||||
gqlTransport "github.com/99designs/gqlgen/graphql/handler/transport"
|
||||
gqlPlayground "github.com/99designs/gqlgen/graphql/playground"
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/go-chi/chi/middleware"
|
||||
"github.com/gobuffalo/packr/v2"
|
||||
|
|
@ -25,6 +29,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/manager/paths"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/session"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
|
|
@ -37,11 +42,6 @@ var uiBox *packr.Box
|
|||
//var legacyUiBox *packr.Box
|
||||
var loginUIBox *packr.Box
|
||||
|
||||
const (
|
||||
ApiKeyHeader = "ApiKey"
|
||||
ApiKeyParameter = "apikey"
|
||||
)
|
||||
|
||||
func allowUnauthenticated(r *http.Request) bool {
|
||||
return strings.HasPrefix(r.URL.Path, "/login") || r.URL.Path == "/css"
|
||||
}
|
||||
|
|
@ -49,41 +49,26 @@ func allowUnauthenticated(r *http.Request) bool {
|
|||
func authenticateHandler() func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c := config.GetInstance()
|
||||
ctx := r.Context()
|
||||
|
||||
// translate api key into current user, if present
|
||||
userID := ""
|
||||
apiKey := r.Header.Get(ApiKeyHeader)
|
||||
var err error
|
||||
|
||||
// try getting the api key as a query parameter
|
||||
if apiKey == "" {
|
||||
apiKey = r.URL.Query().Get(ApiKeyParameter)
|
||||
}
|
||||
|
||||
if apiKey != "" {
|
||||
// match against configured API and set userID to the
|
||||
// configured username. In future, we'll want to
|
||||
// get the username from the key.
|
||||
if c.GetAPIKey() != apiKey {
|
||||
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
userID, err := manager.GetInstance().SessionStore.Authenticate(w, r)
|
||||
if err != nil {
|
||||
if err != session.ErrUnauthorized {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
_, err = w.Write([]byte(err.Error()))
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
userID = c.GetUsername()
|
||||
} else {
|
||||
// handle session
|
||||
userID, err = getSessionUserID(w, r)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
w.Write([]byte(err.Error()))
|
||||
// unauthorized error
|
||||
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
c := config.GetInstance()
|
||||
ctx := r.Context()
|
||||
|
||||
// handle redirect if no user and user is required
|
||||
if userID == "" && c.HasCredentials() && !allowUnauthenticated(r) {
|
||||
// if we don't have a userID, then redirect
|
||||
|
|
@ -105,7 +90,7 @@ func authenticateHandler() func(http.Handler) http.Handler {
|
|||
return
|
||||
}
|
||||
|
||||
ctx = context.WithValue(ctx, ContextUser, userID)
|
||||
ctx = session.SetCurrentUserID(ctx, userID)
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
|
|
@ -114,6 +99,16 @@ func authenticateHandler() func(http.Handler) http.Handler {
|
|||
}
|
||||
}
|
||||
|
||||
func visitedPluginHandler() func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// get the visited plugins and set them in the context
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const loginEndPoint = "/login"
|
||||
|
||||
func Start() {
|
||||
|
|
@ -121,13 +116,15 @@ func Start() {
|
|||
//legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
|
||||
loginUIBox = packr.New("Login UI Box", "../../ui/login")
|
||||
|
||||
initSessionStore()
|
||||
initialiseImages()
|
||||
|
||||
r := chi.NewRouter()
|
||||
|
||||
r.Use(middleware.Heartbeat("/healthz"))
|
||||
r.Use(authenticateHandler())
|
||||
visitedPluginHandler := manager.GetInstance().SessionStore.VisitedPluginHandler()
|
||||
r.Use(visitedPluginHandler)
|
||||
|
||||
r.Use(middleware.Recoverer)
|
||||
|
||||
c := config.GetInstance()
|
||||
|
|
@ -139,30 +136,51 @@ func Start() {
|
|||
r.Use(cors.AllowAll().Handler)
|
||||
r.Use(BaseURLMiddleware)
|
||||
|
||||
recoverFunc := handler.RecoverFunc(func(ctx context.Context, err interface{}) error {
|
||||
recoverFunc := func(ctx context.Context, err interface{}) error {
|
||||
logger.Error(err)
|
||||
debug.PrintStack()
|
||||
|
||||
message := fmt.Sprintf("Internal system error. Error <%v>", err)
|
||||
return errors.New(message)
|
||||
})
|
||||
websocketUpgrader := handler.WebsocketUpgrader(websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
},
|
||||
})
|
||||
maxUploadSize := handler.UploadMaxSize(c.GetMaxUploadSize())
|
||||
websocketKeepAliveDuration := handler.WebsocketKeepAliveDuration(10 * time.Second)
|
||||
|
||||
txnManager := manager.GetInstance().TxnManager
|
||||
resolver := &Resolver{
|
||||
txnManager: txnManager,
|
||||
}
|
||||
|
||||
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: resolver}), recoverFunc, websocketUpgrader, websocketKeepAliveDuration, maxUploadSize)
|
||||
txnManager := manager.GetInstance().TxnManager
|
||||
pluginCache := manager.GetInstance().PluginCache
|
||||
resolver := &Resolver{
|
||||
txnManager: txnManager,
|
||||
hookExecutor: pluginCache,
|
||||
}
|
||||
|
||||
r.Handle("/graphql", gqlHandler)
|
||||
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
|
||||
gqlSrv := gqlHandler.New(models.NewExecutableSchema(models.Config{Resolvers: resolver}))
|
||||
gqlSrv.SetRecoverFunc(recoverFunc)
|
||||
gqlSrv.AddTransport(gqlTransport.Websocket{
|
||||
Upgrader: websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
},
|
||||
},
|
||||
KeepAlivePingInterval: 10 * time.Second,
|
||||
})
|
||||
gqlSrv.AddTransport(gqlTransport.Options{})
|
||||
gqlSrv.AddTransport(gqlTransport.GET{})
|
||||
gqlSrv.AddTransport(gqlTransport.POST{})
|
||||
gqlSrv.AddTransport(gqlTransport.MultipartForm{
|
||||
MaxUploadSize: c.GetMaxUploadSize(),
|
||||
})
|
||||
|
||||
gqlSrv.SetQueryCache(gqlLru.New(1000))
|
||||
gqlSrv.Use(gqlExtension.Introspection{})
|
||||
|
||||
gqlHandlerFunc := func(w http.ResponseWriter, r *http.Request) {
|
||||
gqlSrv.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
// register GQL handler with plugin cache
|
||||
// chain the visited plugin handler
|
||||
manager.GetInstance().PluginCache.RegisterGQLHandler(visitedPluginHandler(http.HandlerFunc(gqlHandlerFunc)))
|
||||
|
||||
r.HandleFunc("/graphql", gqlHandlerFunc)
|
||||
r.HandleFunc("/playground", gqlPlayground.Handler("GraphQL playground", "/graphql"))
|
||||
|
||||
// session handlers
|
||||
r.Post(loginEndPoint, handleLogin)
|
||||
|
|
@ -280,7 +298,7 @@ func Start() {
|
|||
printLatestVersion()
|
||||
logger.Infof("stash is listening on " + address)
|
||||
logger.Infof("stash is running at https://" + displayAddress + "/")
|
||||
logger.Fatal(httpsServer.ListenAndServeTLS("", ""))
|
||||
logger.Error(httpsServer.ListenAndServeTLS("", ""))
|
||||
}()
|
||||
} else {
|
||||
server := &http.Server{
|
||||
|
|
@ -293,7 +311,7 @@ func Start() {
|
|||
printLatestVersion()
|
||||
logger.Infof("stash is listening on " + address)
|
||||
logger.Infof("stash is running at http://" + displayAddress + "/")
|
||||
logger.Fatal(server.ListenAndServe())
|
||||
logger.Error(server.ListenAndServe())
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
|
@ -333,15 +351,6 @@ func makeTLSConfig() *tls.Config {
|
|||
return tlsConfig
|
||||
}
|
||||
|
||||
func HasTLSConfig() bool {
|
||||
ret, _ := utils.FileExists(paths.GetSSLCert())
|
||||
if ret {
|
||||
ret, _ = utils.FileExists(paths.GetSSLKey())
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
type contextKey struct {
|
||||
name string
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,15 +1,13 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
|
||||
"github.com/gorilla/securecookie"
|
||||
"github.com/gorilla/sessions"
|
||||
"github.com/stashapp/stash/pkg/session"
|
||||
)
|
||||
|
||||
const cookieName = "session"
|
||||
|
|
@ -19,17 +17,11 @@ const userIDKey = "userID"
|
|||
|
||||
const returnURLParam = "returnURL"
|
||||
|
||||
var sessionStore = sessions.NewCookieStore(config.GetInstance().GetSessionStoreKey())
|
||||
|
||||
type loginTemplateData struct {
|
||||
URL string
|
||||
Error string
|
||||
}
|
||||
|
||||
func initSessionStore() {
|
||||
sessionStore.MaxAge(config.GetInstance().GetMaxSessionAge())
|
||||
}
|
||||
|
||||
func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string) {
|
||||
data, _ := loginUIBox.Find("login.html")
|
||||
templ, err := template.New("Login").Parse(string(data))
|
||||
|
|
@ -59,22 +51,13 @@ func handleLogin(w http.ResponseWriter, r *http.Request) {
|
|||
url = "/"
|
||||
}
|
||||
|
||||
// ignore error - we want a new session regardless
|
||||
newSession, _ := sessionStore.Get(r, cookieName)
|
||||
|
||||
username := r.FormValue("username")
|
||||
password := r.FormValue("password")
|
||||
|
||||
// authenticate the user
|
||||
if !config.GetInstance().ValidateCredentials(username, password) {
|
||||
err := manager.GetInstance().SessionStore.Login(w, r)
|
||||
if err == session.ErrInvalidCredentials {
|
||||
// redirect back to the login page with an error
|
||||
redirectToLogin(w, url, "Username or password is invalid")
|
||||
return
|
||||
}
|
||||
|
||||
newSession.Values[userIDKey] = username
|
||||
|
||||
err := newSession.Save(r, w)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
|
|
@ -84,17 +67,7 @@ func handleLogin(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
func handleLogout(w http.ResponseWriter, r *http.Request) {
|
||||
session, err := sessionStore.Get(r, cookieName)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
delete(session.Values, userIDKey)
|
||||
session.Options.MaxAge = -1
|
||||
|
||||
err = session.Save(r, w)
|
||||
if err != nil {
|
||||
if err := manager.GetInstance().SessionStore.Logout(w, r); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
|
@ -102,51 +75,3 @@ func handleLogout(w http.ResponseWriter, r *http.Request) {
|
|||
// redirect to the login page if credentials are required
|
||||
getLoginHandler(w, r)
|
||||
}
|
||||
|
||||
func getSessionUserID(w http.ResponseWriter, r *http.Request) (string, error) {
|
||||
session, err := sessionStore.Get(r, cookieName)
|
||||
// ignore errors and treat as an empty user id, so that we handle expired
|
||||
// cookie
|
||||
if err != nil {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if !session.IsNew {
|
||||
val := session.Values[userIDKey]
|
||||
|
||||
// refresh the cookie
|
||||
err = session.Save(r, w)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
ret, _ := val.(string)
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func getCurrentUserID(ctx context.Context) *string {
|
||||
userCtxVal := ctx.Value(ContextUser)
|
||||
if userCtxVal != nil {
|
||||
currentUser := userCtxVal.(string)
|
||||
return ¤tUser
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func createSessionCookie(username string) (*http.Cookie, error) {
|
||||
session := sessions.NewSession(sessionStore, cookieName)
|
||||
session.Values[userIDKey] = username
|
||||
|
||||
encoded, err := securecookie.EncodeMulti(session.Name(), session.Values,
|
||||
sessionStore.Codecs...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sessions.NewCookie(session.Name(), encoded, session.Options), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,3 +58,7 @@ func (b SceneURLBuilder) GetSceneMarkerStreamURL(sceneMarkerID int) string {
|
|||
func (b SceneURLBuilder) GetSceneMarkerStreamPreviewURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/preview"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetFunscriptURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -459,7 +459,12 @@ func TestParseTagScenes(t *testing.T) {
|
|||
|
||||
for _, s := range tags {
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
return TagScenes(s, nil, r.Scene())
|
||||
aliases, err := r.Tag().GetAliases(s.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return TagScenes(s, nil, aliases, r.Scene())
|
||||
}); err != nil {
|
||||
t.Errorf("Error auto-tagging performers: %s", err)
|
||||
}
|
||||
|
|
@ -481,7 +486,7 @@ func TestParseTagScenes(t *testing.T) {
|
|||
t.Errorf("Error getting scene tags: %s", err.Error())
|
||||
}
|
||||
|
||||
// title is only set on scenes where we expect performer to be set
|
||||
// title is only set on scenes where we expect tag to be set
|
||||
if scene.Title.String == scene.Path && len(tags) == 0 {
|
||||
t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path)
|
||||
} else if scene.Title.String != scene.Path && len(tags) > 0 {
|
||||
|
|
@ -604,7 +609,12 @@ func TestParseTagImages(t *testing.T) {
|
|||
|
||||
for _, s := range tags {
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
return TagImages(s, nil, r.Image())
|
||||
aliases, err := r.Tag().GetAliases(s.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return TagImages(s, nil, aliases, r.Image())
|
||||
}); err != nil {
|
||||
t.Errorf("Error auto-tagging performers: %s", err)
|
||||
}
|
||||
|
|
@ -749,7 +759,12 @@ func TestParseTagGalleries(t *testing.T) {
|
|||
|
||||
for _, s := range tags {
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
return TagGalleries(s, nil, r.Gallery())
|
||||
aliases, err := r.Tag().GetAliases(s.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return TagGalleries(s, nil, aliases, r.Gallery())
|
||||
}); err != nil {
|
||||
t.Errorf("Error auto-tagging performers: %s", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,37 +25,62 @@ func getMatchingTags(path string, tagReader models.TagReader) ([]*models.Tag, er
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func getTagTagger(p *models.Tag) tagger {
|
||||
return tagger{
|
||||
func getTagTaggers(p *models.Tag, aliases []string) []tagger {
|
||||
ret := []tagger{{
|
||||
ID: p.ID,
|
||||
Type: "tag",
|
||||
Name: p.Name,
|
||||
}}
|
||||
|
||||
for _, a := range aliases {
|
||||
ret = append(ret, tagger{
|
||||
ID: p.ID,
|
||||
Type: "tag",
|
||||
Name: a,
|
||||
})
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// TagScenes searches for scenes whose path matches the provided tag name and tags the scene with the tag.
|
||||
func TagScenes(p *models.Tag, paths []string, rw models.SceneReaderWriter) error {
|
||||
t := getTagTagger(p)
|
||||
func TagScenes(p *models.Tag, paths []string, aliases []string, rw models.SceneReaderWriter) error {
|
||||
t := getTagTaggers(p, aliases)
|
||||
|
||||
return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return scene.AddTag(rw, otherID, subjectID)
|
||||
})
|
||||
for _, tt := range t {
|
||||
if err := tt.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return scene.AddTag(rw, otherID, subjectID)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TagImages searches for images whose path matches the provided tag name and tags the image with the tag.
|
||||
func TagImages(p *models.Tag, paths []string, rw models.ImageReaderWriter) error {
|
||||
t := getTagTagger(p)
|
||||
func TagImages(p *models.Tag, paths []string, aliases []string, rw models.ImageReaderWriter) error {
|
||||
t := getTagTaggers(p, aliases)
|
||||
|
||||
return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return image.AddTag(rw, otherID, subjectID)
|
||||
})
|
||||
for _, tt := range t {
|
||||
if err := tt.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return image.AddTag(rw, otherID, subjectID)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TagGalleries searches for galleries whose path matches the provided tag name and tags the gallery with the tag.
|
||||
func TagGalleries(p *models.Tag, paths []string, rw models.GalleryReaderWriter) error {
|
||||
t := getTagTagger(p)
|
||||
func TagGalleries(p *models.Tag, paths []string, aliases []string, rw models.GalleryReaderWriter) error {
|
||||
t := getTagTaggers(p, aliases)
|
||||
|
||||
return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return gallery.AddTag(rw, otherID, subjectID)
|
||||
})
|
||||
for _, tt := range t {
|
||||
if err := tt.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) {
|
||||
return gallery.AddTag(rw, otherID, subjectID)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,35 +8,67 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type testTagCase struct {
|
||||
tagName string
|
||||
expectedRegex string
|
||||
aliasName string
|
||||
aliasRegex string
|
||||
}
|
||||
|
||||
var testTagCases = []testTagCase{
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
"",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
"alias name",
|
||||
`(?i)(?:^|_|[^\w\d])alias[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
"alias + name",
|
||||
`(?i)(?:^|_|[^\w\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
func TestTagScenes(t *testing.T) {
|
||||
type test struct {
|
||||
tagName string
|
||||
expectedRegex string
|
||||
}
|
||||
|
||||
tagNames := []test{
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, p := range tagNames {
|
||||
testTagScenes(t, p.tagName, p.expectedRegex)
|
||||
for _, p := range testTagCases {
|
||||
testTagScenes(t, p)
|
||||
}
|
||||
}
|
||||
|
||||
func testTagScenes(t *testing.T, tagName, expectedRegex string) {
|
||||
func testTagScenes(t *testing.T, tc testTagCase) {
|
||||
tagName := tc.tagName
|
||||
expectedRegex := tc.expectedRegex
|
||||
aliasName := tc.aliasName
|
||||
aliasRegex := tc.aliasRegex
|
||||
|
||||
mockSceneReader := &mocks.SceneReaderWriter{}
|
||||
|
||||
const tagID = 2
|
||||
|
||||
var aliases []string
|
||||
|
||||
testPathName := tagName
|
||||
if aliasName != "" {
|
||||
aliases = []string{aliasName}
|
||||
testPathName = aliasName
|
||||
}
|
||||
|
||||
matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4")
|
||||
|
||||
var scenes []*models.Scene
|
||||
matchingPaths, falsePaths := generateTestPaths(tagName, "mp4")
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
scenes = append(scenes, &models.Scene{
|
||||
ID: i + 1,
|
||||
|
|
@ -64,7 +96,23 @@ func testTagScenes(t *testing.T, tagName, expectedRegex string) {
|
|||
PerPage: &perPage,
|
||||
}
|
||||
|
||||
mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once()
|
||||
// if alias provided, then don't find by name
|
||||
onNameQuery := mockSceneReader.On("Query", expectedSceneFilter, expectedFindFilter)
|
||||
if aliasName == "" {
|
||||
onNameQuery.Return(scenes, len(scenes), nil).Once()
|
||||
} else {
|
||||
onNameQuery.Return(nil, 0, nil).Once()
|
||||
|
||||
expectedAliasFilter := &models.SceneFilterType{
|
||||
Organized: &organized,
|
||||
Path: &models.StringCriterionInput{
|
||||
Value: aliasRegex,
|
||||
Modifier: models.CriterionModifierMatchesRegex,
|
||||
},
|
||||
}
|
||||
|
||||
mockSceneReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(scenes, len(scenes), nil).Once()
|
||||
}
|
||||
|
||||
for i := range matchingPaths {
|
||||
sceneID := i + 1
|
||||
|
|
@ -72,7 +120,7 @@ func testTagScenes(t *testing.T, tagName, expectedRegex string) {
|
|||
mockSceneReader.On("UpdateTags", sceneID, []int{tagID}).Return(nil).Once()
|
||||
}
|
||||
|
||||
err := TagScenes(&tag, nil, mockSceneReader)
|
||||
err := TagScenes(&tag, nil, aliases, mockSceneReader)
|
||||
|
||||
assert := assert.New(t)
|
||||
|
||||
|
|
@ -81,34 +129,31 @@ func testTagScenes(t *testing.T, tagName, expectedRegex string) {
|
|||
}
|
||||
|
||||
func TestTagImages(t *testing.T) {
|
||||
type test struct {
|
||||
tagName string
|
||||
expectedRegex string
|
||||
}
|
||||
|
||||
tagNames := []test{
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, p := range tagNames {
|
||||
testTagImages(t, p.tagName, p.expectedRegex)
|
||||
for _, p := range testTagCases {
|
||||
testTagImages(t, p)
|
||||
}
|
||||
}
|
||||
|
||||
func testTagImages(t *testing.T, tagName, expectedRegex string) {
|
||||
func testTagImages(t *testing.T, tc testTagCase) {
|
||||
tagName := tc.tagName
|
||||
expectedRegex := tc.expectedRegex
|
||||
aliasName := tc.aliasName
|
||||
aliasRegex := tc.aliasRegex
|
||||
|
||||
mockImageReader := &mocks.ImageReaderWriter{}
|
||||
|
||||
const tagID = 2
|
||||
|
||||
var aliases []string
|
||||
|
||||
testPathName := tagName
|
||||
if aliasName != "" {
|
||||
aliases = []string{aliasName}
|
||||
testPathName = aliasName
|
||||
}
|
||||
|
||||
var images []*models.Image
|
||||
matchingPaths, falsePaths := generateTestPaths(tagName, "mp4")
|
||||
matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4")
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
images = append(images, &models.Image{
|
||||
ID: i + 1,
|
||||
|
|
@ -136,7 +181,23 @@ func testTagImages(t *testing.T, tagName, expectedRegex string) {
|
|||
PerPage: &perPage,
|
||||
}
|
||||
|
||||
mockImageReader.On("Query", expectedImageFilter, expectedFindFilter).Return(images, len(images), nil).Once()
|
||||
// if alias provided, then don't find by name
|
||||
onNameQuery := mockImageReader.On("Query", expectedImageFilter, expectedFindFilter)
|
||||
if aliasName == "" {
|
||||
onNameQuery.Return(images, len(images), nil).Once()
|
||||
} else {
|
||||
onNameQuery.Return(nil, 0, nil).Once()
|
||||
|
||||
expectedAliasFilter := &models.ImageFilterType{
|
||||
Organized: &organized,
|
||||
Path: &models.StringCriterionInput{
|
||||
Value: aliasRegex,
|
||||
Modifier: models.CriterionModifierMatchesRegex,
|
||||
},
|
||||
}
|
||||
|
||||
mockImageReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(images, len(images), nil).Once()
|
||||
}
|
||||
|
||||
for i := range matchingPaths {
|
||||
imageID := i + 1
|
||||
|
|
@ -144,7 +205,7 @@ func testTagImages(t *testing.T, tagName, expectedRegex string) {
|
|||
mockImageReader.On("UpdateTags", imageID, []int{tagID}).Return(nil).Once()
|
||||
}
|
||||
|
||||
err := TagImages(&tag, nil, mockImageReader)
|
||||
err := TagImages(&tag, nil, aliases, mockImageReader)
|
||||
|
||||
assert := assert.New(t)
|
||||
|
||||
|
|
@ -153,34 +214,31 @@ func testTagImages(t *testing.T, tagName, expectedRegex string) {
|
|||
}
|
||||
|
||||
func TestTagGalleries(t *testing.T) {
|
||||
type test struct {
|
||||
tagName string
|
||||
expectedRegex string
|
||||
}
|
||||
|
||||
tagNames := []test{
|
||||
{
|
||||
"tag name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
{
|
||||
"tag + name",
|
||||
`(?i)(?:^|_|[^\w\d])tag[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\w\d])`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, p := range tagNames {
|
||||
testTagGalleries(t, p.tagName, p.expectedRegex)
|
||||
for _, p := range testTagCases {
|
||||
testTagGalleries(t, p)
|
||||
}
|
||||
}
|
||||
|
||||
func testTagGalleries(t *testing.T, tagName, expectedRegex string) {
|
||||
func testTagGalleries(t *testing.T, tc testTagCase) {
|
||||
tagName := tc.tagName
|
||||
expectedRegex := tc.expectedRegex
|
||||
aliasName := tc.aliasName
|
||||
aliasRegex := tc.aliasRegex
|
||||
|
||||
mockGalleryReader := &mocks.GalleryReaderWriter{}
|
||||
|
||||
const tagID = 2
|
||||
|
||||
var aliases []string
|
||||
|
||||
testPathName := tagName
|
||||
if aliasName != "" {
|
||||
aliases = []string{aliasName}
|
||||
testPathName = aliasName
|
||||
}
|
||||
|
||||
var galleries []*models.Gallery
|
||||
matchingPaths, falsePaths := generateTestPaths(tagName, "mp4")
|
||||
matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4")
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
galleries = append(galleries, &models.Gallery{
|
||||
ID: i + 1,
|
||||
|
|
@ -208,7 +266,23 @@ func testTagGalleries(t *testing.T, tagName, expectedRegex string) {
|
|||
PerPage: &perPage,
|
||||
}
|
||||
|
||||
mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
|
||||
// if alias provided, then don't find by name
|
||||
onNameQuery := mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter)
|
||||
if aliasName == "" {
|
||||
onNameQuery.Return(galleries, len(galleries), nil).Once()
|
||||
} else {
|
||||
onNameQuery.Return(nil, 0, nil).Once()
|
||||
|
||||
expectedAliasFilter := &models.GalleryFilterType{
|
||||
Organized: &organized,
|
||||
Path: &models.StringCriterionInput{
|
||||
Value: aliasRegex,
|
||||
Modifier: models.CriterionModifierMatchesRegex,
|
||||
},
|
||||
}
|
||||
|
||||
mockGalleryReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
|
||||
}
|
||||
|
||||
for i := range matchingPaths {
|
||||
galleryID := i + 1
|
||||
|
|
@ -216,7 +290,7 @@ func testTagGalleries(t *testing.T, tagName, expectedRegex string) {
|
|||
mockGalleryReader.On("UpdateTags", galleryID, []int{tagID}).Return(nil).Once()
|
||||
}
|
||||
|
||||
err := TagGalleries(&tag, nil, mockGalleryReader)
|
||||
err := TagGalleries(&tag, nil, aliases, mockGalleryReader)
|
||||
|
||||
assert := assert.New(t)
|
||||
|
||||
|
|
|
|||
|
|
@ -87,7 +87,13 @@ func getPathWords(path string) []string {
|
|||
var ret []string
|
||||
for _, w := range words {
|
||||
if len(w) > 1 {
|
||||
ret = append(ret, w)
|
||||
// #1450 - we need to open up the criteria for matching so that we
|
||||
// can match where path has no space between subject names -
|
||||
// ie name = "foo bar" - path = "foobar"
|
||||
// we post-match afterwards, so we can afford to be a little loose
|
||||
// with the query
|
||||
// just use the first two characters
|
||||
ret = append(ret, w[0:2])
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ import (
|
|||
var DB *sqlx.DB
|
||||
var WriteMu *sync.Mutex
|
||||
var dbPath string
|
||||
var appSchemaVersion uint = 22
|
||||
var appSchemaVersion uint = 25
|
||||
var databaseSchemaVersion uint
|
||||
|
||||
var (
|
||||
|
|
|
|||
1
pkg/database/migrations/23_scenes_interactive.up.sql
Normal file
1
pkg/database/migrations/23_scenes_interactive.up.sql
Normal file
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE `scenes` ADD COLUMN `interactive` boolean not null default '0';
|
||||
7
pkg/database/migrations/24_tag_aliases.up.sql
Normal file
7
pkg/database/migrations/24_tag_aliases.up.sql
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
CREATE TABLE `tag_aliases` (
|
||||
`tag_id` integer,
|
||||
`alias` varchar(255) NOT NULL,
|
||||
foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX `tag_aliases_alias_unique` on `tag_aliases` (`alias`);
|
||||
8
pkg/database/migrations/25_saved_filters.up.sql
Normal file
8
pkg/database/migrations/25_saved_filters.up.sql
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
CREATE TABLE `saved_filters` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`name` varchar(510) not null,
|
||||
`mode` varchar(255) not null,
|
||||
`filter` blob not null
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX `index_saved_filters_on_mode_name_unique` on `saved_filters` (`mode`, `name`);
|
||||
477
pkg/dlna/cd-service-desc.go
Normal file
477
pkg/dlna/cd-service-desc.go
Normal file
|
|
@ -0,0 +1,477 @@
|
|||
package dlna
|
||||
|
||||
// From: https://github.com/anacrolix/dms
|
||||
// Copyright (c) 2012, Matt Joiner <anacrolix@gmail.com>.
|
||||
// All rights reserved.
|
||||
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
// * Neither the name of the <organization> nor the
|
||||
// names of its contributors may be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
// DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
const contentDirectoryServiceDescription = `<?xml version="1.0"?>
|
||||
<scpd xmlns="urn:schemas-upnp-org:service-1-0">
|
||||
<specVersion>
|
||||
<major>1</major>
|
||||
<minor>0</minor>
|
||||
</specVersion>
|
||||
<actionList>
|
||||
<action>
|
||||
<name>GetSearchCapabilities</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>SearchCaps</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>SearchCapabilities</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>GetSortCapabilities</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>SortCaps</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>SortCapabilities</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>GetSortExtensionCapabilities</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>SortExtensionCaps</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>SortExtensionCapabilities</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>GetFeatureList</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>FeatureList</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>FeatureList</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>GetSystemUpdateID</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>Id</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>SystemUpdateID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>Browse</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ObjectID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>BrowseFlag</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_BrowseFlag</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Filter</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Filter</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>StartingIndex</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Index</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>RequestedCount</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Count</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>SortCriteria</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_SortCriteria</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Result</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Result</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>NumberReturned</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Count</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>TotalMatches</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Count</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>UpdateID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_UpdateID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>Search</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ContainerID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>SearchCriteria</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_SearchCriteria</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Filter</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Filter</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>StartingIndex</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Index</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>RequestedCount</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Count</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>SortCriteria</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_SortCriteria</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Result</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Result</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>NumberReturned</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Count</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>TotalMatches</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Count</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>UpdateID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_UpdateID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>CreateObject</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ContainerID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Elements</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Result</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>ObjectID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Result</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Result</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>DestroyObject</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ObjectID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>UpdateObject</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ObjectID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>CurrentTagValue</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TagValueList</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>NewTagValue</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TagValueList</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>MoveObject</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ObjectID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>NewParentID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>NewObjectID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>ImportResource</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>SourceURI</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_URI</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>DestinationURI</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_URI</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>TransferID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TransferID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>ExportResource</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>SourceURI</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_URI</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>DestinationURI</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_URI</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>TransferID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TransferID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>StopTransferResource</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>TransferID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TransferID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>DeleteResource</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ResourceURI</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_URI</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>GetTransferProgress</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>TransferID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TransferID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>TransferStatus</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TransferStatus</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>TransferLength</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TransferLength</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>TransferTotal</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_TransferTotal</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>CreateReference</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ContainerID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>ObjectID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>NewID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ObjectID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
</actionList>
|
||||
<serviceStateTable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>SearchCapabilities</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>SortCapabilities</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>SortExtensionCapabilities</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="yes">
|
||||
<name>SystemUpdateID</name>
|
||||
<dataType>ui4</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="yes">
|
||||
<name>ContainerUpdateIDs</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="yes">
|
||||
<name>TransferIDs</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>FeatureList</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_ObjectID</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_Result</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_SearchCriteria</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_BrowseFlag</name>
|
||||
<dataType>string</dataType>
|
||||
<allowedValueList>
|
||||
<allowedValue>BrowseMetadata</allowedValue>
|
||||
<allowedValue>BrowseDirectChildren</allowedValue>
|
||||
</allowedValueList>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_Filter</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_SortCriteria</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_Index</name>
|
||||
<dataType>ui4</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_Count</name>
|
||||
<dataType>ui4</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_UpdateID</name>
|
||||
<dataType>ui4</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_TransferID</name>
|
||||
<dataType>ui4</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_TransferStatus</name>
|
||||
<dataType>string</dataType>
|
||||
<allowedValueList>
|
||||
<allowedValue>COMPLETED</allowedValue>
|
||||
<allowedValue>ERROR</allowedValue>
|
||||
<allowedValue>IN_PROGRESS</allowedValue>
|
||||
<allowedValue>STOPPED</allowedValue>
|
||||
</allowedValueList>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_TransferLength</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_TransferTotal</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_TagValueList</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_URI</name>
|
||||
<dataType>uri</dataType>
|
||||
</stateVariable>
|
||||
</serviceStateTable>
|
||||
</scpd>`
|
||||
732
pkg/dlna/cds.go
Normal file
732
pkg/dlna/cds.go
Normal file
|
|
@ -0,0 +1,732 @@
|
|||
package dlna
|
||||
|
||||
// from https://github.com/rclone/rclone
|
||||
// Copyright (C) 2012 by Nick Craig-Wood http://www.craig-wood.com/nick/
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in
|
||||
// all copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
// THE SOFTWARE.
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/anacrolix/dms/dlna"
|
||||
"github.com/anacrolix/dms/upnp"
|
||||
"github.com/anacrolix/dms/upnpav"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
var pageSize = 100
|
||||
|
||||
type browse struct {
|
||||
ObjectID string
|
||||
BrowseFlag string
|
||||
Filter string
|
||||
StartingIndex int
|
||||
RequestedCount int
|
||||
}
|
||||
|
||||
type contentDirectoryService struct {
|
||||
*Server
|
||||
upnp.Eventing
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func formatDurationSexagesimal(d time.Duration) string {
|
||||
ns := d % time.Second
|
||||
d /= time.Second
|
||||
s := d % 60
|
||||
d /= 60
|
||||
m := d % 60
|
||||
d /= 60
|
||||
h := d
|
||||
ret := fmt.Sprintf("%d:%02d:%02d.%09d", h, m, s, ns)
|
||||
ret = strings.TrimRight(ret, "0")
|
||||
ret = strings.TrimRight(ret, ".")
|
||||
return ret
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) updateIDString() string {
|
||||
return fmt.Sprintf("%d", uint32(os.Getpid()))
|
||||
}
|
||||
|
||||
func sceneToContainer(scene *models.Scene, parent string, host string) interface{} {
|
||||
// make stash server URL
|
||||
// TODO - fix this
|
||||
iconURI := (&url.URL{
|
||||
Scheme: "http",
|
||||
Host: host,
|
||||
Path: iconPath,
|
||||
RawQuery: url.Values{
|
||||
"scene": {strconv.Itoa(scene.ID)},
|
||||
"c": {"jpeg"},
|
||||
}.Encode(),
|
||||
}).String()
|
||||
|
||||
// Object goes first
|
||||
obj := upnpav.Object{
|
||||
ID: strconv.Itoa(scene.ID),
|
||||
Restricted: 1,
|
||||
ParentID: parent,
|
||||
Title: scene.GetTitle(),
|
||||
Class: "object.item.videoItem",
|
||||
Icon: iconURI,
|
||||
AlbumArtURI: iconURI,
|
||||
}
|
||||
|
||||
// Wrap up
|
||||
item := upnpav.Item{
|
||||
Object: obj,
|
||||
Res: make([]upnpav.Resource, 0, 1),
|
||||
}
|
||||
|
||||
mimeType := "video/mp4"
|
||||
size, _ := strconv.Atoi(scene.Size.String)
|
||||
|
||||
duration := int64(scene.Duration.Float64)
|
||||
|
||||
item.Res = append(item.Res, upnpav.Resource{
|
||||
URL: (&url.URL{
|
||||
Scheme: "http",
|
||||
Host: host,
|
||||
Path: resPath,
|
||||
RawQuery: url.Values{
|
||||
"scene": {strconv.Itoa(scene.ID)},
|
||||
}.Encode(),
|
||||
}).String(),
|
||||
ProtocolInfo: fmt.Sprintf("http-get:*:%s:%s", mimeType, dlna.ContentFeatures{
|
||||
SupportRange: true,
|
||||
}.String()),
|
||||
Bitrate: uint(scene.Bitrate.Int64),
|
||||
// TODO - make %d:%02d:%02d string
|
||||
Duration: formatDurationSexagesimal(time.Duration(duration) * time.Second),
|
||||
Size: uint64(size),
|
||||
// Resolution: resolution,
|
||||
})
|
||||
|
||||
item.Res = append(item.Res, upnpav.Resource{
|
||||
URL: iconURI,
|
||||
ProtocolInfo: "http-get:*:image/jpeg:DLNA.ORG_PN=JPEG_MED",
|
||||
})
|
||||
|
||||
return item
|
||||
}
|
||||
|
||||
// ContentDirectory object from ObjectID.
|
||||
func (me *contentDirectoryService) objectFromID(id string) (o object, err error) {
|
||||
o.Path, err = url.QueryUnescape(id)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if o.Path == "0" {
|
||||
o.Path = "/"
|
||||
}
|
||||
// o.Path = path.Clean(o.Path)
|
||||
// if !path.IsAbs(o.Path) {
|
||||
// err = fmt.Errorf("bad ObjectID %v", o.Path)
|
||||
// return
|
||||
// }
|
||||
o.RootObjectPath = me.RootObjectPath
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func childPath(paths []string) []string {
|
||||
if len(paths) > 1 {
|
||||
return paths[1:]
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) Handle(action string, argsXML []byte, r *http.Request) (map[string]string, error) {
|
||||
host := r.Host
|
||||
// userAgent := r.UserAgent()
|
||||
switch action {
|
||||
case "GetSystemUpdateID":
|
||||
return map[string]string{
|
||||
"Id": me.updateIDString(),
|
||||
}, nil
|
||||
case "GetSortCapabilities":
|
||||
return map[string]string{
|
||||
"SortCaps": "dc:title",
|
||||
}, nil
|
||||
case "Browse":
|
||||
var browse browse
|
||||
if err := xml.Unmarshal([]byte(argsXML), &browse); err != nil {
|
||||
return nil, upnp.Errorf(upnp.ArgumentValueInvalidErrorCode, "cannot unmarshal browse argument: %s", err.Error())
|
||||
}
|
||||
|
||||
obj, err := me.objectFromID(browse.ObjectID)
|
||||
if err != nil {
|
||||
return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, err.Error())
|
||||
}
|
||||
|
||||
switch browse.BrowseFlag {
|
||||
case "BrowseDirectChildren":
|
||||
return me.handleBrowseDirectChildren(obj, host)
|
||||
case "BrowseMetadata":
|
||||
return me.handleBrowseMetadata(obj, host)
|
||||
default:
|
||||
return nil, upnp.Errorf(upnp.ArgumentValueInvalidErrorCode, "unhandled browse flag: %v", browse.BrowseFlag)
|
||||
}
|
||||
case "GetSearchCapabilities":
|
||||
return map[string]string{
|
||||
"SearchCaps": "",
|
||||
}, nil
|
||||
// from https://github.com/rclone/rclone/blob/master/cmd/serve/dlna/cds.go
|
||||
// Samsung Extensions
|
||||
case "X_GetFeatureList":
|
||||
return map[string]string{
|
||||
"FeatureList": `<Features xmlns="urn:schemas-upnp-org:av:avs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="urn:schemas-upnp-org:av:avs http://www.upnp.org/schemas/av/avs.xsd">
|
||||
<Feature name="samsung.com_BASICVIEW" version="1">
|
||||
<container id="0" type="object.item.imageItem"/>
|
||||
<container id="0" type="object.item.audioItem"/>
|
||||
<container id="0" type="object.item.videoItem"/>
|
||||
</Feature>
|
||||
</Features>`}, nil
|
||||
case "X_SetBookmark":
|
||||
// just ignore
|
||||
return map[string]string{}, nil
|
||||
default:
|
||||
return nil, upnp.InvalidActionError
|
||||
}
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) handleBrowseDirectChildren(obj object, host string) (map[string]string, error) {
|
||||
// Read folder and return children
|
||||
// TODO: check if obj == 0 and return root objects
|
||||
// TODO: check if special path and return files
|
||||
|
||||
var objs []interface{}
|
||||
|
||||
if obj.IsRoot() {
|
||||
objs = getRootObjects()
|
||||
}
|
||||
|
||||
paths := strings.Split(obj.Path, "/")
|
||||
|
||||
// All videos
|
||||
if obj.Path == "all" {
|
||||
objs = me.getAllScenes(host)
|
||||
}
|
||||
|
||||
if strings.HasPrefix(obj.Path, "all/") {
|
||||
page := getPageFromID(paths)
|
||||
if page != nil {
|
||||
objs = me.getPageVideos(&models.SceneFilterType{}, "all", *page, host)
|
||||
}
|
||||
}
|
||||
|
||||
// Saved searches
|
||||
// if obj.Path == "saved-searches" {
|
||||
// var savedPlaylists []models.Playlist
|
||||
// db, _ := models.GetDB()
|
||||
// db.Where("is_deo_enabled = ?", true).Order("ordering asc").Find(&savedPlaylists)
|
||||
// db.Close()
|
||||
|
||||
// for _, playlist := range savedPlaylists {
|
||||
// objs = append(objs, upnpav.Container{Object: upnpav.Object{
|
||||
// ID: "saved-searches/" + strconv.Itoa(int(playlist.ID)),
|
||||
// Restricted: 1,
|
||||
// ParentID: "saved-searches",
|
||||
// Class: "object.container.storageFolder",
|
||||
// Title: playlist.Name,
|
||||
// }})
|
||||
// }
|
||||
// }
|
||||
|
||||
// if strings.HasPrefix(obj.Path, "saved-searches/") {
|
||||
// id := strings.Split(obj.Path, "/")
|
||||
|
||||
// var savedPlaylist models.Playlist
|
||||
// db, _ := models.GetDB()
|
||||
// db.Where("id = ?", id[1]).First(&savedPlaylist)
|
||||
// db.Close()
|
||||
|
||||
// var r models.RequestSceneList
|
||||
// if err := json.Unmarshal([]byte(savedPlaylist.SearchParams), &r); err == nil {
|
||||
// r.IsAccessible = optional.NewBool(true)
|
||||
// r.IsAvailable = optional.NewBool(true)
|
||||
// data := models.QueryScenesFull(r)
|
||||
|
||||
// for i := range data.Scenes {
|
||||
// objs = append(objs, me.sceneToContainer(data.Scenes[i], "sites/"+id[1], host))
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// Studios
|
||||
if obj.Path == "studios" {
|
||||
objs = me.getStudios()
|
||||
}
|
||||
|
||||
if strings.HasPrefix(obj.Path, "studios/") {
|
||||
objs = me.getStudioScenes(childPath(paths), host)
|
||||
}
|
||||
|
||||
// Tags
|
||||
if obj.Path == "tags" {
|
||||
objs = me.getTags()
|
||||
}
|
||||
|
||||
if strings.HasPrefix(obj.Path, "tags/") {
|
||||
objs = me.getTagScenes(childPath(paths), host)
|
||||
}
|
||||
|
||||
// Performers
|
||||
if obj.Path == "performers" {
|
||||
objs = me.getPerformers()
|
||||
}
|
||||
|
||||
if strings.HasPrefix(obj.Path, "performers/") {
|
||||
objs = me.getPerformerScenes(childPath(paths), host)
|
||||
}
|
||||
|
||||
// Movies
|
||||
if obj.Path == "movies" {
|
||||
objs = me.getMovies()
|
||||
}
|
||||
|
||||
if strings.HasPrefix(obj.Path, "movies/") {
|
||||
objs = me.getMovieScenes(childPath(paths), host)
|
||||
}
|
||||
|
||||
// Rating
|
||||
if obj.Path == "rating" {
|
||||
objs = me.getRating()
|
||||
}
|
||||
|
||||
if strings.HasPrefix(obj.Path, "rating/") {
|
||||
objs = me.getRatingScenes(childPath(paths), host)
|
||||
}
|
||||
|
||||
return makeBrowseResult(objs, me.updateIDString())
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string) (map[string]string, error) {
|
||||
var objs []interface{}
|
||||
var updateID string
|
||||
|
||||
// if numeric, then must be scene, otherwise handle as if path
|
||||
sceneID, err := strconv.Atoi(obj.Path)
|
||||
if err != nil {
|
||||
// #1465 - handle root object
|
||||
if obj.IsRoot() {
|
||||
objs = getRootObject()
|
||||
} else {
|
||||
// HACK: just create a fake storage folder to return. The name won't
|
||||
// be correct, but hopefully the names returned from handleBrowseDirectChildren
|
||||
// will be used instead.
|
||||
objs = []interface{}{makeStorageFolder(obj.ID(), obj.ID(), obj.ParentID())}
|
||||
}
|
||||
|
||||
updateID = me.updateIDString()
|
||||
} else {
|
||||
var scene *models.Scene
|
||||
|
||||
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
scene, err = r.Scene().Find(sceneID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
|
||||
if scene != nil {
|
||||
upnpObject := sceneToContainer(scene, "-1", host)
|
||||
objs = []interface{}{upnpObject}
|
||||
|
||||
// http://upnp.org/specs/av/UPnP-av-ContentDirectory-v1-Service.pdf
|
||||
// maximum update ID is 2**32, then rolls back to 0
|
||||
const maxUpdateID int64 = 1 << 32
|
||||
updateID = fmt.Sprint(scene.UpdatedAt.Timestamp.Unix() % maxUpdateID)
|
||||
} else {
|
||||
return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "scene not found")
|
||||
}
|
||||
}
|
||||
|
||||
return makeBrowseResult(objs, updateID)
|
||||
}
|
||||
|
||||
func makeBrowseResult(objs []interface{}, updateID string) (map[string]string, error) {
|
||||
result, err := xml.Marshal(objs)
|
||||
if err != nil {
|
||||
return nil, upnp.Errorf(upnp.ActionFailedErrorCode, "could not marshal objects: %s", err.Error())
|
||||
}
|
||||
|
||||
return map[string]string{
|
||||
"TotalMatches": fmt.Sprint(len(objs)),
|
||||
"NumberReturned": fmt.Sprint(len(objs)),
|
||||
"Result": didl_lite(string(result)),
|
||||
"UpdateID": updateID,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func makeStorageFolder(id, title, parentID string) upnpav.Container {
|
||||
defaultChildCount := 1
|
||||
return upnpav.Container{
|
||||
Object: upnpav.Object{
|
||||
ID: id,
|
||||
Restricted: 1,
|
||||
ParentID: parentID,
|
||||
Class: "object.container.storageFolder",
|
||||
Title: title,
|
||||
},
|
||||
ChildCount: defaultChildCount,
|
||||
}
|
||||
}
|
||||
|
||||
func getRootObject() []interface{} {
|
||||
const rootID = "0"
|
||||
|
||||
return []interface{}{makeStorageFolder(rootID, "stash", "-1")}
|
||||
}
|
||||
|
||||
func getRootObjects() []interface{} {
|
||||
const rootID = "0"
|
||||
|
||||
var objs []interface{}
|
||||
|
||||
objs = append(objs, makeStorageFolder("all", "all", rootID))
|
||||
objs = append(objs, makeStorageFolder("performers", "performers", rootID))
|
||||
objs = append(objs, makeStorageFolder("tags", "tags", rootID))
|
||||
objs = append(objs, makeStorageFolder("studios", "studios", rootID))
|
||||
objs = append(objs, makeStorageFolder("movies", "movies", rootID))
|
||||
objs = append(objs, makeStorageFolder("rating", "rating", rootID))
|
||||
|
||||
return objs
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType, parentID string, host string) []interface{} {
|
||||
var objs []interface{}
|
||||
|
||||
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
sort := "title"
|
||||
findFilter := &models.FindFilterType{
|
||||
PerPage: &pageSize,
|
||||
Sort: &sort,
|
||||
}
|
||||
|
||||
scenes, total, err := r.Scene().Query(sceneFilter, findFilter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if total > pageSize {
|
||||
pager := scenePager{
|
||||
sceneFilter: sceneFilter,
|
||||
parentID: parentID,
|
||||
}
|
||||
|
||||
objs, err = pager.getPages(r, total)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
for _, s := range scenes {
|
||||
objs = append(objs, sceneToContainer(s, parentID, host))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
|
||||
return objs
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getPageVideos(sceneFilter *models.SceneFilterType, parentID string, page int, host string) []interface{} {
|
||||
var objs []interface{}
|
||||
|
||||
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
pager := scenePager{
|
||||
sceneFilter: sceneFilter,
|
||||
parentID: parentID,
|
||||
}
|
||||
|
||||
var err error
|
||||
objs, err = pager.getPageVideos(r, page, host)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
|
||||
return objs
|
||||
}
|
||||
|
||||
func getPageFromID(paths []string) *int {
|
||||
i := utils.StrIndex(paths, "page")
|
||||
if i == -1 || i+1 >= len(paths) {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, err := strconv.Atoi(paths[i+1])
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &ret
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getAllScenes(host string) []interface{} {
|
||||
return me.getVideos(&models.SceneFilterType{}, "all", host)
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getStudios() []interface{} {
|
||||
var objs []interface{}
|
||||
|
||||
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
studios, err := r.Studio().All()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, s := range studios {
|
||||
objs = append(objs, makeStorageFolder("studios/"+strconv.Itoa(s.ID), s.Name.String, "studios"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Errorf(err.Error())
|
||||
}
|
||||
|
||||
return objs
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getStudioScenes(paths []string, host string) []interface{} {
|
||||
sceneFilter := &models.SceneFilterType{
|
||||
Studios: &models.HierarchicalMultiCriterionInput{
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
Value: []string{paths[0]},
|
||||
Depth: 0,
|
||||
},
|
||||
}
|
||||
|
||||
parentID := "studios/" + strings.Join(paths, "/")
|
||||
|
||||
page := getPageFromID(paths)
|
||||
if page != nil {
|
||||
return me.getPageVideos(sceneFilter, parentID, *page, host)
|
||||
}
|
||||
|
||||
return me.getVideos(sceneFilter, parentID, host)
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getTags() []interface{} {
|
||||
var objs []interface{}
|
||||
|
||||
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
tags, err := r.Tag().All()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, s := range tags {
|
||||
objs = append(objs, makeStorageFolder("tags/"+strconv.Itoa(s.ID), s.Name, "tags"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Errorf(err.Error())
|
||||
}
|
||||
|
||||
return objs
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getTagScenes(paths []string, host string) []interface{} {
|
||||
sceneFilter := &models.SceneFilterType{
|
||||
Tags: &models.MultiCriterionInput{
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
Value: []string{paths[0]},
|
||||
},
|
||||
}
|
||||
|
||||
parentID := "tags/" + strings.Join(paths, "/")
|
||||
|
||||
page := getPageFromID(paths)
|
||||
if page != nil {
|
||||
return me.getPageVideos(sceneFilter, parentID, *page, host)
|
||||
}
|
||||
|
||||
return me.getVideos(sceneFilter, parentID, host)
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getPerformers() []interface{} {
|
||||
var objs []interface{}
|
||||
|
||||
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
performers, err := r.Performer().All()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, s := range performers {
|
||||
objs = append(objs, makeStorageFolder("performers/"+strconv.Itoa(s.ID), s.Name.String, "performers"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Errorf(err.Error())
|
||||
}
|
||||
|
||||
return objs
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getPerformerScenes(paths []string, host string) []interface{} {
|
||||
sceneFilter := &models.SceneFilterType{
|
||||
Performers: &models.MultiCriterionInput{
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
Value: []string{paths[0]},
|
||||
},
|
||||
}
|
||||
|
||||
parentID := "performers/" + strings.Join(paths, "/")
|
||||
|
||||
page := getPageFromID(paths)
|
||||
if page != nil {
|
||||
return me.getPageVideos(sceneFilter, parentID, *page, host)
|
||||
}
|
||||
|
||||
return me.getVideos(sceneFilter, parentID, host)
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getMovies() []interface{} {
|
||||
var objs []interface{}
|
||||
|
||||
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
movies, err := r.Movie().All()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, s := range movies {
|
||||
objs = append(objs, makeStorageFolder("movies/"+strconv.Itoa(s.ID), s.Name.String, "movies"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Errorf(err.Error())
|
||||
}
|
||||
|
||||
return objs
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getMovieScenes(paths []string, host string) []interface{} {
|
||||
sceneFilter := &models.SceneFilterType{
|
||||
Movies: &models.MultiCriterionInput{
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
Value: []string{paths[0]},
|
||||
},
|
||||
}
|
||||
|
||||
parentID := "movies/" + strings.Join(paths, "/")
|
||||
|
||||
page := getPageFromID(paths)
|
||||
if page != nil {
|
||||
return me.getPageVideos(sceneFilter, parentID, *page, host)
|
||||
}
|
||||
|
||||
return me.getVideos(sceneFilter, parentID, host)
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getRating() []interface{} {
|
||||
var objs []interface{}
|
||||
|
||||
for r := 1; r <= 5; r++ {
|
||||
rStr := strconv.Itoa(r)
|
||||
objs = append(objs, makeStorageFolder("rating/"+rStr, rStr, "rating"))
|
||||
}
|
||||
|
||||
return objs
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getRatingScenes(paths []string, host string) []interface{} {
|
||||
r, err := strconv.Atoi(paths[0])
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
sceneFilter := &models.SceneFilterType{
|
||||
Rating: &models.IntCriterionInput{
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
Value: r,
|
||||
},
|
||||
}
|
||||
|
||||
parentID := "rating/" + strings.Join(paths, "/")
|
||||
|
||||
page := getPageFromID(paths)
|
||||
if page != nil {
|
||||
return me.getPageVideos(sceneFilter, parentID, *page, host)
|
||||
}
|
||||
|
||||
return me.getVideos(sceneFilter, parentID, host)
|
||||
}
|
||||
|
||||
// Represents a ContentDirectory object.
|
||||
type object struct {
|
||||
Path string // The cleaned, absolute path for the object relative to the server.
|
||||
RootObjectPath string
|
||||
}
|
||||
|
||||
// Returns the actual local filesystem path for the object.
|
||||
func (o *object) FilePath() string {
|
||||
return filepath.Join(o.RootObjectPath, filepath.FromSlash(o.Path))
|
||||
}
|
||||
|
||||
// Returns the ObjectID for the object. This is used in various ContentDirectory actions.
|
||||
func (o object) ID() string {
|
||||
if len(o.Path) == 1 {
|
||||
return "0"
|
||||
}
|
||||
return url.QueryEscape(o.Path)
|
||||
}
|
||||
|
||||
func (o *object) IsRoot() bool {
|
||||
return o.Path == "/"
|
||||
}
|
||||
|
||||
// Returns the object's parent ObjectID. Fortunately it can be deduced from the
|
||||
// ObjectID (for now).
|
||||
func (o object) ParentID() string {
|
||||
if o.IsRoot() {
|
||||
return "-1"
|
||||
}
|
||||
o.Path = path.Dir(o.Path)
|
||||
return o.ID()
|
||||
}
|
||||
82
pkg/dlna/cds_test.go
Normal file
82
pkg/dlna/cds_test.go
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
package dlna
|
||||
|
||||
// From: https://github.com/anacrolix/dms
|
||||
// Copyright (c) 2012, Matt Joiner <anacrolix@gmail.com>.
|
||||
// All rights reserved.
|
||||
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
// * Neither the name of the <organization> nor the
|
||||
// names of its contributors may be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
// DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestEscapeObjectID(t *testing.T) {
|
||||
o := object{
|
||||
Path: "/some/file",
|
||||
}
|
||||
id := o.ID()
|
||||
if strings.ContainsAny(id, "/") {
|
||||
t.Skip("may not work with some players: object IDs contain '/'")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRootObjectID(t *testing.T) {
|
||||
if (object{Path: "/"}).ID() != "0" {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestRootParentObjectID(t *testing.T) {
|
||||
if (object{Path: "/"}).ParentID() != "-1" {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func testHandleBrowse(argsXML string) (map[string]string, error) {
|
||||
cds := contentDirectoryService{
|
||||
Server: &Server{},
|
||||
txnManager: mocks.NewTransactionManager(),
|
||||
}
|
||||
|
||||
r := &http.Request{}
|
||||
return cds.Handle("Browse", []byte(argsXML), r)
|
||||
}
|
||||
|
||||
func TestBrowseMetadataRoot(t *testing.T) {
|
||||
argsXML := `<u:Browse xmlns:u="urn:schemas-upnp-org:service:ContentDirectory:1"><ObjectID>0</ObjectID><BrowseFlag>BrowseMetadata</BrowseFlag><Filter>*</Filter><StartingIndex>0</StartingIndex><RequestedCount>0</RequestedCount><SortCriteria></SortCriteria></u:Browse>`
|
||||
_, err := testHandleBrowse(argsXML)
|
||||
|
||||
assert.Nil(t, err)
|
||||
}
|
||||
|
||||
func TestBrowseMetadataTags(t *testing.T) {
|
||||
argsXML := `<u:Browse xmlns:u="urn:schemas-upnp-org:service:ContentDirectory:1"><ObjectID>tags</ObjectID><BrowseFlag>BrowseMetadata</BrowseFlag><Filter>*</Filter><StartingIndex>0</StartingIndex><RequestedCount>0</RequestedCount><SortCriteria></SortCriteria></u:Browse>`
|
||||
_, err := testHandleBrowse(argsXML)
|
||||
|
||||
assert.Nil(t, err)
|
||||
}
|
||||
205
pkg/dlna/cm-service-desc.go
Normal file
205
pkg/dlna/cm-service-desc.go
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
package dlna
|
||||
|
||||
// from https://github.com/rclone/rclone
|
||||
// Copyright (C) 2012 by Nick Craig-Wood http://www.craig-wood.com/nick/
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in
|
||||
// all copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
// THE SOFTWARE.
|
||||
|
||||
const connectionManagerServiceDescription = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<scpd xmlns="urn:schemas-upnp-org:service-1-0">
|
||||
<specVersion>
|
||||
<major>1</major>
|
||||
<minor>0</minor>
|
||||
</specVersion>
|
||||
<actionList>
|
||||
<action>
|
||||
<name>GetProtocolInfo</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>Source</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>SourceProtocolInfo</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Sink</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>SinkProtocolInfo</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>PrepareForConnection</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>RemoteProtocolInfo</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ProtocolInfo</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>PeerConnectionManager</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ConnectionManager</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>PeerConnectionID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ConnectionID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Direction</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Direction</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>ConnectionID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ConnectionID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>AVTransportID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_AVTransportID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>RcsID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_RcsID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>ConnectionComplete</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ConnectionID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ConnectionID</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>GetCurrentConnectionIDs</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ConnectionIDs</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>CurrentConnectionIDs</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
<action>
|
||||
<name>GetCurrentConnectionInfo</name>
|
||||
<argumentList>
|
||||
<argument>
|
||||
<name>ConnectionID</name>
|
||||
<direction>in</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ConnectionID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>RcsID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_RcsID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>AVTransportID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_AVTransportID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>ProtocolInfo</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ProtocolInfo</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>PeerConnectionManager</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ConnectionManager</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>PeerConnectionID</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ConnectionID</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Direction</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_Direction</relatedStateVariable>
|
||||
</argument>
|
||||
<argument>
|
||||
<name>Status</name>
|
||||
<direction>out</direction>
|
||||
<relatedStateVariable>A_ARG_TYPE_ConnectionStatus</relatedStateVariable>
|
||||
</argument>
|
||||
</argumentList>
|
||||
</action>
|
||||
</actionList>
|
||||
<serviceStateTable>
|
||||
<stateVariable sendEvents="yes">
|
||||
<name>SourceProtocolInfo</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="yes">
|
||||
<name>SinkProtocolInfo</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="yes">
|
||||
<name>CurrentConnectionIDs</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_ConnectionStatus</name>
|
||||
<dataType>string</dataType>
|
||||
<allowedValueList>
|
||||
<allowedValue>OK</allowedValue>
|
||||
<allowedValue>ContentFormatMismatch</allowedValue>
|
||||
<allowedValue>InsufficientBandwidth</allowedValue>
|
||||
<allowedValue>UnreliableChannel</allowedValue>
|
||||
<allowedValue>Unknown</allowedValue>
|
||||
</allowedValueList>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_ConnectionManager</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_Direction</name>
|
||||
<dataType>string</dataType>
|
||||
<allowedValueList>
|
||||
<allowedValue>Input</allowedValue>
|
||||
<allowedValue>Output</allowedValue>
|
||||
</allowedValueList>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_ProtocolInfo</name>
|
||||
<dataType>string</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_ConnectionID</name>
|
||||
<dataType>i4</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_AVTransportID</name>
|
||||
<dataType>i4</dataType>
|
||||
</stateVariable>
|
||||
<stateVariable sendEvents="no">
|
||||
<name>A_ARG_TYPE_RcsID</name>
|
||||
<dataType>i4</dataType>
|
||||
</stateVariable>
|
||||
</serviceStateTable>
|
||||
</scpd>`
|
||||
47
pkg/dlna/cms.go
Normal file
47
pkg/dlna/cms.go
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
package dlna
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/anacrolix/dms/upnp"
|
||||
)
|
||||
|
||||
// from https://github.com/rclone/rclone
|
||||
// Copyright (C) 2012 by Nick Craig-Wood http://www.craig-wood.com/nick/
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in
|
||||
// all copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
// THE SOFTWARE.
|
||||
|
||||
const defaultProtocolInfo = "http-get:*:video/mpeg:*,http-get:*:video/mp4:*,http-get:*:video/vnd.dlna.mpeg-tts:*,http-get:*:video/avi:*,http-get:*:video/x-matroska:*,http-get:*:video/x-ms-wmv:*,http-get:*:video/wtv:*,http-get:*:audio/mpeg:*,http-get:*:audio/mp3:*,http-get:*:audio/mp4:*,http-get:*:audio/x-ms-wma*,http-get:*:audio/wav:*,http-get:*:audio/L16:*,http-get:*image/jpeg:*,http-get:*image/png:*,http-get:*image/gif:*,http-get:*image/tiff:*"
|
||||
|
||||
type connectionManagerService struct {
|
||||
*Server
|
||||
upnp.Eventing
|
||||
}
|
||||
|
||||
func (cms *connectionManagerService) Handle(action string, argsXML []byte, r *http.Request) (map[string]string, error) {
|
||||
switch action {
|
||||
case "GetProtocolInfo":
|
||||
return map[string]string{
|
||||
"Source": defaultProtocolInfo,
|
||||
"Sink": "",
|
||||
}, nil
|
||||
default:
|
||||
return nil, upnp.InvalidActionError
|
||||
}
|
||||
}
|
||||
699
pkg/dlna/dms.go
Normal file
699
pkg/dlna/dms.go
Normal file
|
|
@ -0,0 +1,699 @@
|
|||
package dlna
|
||||
|
||||
// Derived from: https://github.com/anacrolix/dms
|
||||
// Copyright (c) 2012, Matt Joiner <anacrolix@gmail.com>.
|
||||
// All rights reserved.
|
||||
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
// * Neither the name of the <organization> nor the
|
||||
// names of its contributors may be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
// DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/http/pprof"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/anacrolix/dms/soap"
|
||||
"github.com/anacrolix/dms/ssdp"
|
||||
"github.com/anacrolix/dms/upnp"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
const (
|
||||
serverField = "Linux/3.4 DLNADOC/1.50 UPnP/1.0 DMS/1.0"
|
||||
rootDeviceType = "urn:schemas-upnp-org:device:MediaServer:1"
|
||||
rootDeviceModelName = "dms 1.0xb"
|
||||
resPath = "/res"
|
||||
iconPath = "/icon"
|
||||
rootDescPath = "/rootDesc.xml"
|
||||
contentDirectorySCPDURL = "/scpd/ContentDirectory.xml"
|
||||
contentDirectoryEventSubURL = "/evt/ContentDirectory"
|
||||
serviceControlURL = "/ctl"
|
||||
deviceIconPath = "/deviceIcon"
|
||||
)
|
||||
|
||||
func makeDeviceUuid(unique string) string {
|
||||
h := md5.New()
|
||||
if _, err := io.WriteString(h, unique); err != nil {
|
||||
panic("makeDeviceUuid write failed: " + err.Error())
|
||||
}
|
||||
buf := h.Sum(nil)
|
||||
return upnp.FormatUUID(buf)
|
||||
}
|
||||
|
||||
// Groups the service definition with its XML description.
|
||||
type service struct {
|
||||
upnp.Service
|
||||
SCPD string
|
||||
}
|
||||
|
||||
// Exposed UPnP AV services.
|
||||
var services = []*service{
|
||||
{
|
||||
Service: upnp.Service{
|
||||
ServiceType: "urn:schemas-upnp-org:service:ContentDirectory:1",
|
||||
ServiceId: "urn:upnp-org:serviceId:ContentDirectory",
|
||||
EventSubURL: contentDirectoryEventSubURL,
|
||||
ControlURL: serviceControlURL,
|
||||
},
|
||||
SCPD: contentDirectoryServiceDescription,
|
||||
},
|
||||
{
|
||||
Service: upnp.Service{
|
||||
ServiceType: "urn:schemas-upnp-org:service:ConnectionManager:1",
|
||||
ServiceId: "urn:upnp-org:serviceId:ConnectionManager",
|
||||
ControlURL: serviceControlURL,
|
||||
},
|
||||
SCPD: connectionManagerServiceDescription,
|
||||
},
|
||||
{
|
||||
Service: upnp.Service{
|
||||
ServiceType: "urn:microsoft.com:service:X_MS_MediaReceiverRegistrar:1",
|
||||
ServiceId: "urn:microsoft.com:serviceId:X_MS_MediaReceiverRegistrar",
|
||||
ControlURL: serviceControlURL,
|
||||
},
|
||||
SCPD: xmsMediaReceiverServiceDescription,
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
for _, s := range services {
|
||||
p := path.Join("/scpd", s.ServiceId)
|
||||
s.SCPDURL = p
|
||||
}
|
||||
}
|
||||
|
||||
func devices() []string {
|
||||
return []string{
|
||||
"urn:schemas-upnp-org:device:MediaServer:1",
|
||||
}
|
||||
}
|
||||
|
||||
func serviceTypes() (ret []string) {
|
||||
for _, s := range services {
|
||||
ret = append(ret, s.ServiceType)
|
||||
}
|
||||
return
|
||||
}
|
||||
func (me *Server) httpPort() int {
|
||||
return me.HTTPConn.Addr().(*net.TCPAddr).Port
|
||||
}
|
||||
|
||||
func (me *Server) serveHTTP() error {
|
||||
srv := &http.Server{
|
||||
Handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if me.LogHeaders {
|
||||
logger.Debugf("%s %s", r.Method, r.RequestURI)
|
||||
for k, v := range r.Header {
|
||||
logger.Debugf("%s: %s", k, v)
|
||||
}
|
||||
}
|
||||
w.Header().Set("Ext", "")
|
||||
w.Header().Set("Server", serverField)
|
||||
me.httpServeMux.ServeHTTP(&mitmRespWriter{
|
||||
ResponseWriter: w,
|
||||
logHeader: me.LogHeaders,
|
||||
}, r)
|
||||
}),
|
||||
}
|
||||
err := srv.Serve(me.HTTPConn)
|
||||
select {
|
||||
case <-me.closed:
|
||||
return nil
|
||||
default:
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// An interface with these flags should be valid for SSDP.
|
||||
const ssdpInterfaceFlags = net.FlagUp | net.FlagMulticast
|
||||
|
||||
func (me *Server) doSSDP() {
|
||||
active := 0
|
||||
stopped := make(chan struct{})
|
||||
for _, if_ := range me.Interfaces {
|
||||
active++
|
||||
go func(if_ net.Interface) {
|
||||
defer func() {
|
||||
stopped <- struct{}{}
|
||||
}()
|
||||
me.ssdpInterface(if_)
|
||||
}(if_)
|
||||
}
|
||||
for active > 0 {
|
||||
<-stopped
|
||||
active--
|
||||
}
|
||||
}
|
||||
|
||||
// Run SSDP server on an interface.
|
||||
func (me *Server) ssdpInterface(if_ net.Interface) {
|
||||
s := ssdp.Server{
|
||||
Interface: if_,
|
||||
Devices: devices(),
|
||||
Services: serviceTypes(),
|
||||
Location: func(ip net.IP) string {
|
||||
return me.location(ip)
|
||||
},
|
||||
Server: serverField,
|
||||
UUID: me.rootDeviceUUID,
|
||||
NotifyInterval: me.NotifyInterval,
|
||||
}
|
||||
if err := s.Init(); err != nil {
|
||||
if if_.Flags&ssdpInterfaceFlags != ssdpInterfaceFlags {
|
||||
// Didn't expect it to work anyway.
|
||||
return
|
||||
}
|
||||
if strings.Contains(err.Error(), "listen") {
|
||||
// OSX has a lot of dud interfaces. Failure to create a socket on
|
||||
// the interface are what we're expecting if the interface is no
|
||||
// good.
|
||||
return
|
||||
}
|
||||
logger.Errorf("error creating ssdp server on %s: %s", if_.Name, err)
|
||||
return
|
||||
}
|
||||
defer s.Close()
|
||||
logger.Debugf("started SSDP on %s", if_.Name)
|
||||
stopped := make(chan struct{})
|
||||
go func() {
|
||||
defer close(stopped)
|
||||
if err := s.Serve(); err != nil {
|
||||
logger.Errorf("%q: %q\n", if_.Name, err)
|
||||
}
|
||||
}()
|
||||
select {
|
||||
case <-me.closed:
|
||||
// Returning will close the server.
|
||||
case <-stopped:
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
startTime time.Time
|
||||
)
|
||||
|
||||
type Icon struct {
|
||||
Width, Height, Depth int
|
||||
Mimetype string
|
||||
io.ReadSeeker
|
||||
}
|
||||
|
||||
type Server struct {
|
||||
HTTPConn net.Listener
|
||||
FriendlyName string
|
||||
Interfaces []net.Interface
|
||||
httpServeMux *http.ServeMux
|
||||
RootObjectPath string
|
||||
rootDescXML []byte
|
||||
rootDeviceUUID string
|
||||
closed chan struct{}
|
||||
ssdpStopped chan struct{}
|
||||
// The service SOAP handler keyed by service URN.
|
||||
services map[string]UPnPService
|
||||
LogHeaders bool
|
||||
Icons []Icon
|
||||
// Stall event subscription requests until they drop. A workaround for
|
||||
// some bad clients.
|
||||
StallEventSubscribe bool
|
||||
// Time interval between SSPD announces
|
||||
NotifyInterval time.Duration
|
||||
|
||||
txnManager models.TransactionManager
|
||||
sceneServer sceneServer
|
||||
ipWhitelistManager *ipWhitelistManager
|
||||
}
|
||||
|
||||
// UPnP SOAP service.
|
||||
type UPnPService interface {
|
||||
Handle(action string, argsXML []byte, r *http.Request) (respArgs map[string]string, err error)
|
||||
Subscribe(callback []*url.URL, timeoutSeconds int) (sid string, actualTimeout int, err error)
|
||||
Unsubscribe(sid string) error
|
||||
}
|
||||
|
||||
type Cache interface {
|
||||
Set(key interface{}, value interface{})
|
||||
Get(key interface{}) (value interface{}, ok bool)
|
||||
}
|
||||
|
||||
func init() {
|
||||
startTime = time.Now()
|
||||
}
|
||||
|
||||
func xmlMarshalOrPanic(value interface{}) []byte {
|
||||
ret, err := xml.MarshalIndent(value, "", " ")
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("xmlMarshalOrPanic failed to marshal %v: %s", value, err))
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// TODO: Document the use of this for debugging.
|
||||
type mitmRespWriter struct {
|
||||
http.ResponseWriter
|
||||
loggedHeader bool
|
||||
logHeader bool
|
||||
}
|
||||
|
||||
func (me *mitmRespWriter) WriteHeader(code int) {
|
||||
me.doLogHeader(code)
|
||||
me.ResponseWriter.WriteHeader(code)
|
||||
}
|
||||
|
||||
func (me *mitmRespWriter) doLogHeader(code int) {
|
||||
if !me.logHeader {
|
||||
return
|
||||
}
|
||||
logger.Debugf("Response: %d", code)
|
||||
for k, v := range me.Header() {
|
||||
logger.Debugf("%s: %s", k, v)
|
||||
}
|
||||
me.loggedHeader = true
|
||||
}
|
||||
|
||||
func (me *mitmRespWriter) Write(b []byte) (int, error) {
|
||||
if !me.loggedHeader {
|
||||
me.doLogHeader(200)
|
||||
}
|
||||
return me.ResponseWriter.Write(b)
|
||||
}
|
||||
|
||||
// Deprecated: the CloseNotifier interface predates Go's context package.
|
||||
// New code should use Request.Context instead.
|
||||
func (me *mitmRespWriter) CloseNotify() <-chan bool {
|
||||
return me.ResponseWriter.(http.CloseNotifier).CloseNotify()
|
||||
}
|
||||
|
||||
// Set the SCPD serve paths.
|
||||
func init() {
|
||||
for _, s := range services {
|
||||
p := path.Join("/scpd", s.ServiceId)
|
||||
s.SCPDURL = p
|
||||
}
|
||||
}
|
||||
|
||||
// Install handlers to serve SCPD for each UPnP service.
|
||||
func handleSCPDs(mux *http.ServeMux) {
|
||||
for _, s := range services {
|
||||
mux.HandleFunc(s.SCPDURL, func(serviceDesc string) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("content-type", `text/xml; charset="utf-8"`)
|
||||
http.ServeContent(w, r, ".xml", startTime, bytes.NewReader([]byte(serviceDesc)))
|
||||
}
|
||||
}(s.SCPD))
|
||||
}
|
||||
}
|
||||
|
||||
// Marshal SOAP response arguments into a response XML snippet.
|
||||
func marshalSOAPResponse(sa upnp.SoapAction, args map[string]string) []byte {
|
||||
soapArgs := make([]soap.Arg, 0, len(args))
|
||||
for argName, value := range args {
|
||||
soapArgs = append(soapArgs, soap.Arg{
|
||||
XMLName: xml.Name{Local: argName},
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
return []byte(fmt.Sprintf(`<u:%[1]sResponse xmlns:u="%[2]s">%[3]s</u:%[1]sResponse>`, sa.Action, sa.ServiceURN.String(), xmlMarshalOrPanic(soapArgs)))
|
||||
}
|
||||
|
||||
// Handle a SOAP request and return the response arguments or UPnP error.
|
||||
func (me *Server) soapActionResponse(sa upnp.SoapAction, actionRequestXML []byte, r *http.Request) (map[string]string, error) {
|
||||
service, ok := me.services[sa.Type]
|
||||
if !ok {
|
||||
// TODO: What's the invalid service error?!
|
||||
return nil, upnp.Errorf(upnp.InvalidActionErrorCode, "Invalid service: %s", sa.Type)
|
||||
}
|
||||
|
||||
logger.Tracef("%s::Handle %s - %s", sa.Type, sa.Action, actionRequestXML)
|
||||
ret, err := service.Handle(sa.Action, actionRequestXML, r)
|
||||
if err == nil {
|
||||
logger.Tracef("< %v", ret)
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
// Handle a service control HTTP request.
|
||||
func (me *Server) serviceControlHandler(w http.ResponseWriter, r *http.Request) {
|
||||
clientIp, _, _ := net.SplitHostPort(r.RemoteAddr)
|
||||
|
||||
ip := net.ParseIP(clientIp).String()
|
||||
if !me.ipWhitelistManager.ipAllowed(ip) {
|
||||
// only log if we haven't seen it
|
||||
if !me.ipWhitelistManager.addRecent(ip) {
|
||||
logger.Infof("not allowed client %s", clientIp)
|
||||
}
|
||||
|
||||
http.Error(w, "forbidden", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
soapActionString := r.Header.Get("SOAPACTION")
|
||||
soapAction, err := upnp.ParseActionHTTPHeader(soapActionString)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
var env soap.Envelope
|
||||
if err := xml.NewDecoder(r.Body).Decode(&env); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
// AwoX/1.1 UPnP/1.0 DLNADOC/1.50
|
||||
w.Header().Set("Content-Type", `text/xml; charset="utf-8"`)
|
||||
w.Header().Set("Ext", "")
|
||||
w.Header().Set("Server", serverField)
|
||||
soapRespXML, code := func() ([]byte, int) {
|
||||
respArgs, err := me.soapActionResponse(soapAction, env.Body.Action, r)
|
||||
if err != nil {
|
||||
upnpErr := upnp.ConvertError(err)
|
||||
return xmlMarshalOrPanic(soap.NewFault("UPnPError", upnpErr)), 500
|
||||
}
|
||||
return marshalSOAPResponse(soapAction, respArgs), 200
|
||||
}()
|
||||
bodyStr := fmt.Sprintf(`<?xml version="1.0" encoding="utf-8" standalone="yes"?><s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/"><s:Body>%s</s:Body></s:Envelope>`, soapRespXML)
|
||||
w.WriteHeader(code)
|
||||
if _, err := w.Write([]byte(bodyStr)); err != nil {
|
||||
logger.Errorf(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (me *Server) serveIcon(w http.ResponseWriter, r *http.Request) {
|
||||
sceneId := r.URL.Query().Get("scene")
|
||||
if sceneId == "" {
|
||||
return
|
||||
}
|
||||
|
||||
var scene *models.Scene
|
||||
me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error {
|
||||
idInt, err := strconv.Atoi(sceneId)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
scene, _ = r.Scene().Find(idInt)
|
||||
return nil
|
||||
})
|
||||
|
||||
if scene == nil {
|
||||
return
|
||||
}
|
||||
|
||||
me.sceneServer.ServeScreenshot(scene, w, r)
|
||||
}
|
||||
|
||||
func (me *Server) contentDirectoryInitialEvent(urls []*url.URL, sid string) {
|
||||
body := xmlMarshalOrPanic(upnp.PropertySet{
|
||||
Properties: []upnp.Property{
|
||||
{
|
||||
Variable: upnp.Variable{
|
||||
XMLName: xml.Name{
|
||||
Local: "SystemUpdateID",
|
||||
},
|
||||
Value: "0",
|
||||
},
|
||||
},
|
||||
// upnp.Property{
|
||||
// Variable: upnp.Variable{
|
||||
// XMLName: xml.Name{
|
||||
// Local: "ContainerUpdateIDs",
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// upnp.Property{
|
||||
// Variable: upnp.Variable{
|
||||
// XMLName: xml.Name{
|
||||
// Local: "TransferIDs",
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
},
|
||||
Space: "urn:schemas-upnp-org:event-1-0",
|
||||
})
|
||||
body = append([]byte(`<?xml version="1.0"?>`+"\n"), body...)
|
||||
for _, _url := range urls {
|
||||
bodyReader := bytes.NewReader(body)
|
||||
req, err := http.NewRequest("NOTIFY", _url.String(), bodyReader)
|
||||
if err != nil {
|
||||
logger.Errorf("Could not create a request to notify %s: %s", _url.String(), err)
|
||||
continue
|
||||
}
|
||||
req.Header["CONTENT-TYPE"] = []string{`text/xml; charset="utf-8"`}
|
||||
req.Header["NT"] = []string{"upnp:event"}
|
||||
req.Header["NTS"] = []string{"upnp:propchange"}
|
||||
req.Header["SID"] = []string{sid}
|
||||
req.Header["SEQ"] = []string{"0"}
|
||||
// req.Header["TRANSFER-ENCODING"] = []string{"chunked"}
|
||||
// req.ContentLength = int64(bodyReader.Len())
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
logger.Errorf("Could not notify %s: %s", _url.String(), err)
|
||||
continue
|
||||
}
|
||||
b, _ := ioutil.ReadAll(resp.Body)
|
||||
if len(b) > 0 {
|
||||
logger.Debug(string(b))
|
||||
}
|
||||
resp.Body.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func (me *Server) contentDirectoryEventSubHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if me.StallEventSubscribe {
|
||||
// I have an LG TV that doesn't like my eventing implementation.
|
||||
// Returning unimplemented (501?) errors, results in repeat subscribe
|
||||
// attempts which hits some kind of error count limit on the TV
|
||||
// causing it to forcefully disconnect. It also won't work if the CDS
|
||||
// service doesn't include an EventSubURL. The best thing I can do is
|
||||
// cause every attempt to subscribe to timeout on the TV end, which
|
||||
// reduces the error rate enough that the TV continues to operate
|
||||
// without eventing.
|
||||
//
|
||||
// I've not found a reliable way to identify this TV, since it and
|
||||
// others don't seem to include any client-identifying headers on
|
||||
// SUBSCRIBE requests.
|
||||
//
|
||||
// TODO: Get eventing to work with the problematic TV.
|
||||
t := time.Now()
|
||||
<-r.Context().Done()
|
||||
logger.Debugf("stalled subscribe connection went away after %s", time.Since(t))
|
||||
return
|
||||
}
|
||||
// The following code is a work in progress. It partially implements
|
||||
// the spec on eventing but hasn't been completed as I have nothing to
|
||||
// test it with.
|
||||
service := me.services["ContentDirectory"]
|
||||
if r.Method == "SUBSCRIBE" && r.Header.Get("SID") == "" {
|
||||
urls := upnp.ParseCallbackURLs(r.Header.Get("CALLBACK"))
|
||||
var timeout int
|
||||
fmt.Sscanf(r.Header.Get("TIMEOUT"), "Second-%d", &timeout)
|
||||
sid, timeout, _ := service.Subscribe(urls, timeout)
|
||||
w.Header()["SID"] = []string{sid}
|
||||
w.Header()["TIMEOUT"] = []string{fmt.Sprintf("Second-%d", timeout)}
|
||||
// TODO: Shouldn't have to do this to get headers logged.
|
||||
w.WriteHeader(http.StatusOK)
|
||||
go func() {
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
me.contentDirectoryInitialEvent(urls, sid)
|
||||
}()
|
||||
} else if r.Method == "SUBSCRIBE" {
|
||||
http.Error(w, "meh", http.StatusPreconditionFailed)
|
||||
} else {
|
||||
logger.Debugf("unhandled event method: %s", r.Method)
|
||||
}
|
||||
}
|
||||
|
||||
func (me *Server) initMux(mux *http.ServeMux) {
|
||||
mux.HandleFunc("/", func(resp http.ResponseWriter, req *http.Request) {
|
||||
resp.Header().Set("content-type", "text/html")
|
||||
err := rootTmpl.Execute(resp, struct {
|
||||
Readonly bool
|
||||
Path string
|
||||
}{
|
||||
true,
|
||||
me.RootObjectPath,
|
||||
})
|
||||
if err != nil {
|
||||
logger.Errorf(err.Error())
|
||||
}
|
||||
})
|
||||
mux.HandleFunc(contentDirectoryEventSubURL, me.contentDirectoryEventSubHandler)
|
||||
mux.HandleFunc(iconPath, me.serveIcon)
|
||||
mux.HandleFunc(resPath, func(w http.ResponseWriter, r *http.Request) {
|
||||
sceneId := r.URL.Query().Get("scene")
|
||||
var scene *models.Scene
|
||||
me.txnManager.WithReadTxn(context.Background(), func(r models.ReaderRepository) error {
|
||||
sceneIdInt, err := strconv.Atoi(sceneId)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
scene, _ = r.Scene().Find(sceneIdInt)
|
||||
return nil
|
||||
})
|
||||
|
||||
if scene == nil {
|
||||
return
|
||||
}
|
||||
|
||||
me.sceneServer.StreamSceneDirect(scene, w, r)
|
||||
})
|
||||
mux.HandleFunc(rootDescPath, func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("content-type", `text/xml; charset="utf-8"`)
|
||||
w.Header().Set("content-length", fmt.Sprint(len(me.rootDescXML)))
|
||||
w.Header().Set("server", serverField)
|
||||
w.Write(me.rootDescXML)
|
||||
})
|
||||
handleSCPDs(mux)
|
||||
mux.HandleFunc(serviceControlURL, me.serviceControlHandler)
|
||||
mux.HandleFunc("/debug/pprof/", pprof.Index)
|
||||
for i, di := range me.Icons {
|
||||
mux.HandleFunc(fmt.Sprintf("%s/%d", deviceIconPath, i), func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", di.Mimetype)
|
||||
http.ServeContent(w, r, "", time.Time{}, di.ReadSeeker)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (me *Server) initServices() {
|
||||
me.services = map[string]UPnPService{
|
||||
"ContentDirectory": &contentDirectoryService{
|
||||
Server: me,
|
||||
txnManager: me.txnManager,
|
||||
},
|
||||
"ConnectionManager": &connectionManagerService{
|
||||
Server: me,
|
||||
},
|
||||
"X_MS_MediaReceiverRegistrar": &mediaReceiverRegistrarService{
|
||||
Server: me,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (me *Server) Serve() (err error) {
|
||||
me.initServices()
|
||||
me.closed = make(chan struct{})
|
||||
if me.HTTPConn == nil {
|
||||
me.HTTPConn, err = net.Listen("tcp", "")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
if me.Interfaces == nil {
|
||||
ifs, err := net.Interfaces()
|
||||
if err != nil {
|
||||
logger.Errorf(err.Error())
|
||||
}
|
||||
var tmp []net.Interface
|
||||
for _, if_ := range ifs {
|
||||
if if_.Flags&net.FlagUp == 0 || if_.MTU <= 0 {
|
||||
continue
|
||||
}
|
||||
tmp = append(tmp, if_)
|
||||
}
|
||||
me.Interfaces = tmp
|
||||
}
|
||||
me.httpServeMux = http.NewServeMux()
|
||||
me.rootDeviceUUID = makeDeviceUuid(me.FriendlyName)
|
||||
me.rootDescXML, err = xml.MarshalIndent(
|
||||
upnp.DeviceDesc{
|
||||
SpecVersion: upnp.SpecVersion{Major: 1, Minor: 0},
|
||||
Device: upnp.Device{
|
||||
DeviceType: rootDeviceType,
|
||||
FriendlyName: me.FriendlyName,
|
||||
Manufacturer: me.FriendlyName,
|
||||
ModelName: rootDeviceModelName,
|
||||
UDN: me.rootDeviceUUID,
|
||||
ServiceList: func() (ss []upnp.Service) {
|
||||
for _, s := range services {
|
||||
ss = append(ss, s.Service)
|
||||
}
|
||||
return
|
||||
}(),
|
||||
IconList: func() (ret []upnp.Icon) {
|
||||
for i, di := range me.Icons {
|
||||
ret = append(ret, upnp.Icon{
|
||||
Height: di.Height,
|
||||
Width: di.Width,
|
||||
Depth: di.Depth,
|
||||
Mimetype: di.Mimetype,
|
||||
URL: fmt.Sprintf("%s/%d", deviceIconPath, i),
|
||||
})
|
||||
}
|
||||
return
|
||||
}(),
|
||||
},
|
||||
},
|
||||
" ", " ")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
me.rootDescXML = append([]byte(`<?xml version="1.0"?>`), me.rootDescXML...)
|
||||
logger.Debug("HTTP srv on", me.HTTPConn.Addr())
|
||||
me.initMux(me.httpServeMux)
|
||||
me.ssdpStopped = make(chan struct{})
|
||||
go func() {
|
||||
me.doSSDP()
|
||||
close(me.ssdpStopped)
|
||||
}()
|
||||
return me.serveHTTP()
|
||||
}
|
||||
|
||||
func (me *Server) Close() (err error) {
|
||||
close(me.closed)
|
||||
err = me.HTTPConn.Close()
|
||||
<-me.ssdpStopped
|
||||
return
|
||||
}
|
||||
|
||||
func didl_lite(chardata string) string {
|
||||
return `<DIDL-Lite` +
|
||||
` xmlns:dc="http://purl.org/dc/elements/1.1/"` +
|
||||
` xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/"` +
|
||||
` xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/"` +
|
||||
` xmlns:dlna="urn:schemas-dlna-org:metadata-1-0/">` +
|
||||
chardata +
|
||||
`</DIDL-Lite>`
|
||||
}
|
||||
|
||||
func (me *Server) location(ip net.IP) string {
|
||||
url := url.URL{
|
||||
Scheme: "http",
|
||||
Host: (&net.TCPAddr{
|
||||
IP: ip,
|
||||
Port: me.httpPort(),
|
||||
}).String(),
|
||||
Path: rootDescPath,
|
||||
}
|
||||
return url.String()
|
||||
}
|
||||
47
pkg/dlna/html.go
Normal file
47
pkg/dlna/html.go
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
package dlna
|
||||
|
||||
// From: https://github.com/anacrolix/dms
|
||||
// Copyright (c) 2012, Matt Joiner <anacrolix@gmail.com>.
|
||||
// All rights reserved.
|
||||
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
// * Neither the name of the <organization> nor the
|
||||
// names of its contributors may be used to endorse or promote products
|
||||
// derived from this software without specific prior written permission.
|
||||
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
// DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
)
|
||||
|
||||
var (
|
||||
rootTmpl *template.Template
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootTmpl = template.Must(template.New("root").Parse(
|
||||
`<form method="post">
|
||||
Path: <input type="text"
|
||||
name="path"
|
||||
{{if .Readonly}} readonly="readonly"{{end}}
|
||||
value="{{.Path}}"
|
||||
/>
|
||||
<input type="submit" value="Update"{{if .Readonly}} disabled="disabled"{{end}}/>
|
||||
</form>`))
|
||||
}
|
||||
27
pkg/dlna/mrrs.go
Normal file
27
pkg/dlna/mrrs.go
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
package dlna
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/anacrolix/dms/upnp"
|
||||
)
|
||||
|
||||
type mediaReceiverRegistrarService struct {
|
||||
*Server
|
||||
upnp.Eventing
|
||||
}
|
||||
|
||||
func (mrrs *mediaReceiverRegistrarService) Handle(action string, argsXML []byte, r *http.Request) (map[string]string, error) {
|
||||
switch action {
|
||||
case "IsAuthorized", "IsValidated":
|
||||
return map[string]string{
|
||||
"Result": "1",
|
||||
}, nil
|
||||
case "RegisterDevice":
|
||||
return map[string]string{
|
||||
"RegistrationRespMsg": mrrs.rootDeviceUUID,
|
||||
}, nil
|
||||
default:
|
||||
return nil, upnp.InvalidActionError
|
||||
}
|
||||
}
|
||||
80
pkg/dlna/paging.go
Normal file
80
pkg/dlna/paging.go
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
package dlna
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type scenePager struct {
|
||||
sceneFilter *models.SceneFilterType
|
||||
parentID string
|
||||
}
|
||||
|
||||
func (p *scenePager) getPageID(page int) string {
|
||||
return p.parentID + "/page/" + strconv.Itoa(page)
|
||||
}
|
||||
|
||||
func (p *scenePager) getPages(r models.ReaderRepository, total int) ([]interface{}, error) {
|
||||
var objs []interface{}
|
||||
|
||||
// get the first scene of each page to set an appropriate title
|
||||
pages := int(math.Ceil(float64(total) / float64(pageSize)))
|
||||
|
||||
singlePageSize := 1
|
||||
sort := "title"
|
||||
findFilter := &models.FindFilterType{
|
||||
PerPage: &singlePageSize,
|
||||
Sort: &sort,
|
||||
}
|
||||
|
||||
for page := 1; page <= pages; page++ {
|
||||
// TODO - this is really slow. Not sure if there's a better way
|
||||
title := fmt.Sprintf("Page %d", page)
|
||||
if pages <= 10 || (page-1)%(pages/10) == 0 {
|
||||
thisPage := ((page - 1) * pageSize) + 1
|
||||
findFilter.Page = &thisPage
|
||||
scenes, _, err := r.Scene().Query(p.sceneFilter, findFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sceneTitle := scenes[0].GetTitle()
|
||||
|
||||
// use the first three letters as a prefix
|
||||
if len(sceneTitle) > 3 {
|
||||
sceneTitle = sceneTitle[0:3]
|
||||
}
|
||||
|
||||
title = title + fmt.Sprintf(" (%s...)", sceneTitle)
|
||||
}
|
||||
|
||||
objs = append(objs, makeStorageFolder(p.getPageID(page), title, p.parentID))
|
||||
}
|
||||
|
||||
return objs, nil
|
||||
}
|
||||
|
||||
func (p *scenePager) getPageVideos(r models.ReaderRepository, page int, host string) ([]interface{}, error) {
|
||||
var objs []interface{}
|
||||
|
||||
sort := "title"
|
||||
findFilter := &models.FindFilterType{
|
||||
PerPage: &pageSize,
|
||||
Page: &page,
|
||||
Sort: &sort,
|
||||
}
|
||||
|
||||
scenes, _, err := r.Scene().Query(p.sceneFilter, findFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, s := range scenes {
|
||||
objs = append(objs, sceneToContainer(s, p.parentID, host))
|
||||
}
|
||||
|
||||
return objs, nil
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue