92
Makefile
|
|
@ -7,21 +7,15 @@ ifeq (${SHELL}, cmd)
|
|||
endif
|
||||
|
||||
ifdef IS_WIN_SHELL
|
||||
SEPARATOR := &&
|
||||
SET := set
|
||||
RM := del /s /q
|
||||
RMDIR := rmdir /s /q
|
||||
PWD := $(shell echo %cd%)
|
||||
else
|
||||
SEPARATOR := ;
|
||||
SET := export
|
||||
RM := rm -f
|
||||
RMDIR := rm -rf
|
||||
endif
|
||||
|
||||
# set LDFLAGS environment variable to any extra ldflags required
|
||||
# set OUTPUT to generate a specific binary name
|
||||
|
||||
LDFLAGS := $(LDFLAGS)
|
||||
ifdef OUTPUT
|
||||
OUTPUT := -o $(OUTPUT)
|
||||
|
|
@ -34,10 +28,16 @@ export CGO_ENABLED = 1
|
|||
GO_BUILD_TAGS_WINDOWS := sqlite_omit_load_extension sqlite_stat4 osusergo
|
||||
GO_BUILD_TAGS_DEFAULT = $(GO_BUILD_TAGS_WINDOWS) netgo
|
||||
|
||||
.PHONY: release pre-build
|
||||
# set STASH_NOLEGACY environment variable or uncomment to disable legacy browser support
|
||||
# STASH_NOLEGACY := true
|
||||
|
||||
# set STASH_SOURCEMAPS environment variable or uncomment to enable UI sourcemaps
|
||||
# STASH_SOURCEMAPS := true
|
||||
|
||||
.PHONY: release
|
||||
release: pre-ui generate ui build-release
|
||||
|
||||
.PHONY: pre-build
|
||||
pre-build:
|
||||
ifndef BUILD_DATE
|
||||
$(eval BUILD_DATE := $(shell go run -mod=vendor scripts/getDate.go))
|
||||
|
|
@ -55,29 +55,37 @@ ifndef OFFICIAL_BUILD
|
|||
$(eval OFFICIAL_BUILD := false)
|
||||
endif
|
||||
|
||||
.PHONY: build-flags
|
||||
build-flags: pre-build
|
||||
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/api.buildstamp=$(BUILD_DATE)')
|
||||
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/api.githash=$(GITHASH)')
|
||||
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/api.version=$(STASH_VERSION)')
|
||||
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/manager/config.officialBuild=$(OFFICIAL_BUILD)')
|
||||
ifndef GO_BUILD_TAGS
|
||||
$(eval GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT))
|
||||
endif
|
||||
|
||||
$(eval BUILD_FLAGS := -mod=vendor -v -tags "$(GO_BUILD_TAGS)" $(GO_BUILD_FLAGS) -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS)")
|
||||
|
||||
# NOTE: the build target still includes netgo because we cannot detect
|
||||
# Windows easily from the Makefile.
|
||||
build: pre-build
|
||||
.PHONY: build
|
||||
build: build-flags
|
||||
build:
|
||||
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/api.version=$(STASH_VERSION)' -X 'github.com/stashapp/stash/internal/api.buildstamp=$(BUILD_DATE)' -X 'github.com/stashapp/stash/internal/api.githash=$(GITHASH)')
|
||||
$(eval LDFLAGS := $(LDFLAGS) -X 'github.com/stashapp/stash/internal/manager/config.officialBuild=$(OFFICIAL_BUILD)')
|
||||
go build $(OUTPUT) -mod=vendor -v -tags "$(GO_BUILD_TAGS)" $(GO_BUILD_FLAGS) -ldflags "$(LDFLAGS) $(EXTRA_LDFLAGS) $(PLATFORM_SPECIFIC_LDFLAGS)" ./cmd/stash
|
||||
go build $(OUTPUT) $(BUILD_FLAGS) ./cmd/stash
|
||||
|
||||
# strips debug symbols from the release build
|
||||
.PHONY: build-release
|
||||
build-release: EXTRA_LDFLAGS := -s -w
|
||||
build-release: GO_BUILD_FLAGS := -trimpath
|
||||
build-release: build
|
||||
|
||||
.PHONY: build-release-static
|
||||
build-release-static: EXTRA_LDFLAGS := -extldflags=-static -s -w
|
||||
build-release-static: GO_BUILD_FLAGS := -trimpath
|
||||
build-release-static: build
|
||||
|
||||
# cross-compile- targets should be run within the compiler docker container
|
||||
.PHONY: cross-compile-windows
|
||||
cross-compile-windows: export GOOS := windows
|
||||
cross-compile-windows: export GOARCH := amd64
|
||||
cross-compile-windows: export CC := x86_64-w64-mingw32-gcc
|
||||
|
|
@ -86,6 +94,7 @@ cross-compile-windows: OUTPUT := -o dist/stash-win.exe
|
|||
cross-compile-windows: GO_BUILD_TAGS := $(GO_BUILD_TAGS_WINDOWS)
|
||||
cross-compile-windows: build-release-static
|
||||
|
||||
.PHONY: cross-compile-macos-intel
|
||||
cross-compile-macos-intel: export GOOS := darwin
|
||||
cross-compile-macos-intel: export GOARCH := amd64
|
||||
cross-compile-macos-intel: export CC := o64-clang
|
||||
|
|
@ -95,6 +104,7 @@ cross-compile-macos-intel: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT)
|
|||
# can't use static build for OSX
|
||||
cross-compile-macos-intel: build-release
|
||||
|
||||
.PHONY: cross-compile-macos-applesilicon
|
||||
cross-compile-macos-applesilicon: export GOOS := darwin
|
||||
cross-compile-macos-applesilicon: export GOARCH := arm64
|
||||
cross-compile-macos-applesilicon: export CC := oa64e-clang
|
||||
|
|
@ -104,6 +114,7 @@ cross-compile-macos-applesilicon: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT)
|
|||
# can't use static build for OSX
|
||||
cross-compile-macos-applesilicon: build-release
|
||||
|
||||
.PHONY: cross-compile-macos
|
||||
cross-compile-macos:
|
||||
rm -rf dist/Stash.app dist/Stash-macos.zip
|
||||
make cross-compile-macos-applesilicon
|
||||
|
|
@ -118,18 +129,21 @@ cross-compile-macos:
|
|||
cd dist && zip -r Stash-macos.zip Stash.app && cd ..
|
||||
rm -rf dist/Stash.app
|
||||
|
||||
.PHONY: cross-compile-freebsd
|
||||
cross-compile-freebsd: export GOOS := freebsd
|
||||
cross-compile-freebsd: export GOARCH := amd64
|
||||
cross-compile-freebsd: OUTPUT := -o dist/stash-freebsd
|
||||
cross-compile-freebsd: GO_BUILD_TAGS += netgo
|
||||
cross-compile-freebsd: build-release-static
|
||||
|
||||
.PHONY: cross-compile-linux
|
||||
cross-compile-linux: export GOOS := linux
|
||||
cross-compile-linux: export GOARCH := amd64
|
||||
cross-compile-linux: OUTPUT := -o dist/stash-linux
|
||||
cross-compile-linux: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT)
|
||||
cross-compile-linux: build-release-static
|
||||
|
||||
.PHONY: cross-compile-linux-arm64v8
|
||||
cross-compile-linux-arm64v8: export GOOS := linux
|
||||
cross-compile-linux-arm64v8: export GOARCH := arm64
|
||||
cross-compile-linux-arm64v8: export CC := aarch64-linux-gnu-gcc
|
||||
|
|
@ -137,6 +151,7 @@ cross-compile-linux-arm64v8: OUTPUT := -o dist/stash-linux-arm64v8
|
|||
cross-compile-linux-arm64v8: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT)
|
||||
cross-compile-linux-arm64v8: build-release-static
|
||||
|
||||
.PHONY: cross-compile-linux-arm32v7
|
||||
cross-compile-linux-arm32v7: export GOOS := linux
|
||||
cross-compile-linux-arm32v7: export GOARCH := arm
|
||||
cross-compile-linux-arm32v7: export GOARM := 7
|
||||
|
|
@ -145,6 +160,7 @@ cross-compile-linux-arm32v7: OUTPUT := -o dist/stash-linux-arm32v7
|
|||
cross-compile-linux-arm32v7: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT)
|
||||
cross-compile-linux-arm32v7: build-release-static
|
||||
|
||||
.PHONY: cross-compile-linux-arm32v6
|
||||
cross-compile-linux-arm32v6: export GOOS := linux
|
||||
cross-compile-linux-arm32v6: export GOARCH := arm
|
||||
cross-compile-linux-arm32v6: export GOARM := 6
|
||||
|
|
@ -153,6 +169,7 @@ cross-compile-linux-arm32v6: OUTPUT := -o dist/stash-linux-arm32v6
|
|||
cross-compile-linux-arm32v6: GO_BUILD_TAGS := $(GO_BUILD_TAGS_DEFAULT)
|
||||
cross-compile-linux-arm32v6: build-release-static
|
||||
|
||||
.PHONY: cross-compile-all
|
||||
cross-compile-all:
|
||||
make cross-compile-windows
|
||||
make cross-compile-macos-intel
|
||||
|
|
@ -164,15 +181,16 @@ cross-compile-all:
|
|||
|
||||
.PHONY: touch-ui
|
||||
touch-ui:
|
||||
ifndef IS_WIN_SHELL
|
||||
@mkdir -p ui/v2.5/build
|
||||
@touch ui/v2.5/build/index.html
|
||||
else
|
||||
ifdef IS_WIN_SHELL
|
||||
@if not exist "ui\\v2.5\\build" mkdir ui\\v2.5\\build
|
||||
@type nul >> ui/v2.5/build/index.html
|
||||
else
|
||||
@mkdir -p ui/v2.5/build
|
||||
@touch ui/v2.5/build/index.html
|
||||
endif
|
||||
|
||||
# Regenerates GraphQL files
|
||||
.PHONY: generate
|
||||
generate: generate-backend generate-frontend
|
||||
|
||||
.PHONY: generate-frontend
|
||||
|
|
@ -219,14 +237,14 @@ generate-test-mocks:
|
|||
# runs server
|
||||
# sets the config file to use the local dev config
|
||||
.PHONY: server-start
|
||||
server-start: export STASH_CONFIG_FILE=config.yml
|
||||
server-start:
|
||||
ifndef IS_WIN_SHELL
|
||||
@mkdir -p .local
|
||||
else
|
||||
server-start: export STASH_CONFIG_FILE := config.yml
|
||||
server-start: build-flags
|
||||
ifdef IS_WIN_SHELL
|
||||
@if not exist ".local" mkdir .local
|
||||
else
|
||||
@mkdir -p .local
|
||||
endif
|
||||
cd .local && go run ../cmd/stash
|
||||
cd .local && go run $(BUILD_FLAGS) ../cmd/stash
|
||||
|
||||
# removes local dev config files
|
||||
.PHONY: server-clean
|
||||
|
|
@ -239,18 +257,32 @@ server-clean:
|
|||
pre-ui:
|
||||
cd ui/v2.5 && yarn install --frozen-lockfile
|
||||
|
||||
.PHONY: ui-env
|
||||
ui-env: pre-build
|
||||
$(eval export VITE_APP_DATE := $(BUILD_DATE))
|
||||
$(eval export VITE_APP_GITHASH := $(GITHASH))
|
||||
$(eval export VITE_APP_STASH_VERSION := $(STASH_VERSION))
|
||||
ifdef STASH_NOLEGACY
|
||||
$(eval export VITE_APP_NOLEGACY := true)
|
||||
endif
|
||||
ifdef STASH_SOURCEMAPS
|
||||
$(eval export VITE_APP_SOURCEMAPS := true)
|
||||
endif
|
||||
|
||||
.PHONY: ui
|
||||
ui: pre-build
|
||||
$(SET) VITE_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
|
||||
$(SET) VITE_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
|
||||
$(SET) VITE_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
|
||||
ui: ui-env
|
||||
cd ui/v2.5 && yarn build
|
||||
|
||||
.PHONY: ui-nolegacy
|
||||
ui-nolegacy: STASH_NOLEGACY := true
|
||||
ui-nolegacy: ui
|
||||
|
||||
.PHONY: ui-sourcemaps
|
||||
ui-sourcemaps: STASH_SOURCEMAPS := true
|
||||
ui-sourcemaps: ui
|
||||
|
||||
.PHONY: ui-start
|
||||
ui-start: pre-build
|
||||
$(SET) VITE_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
|
||||
$(SET) VITE_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
|
||||
$(SET) VITE_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
|
||||
ui-start: ui-env
|
||||
cd ui/v2.5 && yarn start --host
|
||||
|
||||
.PHONY: fmt-ui
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ https://stashapp.cc
|
|||
[](https://hub.docker.com/r/stashapp/stash 'DockerHub')
|
||||
[](https://opencollective.com/stashapp)
|
||||
[](https://goreportcard.com/report/github.com/stashapp/stash)
|
||||
[](https://matrix.to/#/#stashapp:unredacted.org)
|
||||
[](https://discord.gg/2TsNFKt)
|
||||
[](https://github.com/stashapp/stash/releases/latest)
|
||||
[](https://github.com/stashapp/stash/labels/bounty)
|
||||
|
|
@ -58,6 +59,7 @@ Check out our documentation on [Stash-Docs](https://docs.stashapp.cc) for inform
|
|||
|
||||
For more help you can:
|
||||
* Check the in-app documentation, in the top right corner of the app (it's also mirrored on [Stash-Docs](https://docs.stashapp.cc/in-app-manual))
|
||||
* Join the [Matrix space](https://matrix.to/#/#stashapp:unredacted.org)
|
||||
* Join the [Discord server](https://discord.gg/2TsNFKt), where the community can offer support.
|
||||
* Start a [discussion on GitHub](https://github.com/stashapp/stash/discussions)
|
||||
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ func main() {
|
|||
}()
|
||||
|
||||
go handleSignals()
|
||||
desktop.Start(manager.GetInstance(), &manager.FaviconProvider{UIBox: ui.UIBox})
|
||||
desktop.Start(manager.GetInstance(), &ui.FaviconProvider)
|
||||
|
||||
blockForever()
|
||||
}
|
||||
|
|
|
|||
4
go.mod
|
|
@ -9,7 +9,7 @@ require (
|
|||
github.com/chromedp/chromedp v0.7.3
|
||||
github.com/corona10/goimagehash v1.0.3
|
||||
github.com/disintegration/imaging v1.6.0
|
||||
github.com/fvbommel/sortorder v1.0.2
|
||||
github.com/fvbommel/sortorder v1.1.0
|
||||
github.com/go-chi/chi v4.0.2+incompatible
|
||||
github.com/golang-jwt/jwt/v4 v4.0.0
|
||||
github.com/golang-migrate/migrate/v4 v4.15.0-beta.1
|
||||
|
|
@ -24,7 +24,6 @@ require (
|
|||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8
|
||||
github.com/remeh/sizedwaitgroup v1.0.0
|
||||
github.com/robertkrimen/otto v0.0.0-20200922221731-ef014fd054ac
|
||||
github.com/rs/cors v1.6.0
|
||||
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f
|
||||
github.com/sirupsen/logrus v1.8.1
|
||||
github.com/spf13/afero v1.8.2 // indirect
|
||||
|
|
@ -48,6 +47,7 @@ require (
|
|||
require (
|
||||
github.com/asticode/go-astisub v0.20.0
|
||||
github.com/doug-martin/goqu/v9 v9.18.0
|
||||
github.com/go-chi/cors v1.2.1
|
||||
github.com/go-chi/httplog v0.2.1
|
||||
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
|
||||
github.com/hashicorp/golang-lru v0.5.4
|
||||
|
|
|
|||
8
go.sum
|
|
@ -233,8 +233,8 @@ github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMo
|
|||
github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI=
|
||||
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
||||
github.com/fsouza/fake-gcs-server v1.17.0/go.mod h1:D1rTE4YCyHFNa99oyJJ5HyclvN/0uQR+pM/VdlL83bw=
|
||||
github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo=
|
||||
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
|
||||
github.com/fvbommel/sortorder v1.1.0 h1:fUmoe+HLsBTctBDoaBwpQo5N+nrCp8g/BjKb/6ZQmYw=
|
||||
github.com/fvbommel/sortorder v1.1.0/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||
|
|
@ -242,6 +242,8 @@ github.com/go-chi/chi v4.0.2+incompatible h1:maB6vn6FqCxrpz4FqWdh4+lwpyZIQS7YEAU
|
|||
github.com/go-chi/chi v4.0.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
|
||||
github.com/go-chi/chi/v5 v5.0.0 h1:DBPx88FjZJH3FsICfDAfIfnb7XxKIYVGG6lOPlhENAg=
|
||||
github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs=
|
||||
github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4=
|
||||
github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
|
||||
github.com/go-chi/httplog v0.2.1 h1:KgCtIUkYNlfIsUPzE3utxd1KDKOvCrnAKaqdo0rmrh0=
|
||||
github.com/go-chi/httplog v0.2.1/go.mod h1:JyHOFO9twSfGoTin/RoP25Lx2a9Btq10ug+sgxe0+bo=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
|
|
@ -668,8 +670,6 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L
|
|||
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rs/cors v1.6.0 h1:G9tHG9lebljV9mfp9SNPDL36nCDxmo3zTlAf1YgvzmI=
|
||||
github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
||||
github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
|
|||
maxTranscodeSize
|
||||
maxStreamingTranscodeSize
|
||||
writeImageThumbnails
|
||||
createImageClipsFromVideos
|
||||
apiKey
|
||||
username
|
||||
password
|
||||
|
|
@ -99,6 +100,7 @@ fragment ConfigDLNAData on ConfigDLNAResult {
|
|||
enabled
|
||||
whitelistedIPs
|
||||
interfaces
|
||||
videoSortOrder
|
||||
}
|
||||
|
||||
fragment ConfigScrapingData on ConfigScrapingResult {
|
||||
|
|
@ -139,6 +141,7 @@ fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult {
|
|||
scanGenerateSprites
|
||||
scanGeneratePhashes
|
||||
scanGenerateThumbnails
|
||||
scanGenerateClipPreviews
|
||||
}
|
||||
|
||||
identify {
|
||||
|
|
@ -179,6 +182,7 @@ fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult {
|
|||
transcodes
|
||||
phashes
|
||||
interactiveHeatmapsSpeeds
|
||||
clipPreviews
|
||||
}
|
||||
|
||||
deleteFile
|
||||
|
|
|
|||
|
|
@ -43,4 +43,46 @@ fragment GalleryFileData on GalleryFile {
|
|||
type
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment VisualFileData on VisualFile {
|
||||
... on BaseFile {
|
||||
id
|
||||
path
|
||||
size
|
||||
mod_time
|
||||
fingerprints {
|
||||
type
|
||||
value
|
||||
}
|
||||
}
|
||||
... on ImageFile {
|
||||
id
|
||||
path
|
||||
size
|
||||
mod_time
|
||||
width
|
||||
height
|
||||
fingerprints {
|
||||
type
|
||||
value
|
||||
}
|
||||
}
|
||||
... on VideoFile {
|
||||
id
|
||||
path
|
||||
size
|
||||
mod_time
|
||||
duration
|
||||
video_codec
|
||||
audio_codec
|
||||
width
|
||||
height
|
||||
frame_rate
|
||||
bit_rate
|
||||
fingerprints {
|
||||
type
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ fragment SlimImageData on Image {
|
|||
|
||||
paths {
|
||||
thumbnail
|
||||
preview
|
||||
image
|
||||
}
|
||||
|
||||
|
|
@ -45,4 +46,8 @@ fragment SlimImageData on Image {
|
|||
favorite
|
||||
image_path
|
||||
}
|
||||
|
||||
visual_files {
|
||||
...VisualFileData
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ fragment ImageData on Image {
|
|||
|
||||
paths {
|
||||
thumbnail
|
||||
preview
|
||||
image
|
||||
}
|
||||
|
||||
|
|
@ -33,4 +34,8 @@ fragment ImageData on Image {
|
|||
performers {
|
||||
...PerformerData
|
||||
}
|
||||
|
||||
visual_files {
|
||||
...VisualFileData
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,8 @@ fragment SlimPerformerData on Performer {
|
|||
eye_color
|
||||
height_cm
|
||||
fake_tits
|
||||
penis_length
|
||||
circumcised
|
||||
career_length
|
||||
tattoos
|
||||
piercings
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ fragment PerformerData on Performer {
|
|||
height_cm
|
||||
measurements
|
||||
fake_tits
|
||||
penis_length
|
||||
circumcised
|
||||
career_length
|
||||
tattoos
|
||||
piercings
|
||||
|
|
@ -25,6 +27,8 @@ fragment PerformerData on Performer {
|
|||
image_count
|
||||
gallery_count
|
||||
movie_count
|
||||
performer_count
|
||||
o_counter
|
||||
|
||||
tags {
|
||||
...SlimTagData
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ fragment ScrapedPerformerData on ScrapedPerformer {
|
|||
height
|
||||
measurements
|
||||
fake_tits
|
||||
penis_length
|
||||
circumcised
|
||||
career_length
|
||||
tattoos
|
||||
piercings
|
||||
|
|
@ -43,6 +45,8 @@ fragment ScrapedScenePerformerData on ScrapedPerformer {
|
|||
height
|
||||
measurements
|
||||
fake_tits
|
||||
penis_length
|
||||
circumcised
|
||||
career_length
|
||||
tattoos
|
||||
piercings
|
||||
|
|
|
|||
|
|
@ -20,8 +20,8 @@ query FindScenesByPathRegex($filter: FindFilterType) {
|
|||
}
|
||||
}
|
||||
|
||||
query FindDuplicateScenes($distance: Int) {
|
||||
findDuplicateScenes(distance: $distance) {
|
||||
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
|
||||
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
|
||||
...SlimSceneData
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,8 +14,16 @@ type Query {
|
|||
|
||||
findScenesByPathRegex(filter: FindFilterType): FindScenesResultType!
|
||||
|
||||
""" Returns any groups of scenes that are perceptual duplicates within the queried distance """
|
||||
findDuplicateScenes(distance: Int): [[Scene!]!]!
|
||||
"""
|
||||
Returns any groups of scenes that are perceptual duplicates within the queried distance
|
||||
and the difference between their duration is smaller than durationDiff
|
||||
"""
|
||||
findDuplicateScenes(
|
||||
distance: Int,
|
||||
"""Max difference in seconds between files in order to be considered for similarity matching.
|
||||
Fractional seconds are ok: 0.5 will mean only files that have durations within 0.5 seconds between them will be matched based on PHash distance."""
|
||||
duration_diff: Float
|
||||
): [[Scene!]!]!
|
||||
|
||||
"""Return valid stream paths"""
|
||||
sceneStreams(id: ID): [SceneStreamEndpoint!]!
|
||||
|
|
@ -295,14 +303,14 @@ type Mutation {
|
|||
metadataClean(input: CleanMetadataInput!): ID!
|
||||
"""Identifies scenes using scrapers. Returns the job ID"""
|
||||
metadataIdentify(input: IdentifyMetadataInput!): ID!
|
||||
|
||||
|
||||
"""Migrate generated files for the current hash naming"""
|
||||
migrateHashNaming: ID!
|
||||
"""Migrates legacy scene screenshot files into the blob storage"""
|
||||
migrateSceneScreenshots(input: MigrateSceneScreenshotsInput!): ID!
|
||||
"""Migrates blobs from the old storage system to the current one"""
|
||||
migrateBlobs(input: MigrateBlobsInput!): ID!
|
||||
|
||||
|
||||
"""Anonymise the database in a separate file. Optionally returns a link to download the database file"""
|
||||
anonymiseDatabase(input: AnonymiseDatabaseInput!): String
|
||||
|
||||
|
|
|
|||
|
|
@ -106,6 +106,8 @@ input ConfigGeneralInput {
|
|||
|
||||
"""Write image thumbnails to disk when generating on the fly"""
|
||||
writeImageThumbnails: Boolean
|
||||
"""Create Image Clips from Video extensions when Videos are disabled in Library"""
|
||||
createImageClipsFromVideos: Boolean
|
||||
"""Username"""
|
||||
username: String
|
||||
"""Password"""
|
||||
|
|
@ -215,6 +217,8 @@ type ConfigGeneralResult {
|
|||
|
||||
"""Write image thumbnails to disk when generating on the fly"""
|
||||
writeImageThumbnails: Boolean!
|
||||
"""Create Image Clips from Video extensions when Videos are disabled in Library"""
|
||||
createImageClipsFromVideos: Boolean!
|
||||
"""API Key"""
|
||||
apiKey: String!
|
||||
"""Username"""
|
||||
|
|
@ -431,6 +435,8 @@ input ConfigDLNAInput {
|
|||
whitelistedIPs: [String!]
|
||||
"""List of interfaces to run DLNA on. Empty for all"""
|
||||
interfaces: [String!]
|
||||
"""Order to sort videos"""
|
||||
videoSortOrder: String
|
||||
}
|
||||
|
||||
type ConfigDLNAResult {
|
||||
|
|
@ -441,6 +447,8 @@ type ConfigDLNAResult {
|
|||
whitelistedIPs: [String!]!
|
||||
"""List of interfaces to run DLNA on. Empty for all"""
|
||||
interfaces: [String!]!
|
||||
"""Order to sort videos"""
|
||||
videoSortOrder: String!
|
||||
}
|
||||
|
||||
input ConfigScrapingInput {
|
||||
|
|
|
|||
|
|
@ -73,12 +73,14 @@ type ImageFile implements BaseFile {
|
|||
fingerprints: [Fingerprint!]!
|
||||
|
||||
width: Int!
|
||||
height: Int!
|
||||
height: Int!
|
||||
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
}
|
||||
|
||||
union VisualFile = VideoFile | ImageFile
|
||||
|
||||
type GalleryFile implements BaseFile {
|
||||
id: ID!
|
||||
path: String!
|
||||
|
|
|
|||
|
|
@ -76,6 +76,10 @@ input PerformerFilterType {
|
|||
measurements: StringCriterionInput
|
||||
"""Filter by fake tits value"""
|
||||
fake_tits: StringCriterionInput
|
||||
"""Filter by penis length value"""
|
||||
penis_length: FloatCriterionInput
|
||||
"""Filter by ciricumcision"""
|
||||
circumcised: CircumcisionCriterionInput
|
||||
"""Filter by career length"""
|
||||
career_length: StringCriterionInput
|
||||
"""Filter by tattoos"""
|
||||
|
|
@ -98,6 +102,8 @@ input PerformerFilterType {
|
|||
image_count: IntCriterionInput
|
||||
"""Filter by gallery count"""
|
||||
gallery_count: IntCriterionInput
|
||||
"""Filter by o count"""
|
||||
o_counter: IntCriterionInput
|
||||
"""Filter by StashID"""
|
||||
stash_id: StringCriterionInput @deprecated(reason: "Use stash_id_endpoint instead")
|
||||
"""Filter by StashID"""
|
||||
|
|
@ -116,6 +122,8 @@ input PerformerFilterType {
|
|||
death_year: IntCriterionInput
|
||||
"""Filter by studios where performer appears in scene/image/gallery"""
|
||||
studios: HierarchicalMultiCriterionInput
|
||||
"""Filter by performers where performer appears with another performer in scene/image/gallery"""
|
||||
performers: MultiCriterionInput
|
||||
"""Filter by autotag ignore value"""
|
||||
ignore_auto_tag: Boolean
|
||||
"""Filter by birthdate"""
|
||||
|
|
@ -165,7 +173,9 @@ input SceneFilterType {
|
|||
"""Filter by file checksum"""
|
||||
checksum: StringCriterionInput
|
||||
"""Filter by file phash"""
|
||||
phash: StringCriterionInput
|
||||
phash: StringCriterionInput @deprecated(reason: "Use phash_distance instead")
|
||||
"""Filter by file phash distance"""
|
||||
phash_distance: PhashDistanceCriterionInput
|
||||
"""Filter by path"""
|
||||
path: StringCriterionInput
|
||||
"""Filter by file count"""
|
||||
|
|
@ -499,20 +509,33 @@ input IntCriterionInput {
|
|||
modifier: CriterionModifier!
|
||||
}
|
||||
|
||||
input FloatCriterionInput {
|
||||
value: Float!
|
||||
value2: Float
|
||||
modifier: CriterionModifier!
|
||||
}
|
||||
|
||||
input MultiCriterionInput {
|
||||
value: [ID!]
|
||||
modifier: CriterionModifier!
|
||||
excludes: [ID!]
|
||||
}
|
||||
|
||||
input GenderCriterionInput {
|
||||
value: GenderEnum
|
||||
modifier: CriterionModifier!
|
||||
}
|
||||
|
||||
input CircumcisionCriterionInput {
|
||||
value: [CircumisedEnum!]
|
||||
modifier: CriterionModifier!
|
||||
}
|
||||
|
||||
input HierarchicalMultiCriterionInput {
|
||||
value: [ID!]
|
||||
modifier: CriterionModifier!
|
||||
depth: Int
|
||||
excludes: [ID!]
|
||||
}
|
||||
|
||||
input DateCriterionInput {
|
||||
|
|
@ -527,6 +550,12 @@ input TimestampCriterionInput {
|
|||
modifier: CriterionModifier!
|
||||
}
|
||||
|
||||
input PhashDistanceCriterionInput {
|
||||
value: String!
|
||||
modifier: CriterionModifier!
|
||||
distance: Int
|
||||
}
|
||||
|
||||
enum FilterMode {
|
||||
SCENES,
|
||||
PERFORMERS,
|
||||
|
|
|
|||
|
|
@ -16,8 +16,9 @@ type Image {
|
|||
|
||||
file_mod_time: Time @deprecated(reason: "Use files.mod_time")
|
||||
|
||||
file: ImageFileType! @deprecated(reason: "Use files.mod_time")
|
||||
files: [ImageFile!]!
|
||||
file: ImageFileType! @deprecated(reason: "Use visual_files")
|
||||
files: [ImageFile!]! @deprecated(reason: "Use visual_files")
|
||||
visual_files: [VisualFile!]!
|
||||
paths: ImagePathsType! # Resolver
|
||||
|
||||
galleries: [Gallery!]!
|
||||
|
|
@ -35,6 +36,7 @@ type ImageFileType {
|
|||
|
||||
type ImagePathsType {
|
||||
thumbnail: String # Resolver
|
||||
preview: String # Resolver
|
||||
image: String # Resolver
|
||||
}
|
||||
|
||||
|
|
@ -95,4 +97,4 @@ type FindImagesResultType {
|
|||
"""Total file size in bytes"""
|
||||
filesize: Float!
|
||||
images: [Image!]!
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ input GenerateMetadataInput {
|
|||
forceTranscodes: Boolean
|
||||
phashes: Boolean
|
||||
interactiveHeatmapsSpeeds: Boolean
|
||||
clipPreviews: Boolean
|
||||
|
||||
"""scene ids to generate for"""
|
||||
sceneIDs: [ID!]
|
||||
|
|
@ -49,6 +50,7 @@ type GenerateMetadataOptions {
|
|||
transcodes: Boolean
|
||||
phashes: Boolean
|
||||
interactiveHeatmapsSpeeds: Boolean
|
||||
clipPreviews: Boolean
|
||||
}
|
||||
|
||||
type GeneratePreviewOptions {
|
||||
|
|
@ -98,6 +100,8 @@ input ScanMetadataInput {
|
|||
scanGeneratePhashes: Boolean
|
||||
"""Generate image thumbnails during scan"""
|
||||
scanGenerateThumbnails: Boolean
|
||||
"""Generate image clip previews during scan"""
|
||||
scanGenerateClipPreviews: Boolean
|
||||
|
||||
"Filter options for the scan"
|
||||
filter: ScanMetaDataFilterInput
|
||||
|
|
@ -120,6 +124,8 @@ type ScanMetadataOptions {
|
|||
scanGeneratePhashes: Boolean!
|
||||
"""Generate image thumbnails during scan"""
|
||||
scanGenerateThumbnails: Boolean!
|
||||
"""Generate image clip previews during scan"""
|
||||
scanGenerateClipPreviews: Boolean!
|
||||
}
|
||||
|
||||
input CleanMetadataInput {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,11 @@ enum GenderEnum {
|
|||
INTERSEX
|
||||
NON_BINARY
|
||||
}
|
||||
|
||||
enum CircumisedEnum {
|
||||
CUT
|
||||
UNCUT
|
||||
}
|
||||
|
||||
type Performer {
|
||||
id: ID!
|
||||
|
|
@ -24,6 +29,8 @@ type Performer {
|
|||
height_cm: Int
|
||||
measurements: String
|
||||
fake_tits: String
|
||||
penis_length: Float
|
||||
circumcised: CircumisedEnum
|
||||
career_length: String
|
||||
tattoos: String
|
||||
piercings: String
|
||||
|
|
@ -37,6 +44,8 @@ type Performer {
|
|||
scene_count: Int # Resolver
|
||||
image_count: Int # Resolver
|
||||
gallery_count: Int # Resolver
|
||||
performer_count: Int # Resolver
|
||||
o_counter: Int # Resolver
|
||||
scenes: [Scene!]!
|
||||
stash_ids: [StashID!]!
|
||||
# rating expressed as 1-5
|
||||
|
|
@ -67,6 +76,8 @@ input PerformerCreateInput {
|
|||
height_cm: Int
|
||||
measurements: String
|
||||
fake_tits: String
|
||||
penis_length: Float
|
||||
circumcised: CircumisedEnum
|
||||
career_length: String
|
||||
tattoos: String
|
||||
piercings: String
|
||||
|
|
@ -105,6 +116,8 @@ input PerformerUpdateInput {
|
|||
height_cm: Int
|
||||
measurements: String
|
||||
fake_tits: String
|
||||
penis_length: Float
|
||||
circumcised: CircumisedEnum
|
||||
career_length: String
|
||||
tattoos: String
|
||||
piercings: String
|
||||
|
|
@ -148,6 +161,8 @@ input BulkPerformerUpdateInput {
|
|||
height_cm: Int
|
||||
measurements: String
|
||||
fake_tits: String
|
||||
penis_length: Float
|
||||
circumcised: CircumisedEnum
|
||||
career_length: String
|
||||
tattoos: String
|
||||
piercings: String
|
||||
|
|
|
|||
|
|
@ -15,6 +15,8 @@ type ScrapedPerformer {
|
|||
height: String
|
||||
measurements: String
|
||||
fake_tits: String
|
||||
penis_length: String
|
||||
circumcised: String
|
||||
career_length: String
|
||||
tattoos: String
|
||||
piercings: String
|
||||
|
|
@ -48,6 +50,8 @@ input ScrapedPerformerInput {
|
|||
height: String
|
||||
measurements: String
|
||||
fake_tits: String
|
||||
penis_length: String
|
||||
circumcised: String
|
||||
career_length: String
|
||||
tattoos: String
|
||||
piercings: String
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import (
|
|||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
|
|
@ -13,11 +14,6 @@ import (
|
|||
"github.com/stashapp/stash/pkg/session"
|
||||
)
|
||||
|
||||
const (
|
||||
loginEndPoint = "/login"
|
||||
logoutEndPoint = "/logout"
|
||||
)
|
||||
|
||||
const (
|
||||
tripwireActivatedErrMsg = "Stash is exposed to the public internet without authentication, and is not serving any more content to protect your privacy. " +
|
||||
"More information and fixes are available at https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet"
|
||||
|
|
@ -30,7 +26,7 @@ const (
|
|||
|
||||
func allowUnauthenticated(r *http.Request) bool {
|
||||
// #2715 - allow access to UI files
|
||||
return strings.HasPrefix(r.URL.Path, loginEndPoint) || r.URL.Path == logoutEndPoint || r.URL.Path == "/css" || strings.HasPrefix(r.URL.Path, "/assets")
|
||||
return strings.HasPrefix(r.URL.Path, loginEndpoint) || r.URL.Path == logoutEndpoint || r.URL.Path == "/css" || strings.HasPrefix(r.URL.Path, "/assets")
|
||||
}
|
||||
|
||||
func authenticateHandler() func(http.Handler) http.Handler {
|
||||
|
|
@ -38,38 +34,41 @@ func authenticateHandler() func(http.Handler) http.Handler {
|
|||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
c := config.GetInstance()
|
||||
|
||||
if !checkSecurityTripwireActivated(c, w) {
|
||||
// error if external access tripwire activated
|
||||
if accessErr := session.CheckExternalAccessTripwire(c); accessErr != nil {
|
||||
http.Error(w, tripwireActivatedErrMsg, http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := manager.GetInstance().SessionStore.Authenticate(w, r)
|
||||
if err != nil {
|
||||
if errors.Is(err, session.ErrUnauthorized) {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
_, err = w.Write([]byte(err.Error()))
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// unauthorized error
|
||||
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||
w.Header().Add("WWW-Authenticate", "FormBased")
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
if err := session.CheckAllowPublicWithoutAuth(c, r); err != nil {
|
||||
var externalAccess session.ExternalAccessError
|
||||
switch {
|
||||
case errors.As(err, &externalAccess):
|
||||
securityActivateTripwireAccessedFromInternetWithoutAuth(c, externalAccess, w)
|
||||
return
|
||||
default:
|
||||
var accessErr session.ExternalAccessError
|
||||
if errors.As(err, &accessErr) {
|
||||
session.LogExternalAccessError(accessErr)
|
||||
|
||||
err := c.ActivatePublicAccessTripwire(net.IP(accessErr).String())
|
||||
if err != nil {
|
||||
logger.Errorf("Error activating public access tripwire: %v", err)
|
||||
}
|
||||
|
||||
http.Error(w, externalAccessErrMsg, http.StatusForbidden)
|
||||
} else {
|
||||
logger.Errorf("Error checking external access security: %v", err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
ctx := r.Context()
|
||||
|
|
@ -77,15 +76,15 @@ func authenticateHandler() func(http.Handler) http.Handler {
|
|||
if c.HasCredentials() {
|
||||
// authentication is required
|
||||
if userID == "" && !allowUnauthenticated(r) {
|
||||
// authentication was not received, redirect
|
||||
// if graphql was requested, we just return a forbidden error
|
||||
if r.URL.Path == "/graphql" {
|
||||
w.Header().Add("WWW-Authenticate", `FormBased`)
|
||||
// if graphql or a non-webpage was requested, we just return a forbidden error
|
||||
ext := path.Ext(r.URL.Path)
|
||||
if r.URL.Path == gqlEndpoint || (ext != "" && ext != ".html") {
|
||||
w.Header().Add("WWW-Authenticate", "FormBased")
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
prefix := getProxyPrefix(r)
|
||||
|
||||
// otherwise redirect to the login page
|
||||
returnURL := url.URL{
|
||||
|
|
@ -95,7 +94,7 @@ func authenticateHandler() func(http.Handler) http.Handler {
|
|||
q := make(url.Values)
|
||||
q.Set(returnURLParam, returnURL.String())
|
||||
u := url.URL{
|
||||
Path: prefix + "/login",
|
||||
Path: prefix + loginEndpoint,
|
||||
RawQuery: q.Encode(),
|
||||
}
|
||||
http.Redirect(w, r, u.String(), http.StatusFound)
|
||||
|
|
@ -111,31 +110,3 @@ func authenticateHandler() func(http.Handler) http.Handler {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func checkSecurityTripwireActivated(c *config.Instance, w http.ResponseWriter) bool {
|
||||
if accessErr := session.CheckExternalAccessTripwire(c); accessErr != nil {
|
||||
w.WriteHeader(http.StatusForbidden)
|
||||
_, err := w.Write([]byte(tripwireActivatedErrMsg))
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func securityActivateTripwireAccessedFromInternetWithoutAuth(c *config.Instance, accessErr session.ExternalAccessError, w http.ResponseWriter) {
|
||||
session.LogExternalAccessError(accessErr)
|
||||
|
||||
err := c.ActivatePublicAccessTripwire(net.IP(accessErr).String())
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusForbidden)
|
||||
_, err = w.Write([]byte(externalAccessErrMsg))
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -113,7 +113,6 @@ type LatestRelease struct {
|
|||
}
|
||||
|
||||
func makeGithubRequest(ctx context.Context, url string, output interface{}) error {
|
||||
|
||||
transport := &http.Transport{Proxy: http.ProxyFromEnvironment}
|
||||
|
||||
client := &http.Client{
|
||||
|
|
@ -124,6 +123,7 @@ func makeGithubRequest(ctx context.Context, url string, output interface{}) erro
|
|||
req, _ := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
|
||||
req.Header.Add("Accept", apiAcceptHeader) // gh api recommendation , send header with api version
|
||||
logger.Debugf("Github API request: %s", url)
|
||||
response, err := client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
|
|
@ -229,19 +229,39 @@ func GetLatestRelease(ctx context.Context) (*LatestRelease, error) {
|
|||
}
|
||||
|
||||
func getReleaseHash(ctx context.Context, tagName string) (string, error) {
|
||||
url := apiTags
|
||||
tags := []githubTagResponse{}
|
||||
err := makeGithubRequest(ctx, url, &tags)
|
||||
if err != nil {
|
||||
return "", err
|
||||
// Start with a small page size if not searching for latest_develop
|
||||
perPage := 10
|
||||
if tagName == developmentTag {
|
||||
perPage = 100
|
||||
}
|
||||
|
||||
for _, tag := range tags {
|
||||
if tag.Name == tagName {
|
||||
if len(tag.Commit.Sha) != 40 {
|
||||
return "", errors.New("invalid Github API response")
|
||||
// Limit to 5 pages, ie 500 tags - should be plenty
|
||||
for page := 1; page <= 5; {
|
||||
url := fmt.Sprintf("%s?per_page=%d&page=%d", apiTags, perPage, page)
|
||||
tags := []githubTagResponse{}
|
||||
err := makeGithubRequest(ctx, url, &tags)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, tag := range tags {
|
||||
if tag.Name == tagName {
|
||||
if len(tag.Commit.Sha) != 40 {
|
||||
return "", errors.New("invalid Github API response")
|
||||
}
|
||||
return tag.Commit.Sha, nil
|
||||
}
|
||||
return tag.Commit.Sha, nil
|
||||
}
|
||||
|
||||
if len(tags) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
// if not found in the first 10, search again on page 1 with the first 100
|
||||
if perPage == 10 {
|
||||
perPage = 100
|
||||
} else {
|
||||
page++
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
42
internal/api/error.go
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
|
||||
"github.com/99designs/gqlgen/graphql"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/vektah/gqlparser/v2/gqlerror"
|
||||
)
|
||||
|
||||
func gqlErrorHandler(ctx context.Context, e error) *gqlerror.Error {
|
||||
if !errors.Is(ctx.Err(), context.Canceled) {
|
||||
// log all errors - for now just log the error message
|
||||
// we can potentially add more context later
|
||||
fc := graphql.GetFieldContext(ctx)
|
||||
if fc != nil {
|
||||
logger.Errorf("%s: %v", fc.Path(), e)
|
||||
|
||||
// log the args in debug level
|
||||
logger.DebugFunc(func() (string, []interface{}) {
|
||||
var args interface{}
|
||||
args = fc.Args
|
||||
|
||||
s, _ := json.Marshal(args)
|
||||
if len(s) > 0 {
|
||||
args = string(s)
|
||||
}
|
||||
|
||||
return "%s: %v", []interface{}{
|
||||
fc.Path(),
|
||||
args,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// we may also want to transform the error message for the response
|
||||
// for now just return the original error
|
||||
return graphql.DefaultErrorPresenter(ctx, e)
|
||||
}
|
||||
|
|
@ -87,7 +87,7 @@ func initialiseCustomImages() {
|
|||
}
|
||||
}
|
||||
|
||||
func getRandomPerformerImageUsingName(name string, gender models.GenderEnum, customPath string) ([]byte, error) {
|
||||
func getRandomPerformerImageUsingName(name string, gender *models.GenderEnum, customPath string) ([]byte, error) {
|
||||
var box *imageBox
|
||||
|
||||
// If we have a custom path, we should return a new box in the given path.
|
||||
|
|
@ -95,11 +95,16 @@ func getRandomPerformerImageUsingName(name string, gender models.GenderEnum, cus
|
|||
box = performerBoxCustom
|
||||
}
|
||||
|
||||
var g models.GenderEnum
|
||||
if gender != nil {
|
||||
g = *gender
|
||||
}
|
||||
|
||||
if box == nil {
|
||||
switch gender {
|
||||
case models.GenderEnumFemale:
|
||||
switch g {
|
||||
case models.GenderEnumFemale, models.GenderEnumTransgenderFemale:
|
||||
box = performerBox
|
||||
case models.GenderEnumMale:
|
||||
case models.GenderEnumMale, models.GenderEnumTransgenderMale:
|
||||
box = performerBoxMale
|
||||
default:
|
||||
box = performerBox
|
||||
|
|
|
|||
|
|
@ -12,42 +12,55 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (*file.ImageFile, error) {
|
||||
func convertImageFile(f *file.ImageFile) *ImageFile {
|
||||
ret := &ImageFile{
|
||||
ID: strconv.Itoa(int(f.ID)),
|
||||
Path: f.Path,
|
||||
Basename: f.Basename,
|
||||
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
|
||||
ModTime: f.ModTime,
|
||||
Size: f.Size,
|
||||
Width: f.Width,
|
||||
Height: f.Height,
|
||||
CreatedAt: f.CreatedAt,
|
||||
UpdatedAt: f.UpdatedAt,
|
||||
Fingerprints: resolveFingerprints(f.Base()),
|
||||
}
|
||||
|
||||
if f.ZipFileID != nil {
|
||||
zipFileID := strconv.Itoa(int(*f.ZipFileID))
|
||||
ret.ZipFileID = &zipFileID
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (file.VisualFile, error) {
|
||||
if obj.PrimaryFileID != nil {
|
||||
f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, ok := f.(*file.ImageFile)
|
||||
asFrame, ok := f.(file.VisualFile)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("file %T is not an image file", f)
|
||||
return nil, fmt.Errorf("file %T is not an frame", f)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
return asFrame, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]*file.ImageFile, error) {
|
||||
func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]file.File, error) {
|
||||
fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
|
||||
ret := make([]*file.ImageFile, len(files))
|
||||
for i, bf := range files {
|
||||
f, ok := bf.(*file.ImageFile)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("file %T is not an image file", f)
|
||||
}
|
||||
|
||||
ret[i] = f
|
||||
}
|
||||
|
||||
return ret, firstError(errs)
|
||||
return files, firstError(errs)
|
||||
}
|
||||
|
||||
func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) {
|
||||
|
|
@ -65,9 +78,9 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
width := f.Width
|
||||
height := f.Height
|
||||
size := f.Size
|
||||
width := f.GetWidth()
|
||||
height := f.GetHeight()
|
||||
size := f.Base().Size
|
||||
return &ImageFileType{
|
||||
Size: int(size),
|
||||
Width: width,
|
||||
|
|
@ -75,6 +88,32 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile
|
|||
}, nil
|
||||
}
|
||||
|
||||
func convertVisualFile(f file.File) VisualFile {
|
||||
switch f := f.(type) {
|
||||
case *file.ImageFile:
|
||||
return convertImageFile(f)
|
||||
case *file.VideoFile:
|
||||
return convertVideoFile(f)
|
||||
default:
|
||||
panic(fmt.Sprintf("unknown file type %T", f))
|
||||
}
|
||||
}
|
||||
|
||||
func (r *imageResolver) VisualFiles(ctx context.Context, obj *models.Image) ([]VisualFile, error) {
|
||||
fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
|
||||
ret := make([]VisualFile, len(files))
|
||||
for i, f := range files {
|
||||
ret[i] = convertVisualFile(f)
|
||||
}
|
||||
|
||||
return ret, firstError(errs)
|
||||
}
|
||||
|
||||
func (r *imageResolver) Date(ctx context.Context, obj *models.Image) (*string, error) {
|
||||
if obj.Date != nil {
|
||||
result := obj.Date.String()
|
||||
|
|
@ -89,27 +128,18 @@ func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageF
|
|||
return nil, err
|
||||
}
|
||||
|
||||
ret := make([]*ImageFile, len(files))
|
||||
var ret []*ImageFile
|
||||
|
||||
for i, f := range files {
|
||||
ret[i] = &ImageFile{
|
||||
ID: strconv.Itoa(int(f.ID)),
|
||||
Path: f.Path,
|
||||
Basename: f.Basename,
|
||||
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
|
||||
ModTime: f.ModTime,
|
||||
Size: f.Size,
|
||||
Width: f.Width,
|
||||
Height: f.Height,
|
||||
CreatedAt: f.CreatedAt,
|
||||
UpdatedAt: f.UpdatedAt,
|
||||
Fingerprints: resolveFingerprints(f.Base()),
|
||||
for _, f := range files {
|
||||
// filter out non-image files
|
||||
imageFile, ok := f.(*file.ImageFile)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if f.ZipFileID != nil {
|
||||
zipFileID := strconv.Itoa(int(*f.ZipFileID))
|
||||
ret[i].ZipFileID = &zipFileID
|
||||
}
|
||||
thisFile := convertImageFile(imageFile)
|
||||
|
||||
ret = append(ret, thisFile)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
|
|
@ -121,7 +151,7 @@ func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*ti
|
|||
return nil, err
|
||||
}
|
||||
if f != nil {
|
||||
return &f.ModTime, nil
|
||||
return &f.Base().ModTime, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
|
|
@ -131,10 +161,12 @@ func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePat
|
|||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
builder := urlbuilders.NewImageURLBuilder(baseURL, obj)
|
||||
thumbnailPath := builder.GetThumbnailURL()
|
||||
previewPath := builder.GetPreviewURL()
|
||||
imagePath := builder.GetImageURL()
|
||||
return &ImagePathsType{
|
||||
Image: &imagePath,
|
||||
Thumbnail: &thumbnailPath,
|
||||
Preview: &previewPath,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -86,33 +86,38 @@ func (r *movieResolver) Synopsis(ctx context.Context, obj *models.Movie) (*strin
|
|||
}
|
||||
|
||||
func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
frontimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieFrontImageURL()
|
||||
return &frontimagePath, nil
|
||||
}
|
||||
|
||||
func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
|
||||
// don't return any thing if there is no back image
|
||||
hasImage := false
|
||||
var hasImage bool
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
hasImage, err = r.repository.Movie.HasBackImage(ctx, obj.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
hasImage, err = r.repository.Movie.HasFrontImage(ctx, obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieFrontImageURL(hasImage)
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
|
||||
var hasImage bool
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
hasImage, err = r.repository.Movie.HasBackImage(ctx, obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// don't return anything if there is no back image
|
||||
if !hasImage {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL()
|
||||
return &backimagePath, nil
|
||||
imagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL()
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret *int, err error) {
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/gallery"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/performer"
|
||||
)
|
||||
|
||||
// Checksum is deprecated
|
||||
|
|
@ -19,7 +20,7 @@ func (r *performerResolver) Checksum(ctx context.Context, obj *models.Performer)
|
|||
|
||||
func (r *performerResolver) Aliases(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if !obj.Aliases.Loaded() {
|
||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
return obj.LoadAliases(ctx, r.repository.Performer)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
|
@ -32,7 +33,7 @@ func (r *performerResolver) Aliases(ctx context.Context, obj *models.Performer)
|
|||
|
||||
func (r *performerResolver) AliasList(ctx context.Context, obj *models.Performer) ([]string, error) {
|
||||
if !obj.Aliases.Loaded() {
|
||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
return obj.LoadAliases(ctx, r.repository.Performer)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
|
@ -63,8 +64,17 @@ func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer
|
|||
}
|
||||
|
||||
func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
var hasImage bool
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
hasImage, err = r.repository.Performer.HasImage(ctx, obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj).GetPerformerImageURL()
|
||||
imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj).GetPerformerImageURL(hasImage)
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
|
|
@ -118,6 +128,24 @@ func (r *performerResolver) GalleryCount(ctx context.Context, obj *models.Perfor
|
|||
return &res, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) OCounter(ctx context.Context, obj *models.Performer) (ret *int, err error) {
|
||||
var res_scene int
|
||||
var res_image int
|
||||
var res int
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
res_scene, err = r.repository.Scene.OCountByPerformerID(ctx, obj.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res_image, err = r.repository.Image.OCountByPerformerID(ctx, obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res = res_scene + res_image
|
||||
return &res, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (ret []*models.Scene, err error) {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
ret, err = r.repository.Scene.FindByPerformerID(ctx, obj.ID)
|
||||
|
|
@ -181,3 +209,15 @@ func (r *performerResolver) MovieCount(ctx context.Context, obj *models.Performe
|
|||
|
||||
return &res, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) PerformerCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
|
||||
var res int
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
res, err = performer.CountByAppearsWith(ctx, r.repository.Performer, obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,35 @@ import (
|
|||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func convertVideoFile(f *file.VideoFile) *VideoFile {
|
||||
ret := &VideoFile{
|
||||
ID: strconv.Itoa(int(f.ID)),
|
||||
Path: f.Path,
|
||||
Basename: f.Basename,
|
||||
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
|
||||
ModTime: f.ModTime,
|
||||
Format: f.Format,
|
||||
Size: f.Size,
|
||||
Duration: handleFloat64Value(f.Duration),
|
||||
VideoCodec: f.VideoCodec,
|
||||
AudioCodec: f.AudioCodec,
|
||||
Width: f.Width,
|
||||
Height: f.Height,
|
||||
FrameRate: handleFloat64Value(f.FrameRate),
|
||||
BitRate: int(f.BitRate),
|
||||
CreatedAt: f.CreatedAt,
|
||||
UpdatedAt: f.UpdatedAt,
|
||||
Fingerprints: resolveFingerprints(f.Base()),
|
||||
}
|
||||
|
||||
if f.ZipFileID != nil {
|
||||
zipFileID := strconv.Itoa(int(*f.ZipFileID))
|
||||
ret.ZipFileID = &zipFileID
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*file.VideoFile, error) {
|
||||
if obj.PrimaryFileID != nil {
|
||||
f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
|
||||
|
|
@ -112,30 +141,7 @@ func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoF
|
|||
ret := make([]*VideoFile, len(files))
|
||||
|
||||
for i, f := range files {
|
||||
ret[i] = &VideoFile{
|
||||
ID: strconv.Itoa(int(f.ID)),
|
||||
Path: f.Path,
|
||||
Basename: f.Basename,
|
||||
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
|
||||
ModTime: f.ModTime,
|
||||
Format: f.Format,
|
||||
Size: f.Size,
|
||||
Duration: handleFloat64Value(f.Duration),
|
||||
VideoCodec: f.VideoCodec,
|
||||
AudioCodec: f.AudioCodec,
|
||||
Width: f.Width,
|
||||
Height: f.Height,
|
||||
FrameRate: handleFloat64Value(f.FrameRate),
|
||||
BitRate: int(f.BitRate),
|
||||
CreatedAt: f.CreatedAt,
|
||||
UpdatedAt: f.UpdatedAt,
|
||||
Fingerprints: resolveFingerprints(f.Base()),
|
||||
}
|
||||
|
||||
if f.ZipFileID != nil {
|
||||
zipFileID := strconv.Itoa(int(*f.ZipFileID))
|
||||
ret[i].ZipFileID = &zipFileID
|
||||
}
|
||||
ret[i] = convertVideoFile(f)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
|
|
@ -178,8 +184,8 @@ func formatFingerprint(fp interface{}) string {
|
|||
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
config := manager.GetInstance().Config
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
|
||||
screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt)
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj)
|
||||
screenshotPath := builder.GetScreenshotURL()
|
||||
previewPath := builder.GetStreamPreviewURL()
|
||||
streamPath := builder.GetStreamURL(config.GetAPIKey()).String()
|
||||
webpPath := builder.GetStreamPreviewImageURL()
|
||||
|
|
@ -370,7 +376,7 @@ func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]
|
|||
config := manager.GetInstance().Config
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj)
|
||||
apiKey := config.GetAPIKey()
|
||||
|
||||
return manager.GetSceneStreamPaths(obj, builder.GetStreamURL(apiKey), config.GetMaxStreamingTranscodeSize())
|
||||
|
|
|
|||
|
|
@ -48,20 +48,17 @@ func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker)
|
|||
|
||||
func (r *sceneMarkerResolver) Stream(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamURL(obj.ID), nil
|
||||
return urlbuilders.NewSceneMarkerURLBuilder(baseURL, obj).GetStreamURL(), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Preview(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamPreviewURL(obj.ID), nil
|
||||
return urlbuilders.NewSceneMarkerURLBuilder(baseURL, obj).GetPreviewURL(), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Screenshot(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamScreenshotURL(obj.ID), nil
|
||||
return urlbuilders.NewSceneMarkerURLBuilder(baseURL, obj).GetScreenshotURL(), nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) CreatedAt(ctx context.Context, obj *models.SceneMarker) (*time.Time, error) {
|
||||
|
|
|
|||
|
|
@ -27,9 +27,6 @@ func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string,
|
|||
}
|
||||
|
||||
func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj).GetStudioImageURL()
|
||||
|
||||
var hasImage bool
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
|
|
@ -39,11 +36,8 @@ func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*st
|
|||
return nil, err
|
||||
}
|
||||
|
||||
// indicate that image is missing by setting default query param to true
|
||||
if !hasImage {
|
||||
imagePath += "?default=true"
|
||||
}
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj).GetStudioImageURL(hasImage)
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -111,8 +111,17 @@ func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag) (ret
|
|||
}
|
||||
|
||||
func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string, error) {
|
||||
var hasImage bool
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
hasImage, err = r.repository.Tag.HasImage(ctx, obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj).GetTagImageURL()
|
||||
imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj).GetTagImageURL(hasImage)
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -218,6 +218,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
|
|||
c.Set(config.WriteImageThumbnails, *input.WriteImageThumbnails)
|
||||
}
|
||||
|
||||
if input.CreateImageClipsFromVideos != nil {
|
||||
c.Set(config.CreateImageClipsFromVideos, *input.CreateImageClipsFromVideos)
|
||||
}
|
||||
|
||||
if input.GalleryCoverRegex != nil {
|
||||
|
||||
_, err := regexp.Compile(*input.GalleryCoverRegex)
|
||||
|
|
@ -228,8 +232,13 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
|
|||
c.Set(config.GalleryCoverRegex, *input.GalleryCoverRegex)
|
||||
}
|
||||
|
||||
if input.Username != nil {
|
||||
if input.Username != nil && *input.Username != c.GetUsername() {
|
||||
c.Set(config.Username, input.Username)
|
||||
if *input.Password == "" {
|
||||
logger.Info("Username cleared")
|
||||
} else {
|
||||
logger.Info("Username changed")
|
||||
}
|
||||
}
|
||||
|
||||
if input.Password != nil {
|
||||
|
|
@ -238,6 +247,11 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
|
|||
currentPWHash := c.GetPasswordHash()
|
||||
|
||||
if *input.Password != currentPWHash {
|
||||
if *input.Password == "" {
|
||||
logger.Info("Password cleared")
|
||||
} else {
|
||||
logger.Info("Password changed")
|
||||
}
|
||||
c.SetPassword(*input.Password)
|
||||
}
|
||||
}
|
||||
|
|
@ -483,6 +497,10 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input ConfigDLNAIn
|
|||
c.Set(config.DLNADefaultIPWhitelist, input.WhitelistedIPs)
|
||||
}
|
||||
|
||||
if input.VideoSortOrder != nil {
|
||||
c.Set(config.DLNAVideoSortOrder, input.VideoSortOrder)
|
||||
}
|
||||
|
||||
currentDLNAEnabled := c.GetDLNADefaultEnabled()
|
||||
if input.Enabled != nil && *input.Enabled != currentDLNAEnabled {
|
||||
c.Set(config.DLNADefaultEnabled, *input.Enabled)
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
|
@ -126,9 +127,9 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp
|
|||
}
|
||||
|
||||
// ensure that new primary file is associated with scene
|
||||
var f *file.ImageFile
|
||||
var f file.File
|
||||
for _, ff := range i.Files.List() {
|
||||
if ff.ID == converted {
|
||||
if ff.Base().ID == converted {
|
||||
f = ff
|
||||
}
|
||||
}
|
||||
|
|
@ -138,6 +139,8 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp
|
|||
}
|
||||
}
|
||||
|
||||
var updatedGalleryIDs []int
|
||||
|
||||
if translator.hasField("gallery_ids") {
|
||||
updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet)
|
||||
if err != nil {
|
||||
|
|
@ -152,6 +155,8 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp
|
|||
if err := r.galleryService.ValidateImageGalleryChange(ctx, i, *updatedImage.GalleryIDs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedGalleryIDs = updatedImage.GalleryIDs.ImpactedIDs(i.GalleryIDs.List())
|
||||
}
|
||||
|
||||
if translator.hasField("performer_ids") {
|
||||
|
|
@ -174,6 +179,13 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp
|
|||
return nil, err
|
||||
}
|
||||
|
||||
// #3759 - update all impacted galleries
|
||||
for _, galleryID := range updatedGalleryIDs {
|
||||
if err := r.galleryService.Updated(ctx, galleryID); err != nil {
|
||||
return nil, fmt.Errorf("updating gallery %d: %w", galleryID, err)
|
||||
}
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
|
|
@ -223,6 +235,7 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
|
|||
|
||||
// Start the transaction and save the image marker
|
||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||
var updatedGalleryIDs []int
|
||||
qb := r.repository.Image
|
||||
|
||||
for _, imageID := range imageIDs {
|
||||
|
|
@ -244,6 +257,9 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
|
|||
if err := r.galleryService.ValidateImageGalleryChange(ctx, i, *updatedImage.GalleryIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
thisUpdatedGalleryIDs := updatedImage.GalleryIDs.ImpactedIDs(i.GalleryIDs.List())
|
||||
updatedGalleryIDs = intslice.IntAppendUniques(updatedGalleryIDs, thisUpdatedGalleryIDs)
|
||||
}
|
||||
|
||||
image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
|
||||
|
|
@ -254,6 +270,13 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
|
|||
ret = append(ret, image)
|
||||
}
|
||||
|
||||
// #3759 - update all impacted galleries
|
||||
for _, galleryID := range updatedGalleryIDs {
|
||||
if err := r.galleryService.Updated(ctx, galleryID); err != nil {
|
||||
return fmt.Errorf("updating gallery %d: %w", galleryID, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC
|
|||
newPerformer.URL = *input.URL
|
||||
}
|
||||
if input.Gender != nil {
|
||||
newPerformer.Gender = *input.Gender
|
||||
newPerformer.Gender = input.Gender
|
||||
}
|
||||
if input.Birthdate != nil {
|
||||
d := models.NewDate(*input.Birthdate)
|
||||
|
|
@ -98,6 +98,12 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC
|
|||
if input.FakeTits != nil {
|
||||
newPerformer.FakeTits = *input.FakeTits
|
||||
}
|
||||
if input.PenisLength != nil {
|
||||
newPerformer.PenisLength = input.PenisLength
|
||||
}
|
||||
if input.Circumcised != nil {
|
||||
newPerformer.Circumcised = input.Circumcised
|
||||
}
|
||||
if input.CareerLength != nil {
|
||||
newPerformer.CareerLength = *input.CareerLength
|
||||
}
|
||||
|
|
@ -222,6 +228,16 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU
|
|||
|
||||
updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity")
|
||||
updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits")
|
||||
updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length")
|
||||
|
||||
if translator.hasField("circumcised") {
|
||||
if input.Circumcised != nil {
|
||||
updatedPerformer.Circumcised = models.NewOptionalString(input.Circumcised.String())
|
||||
} else {
|
||||
updatedPerformer.Circumcised = models.NewOptionalStringPtr(nil)
|
||||
}
|
||||
}
|
||||
|
||||
updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length")
|
||||
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
|
||||
updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings")
|
||||
|
|
@ -339,6 +355,16 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
|
|||
|
||||
updatedPerformer.Measurements = translator.optionalString(input.Measurements, "measurements")
|
||||
updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits")
|
||||
updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length")
|
||||
|
||||
if translator.hasField("circumcised") {
|
||||
if input.Circumcised != nil {
|
||||
updatedPerformer.Circumcised = models.NewOptionalString(input.Circumcised.String())
|
||||
} else {
|
||||
updatedPerformer.Circumcised = models.NewOptionalStringPtr(nil)
|
||||
}
|
||||
}
|
||||
|
||||
updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length")
|
||||
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
|
||||
updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings")
|
||||
|
|
@ -418,7 +444,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
|
|||
// execute post hooks outside of txn
|
||||
var newRet []*models.Performer
|
||||
for _, performer := range ret {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, performer.ID, plugin.ImageUpdatePost, input, translator.getFields())
|
||||
r.hookExecutor.ExecutePostHooks(ctx, performer.ID, plugin.PerformerUpdatePost, input, translator.getFields())
|
||||
|
||||
performer, err = r.getPerformer(ctx, performer.ID)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -106,6 +106,7 @@ func makeConfigGeneralResult() *ConfigGeneralResult {
|
|||
MaxTranscodeSize: &maxTranscodeSize,
|
||||
MaxStreamingTranscodeSize: &maxStreamingTranscodeSize,
|
||||
WriteImageThumbnails: config.IsWriteImageThumbnails(),
|
||||
CreateImageClipsFromVideos: config.IsCreateImageClipsFromVideos(),
|
||||
GalleryCoverRegex: config.GetGalleryCoverRegex(),
|
||||
APIKey: config.GetAPIKey(),
|
||||
Username: config.GetUsername(),
|
||||
|
|
@ -202,6 +203,7 @@ func makeConfigDLNAResult() *ConfigDLNAResult {
|
|||
Enabled: config.GetDLNADefaultEnabled(),
|
||||
WhitelistedIPs: config.GetDLNADefaultIPWhitelist(),
|
||||
Interfaces: config.GetDLNAInterfaces(),
|
||||
VideoSortOrder: config.GetVideoSortOrder(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -220,13 +220,17 @@ func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int) (ret [][]*models.Scene, err error) {
|
||||
func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int, durationDiff *float64) (ret [][]*models.Scene, err error) {
|
||||
dist := 0
|
||||
durDiff := -1.
|
||||
if distance != nil {
|
||||
dist = *distance
|
||||
}
|
||||
if durationDiff != nil {
|
||||
durDiff = *durationDiff
|
||||
}
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
ret, err = r.repository.Scene.FindDuplicates(ctx, dist)
|
||||
ret, err = r.repository.Scene.FindDuplicates(ctx, dist, durDiff)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*manage
|
|||
config := manager.GetInstance().Config
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, scene.ID)
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, scene)
|
||||
apiKey := config.GetAPIKey()
|
||||
|
||||
return manager.GetSceneStreamPaths(scene, builder.GetStreamURL(apiKey), config.GetMaxStreamingTranscodeSize())
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ import (
|
|||
"net/http"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"syscall"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
|
|
@ -19,6 +18,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/txn"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type ImageFinder interface {
|
||||
|
|
@ -40,6 +40,7 @@ func (rs imageRoutes) Routes() chi.Router {
|
|||
|
||||
r.Get("/image", rs.Image)
|
||||
r.Get("/thumbnail", rs.Thumbnail)
|
||||
r.Get("/preview", rs.Preview)
|
||||
})
|
||||
|
||||
return r
|
||||
|
|
@ -51,12 +52,10 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
|
|||
img := r.Context().Value(imageKey).(*models.Image)
|
||||
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum, models.DefaultGthumbWidth)
|
||||
|
||||
w.Header().Add("Cache-Control", "max-age=604800000")
|
||||
|
||||
// if the thumbnail doesn't exist, encode on the fly
|
||||
exists, _ := fsutil.FileExists(filepath)
|
||||
if exists {
|
||||
http.ServeFile(w, r, filepath)
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
} else {
|
||||
const useDefault = true
|
||||
|
||||
|
|
@ -66,13 +65,19 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG)
|
||||
clipPreviewOptions := image.ClipPreviewOptions{
|
||||
InputArgs: manager.GetInstance().Config.GetTranscodeInputArgs(),
|
||||
OutputArgs: manager.GetInstance().Config.GetTranscodeOutputArgs(),
|
||||
Preset: manager.GetInstance().Config.GetPreviewPreset().String(),
|
||||
}
|
||||
|
||||
encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG, manager.GetInstance().FFProbe, clipPreviewOptions)
|
||||
data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth)
|
||||
if err != nil {
|
||||
// don't log for unsupported image format
|
||||
// don't log for file not found - can optionally be logged in serveImage
|
||||
if !errors.Is(err, image.ErrNotSupportedForThumbnail) && !errors.Is(err, fs.ErrNotExist) {
|
||||
logger.Errorf("error generating thumbnail for %s: %v", f.Path, err)
|
||||
logger.Errorf("error generating thumbnail for %s: %v", f.Base().Path, err)
|
||||
|
||||
var exitErr *exec.ExitError
|
||||
if errors.As(err, &exitErr) {
|
||||
|
|
@ -88,16 +93,24 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
|
|||
// write the generated thumbnail to disk if enabled
|
||||
if manager.GetInstance().Config.IsWriteImageThumbnails() {
|
||||
logger.Debugf("writing thumbnail to disk: %s", img.Path)
|
||||
if err := fsutil.WriteFile(filepath, data); err != nil {
|
||||
logger.Errorf("error writing thumbnail for image %s: %v", img.Path, err)
|
||||
if err := fsutil.WriteFile(filepath, data); err == nil {
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
return
|
||||
}
|
||||
logger.Errorf("error writing thumbnail for image %s: %v", img.Path, err)
|
||||
}
|
||||
if n, err := w.Write(data); err != nil && !errors.Is(err, syscall.EPIPE) {
|
||||
logger.Errorf("error serving thumbnail (wrote %v bytes out of %v): %v", n, len(data), err)
|
||||
}
|
||||
utils.ServeStaticContent(w, r, data)
|
||||
}
|
||||
}
|
||||
|
||||
func (rs imageRoutes) Preview(w http.ResponseWriter, r *http.Request) {
|
||||
img := r.Context().Value(imageKey).(*models.Image)
|
||||
filepath := manager.GetInstance().Paths.Generated.GetClipPreviewPath(img.Checksum, models.DefaultGthumbWidth)
|
||||
|
||||
// don't check if the preview exists - we'll just return a 404 if it doesn't
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs imageRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
i := r.Context().Value(imageKey).(*models.Image)
|
||||
|
||||
|
|
@ -109,7 +122,7 @@ func (rs imageRoutes) serveImage(w http.ResponseWriter, r *http.Request, i *mode
|
|||
const defaultImageImage = "image/image.svg"
|
||||
|
||||
if i.Files.Primary() != nil {
|
||||
err := i.Files.Primary().Serve(&file.OsFS{}, w, r)
|
||||
err := i.Files.Primary().Base().Serve(&file.OsFS{}, w, r)
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
|
|
@ -131,8 +144,8 @@ func (rs imageRoutes) serveImage(w http.ResponseWriter, r *http.Request, i *mode
|
|||
// fall back to static image
|
||||
f, _ := static.Image.Open(defaultImageImage)
|
||||
defer f.Close()
|
||||
stat, _ := f.Stat()
|
||||
http.ServeContent(w, r, "image.svg", stat.ModTime(), f.(io.ReadSeeker))
|
||||
image, _ := io.ReadAll(f)
|
||||
utils.ServeImage(w, r, image)
|
||||
}
|
||||
|
||||
// endregion
|
||||
|
|
|
|||
|
|
@ -58,9 +58,7 @@ func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
|
|||
image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
|
||||
}
|
||||
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving movie front image: %v", err)
|
||||
}
|
||||
utils.ServeImage(w, r, image)
|
||||
}
|
||||
|
||||
func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -85,9 +83,7 @@ func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
|
|||
image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
|
||||
}
|
||||
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving movie back image: %v", err)
|
||||
}
|
||||
utils.ServeImage(w, r, image)
|
||||
}
|
||||
|
||||
func (rs movieRoutes) MovieCtx(next http.Handler) http.Handler {
|
||||
|
|
|
|||
|
|
@ -54,13 +54,11 @@ func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
if len(image) == 0 || defaultParam == "true" {
|
||||
if len(image) == 0 {
|
||||
image, _ = getRandomPerformerImageUsingName(performer.Name, performer.Gender, config.GetInstance().GetCustomPerformerImageLocation())
|
||||
}
|
||||
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving performer image: %v", err)
|
||||
}
|
||||
utils.ServeImage(w, r, image)
|
||||
}
|
||||
|
||||
func (rs performerRoutes) PerformerCtx(next http.Handler) http.Handler {
|
||||
|
|
|
|||
|
|
@ -88,24 +88,12 @@ func (rs sceneRoutes) Routes() chi.Router {
|
|||
// region Handlers
|
||||
|
||||
func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
// #3526 - return 404 if the scene does not have any files
|
||||
if scene.Path == "" {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
ss := manager.SceneServer{
|
||||
TxnManager: rs.txnManager,
|
||||
SceneCoverGetter: rs.sceneFinder,
|
||||
}
|
||||
|
||||
sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())
|
||||
|
||||
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, sceneHash)
|
||||
streamRequestCtx := ffmpeg.NewStreamRequestContext(w, r)
|
||||
|
||||
// #2579 - hijacking and closing the connection here causes video playback to fail in Safari
|
||||
// We trust that the request context will be closed, so we don't need to call Cancel on the
|
||||
// returned context here.
|
||||
_ = manager.GetInstance().ReadLockManager.ReadLock(streamRequestCtx, filepath)
|
||||
http.ServeFile(w, r, filepath)
|
||||
ss.StreamSceneDirect(scene, w, r)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -266,22 +254,16 @@ func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
|
|||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())
|
||||
filepath := manager.GetInstance().Paths.Scene.GetVideoPreviewPath(sceneHash)
|
||||
serveFileNoCache(w, r, filepath)
|
||||
}
|
||||
|
||||
// serveFileNoCache serves the provided file, ensuring that the response
|
||||
// contains headers to prevent caching.
|
||||
func serveFileNoCache(w http.ResponseWriter, r *http.Request, filepath string) {
|
||||
w.Header().Add("Cache-Control", "no-cache")
|
||||
|
||||
http.ServeFile(w, r, filepath)
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())
|
||||
filepath := manager.GetInstance().Paths.Scene.GetWebpPreviewPath(sceneHash)
|
||||
http.ServeFile(w, r, filepath)
|
||||
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) getChapterVttTitle(ctx context.Context, marker *models.SceneMarker) (*string, error) {
|
||||
|
|
@ -355,7 +337,7 @@ func (rs sceneRoutes) VttChapter(w http.ResponseWriter, r *http.Request) {
|
|||
vtt := strings.Join(vttLines, "\n")
|
||||
|
||||
w.Header().Set("Content-Type", "text/vtt")
|
||||
_, _ = w.Write([]byte(vtt))
|
||||
utils.ServeStaticContent(w, r, []byte(vtt))
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -366,9 +348,10 @@ func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
|
|||
} else {
|
||||
sceneHash = chi.URLParam(r, "sceneHash")
|
||||
}
|
||||
w.Header().Set("Content-Type", "text/vtt")
|
||||
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(sceneHash)
|
||||
http.ServeFile(w, r, filepath)
|
||||
|
||||
w.Header().Set("Content-Type", "text/vtt")
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -379,23 +362,24 @@ func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
|
|||
} else {
|
||||
sceneHash = chi.URLParam(r, "sceneHash")
|
||||
}
|
||||
w.Header().Set("Content-Type", "image/jpeg")
|
||||
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(sceneHash)
|
||||
http.ServeFile(w, r, filepath)
|
||||
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
|
||||
s := r.Context().Value(sceneKey).(*models.Scene)
|
||||
funscript := video.GetFunscriptPath(s.Path)
|
||||
serveFileNoCache(w, r, funscript)
|
||||
filepath := video.GetFunscriptPath(s.Path)
|
||||
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) InteractiveHeatmap(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())
|
||||
w.Header().Set("Content-Type", "image/png")
|
||||
filepath := manager.GetInstance().Paths.Scene.GetInteractiveHeatmapPath(sceneHash)
|
||||
http.ServeFile(w, r, filepath)
|
||||
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang string, ext string) {
|
||||
|
|
@ -434,16 +418,17 @@ func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang strin
|
|||
return
|
||||
}
|
||||
|
||||
var b bytes.Buffer
|
||||
err = sub.WriteToWebVTT(&b)
|
||||
var buf bytes.Buffer
|
||||
|
||||
err = sub.WriteToWebVTT(&buf)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/vtt")
|
||||
w.Header().Add("Cache-Control", "no-cache")
|
||||
_, _ = b.WriteTo(w)
|
||||
utils.ServeStaticContent(w, r, buf.Bytes())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -483,7 +468,7 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
|
|||
}
|
||||
|
||||
filepath := manager.GetInstance().Paths.SceneMarkers.GetVideoPreviewPath(sceneHash, int(sceneMarker.Seconds))
|
||||
http.ServeFile(w, r, filepath)
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -516,12 +501,10 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request)
|
|||
exists, _ := fsutil.FileExists(filepath)
|
||||
if !exists {
|
||||
w.Header().Set("Content-Type", "image/png")
|
||||
w.Header().Set("Cache-Control", "no-store")
|
||||
_, _ = w.Write(utils.PendingGenerateResource)
|
||||
return
|
||||
utils.ServeStaticContent(w, r, utils.PendingGenerateResource)
|
||||
} else {
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) SceneMarkerScreenshot(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -554,12 +537,10 @@ func (rs sceneRoutes) SceneMarkerScreenshot(w http.ResponseWriter, r *http.Reque
|
|||
exists, _ := fsutil.FileExists(filepath)
|
||||
if !exists {
|
||||
w.Header().Set("Content-Type", "image/png")
|
||||
w.Header().Set("Cache-Control", "no-store")
|
||||
_, _ = w.Write(utils.PendingGenerateResource)
|
||||
return
|
||||
utils.ServeStaticContent(w, r, utils.PendingGenerateResource)
|
||||
} else {
|
||||
utils.ServeStaticFile(w, r, filepath)
|
||||
}
|
||||
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
// endregion
|
||||
|
|
|
|||
|
|
@ -3,10 +3,12 @@ package api
|
|||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/internal/static"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/studio"
|
||||
|
|
@ -55,12 +57,17 @@ func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
image, _ = utils.ProcessBase64Image(models.DefaultStudioImage)
|
||||
const defaultStudioImage = "studio/studio.svg"
|
||||
|
||||
// fall back to static image
|
||||
f, _ := static.Studio.Open(defaultStudioImage)
|
||||
defer f.Close()
|
||||
stat, _ := f.Stat()
|
||||
http.ServeContent(w, r, "studio.svg", stat.ModTime(), f.(io.ReadSeeker))
|
||||
return
|
||||
}
|
||||
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving studio image: %v", err)
|
||||
}
|
||||
utils.ServeImage(w, r, image)
|
||||
}
|
||||
|
||||
func (rs studioRoutes) StudioCtx(next http.Handler) http.Handler {
|
||||
|
|
|
|||
|
|
@ -3,10 +3,12 @@ package api
|
|||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/internal/static"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/tag"
|
||||
|
|
@ -55,12 +57,17 @@ func (rs tagRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
image = models.DefaultTagImage
|
||||
const defaultTagImage = "tag/tag.svg"
|
||||
|
||||
// fall back to static image
|
||||
f, _ := static.Tag.Open(defaultTagImage)
|
||||
defer f.Close()
|
||||
stat, _ := f.Stat()
|
||||
http.ServeContent(w, r, "tag.svg", stat.ModTime(), f.(io.ReadSeeker))
|
||||
return
|
||||
}
|
||||
|
||||
if err := utils.ServeImage(image, w, r); err != nil {
|
||||
logger.Warnf("error serving tag image: %v", err)
|
||||
}
|
||||
utils.ServeImage(w, r, image)
|
||||
}
|
||||
|
||||
func (rs tagRoutes) TagCtx(next http.Handler) http.Handler {
|
||||
|
|
|
|||
|
|
@ -27,17 +27,25 @@ import (
|
|||
"github.com/gorilla/websocket"
|
||||
"github.com/vearutop/statigz"
|
||||
|
||||
"github.com/go-chi/cors"
|
||||
"github.com/go-chi/httplog"
|
||||
"github.com/rs/cors"
|
||||
"github.com/stashapp/stash/internal/api/loaders"
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"github.com/stashapp/stash/ui"
|
||||
)
|
||||
|
||||
const (
|
||||
loginEndpoint = "/login"
|
||||
logoutEndpoint = "/logout"
|
||||
gqlEndpoint = "/graphql"
|
||||
playgroundEndpoint = "/playground"
|
||||
)
|
||||
|
||||
var version string
|
||||
var buildstamp string
|
||||
var githash string
|
||||
|
|
@ -51,6 +59,7 @@ func Start() error {
|
|||
r := chi.NewRouter()
|
||||
|
||||
r.Use(middleware.Heartbeat("/healthz"))
|
||||
r.Use(cors.AllowAll().Handler)
|
||||
r.Use(authenticateHandler())
|
||||
visitedPluginHandler := manager.GetInstance().SessionStore.VisitedPluginHandler()
|
||||
r.Use(visitedPluginHandler)
|
||||
|
|
@ -67,7 +76,6 @@ func Start() error {
|
|||
r.Use(SecurityHeadersMiddleware)
|
||||
r.Use(middleware.DefaultCompress)
|
||||
r.Use(middleware.StripSlashes)
|
||||
r.Use(cors.AllowAll().Handler)
|
||||
r.Use(BaseURLMiddleware)
|
||||
|
||||
recoverFunc := func(ctx context.Context, err interface{}) error {
|
||||
|
|
@ -120,7 +128,10 @@ func Start() error {
|
|||
gqlSrv.SetQueryCache(gqlLru.New(1000))
|
||||
gqlSrv.Use(gqlExtension.Introspection{})
|
||||
|
||||
gqlSrv.SetErrorPresenter(gqlErrorHandler)
|
||||
|
||||
gqlHandlerFunc := func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Cache-Control", "no-store")
|
||||
gqlSrv.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
|
|
@ -130,14 +141,12 @@ func Start() error {
|
|||
gqlHandler := visitedPluginHandler(dataloaders.Middleware(http.HandlerFunc(gqlHandlerFunc)))
|
||||
manager.GetInstance().PluginCache.RegisterGQLHandler(gqlHandler)
|
||||
|
||||
r.HandleFunc("/graphql", gqlHandlerFunc)
|
||||
r.HandleFunc("/playground", gqlPlayground.Handler("GraphQL playground", "/graphql"))
|
||||
|
||||
// session handlers
|
||||
r.Post(loginEndPoint, handleLogin(loginUIBox))
|
||||
r.Get(logoutEndPoint, handleLogout(loginUIBox))
|
||||
|
||||
r.Get(loginEndPoint, getLoginHandler(loginUIBox))
|
||||
r.HandleFunc(gqlEndpoint, gqlHandlerFunc)
|
||||
r.HandleFunc(playgroundEndpoint, func(w http.ResponseWriter, r *http.Request) {
|
||||
setPageSecurityHeaders(w, r)
|
||||
endpoint := getProxyPrefix(r) + gqlEndpoint
|
||||
gqlPlayground.Handler("GraphQL playground", endpoint)(w, r)
|
||||
})
|
||||
|
||||
r.Mount("/performer", performerRoutes{
|
||||
txnManager: txnManager,
|
||||
|
|
@ -172,36 +181,17 @@ func Start() error {
|
|||
|
||||
r.HandleFunc("/css", cssHandler(c, pluginCache))
|
||||
r.HandleFunc("/javascript", javascriptHandler(c, pluginCache))
|
||||
r.HandleFunc("/customlocales", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if c.GetCustomLocalesEnabled() {
|
||||
// search for custom-locales.json in current directory, then $HOME/.stash
|
||||
fn := c.GetCustomLocalesPath()
|
||||
exists, _ := fsutil.FileExists(fn)
|
||||
if exists {
|
||||
http.ServeFile(w, r, fn)
|
||||
return
|
||||
}
|
||||
}
|
||||
_, _ = w.Write([]byte("{}"))
|
||||
})
|
||||
r.HandleFunc("/customlocales", customLocalesHandler(c))
|
||||
|
||||
r.HandleFunc("/login*", func(w http.ResponseWriter, r *http.Request) {
|
||||
ext := path.Ext(r.URL.Path)
|
||||
if ext == ".html" || ext == "" {
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
staticLoginUI := statigz.FileServer(loginUIBox.(fs.ReadDirFS))
|
||||
|
||||
data := getLoginPage(loginUIBox)
|
||||
baseURLIndex := strings.Replace(string(data), "%BASE_URL%", prefix+"/", 2)
|
||||
_, _ = w.Write([]byte(baseURLIndex))
|
||||
} else {
|
||||
r.URL.Path = strings.Replace(r.URL.Path, loginEndPoint, "", 1)
|
||||
loginRoot, err := fs.Sub(loginUIBox, loginRootDir)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
http.FileServer(http.FS(loginRoot)).ServeHTTP(w, r)
|
||||
}
|
||||
r.Get(loginEndpoint, handleLogin(loginUIBox))
|
||||
r.Post(loginEndpoint, handleLoginPost(loginUIBox))
|
||||
r.Get(logoutEndpoint, handleLogout())
|
||||
r.HandleFunc(loginEndpoint+"/*", func(w http.ResponseWriter, r *http.Request) {
|
||||
r.URL.Path = strings.TrimPrefix(r.URL.Path, loginEndpoint)
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
staticLoginUI.ServeHTTP(w, r)
|
||||
})
|
||||
|
||||
// Serve static folders
|
||||
|
|
@ -213,12 +203,10 @@ func Start() error {
|
|||
}
|
||||
|
||||
customUILocation := c.GetCustomUILocation()
|
||||
static := statigz.FileServer(uiBox)
|
||||
staticUI := statigz.FileServer(uiBox.(fs.ReadDirFS))
|
||||
|
||||
// Serve the web app
|
||||
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
|
||||
const uiRootDir = "v2.5/build"
|
||||
|
||||
ext := path.Ext(r.URL.Path)
|
||||
|
||||
if customUILocation != "" {
|
||||
|
|
@ -232,29 +220,29 @@ func Start() error {
|
|||
|
||||
if ext == ".html" || ext == "" {
|
||||
themeColor := c.GetThemeColor()
|
||||
data, err := uiBox.ReadFile(uiRootDir + "/index.html")
|
||||
data, err := fs.ReadFile(uiBox, "index.html")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
indexHtml := string(data)
|
||||
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
baseURLIndex := strings.ReplaceAll(string(data), "%COLOR%", themeColor)
|
||||
baseURLIndex = strings.ReplaceAll(baseURLIndex, "/%BASE_URL%", prefix)
|
||||
baseURLIndex = strings.Replace(baseURLIndex, "base href=\"/\"", fmt.Sprintf("base href=\"%s\"", prefix+"/"), 1)
|
||||
_, _ = w.Write([]byte(baseURLIndex))
|
||||
prefix := getProxyPrefix(r)
|
||||
indexHtml = strings.ReplaceAll(indexHtml, "%COLOR%", themeColor)
|
||||
indexHtml = strings.Replace(indexHtml, `<base href="/"`, fmt.Sprintf(`<base href="%s/"`, prefix), 1)
|
||||
|
||||
w.Header().Set("Content-Type", "text/html")
|
||||
setPageSecurityHeaders(w, r)
|
||||
|
||||
utils.ServeStaticContent(w, r, []byte(indexHtml))
|
||||
} else {
|
||||
isStatic, _ := path.Match("/static/*/*", r.URL.Path)
|
||||
isStatic, _ := path.Match("/assets/*", r.URL.Path)
|
||||
if isStatic {
|
||||
w.Header().Add("Cache-Control", "max-age=604800000")
|
||||
w.Header().Set("Cache-Control", "public, max-age=31536000, immutable")
|
||||
} else {
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
}
|
||||
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
if prefix != "" {
|
||||
r.URL.Path = strings.TrimPrefix(r.URL.Path, prefix)
|
||||
}
|
||||
r.URL.Path = uiRootDir + r.URL.Path
|
||||
|
||||
static.ServeHTTP(w, r)
|
||||
staticUI.ServeHTTP(w, r)
|
||||
}
|
||||
})
|
||||
|
||||
|
|
@ -305,52 +293,34 @@ func Start() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func copyFile(w io.Writer, path string) (time.Time, error) {
|
||||
func copyFile(w io.Writer, path string) error {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
info, err := f.Stat()
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
|
||||
_, err = io.Copy(w, f)
|
||||
|
||||
return info.ModTime(), err
|
||||
return err
|
||||
}
|
||||
|
||||
func serveFiles(w http.ResponseWriter, r *http.Request, name string, paths []string) {
|
||||
func serveFiles(w http.ResponseWriter, r *http.Request, paths []string) {
|
||||
buffer := bytes.Buffer{}
|
||||
|
||||
latestModTime := time.Time{}
|
||||
|
||||
for _, path := range paths {
|
||||
modTime, err := copyFile(&buffer, path)
|
||||
err := copyFile(&buffer, path)
|
||||
if err != nil {
|
||||
logger.Errorf("error serving file %s: %v", path, err)
|
||||
} else {
|
||||
if modTime.After(latestModTime) {
|
||||
latestModTime = modTime
|
||||
}
|
||||
buffer.Write([]byte("\n"))
|
||||
}
|
||||
buffer.Write([]byte("\n"))
|
||||
}
|
||||
|
||||
// Always revalidate with server
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
|
||||
bufferReader := bytes.NewReader(buffer.Bytes())
|
||||
http.ServeContent(w, r, name, latestModTime, bufferReader)
|
||||
utils.ServeStaticContent(w, r, buffer.Bytes())
|
||||
}
|
||||
|
||||
func cssHandler(c *config.Instance, pluginCache *plugin.Cache) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
// concatenate with plugin css files
|
||||
w.Header().Set("Content-Type", "text/css")
|
||||
|
||||
// add plugin css files first
|
||||
var paths []string
|
||||
|
||||
|
|
@ -367,14 +337,13 @@ func cssHandler(c *config.Instance, pluginCache *plugin.Cache) func(w http.Respo
|
|||
}
|
||||
}
|
||||
|
||||
serveFiles(w, r, "custom.css", paths)
|
||||
w.Header().Set("Content-Type", "text/css")
|
||||
serveFiles(w, r, paths)
|
||||
}
|
||||
}
|
||||
|
||||
func javascriptHandler(c *config.Instance, pluginCache *plugin.Cache) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "text/javascript")
|
||||
|
||||
// add plugin javascript files first
|
||||
var paths []string
|
||||
|
||||
|
|
@ -391,7 +360,33 @@ func javascriptHandler(c *config.Instance, pluginCache *plugin.Cache) func(w htt
|
|||
}
|
||||
}
|
||||
|
||||
serveFiles(w, r, "custom.js", paths)
|
||||
w.Header().Set("Content-Type", "text/javascript")
|
||||
serveFiles(w, r, paths)
|
||||
}
|
||||
}
|
||||
|
||||
func customLocalesHandler(c *config.Instance) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
buffer := bytes.Buffer{}
|
||||
|
||||
if c.GetCustomLocalesEnabled() {
|
||||
// search for custom-locales.json in current directory, then $HOME/.stash
|
||||
path := c.GetCustomLocalesPath()
|
||||
exists, _ := fsutil.FileExists(path)
|
||||
if exists {
|
||||
err := copyFile(&buffer, path)
|
||||
if err != nil {
|
||||
logger.Errorf("error serving file %s: %v", path, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if buffer.Len() == 0 {
|
||||
buffer.Write([]byte("{}"))
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
utils.ServeStaticContent(w, r, buffer.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -478,6 +473,47 @@ func makeTLSConfig(c *config.Instance) (*tls.Config, error) {
|
|||
return tlsConfig, nil
|
||||
}
|
||||
|
||||
func setPageSecurityHeaders(w http.ResponseWriter, r *http.Request) {
|
||||
c := config.GetInstance()
|
||||
|
||||
defaultSrc := "data: 'self' 'unsafe-inline'"
|
||||
connectSrc := "data: 'self'"
|
||||
imageSrc := "data: *"
|
||||
scriptSrc := "'self' 'unsafe-inline' 'unsafe-eval'"
|
||||
styleSrc := "'self' 'unsafe-inline'"
|
||||
mediaSrc := "blob: 'self'"
|
||||
|
||||
// Workaround Safari bug https://bugs.webkit.org/show_bug.cgi?id=201591
|
||||
// Allows websocket requests to any origin
|
||||
connectSrc += " ws: wss:"
|
||||
|
||||
// The graphql playground pulls its frontend from a cdn
|
||||
if r.URL.Path == playgroundEndpoint {
|
||||
connectSrc += " https://cdn.jsdelivr.net"
|
||||
scriptSrc += " https://cdn.jsdelivr.net"
|
||||
styleSrc += " https://cdn.jsdelivr.net"
|
||||
}
|
||||
|
||||
if !c.IsNewSystem() && c.GetHandyKey() != "" {
|
||||
connectSrc += " https://www.handyfeeling.com"
|
||||
}
|
||||
|
||||
cspDirectives := fmt.Sprintf("default-src %s; connect-src %s; img-src %s; script-src %s; style-src %s; media-src %s;", defaultSrc, connectSrc, imageSrc, scriptSrc, styleSrc, mediaSrc)
|
||||
cspDirectives += " worker-src blob:; child-src 'none'; object-src 'none'; form-action 'self';"
|
||||
|
||||
w.Header().Set("Referrer-Policy", "same-origin")
|
||||
w.Header().Set("Content-Security-Policy", cspDirectives)
|
||||
}
|
||||
|
||||
func SecurityHeadersMiddleware(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("X-Content-Type-Options", "nosniff")
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
}
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
|
||||
type contextKey struct {
|
||||
name string
|
||||
}
|
||||
|
|
@ -486,35 +522,6 @@ var (
|
|||
BaseURLCtxKey = &contextKey{"BaseURL"}
|
||||
)
|
||||
|
||||
func SecurityHeadersMiddleware(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
c := config.GetInstance()
|
||||
connectableOrigins := "connect-src data: 'self'"
|
||||
|
||||
// Workaround Safari bug https://bugs.webkit.org/show_bug.cgi?id=201591
|
||||
// Allows websocket requests to any origin
|
||||
connectableOrigins += " ws: wss:"
|
||||
|
||||
// The graphql playground pulls its frontend from a cdn
|
||||
connectableOrigins += " https://cdn.jsdelivr.net "
|
||||
|
||||
if !c.IsNewSystem() && c.GetHandyKey() != "" {
|
||||
connectableOrigins += " https://www.handyfeeling.com"
|
||||
}
|
||||
connectableOrigins += "; "
|
||||
|
||||
cspDirectives := "default-src data: 'self' 'unsafe-inline';" + connectableOrigins + "img-src data: *; script-src 'self' https://cdn.jsdelivr.net 'unsafe-inline' 'unsafe-eval'; style-src 'self' https://cdn.jsdelivr.net 'unsafe-inline'; style-src-elem 'self' https://cdn.jsdelivr.net 'unsafe-inline'; media-src 'self' blob:; child-src 'none'; worker-src blob:; object-src 'none'; form-action 'self'"
|
||||
|
||||
w.Header().Set("Referrer-Policy", "same-origin")
|
||||
w.Header().Set("X-Content-Type-Options", "nosniff")
|
||||
w.Header().Set("X-XSS-Protection", "1")
|
||||
w.Header().Set("Content-Security-Policy", cspDirectives)
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
}
|
||||
return http.HandlerFunc(fn)
|
||||
}
|
||||
|
||||
func BaseURLMiddleware(next http.Handler) http.Handler {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
|
@ -523,7 +530,7 @@ func BaseURLMiddleware(next http.Handler) http.Handler {
|
|||
if strings.Compare("https", r.URL.Scheme) == 0 || r.TLS != nil || r.Header.Get("X-Forwarded-Proto") == "https" {
|
||||
scheme = "https"
|
||||
}
|
||||
prefix := getProxyPrefix(r.Header)
|
||||
prefix := getProxyPrefix(r)
|
||||
|
||||
baseURL := scheme + "://" + r.Host + prefix
|
||||
|
||||
|
|
@ -539,11 +546,6 @@ func BaseURLMiddleware(next http.Handler) http.Handler {
|
|||
return http.HandlerFunc(fn)
|
||||
}
|
||||
|
||||
func getProxyPrefix(headers http.Header) string {
|
||||
prefix := ""
|
||||
if headers.Get("X-Forwarded-Prefix") != "" {
|
||||
prefix = strings.TrimRight(headers.Get("X-Forwarded-Prefix"), "/")
|
||||
}
|
||||
|
||||
return prefix
|
||||
func getProxyPrefix(r *http.Request) string {
|
||||
return strings.TrimRight(r.Header.Get("X-Forwarded-Prefix"), "/")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,22 +1,25 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/session"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
const loginRootDir = "login"
|
||||
const returnURLParam = "returnURL"
|
||||
|
||||
func getLoginPage(loginUIBox embed.FS) []byte {
|
||||
data, err := loginUIBox.ReadFile(loginRootDir + "/login.html")
|
||||
func getLoginPage(loginUIBox fs.FS) []byte {
|
||||
data, err := fs.ReadFile(loginUIBox, "login.html")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
|
@ -28,42 +31,66 @@ type loginTemplateData struct {
|
|||
Error string
|
||||
}
|
||||
|
||||
func redirectToLogin(loginUIBox embed.FS, w http.ResponseWriter, returnURL string, loginError string) {
|
||||
data := getLoginPage(loginUIBox)
|
||||
templ, err := template.New("Login").Parse(string(data))
|
||||
func serveLoginPage(loginUIBox fs.FS, w http.ResponseWriter, r *http.Request, returnURL string, loginError string) {
|
||||
loginPage := string(getLoginPage(loginUIBox))
|
||||
prefix := getProxyPrefix(r)
|
||||
loginPage = strings.ReplaceAll(loginPage, "/%BASE_URL%", prefix)
|
||||
|
||||
templ, err := template.New("Login").Parse(loginPage)
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = templ.Execute(w, loginTemplateData{URL: returnURL, Error: loginError})
|
||||
buffer := bytes.Buffer{}
|
||||
err = templ.Execute(&buffer, loginTemplateData{URL: returnURL, Error: loginError})
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/html")
|
||||
setPageSecurityHeaders(w, r)
|
||||
|
||||
utils.ServeStaticContent(w, r, buffer.Bytes())
|
||||
}
|
||||
|
||||
func getLoginHandler(loginUIBox embed.FS) http.HandlerFunc {
|
||||
func handleLogin(loginUIBox fs.FS) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
returnURL := r.URL.Query().Get(returnURLParam)
|
||||
|
||||
if !config.GetInstance().HasCredentials() {
|
||||
http.Redirect(w, r, "/", http.StatusFound)
|
||||
if returnURL != "" {
|
||||
http.Redirect(w, r, returnURL, http.StatusFound)
|
||||
} else {
|
||||
prefix := getProxyPrefix(r)
|
||||
http.Redirect(w, r, prefix+"/", http.StatusFound)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
redirectToLogin(loginUIBox, w, r.URL.Query().Get(returnURLParam), "")
|
||||
serveLoginPage(loginUIBox, w, r, returnURL, "")
|
||||
}
|
||||
}
|
||||
|
||||
func handleLogin(loginUIBox embed.FS) http.HandlerFunc {
|
||||
func handleLoginPost(loginUIBox fs.FS) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
url := r.FormValue(returnURLParam)
|
||||
if url == "" {
|
||||
url = "/"
|
||||
url = getProxyPrefix(r) + "/"
|
||||
}
|
||||
|
||||
err := manager.GetInstance().SessionStore.Login(w, r)
|
||||
if errors.Is(err, session.ErrInvalidCredentials) {
|
||||
// redirect back to the login page with an error
|
||||
redirectToLogin(loginUIBox, w, url, "Username or password is invalid")
|
||||
if err != nil {
|
||||
// always log the error
|
||||
logger.Errorf("Error logging in: %v", err)
|
||||
}
|
||||
|
||||
var invalidCredentialsError *session.InvalidCredentialsError
|
||||
|
||||
if errors.As(err, &invalidCredentialsError) {
|
||||
// serve login page with an error
|
||||
serveLoginPage(loginUIBox, w, r, url, "Username or password is invalid")
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -76,7 +103,7 @@ func handleLogin(loginUIBox embed.FS) http.HandlerFunc {
|
|||
}
|
||||
}
|
||||
|
||||
func handleLogout(loginUIBox embed.FS) http.HandlerFunc {
|
||||
func handleLogout() http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if err := manager.GetInstance().SessionStore.Logout(w, r); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
|
|
@ -84,6 +111,11 @@ func handleLogout(loginUIBox embed.FS) http.HandlerFunc {
|
|||
}
|
||||
|
||||
// redirect to the login page if credentials are required
|
||||
getLoginHandler(loginUIBox)(w, r)
|
||||
prefix := getProxyPrefix(r)
|
||||
if config.GetInstance().HasCredentials() {
|
||||
http.Redirect(w, r, prefix+loginEndpoint, http.StatusFound)
|
||||
} else {
|
||||
http.Redirect(w, r, prefix+"/", http.StatusFound)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
package urlbuilders
|
||||
|
||||
import "strconv"
|
||||
|
||||
type GalleryURLBuilder struct {
|
||||
BaseURL string
|
||||
GalleryID string
|
||||
}
|
||||
|
||||
func NewGalleryURLBuilder(baseURL string, galleryID int) GalleryURLBuilder {
|
||||
return GalleryURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
GalleryID: strconv.Itoa(galleryID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b GalleryURLBuilder) GetGalleryImageURL(fileIndex int) string {
|
||||
return b.BaseURL + "/gallery/" + b.GalleryID + "/" + strconv.Itoa(fileIndex)
|
||||
}
|
||||
|
|
@ -3,12 +3,15 @@ package urlbuilders
|
|||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type ImageURLBuilder struct {
|
||||
BaseURL string
|
||||
ImageID string
|
||||
Checksum string
|
||||
UpdatedAt string
|
||||
}
|
||||
|
||||
|
|
@ -16,14 +19,23 @@ func NewImageURLBuilder(baseURL string, image *models.Image) ImageURLBuilder {
|
|||
return ImageURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
ImageID: strconv.Itoa(image.ID),
|
||||
Checksum: image.Checksum,
|
||||
UpdatedAt: strconv.FormatInt(image.UpdatedAt.Unix(), 10),
|
||||
}
|
||||
}
|
||||
|
||||
func (b ImageURLBuilder) GetImageURL() string {
|
||||
return b.BaseURL + "/image/" + b.ImageID + "/image?" + b.UpdatedAt
|
||||
return b.BaseURL + "/image/" + b.ImageID + "/image?t=" + b.UpdatedAt
|
||||
}
|
||||
|
||||
func (b ImageURLBuilder) GetThumbnailURL() string {
|
||||
return b.BaseURL + "/image/" + b.ImageID + "/thumbnail?" + b.UpdatedAt
|
||||
return b.BaseURL + "/image/" + b.ImageID + "/thumbnail?t=" + b.UpdatedAt
|
||||
}
|
||||
|
||||
func (b ImageURLBuilder) GetPreviewURL() string {
|
||||
if exists, err := fsutil.FileExists(manager.GetInstance().Paths.Generated.GetClipPreviewPath(b.Checksum, models.DefaultGthumbWidth)); exists && err == nil {
|
||||
return b.BaseURL + "/image/" + b.ImageID + "/preview?" + b.UpdatedAt
|
||||
} else {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,10 +19,14 @@ func NewMovieURLBuilder(baseURL string, movie *models.Movie) MovieURLBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
func (b MovieURLBuilder) GetMovieFrontImageURL() string {
|
||||
return b.BaseURL + "/movie/" + b.MovieID + "/frontimage?" + b.UpdatedAt
|
||||
func (b MovieURLBuilder) GetMovieFrontImageURL(hasImage bool) string {
|
||||
url := b.BaseURL + "/movie/" + b.MovieID + "/frontimage?t=" + b.UpdatedAt
|
||||
if !hasImage {
|
||||
url += "&default=true"
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
func (b MovieURLBuilder) GetMovieBackImageURL() string {
|
||||
return b.BaseURL + "/movie/" + b.MovieID + "/backimage?" + b.UpdatedAt
|
||||
return b.BaseURL + "/movie/" + b.MovieID + "/backimage?t=" + b.UpdatedAt
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,6 +20,10 @@ func NewPerformerURLBuilder(baseURL string, performer *models.Performer) Perform
|
|||
}
|
||||
}
|
||||
|
||||
func (b PerformerURLBuilder) GetPerformerImageURL() string {
|
||||
return b.BaseURL + "/performer/" + b.PerformerID + "/image?" + b.UpdatedAt
|
||||
func (b PerformerURLBuilder) GetPerformerImageURL(hasImage bool) string {
|
||||
url := b.BaseURL + "/performer/" + b.PerformerID + "/image?t=" + b.UpdatedAt
|
||||
if !hasImage {
|
||||
url += "&default=true"
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,18 +4,21 @@ import (
|
|||
"fmt"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type SceneURLBuilder struct {
|
||||
BaseURL string
|
||||
SceneID string
|
||||
BaseURL string
|
||||
SceneID string
|
||||
UpdatedAt string
|
||||
}
|
||||
|
||||
func NewSceneURLBuilder(baseURL string, sceneID int) SceneURLBuilder {
|
||||
func NewSceneURLBuilder(baseURL string, scene *models.Scene) SceneURLBuilder {
|
||||
return SceneURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
SceneID: strconv.Itoa(sceneID),
|
||||
BaseURL: baseURL,
|
||||
SceneID: strconv.Itoa(scene.ID),
|
||||
UpdatedAt: strconv.FormatInt(scene.UpdatedAt.Unix(), 10),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -50,26 +53,14 @@ func (b SceneURLBuilder) GetSpriteURL(checksum string) string {
|
|||
return b.BaseURL + "/scene/" + checksum + "_sprite.jpg"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetScreenshotURL(updateTime time.Time) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?" + strconv.FormatInt(updateTime.Unix(), 10)
|
||||
func (b SceneURLBuilder) GetScreenshotURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?t=" + b.UpdatedAt
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetChaptersVTTURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/vtt/chapter"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetSceneMarkerStreamURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/stream"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetSceneMarkerStreamPreviewURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/preview"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetSceneMarkerStreamScreenshotURL(sceneMarkerID int) string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerID) + "/screenshot"
|
||||
}
|
||||
|
||||
func (b SceneURLBuilder) GetFunscriptURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
|
||||
}
|
||||
|
|
|
|||
33
internal/api/urlbuilders/scene_markers.go
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
package urlbuilders
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type SceneMarkerURLBuilder struct {
|
||||
BaseURL string
|
||||
SceneID string
|
||||
MarkerID string
|
||||
}
|
||||
|
||||
func NewSceneMarkerURLBuilder(baseURL string, sceneMarker *models.SceneMarker) SceneMarkerURLBuilder {
|
||||
return SceneMarkerURLBuilder{
|
||||
BaseURL: baseURL,
|
||||
SceneID: strconv.Itoa(int(sceneMarker.SceneID.Int64)),
|
||||
MarkerID: strconv.Itoa(sceneMarker.ID),
|
||||
}
|
||||
}
|
||||
|
||||
func (b SceneMarkerURLBuilder) GetStreamURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + b.MarkerID + "/stream"
|
||||
}
|
||||
|
||||
func (b SceneMarkerURLBuilder) GetPreviewURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + b.MarkerID + "/preview"
|
||||
}
|
||||
|
||||
func (b SceneMarkerURLBuilder) GetScreenshotURL() string {
|
||||
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + b.MarkerID + "/screenshot"
|
||||
}
|
||||
|
|
@ -19,6 +19,10 @@ func NewStudioURLBuilder(baseURL string, studio *models.Studio) StudioURLBuilder
|
|||
}
|
||||
}
|
||||
|
||||
func (b StudioURLBuilder) GetStudioImageURL() string {
|
||||
return b.BaseURL + "/studio/" + b.StudioID + "/image?" + b.UpdatedAt
|
||||
func (b StudioURLBuilder) GetStudioImageURL(hasImage bool) string {
|
||||
url := b.BaseURL + "/studio/" + b.StudioID + "/image?t=" + b.UpdatedAt
|
||||
if !hasImage {
|
||||
url += "&default=true"
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,6 +19,10 @@ func NewTagURLBuilder(baseURL string, tag *models.Tag) TagURLBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
func (b TagURLBuilder) GetTagImageURL() string {
|
||||
return b.BaseURL + "/tag/" + b.TagID + "/image?" + b.UpdatedAt
|
||||
func (b TagURLBuilder) GetTagImageURL(hasImage bool) string {
|
||||
url := b.BaseURL + "/tag/" + b.TagID + "/image?t=" + b.UpdatedAt
|
||||
if !hasImage {
|
||||
url += "&default=true"
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
|
|
|||
|
|
@ -440,14 +440,25 @@ func getRootObjects() []interface{} {
|
|||
return objs
|
||||
}
|
||||
|
||||
func getSortDirection(sceneFilter *models.SceneFilterType, sort string) models.SortDirectionEnum {
|
||||
direction := models.SortDirectionEnumDesc
|
||||
if sort == "title" {
|
||||
direction = models.SortDirectionEnumAsc
|
||||
}
|
||||
|
||||
return direction
|
||||
}
|
||||
|
||||
func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType, parentID string, host string) []interface{} {
|
||||
var objs []interface{}
|
||||
|
||||
if err := txn.WithReadTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
|
||||
sort := "title"
|
||||
sort := me.VideoSortOrder
|
||||
direction := getSortDirection(sceneFilter, sort)
|
||||
findFilter := &models.FindFilterType{
|
||||
PerPage: &pageSize,
|
||||
Sort: &sort,
|
||||
PerPage: &pageSize,
|
||||
Sort: &sort,
|
||||
Direction: &direction,
|
||||
}
|
||||
|
||||
scenes, total, err := scene.QueryWithCount(ctx, me.repository.SceneFinder, sceneFilter, findFilter)
|
||||
|
|
@ -492,8 +503,10 @@ func (me *contentDirectoryService) getPageVideos(sceneFilter *models.SceneFilter
|
|||
parentID: parentID,
|
||||
}
|
||||
|
||||
sort := me.VideoSortOrder
|
||||
direction := getSortDirection(sceneFilter, sort)
|
||||
var err error
|
||||
objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, me.repository.FileFinder, page, host)
|
||||
objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, me.repository.FileFinder, page, host, sort, direction)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -276,6 +276,7 @@ type Server struct {
|
|||
repository Repository
|
||||
sceneServer sceneServer
|
||||
ipWhitelistManager *ipWhitelistManager
|
||||
VideoSortOrder string
|
||||
}
|
||||
|
||||
// UPnP SOAP service.
|
||||
|
|
|
|||
|
|
@ -60,14 +60,14 @@ func (p *scenePager) getPages(ctx context.Context, r scene.Queryer, total int) (
|
|||
return objs, nil
|
||||
}
|
||||
|
||||
func (p *scenePager) getPageVideos(ctx context.Context, r SceneFinder, f file.Finder, page int, host string) ([]interface{}, error) {
|
||||
func (p *scenePager) getPageVideos(ctx context.Context, r SceneFinder, f file.Finder, page int, host string, sort string, direction models.SortDirectionEnum) ([]interface{}, error) {
|
||||
var objs []interface{}
|
||||
|
||||
sort := "title"
|
||||
findFilter := &models.FindFilterType{
|
||||
PerPage: &pageSize,
|
||||
Page: &page,
|
||||
Sort: &sort,
|
||||
PerPage: &pageSize,
|
||||
Page: &page,
|
||||
Sort: &sort,
|
||||
Direction: &direction,
|
||||
}
|
||||
|
||||
scenes, err := scene.Query(ctx, r, p.sceneFilter, findFilter)
|
||||
|
|
|
|||
|
|
@ -45,6 +45,7 @@ type dmsConfig struct {
|
|||
LogHeaders bool
|
||||
StallEventSubscribe bool
|
||||
NotifyInterval time.Duration
|
||||
VideoSortOrder string
|
||||
}
|
||||
|
||||
type sceneServer interface {
|
||||
|
|
@ -56,6 +57,7 @@ type Config interface {
|
|||
GetDLNAInterfaces() []string
|
||||
GetDLNAServerName() string
|
||||
GetDLNADefaultIPWhitelist() []string
|
||||
GetVideoSortOrder() string
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
|
|
@ -123,6 +125,7 @@ func (s *Service) init() error {
|
|||
FriendlyName: friendlyName,
|
||||
LogHeaders: false,
|
||||
NotifyInterval: 30 * time.Second,
|
||||
VideoSortOrder: s.config.GetVideoSortOrder(),
|
||||
}
|
||||
|
||||
interfaces, err := s.getInterfaces()
|
||||
|
|
@ -164,6 +167,7 @@ func (s *Service) init() error {
|
|||
// },
|
||||
StallEventSubscribe: dmsConfig.StallEventSubscribe,
|
||||
NotifyInterval: dmsConfig.NotifyInterval,
|
||||
VideoSortOrder: dmsConfig.VideoSortOrder,
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
|||
|
|
@ -65,7 +65,8 @@ func scrapedToPerformerInput(performer *models.ScrapedPerformer) models.Performe
|
|||
ret.DeathDate = &d
|
||||
}
|
||||
if performer.Gender != nil {
|
||||
ret.Gender = models.GenderEnum(*performer.Gender)
|
||||
v := models.GenderEnum(*performer.Gender)
|
||||
ret.Gender = &v
|
||||
}
|
||||
if performer.Ethnicity != nil {
|
||||
ret.Ethnicity = *performer.Ethnicity
|
||||
|
|
@ -97,6 +98,16 @@ func scrapedToPerformerInput(performer *models.ScrapedPerformer) models.Performe
|
|||
if performer.FakeTits != nil {
|
||||
ret.FakeTits = *performer.FakeTits
|
||||
}
|
||||
if performer.PenisLength != nil {
|
||||
h, err := strconv.ParseFloat(*performer.PenisLength, 64)
|
||||
if err == nil {
|
||||
ret.PenisLength = &h
|
||||
}
|
||||
}
|
||||
if performer.Circumcised != nil {
|
||||
v := models.CircumisedEnum(*performer.Circumcised)
|
||||
ret.Circumcised = &v
|
||||
}
|
||||
if performer.CareerLength != nil {
|
||||
ret.CareerLength = *performer.CareerLength
|
||||
}
|
||||
|
|
|
|||
|
|
@ -228,6 +228,10 @@ func Test_scrapedToPerformerInput(t *testing.T) {
|
|||
return &d
|
||||
}
|
||||
|
||||
genderPtr := func(g models.GenderEnum) *models.GenderEnum {
|
||||
return &g
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
performer *models.ScrapedPerformer
|
||||
|
|
@ -259,7 +263,7 @@ func Test_scrapedToPerformerInput(t *testing.T) {
|
|||
Name: name,
|
||||
Birthdate: dateToDatePtr(models.NewDate(*nextVal())),
|
||||
DeathDate: dateToDatePtr(models.NewDate(*nextVal())),
|
||||
Gender: models.GenderEnum(*nextVal()),
|
||||
Gender: genderPtr(models.GenderEnum(*nextVal())),
|
||||
Ethnicity: *nextVal(),
|
||||
Country: *nextVal(),
|
||||
EyeColor: *nextVal(),
|
||||
|
|
|
|||
|
|
@ -235,6 +235,13 @@ func (log *Logger) Tracef(format string, args ...interface{}) {
|
|||
log.addLogItem(l)
|
||||
}
|
||||
|
||||
func (log *Logger) TraceFunc(fn func() (string, []interface{})) {
|
||||
if log.logger.Level >= logrus.TraceLevel {
|
||||
msg, args := fn()
|
||||
log.Tracef(msg, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func (log *Logger) Debug(args ...interface{}) {
|
||||
log.logger.Debug(args...)
|
||||
l := &LogItem{
|
||||
|
|
@ -253,6 +260,17 @@ func (log *Logger) Debugf(format string, args ...interface{}) {
|
|||
log.addLogItem(l)
|
||||
}
|
||||
|
||||
func (log *Logger) logFunc(level logrus.Level, logFn func(format string, args ...interface{}), fn func() (string, []interface{})) {
|
||||
if log.logger.Level >= level {
|
||||
msg, args := fn()
|
||||
logFn(msg, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func (log *Logger) DebugFunc(fn func() (string, []interface{})) {
|
||||
log.logFunc(logrus.DebugLevel, log.logger.Debugf, fn)
|
||||
}
|
||||
|
||||
func (log *Logger) Info(args ...interface{}) {
|
||||
log.logger.Info(args...)
|
||||
l := &LogItem{
|
||||
|
|
@ -271,6 +289,10 @@ func (log *Logger) Infof(format string, args ...interface{}) {
|
|||
log.addLogItem(l)
|
||||
}
|
||||
|
||||
func (log *Logger) InfoFunc(fn func() (string, []interface{})) {
|
||||
log.logFunc(logrus.InfoLevel, log.logger.Infof, fn)
|
||||
}
|
||||
|
||||
func (log *Logger) Warn(args ...interface{}) {
|
||||
log.logger.Warn(args...)
|
||||
l := &LogItem{
|
||||
|
|
@ -289,6 +311,10 @@ func (log *Logger) Warnf(format string, args ...interface{}) {
|
|||
log.addLogItem(l)
|
||||
}
|
||||
|
||||
func (log *Logger) WarnFunc(fn func() (string, []interface{})) {
|
||||
log.logFunc(logrus.WarnLevel, log.logger.Warnf, fn)
|
||||
}
|
||||
|
||||
func (log *Logger) Error(args ...interface{}) {
|
||||
log.logger.Error(args...)
|
||||
l := &LogItem{
|
||||
|
|
@ -307,6 +333,10 @@ func (log *Logger) Errorf(format string, args ...interface{}) {
|
|||
log.addLogItem(l)
|
||||
}
|
||||
|
||||
func (log *Logger) ErrorFunc(fn func() (string, []interface{})) {
|
||||
log.logFunc(logrus.ErrorLevel, log.logger.Errorf, fn)
|
||||
}
|
||||
|
||||
func (log *Logger) Fatal(args ...interface{}) {
|
||||
log.logger.Fatal(args...)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -96,6 +96,9 @@ const (
|
|||
WriteImageThumbnails = "write_image_thumbnails"
|
||||
writeImageThumbnailsDefault = true
|
||||
|
||||
CreateImageClipsFromVideos = "create_image_clip_from_videos"
|
||||
createImageClipsFromVideosDefault = false
|
||||
|
||||
Host = "host"
|
||||
hostDefault = "0.0.0.0"
|
||||
|
||||
|
|
@ -210,6 +213,9 @@ const (
|
|||
DLNADefaultIPWhitelist = "dlna.default_whitelist"
|
||||
DLNAInterfaces = "dlna.interfaces"
|
||||
|
||||
DLNAVideoSortOrder = "dlna.video_sort_order"
|
||||
dlnaVideoSortOrderDefault = "title"
|
||||
|
||||
// Logging options
|
||||
LogFile = "logFile"
|
||||
LogOut = "logOut"
|
||||
|
|
@ -862,6 +868,10 @@ func (i *Instance) IsWriteImageThumbnails() bool {
|
|||
return i.getBool(WriteImageThumbnails)
|
||||
}
|
||||
|
||||
func (i *Instance) IsCreateImageClipsFromVideos() bool {
|
||||
return i.getBool(CreateImageClipsFromVideos)
|
||||
}
|
||||
|
||||
func (i *Instance) GetAPIKey() string {
|
||||
return i.getString(ApiKey)
|
||||
}
|
||||
|
|
@ -1370,6 +1380,17 @@ func (i *Instance) GetDLNAInterfaces() []string {
|
|||
return i.getStringSlice(DLNAInterfaces)
|
||||
}
|
||||
|
||||
// GetVideoSortOrder returns the sort order to display videos. If
|
||||
// empty, videos will be sorted by titles.
|
||||
func (i *Instance) GetVideoSortOrder() string {
|
||||
ret := i.getString(DLNAVideoSortOrder)
|
||||
if ret == "" {
|
||||
ret = dlnaVideoSortOrderDefault
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// GetLogFile returns the filename of the file to output logs to.
|
||||
// An empty string means that file logging will be disabled.
|
||||
func (i *Instance) GetLogFile() string {
|
||||
|
|
@ -1499,6 +1520,7 @@ func (i *Instance) setDefaultValues(write bool) error {
|
|||
i.main.SetDefault(ThemeColor, DefaultThemeColor)
|
||||
|
||||
i.main.SetDefault(WriteImageThumbnails, writeImageThumbnailsDefault)
|
||||
i.main.SetDefault(CreateImageClipsFromVideos, createImageClipsFromVideosDefault)
|
||||
|
||||
i.main.SetDefault(Database, defaultDatabaseFilePath)
|
||||
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ type flagStruct struct {
|
|||
configFilePath string
|
||||
cpuProfilePath string
|
||||
nobrowser bool
|
||||
helpFlag bool
|
||||
}
|
||||
|
||||
func GetInstance() *Instance {
|
||||
|
|
@ -40,6 +41,12 @@ func Initialize() (*Instance, error) {
|
|||
var err error
|
||||
initOnce.Do(func() {
|
||||
flags := initFlags()
|
||||
|
||||
if flags.helpFlag {
|
||||
pflag.Usage()
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
overrides := makeOverrideConfig()
|
||||
|
||||
_ = GetInstance()
|
||||
|
|
@ -126,6 +133,7 @@ func initFlags() flagStruct {
|
|||
pflag.StringVarP(&flags.configFilePath, "config", "c", "", "config file to use")
|
||||
pflag.StringVar(&flags.cpuProfilePath, "cpuprofile", "", "write cpu profile to file")
|
||||
pflag.BoolVar(&flags.nobrowser, "nobrowser", false, "Don't open a browser window after launch")
|
||||
pflag.BoolVarP(&flags.helpFlag, "help", "h", false, "show this help text and exit")
|
||||
|
||||
pflag.Parse()
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,8 @@ type ScanMetadataOptions struct {
|
|||
ScanGeneratePhashes bool `json:"scanGeneratePhashes"`
|
||||
// Generate image thumbnails during scan
|
||||
ScanGenerateThumbnails bool `json:"scanGenerateThumbnails"`
|
||||
// Generate image thumbnails during scan
|
||||
ScanGenerateClipPreviews bool `json:"scanGenerateClipPreviews"`
|
||||
}
|
||||
|
||||
type AutoTagMetadataOptions struct {
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ func (s *DownloadStore) Serve(hash string, w http.ResponseWriter, r *http.Reques
|
|||
if f.contentType != "" {
|
||||
w.Header().Add("Content-Type", f.contentType)
|
||||
}
|
||||
w.Header().Set("Cache-Control", "no-store")
|
||||
http.ServeFile(w, r, f.path)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
package manager
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
const faviconDir = "v2.5/build/"
|
||||
|
||||
type FaviconProvider struct {
|
||||
UIBox embed.FS
|
||||
}
|
||||
|
||||
func (p *FaviconProvider) GetFavicon() []byte {
|
||||
if runtime.GOOS == "windows" {
|
||||
faviconPath := faviconDir + "favicon.ico"
|
||||
ret, _ := p.UIBox.ReadFile(faviconPath)
|
||||
return ret
|
||||
}
|
||||
|
||||
return p.GetFaviconPng()
|
||||
}
|
||||
|
||||
func (p *FaviconProvider) GetFaviconPng() []byte {
|
||||
faviconPath := faviconDir + "favicon.png"
|
||||
ret, _ := p.UIBox.ReadFile(faviconPath)
|
||||
return ret
|
||||
}
|
||||
|
|
@ -63,7 +63,7 @@ func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.O
|
|||
var ret []file.Fingerprint
|
||||
calculateMD5 := true
|
||||
|
||||
if isVideo(f.Basename) {
|
||||
if useAsVideo(f.Path) {
|
||||
var (
|
||||
fp *file.Fingerprint
|
||||
err error
|
||||
|
|
|
|||
|
|
@ -73,10 +73,11 @@ func (g *InteractiveHeatmapSpeedGenerator) Generate(funscriptPath string, heatma
|
|||
return fmt.Errorf("no valid actions in funscript")
|
||||
}
|
||||
|
||||
sceneDurationMilli := int64(sceneDuration * 1000)
|
||||
g.Funscript = funscript
|
||||
g.Funscript.UpdateIntensityAndSpeed()
|
||||
|
||||
err = g.RenderHeatmap(heatmapPath)
|
||||
err = g.RenderHeatmap(heatmapPath, sceneDurationMilli)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -155,8 +156,8 @@ func (funscript *Script) UpdateIntensityAndSpeed() {
|
|||
}
|
||||
|
||||
// funscript needs to have intensity updated first
|
||||
func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap(heatmapPath string) error {
|
||||
gradient := g.Funscript.getGradientTable(g.NumSegments)
|
||||
func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap(heatmapPath string, sceneDurationMilli int64) error {
|
||||
gradient := g.Funscript.getGradientTable(g.NumSegments, sceneDurationMilli)
|
||||
|
||||
img := image.NewRGBA(image.Rect(0, 0, g.Width, g.Height))
|
||||
for x := 0; x < g.Width; x++ {
|
||||
|
|
@ -179,7 +180,7 @@ func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap(heatmapPath string) err
|
|||
}
|
||||
|
||||
// add 10 minute marks
|
||||
maxts := g.Funscript.Actions[len(g.Funscript.Actions)-1].At
|
||||
maxts := sceneDurationMilli
|
||||
const tick = 600000
|
||||
var ts int64 = tick
|
||||
c, _ := colorful.Hex("#000000")
|
||||
|
|
@ -242,7 +243,7 @@ func (gt GradientTable) GetYRange(t float64) [2]float64 {
|
|||
return gt[len(gt)-1].YRange
|
||||
}
|
||||
|
||||
func (funscript Script) getGradientTable(numSegments int) GradientTable {
|
||||
func (funscript Script) getGradientTable(numSegments int, sceneDurationMilli int64) GradientTable {
|
||||
const windowSize = 15
|
||||
const backfillThreshold = 500
|
||||
|
||||
|
|
@ -255,7 +256,7 @@ func (funscript Script) getGradientTable(numSegments int) GradientTable {
|
|||
gradient := make(GradientTable, numSegments)
|
||||
posList := []int{}
|
||||
|
||||
maxts := funscript.Actions[len(funscript.Actions)-1].At
|
||||
maxts := sceneDurationMilli
|
||||
|
||||
for _, a := range funscript.Actions {
|
||||
posList = append(posList, a.Pos)
|
||||
|
|
|
|||
|
|
@ -279,11 +279,11 @@ func initialize() error {
|
|||
}
|
||||
|
||||
func videoFileFilter(ctx context.Context, f file.File) bool {
|
||||
return isVideo(f.Base().Basename)
|
||||
return useAsVideo(f.Base().Path)
|
||||
}
|
||||
|
||||
func imageFileFilter(ctx context.Context, f file.File) bool {
|
||||
return isImage(f.Base().Basename)
|
||||
return useAsImage(f.Base().Path)
|
||||
}
|
||||
|
||||
func galleryFileFilter(ctx context.Context, f file.File) bool {
|
||||
|
|
@ -306,8 +306,10 @@ func makeScanner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Scanner {
|
|||
Filter: file.FilterFunc(videoFileFilter),
|
||||
},
|
||||
&file.FilteredDecorator{
|
||||
Decorator: &file_image.Decorator{},
|
||||
Filter: file.FilterFunc(imageFileFilter),
|
||||
Decorator: &file_image.Decorator{
|
||||
FFProbe: instance.FFProbe,
|
||||
},
|
||||
Filter: file.FilterFunc(imageFileFilter),
|
||||
},
|
||||
},
|
||||
FingerprintCalculator: &fingerprintCalculator{instance.Config},
|
||||
|
|
@ -509,12 +511,8 @@ func (s *Manager) SetBlobStoreOptions() {
|
|||
}
|
||||
|
||||
func writeStashIcon() {
|
||||
p := FaviconProvider{
|
||||
UIBox: ui.UIBox,
|
||||
}
|
||||
|
||||
iconPath := filepath.Join(instance.Config.GetConfigPath(), "icon.png")
|
||||
err := os.WriteFile(iconPath, p.GetFaviconPng(), 0644)
|
||||
err := os.WriteFile(iconPath, ui.FaviconProvider.GetFaviconPng(), 0644)
|
||||
if err != nil {
|
||||
logger.Errorf("Couldn't write icon file: %s", err.Error())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,6 +15,20 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func useAsVideo(pathname string) bool {
|
||||
if instance.Config.IsCreateImageClipsFromVideos() && config.StashConfigs.GetStashFromDirPath(instance.Config.GetStashPaths(), pathname).ExcludeVideo {
|
||||
return false
|
||||
}
|
||||
return isVideo(pathname)
|
||||
}
|
||||
|
||||
func useAsImage(pathname string) bool {
|
||||
if instance.Config.IsCreateImageClipsFromVideos() && config.StashConfigs.GetStashFromDirPath(instance.Config.GetStashPaths(), pathname).ExcludeVideo {
|
||||
return isImage(pathname) || isVideo(pathname)
|
||||
}
|
||||
return isImage(pathname)
|
||||
}
|
||||
|
||||
func isZip(pathname string) bool {
|
||||
gExt := config.GetInstance().GetGalleryExtensions()
|
||||
return fsutil.MatchExtension(pathname, gExt)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ import (
|
|||
type ImageReaderWriter interface {
|
||||
models.ImageReaderWriter
|
||||
image.FinderCreatorUpdater
|
||||
models.ImageFileLoader
|
||||
GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error)
|
||||
}
|
||||
|
||||
|
|
@ -114,4 +113,6 @@ type GalleryService interface {
|
|||
Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error)
|
||||
|
||||
ValidateImageGalleryChange(ctx context.Context, i *models.Image, updateIDs models.UpdateIDs) error
|
||||
|
||||
Updated(ctx context.Context, galleryID int) error
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,9 +39,15 @@ type SceneServer struct {
|
|||
}
|
||||
|
||||
func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWriter, r *http.Request) {
|
||||
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
|
||||
// #3526 - return 404 if the scene does not have any files
|
||||
if scene.Path == "" {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo))
|
||||
sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())
|
||||
|
||||
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, sceneHash)
|
||||
streamRequestCtx := ffmpeg.NewStreamRequestContext(w, r)
|
||||
|
||||
// #2579 - hijacking and closing the connection here causes video playback to fail in Safari
|
||||
|
|
@ -69,11 +75,17 @@ func (s *SceneServer) ServeScreenshot(scene *models.Scene, w http.ResponseWriter
|
|||
if cover == nil {
|
||||
// fallback to legacy image if present
|
||||
if scene.Path != "" {
|
||||
filepath := GetInstance().Paths.Scene.GetLegacyScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
|
||||
sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())
|
||||
filepath := GetInstance().Paths.Scene.GetLegacyScreenshotPath(sceneHash)
|
||||
|
||||
// fall back to the scene image blob if the file isn't present
|
||||
screenshotExists, _ := fsutil.FileExists(filepath)
|
||||
if screenshotExists {
|
||||
if r.URL.Query().Has("t") {
|
||||
w.Header().Set("Cache-Control", "private, max-age=31536000, immutable")
|
||||
} else {
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
}
|
||||
http.ServeFile(w, r, filepath)
|
||||
return
|
||||
}
|
||||
|
|
@ -83,11 +95,8 @@ func (s *SceneServer) ServeScreenshot(scene *models.Scene, w http.ResponseWriter
|
|||
// should always be there
|
||||
f, _ := static.Scene.Open(defaultSceneImage)
|
||||
defer f.Close()
|
||||
stat, _ := f.Stat()
|
||||
http.ServeContent(w, r, "scene.svg", stat.ModTime(), f.(io.ReadSeeker))
|
||||
cover, _ = io.ReadAll(f)
|
||||
}
|
||||
|
||||
if err := utils.ServeImage(cover, w, r); err != nil {
|
||||
logger.Warnf("error serving screenshot image: %v", err)
|
||||
}
|
||||
utils.ServeImage(w, r, cover)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL *url.URL, maxStrea
|
|||
|
||||
// convert StreamingResolutionEnum to ResolutionEnum
|
||||
maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize)
|
||||
sceneResolution := pf.GetMinResolution()
|
||||
sceneResolution := file.GetMinResolution(pf)
|
||||
includeSceneStreamPath := func(streamingResolution models.StreamingResolutionEnum) bool {
|
||||
var minResolution int
|
||||
if streamingResolution == models.StreamingResolutionEnumOriginal {
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ func (j *autoTagJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||
j.autoTagSpecific(ctx, progress)
|
||||
}
|
||||
|
||||
logger.Infof("Finished autotag after %s", time.Since(begin).String())
|
||||
logger.Infof("Finished auto-tag after %s", time.Since(begin).String())
|
||||
}
|
||||
|
||||
func (j *autoTagJob) isFileBasedAutoTag(input AutoTagMetadataInput) bool {
|
||||
|
|
@ -84,32 +84,34 @@ func (j *autoTagJob) autoTagSpecific(ctx context.Context, progress *job.Progress
|
|||
if performerCount == 1 && performerIds[0] == wildcard {
|
||||
performerCount, err = performerQuery.Count(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error getting performer count: %v", err)
|
||||
return fmt.Errorf("getting performer count: %v", err)
|
||||
}
|
||||
}
|
||||
if studioCount == 1 && studioIds[0] == wildcard {
|
||||
studioCount, err = studioQuery.Count(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error getting studio count: %v", err)
|
||||
return fmt.Errorf("getting studio count: %v", err)
|
||||
}
|
||||
}
|
||||
if tagCount == 1 && tagIds[0] == wildcard {
|
||||
tagCount, err = tagQuery.Count(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error getting tag count: %v", err)
|
||||
return fmt.Errorf("getting tag count: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
if !job.IsCancelled(ctx) {
|
||||
logger.Errorf("auto-tag error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
total := performerCount + studioCount + tagCount
|
||||
progress.SetTotal(total)
|
||||
|
||||
logger.Infof("Starting autotag of %d performers, %d studios, %d tags", performerCount, studioCount, tagCount)
|
||||
logger.Infof("Starting auto-tag of %d performers, %d studios, %d tags", performerCount, studioCount, tagCount)
|
||||
|
||||
j.autoTagPerformers(ctx, progress, input.Paths, performerIds)
|
||||
j.autoTagStudios(ctx, progress, input.Paths, studioIds)
|
||||
|
|
@ -142,7 +144,7 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre
|
|||
PerPage: &perPage,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("error querying performers: %w", err)
|
||||
return fmt.Errorf("querying performers: %w", err)
|
||||
}
|
||||
} else {
|
||||
performerIdInt, err := strconv.Atoi(performerId)
|
||||
|
|
@ -167,11 +169,10 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre
|
|||
|
||||
for _, performer := range performers {
|
||||
if job.IsCancelled(ctx) {
|
||||
logger.Info("Stopping due to user request")
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := func() error {
|
||||
err := func() error {
|
||||
r := j.txnManager
|
||||
if err := tagger.PerformerScenes(ctx, performer, paths, r.Scene); err != nil {
|
||||
return fmt.Errorf("processing scenes: %w", err)
|
||||
|
|
@ -184,8 +185,14 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre
|
|||
}
|
||||
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return fmt.Errorf("error auto-tagging performer '%s': %s", performer.Name, err.Error())
|
||||
}()
|
||||
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("tagging performer '%s': %s", performer.Name, err.Error())
|
||||
}
|
||||
|
||||
progress.Increment()
|
||||
|
|
@ -193,8 +200,12 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre
|
|||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
continue
|
||||
logger.Errorf("auto-tag error: %v", err)
|
||||
}
|
||||
|
||||
if job.IsCancelled(ctx) {
|
||||
logger.Info("Stopping performer auto-tag due to user request")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -225,17 +236,17 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress,
|
|||
PerPage: &perPage,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("error querying studios: %v", err)
|
||||
return fmt.Errorf("querying studios: %v", err)
|
||||
}
|
||||
} else {
|
||||
studioIdInt, err := strconv.Atoi(studioId)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error parsing studio id %s: %s", studioId, err.Error())
|
||||
return fmt.Errorf("parsing studio id %s: %s", studioId, err.Error())
|
||||
}
|
||||
|
||||
studio, err := studioQuery.Find(ctx, studioIdInt)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding studio id %s: %s", studioId, err.Error())
|
||||
return fmt.Errorf("finding studio id %s: %s", studioId, err.Error())
|
||||
}
|
||||
|
||||
if studio == nil {
|
||||
|
|
@ -247,11 +258,10 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress,
|
|||
|
||||
for _, studio := range studios {
|
||||
if job.IsCancelled(ctx) {
|
||||
logger.Info("Stopping due to user request")
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := func() error {
|
||||
err := func() error {
|
||||
aliases, err := r.Studio.GetAliases(ctx, studio.ID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getting studio aliases: %w", err)
|
||||
|
|
@ -268,8 +278,14 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress,
|
|||
}
|
||||
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return fmt.Errorf("error auto-tagging studio '%s': %s", studio.Name.String, err.Error())
|
||||
}()
|
||||
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("tagging studio '%s': %s", studio.Name.String, err.Error())
|
||||
}
|
||||
|
||||
progress.Increment()
|
||||
|
|
@ -277,8 +293,12 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress,
|
|||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
continue
|
||||
logger.Errorf("auto-tag error: %v", err)
|
||||
}
|
||||
|
||||
if job.IsCancelled(ctx) {
|
||||
logger.Info("Stopping studio auto-tag due to user request")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -308,28 +328,27 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa
|
|||
PerPage: &perPage,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("error querying tags: %v", err)
|
||||
return fmt.Errorf("querying tags: %v", err)
|
||||
}
|
||||
} else {
|
||||
tagIdInt, err := strconv.Atoi(tagId)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error parsing tag id %s: %s", tagId, err.Error())
|
||||
return fmt.Errorf("parsing tag id %s: %s", tagId, err.Error())
|
||||
}
|
||||
|
||||
tag, err := tagQuery.Find(ctx, tagIdInt)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding tag id %s: %s", tagId, err.Error())
|
||||
return fmt.Errorf("finding tag id %s: %s", tagId, err.Error())
|
||||
}
|
||||
tags = append(tags, tag)
|
||||
}
|
||||
|
||||
for _, tag := range tags {
|
||||
if job.IsCancelled(ctx) {
|
||||
logger.Info("Stopping due to user request")
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := func() error {
|
||||
err := func() error {
|
||||
aliases, err := r.Tag.GetAliases(ctx, tag.ID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getting tag aliases: %w", err)
|
||||
|
|
@ -346,8 +365,14 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa
|
|||
}
|
||||
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return fmt.Errorf("error auto-tagging tag '%s': %s", tag.Name, err.Error())
|
||||
}()
|
||||
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("tagging tag '%s': %s", tag.Name, err.Error())
|
||||
}
|
||||
|
||||
progress.Increment()
|
||||
|
|
@ -355,8 +380,12 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa
|
|||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
continue
|
||||
logger.Errorf("auto-tag error: %v", err)
|
||||
}
|
||||
|
||||
if job.IsCancelled(ctx) {
|
||||
logger.Info("Stopping tag auto-tag due to user request")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -488,11 +517,13 @@ func (t *autoTagFilesTask) getCount(ctx context.Context, r Repository) (int, err
|
|||
return sceneCount + imageCount + galleryCount, nil
|
||||
}
|
||||
|
||||
func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) error {
|
||||
func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) {
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Auto-tagging scenes...")
|
||||
|
||||
batchSize := 1000
|
||||
|
||||
findFilter := models.BatchFindFilter(batchSize)
|
||||
|
|
@ -506,12 +537,16 @@ func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) erro
|
|||
scenes, err = scene.Query(ctx, r.Scene, sceneFilter, findFilter)
|
||||
return err
|
||||
}); err != nil {
|
||||
return fmt.Errorf("querying scenes: %w", err)
|
||||
if !job.IsCancelled(ctx) {
|
||||
logger.Errorf("error querying scenes for auto-tag: %w", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for _, ss := range scenes {
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
logger.Info("Stopping auto-tag due to user request")
|
||||
return
|
||||
}
|
||||
|
||||
tt := autoTagSceneTask{
|
||||
|
|
@ -541,15 +576,15 @@ func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) erro
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) error {
|
||||
func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) {
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Auto-tagging images...")
|
||||
|
||||
batchSize := 1000
|
||||
|
||||
findFilter := models.BatchFindFilter(batchSize)
|
||||
|
|
@ -563,12 +598,16 @@ func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) erro
|
|||
images, err = image.Query(ctx, r.Image, imageFilter, findFilter)
|
||||
return err
|
||||
}); err != nil {
|
||||
return fmt.Errorf("querying images: %w", err)
|
||||
if !job.IsCancelled(ctx) {
|
||||
logger.Errorf("error querying images for auto-tag: %w", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for _, ss := range images {
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
logger.Info("Stopping auto-tag due to user request")
|
||||
return
|
||||
}
|
||||
|
||||
tt := autoTagImageTask{
|
||||
|
|
@ -598,15 +637,15 @@ func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) erro
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) error {
|
||||
func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) {
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Auto-tagging galleries...")
|
||||
|
||||
batchSize := 1000
|
||||
|
||||
findFilter := models.BatchFindFilter(batchSize)
|
||||
|
|
@ -620,12 +659,16 @@ func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) e
|
|||
galleries, _, err = r.Gallery.Query(ctx, galleryFilter, findFilter)
|
||||
return err
|
||||
}); err != nil {
|
||||
return fmt.Errorf("querying galleries: %w", err)
|
||||
if !job.IsCancelled(ctx) {
|
||||
logger.Errorf("error querying galleries for auto-tag: %w", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for _, ss := range galleries {
|
||||
if job.IsCancelled(ctx) {
|
||||
return nil
|
||||
logger.Info("Stopping auto-tag due to user request")
|
||||
return
|
||||
}
|
||||
|
||||
tt := autoTagGalleryTask{
|
||||
|
|
@ -655,8 +698,6 @@ func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) e
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *autoTagFilesTask) process(ctx context.Context) {
|
||||
|
|
@ -668,35 +709,19 @@ func (t *autoTagFilesTask) process(ctx context.Context) {
|
|||
}
|
||||
|
||||
t.progress.SetTotal(total)
|
||||
logger.Infof("Starting autotag of %d files", total)
|
||||
logger.Infof("Starting auto-tag of %d files", total)
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Errorf("error getting count for autotag task: %v", err)
|
||||
if !job.IsCancelled(ctx) {
|
||||
logger.Errorf("error getting file count for auto-tag task: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Autotagging scenes...")
|
||||
if err := t.processScenes(ctx, r); err != nil {
|
||||
logger.Errorf("error processing scenes: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Autotagging images...")
|
||||
if err := t.processImages(ctx, r); err != nil {
|
||||
logger.Errorf("error processing images: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Autotagging galleries...")
|
||||
if err := t.processGalleries(ctx, r); err != nil {
|
||||
logger.Errorf("error processing galleries: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
if job.IsCancelled(ctx) {
|
||||
logger.Info("Stopping due to user request")
|
||||
}
|
||||
t.processScenes(ctx, r)
|
||||
t.processImages(ctx, r)
|
||||
t.processGalleries(ctx, r)
|
||||
}
|
||||
|
||||
type autoTagSceneTask struct {
|
||||
|
|
@ -721,23 +746,25 @@ func (t *autoTagSceneTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
|||
|
||||
if t.performers {
|
||||
if err := autotag.ScenePerformers(ctx, t.scene, r.Scene, r.Performer, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.DisplayName(), err)
|
||||
return fmt.Errorf("tagging scene performers for %s: %v", t.scene.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
if t.studios {
|
||||
if err := autotag.SceneStudios(ctx, t.scene, r.Scene, r.Studio, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.DisplayName(), err)
|
||||
return fmt.Errorf("tagging scene studio for %s: %v", t.scene.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
if t.tags {
|
||||
if err := autotag.SceneTags(ctx, t.scene, r.Scene, r.Tag, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.DisplayName(), err)
|
||||
return fmt.Errorf("tagging scene tags for %s: %v", t.scene.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
if !job.IsCancelled(ctx) {
|
||||
logger.Errorf("auto-tag error: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -758,23 +785,25 @@ func (t *autoTagImageTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
|||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
if t.performers {
|
||||
if err := autotag.ImagePerformers(ctx, t.image, r.Image, r.Performer, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging image performers for %s: %v", t.image.DisplayName(), err)
|
||||
return fmt.Errorf("tagging image performers for %s: %v", t.image.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
if t.studios {
|
||||
if err := autotag.ImageStudios(ctx, t.image, r.Image, r.Studio, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging image studio for %s: %v", t.image.DisplayName(), err)
|
||||
return fmt.Errorf("tagging image studio for %s: %v", t.image.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
if t.tags {
|
||||
if err := autotag.ImageTags(ctx, t.image, r.Image, r.Tag, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging image tags for %s: %v", t.image.DisplayName(), err)
|
||||
return fmt.Errorf("tagging image tags for %s: %v", t.image.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
if !job.IsCancelled(ctx) {
|
||||
logger.Errorf("auto-tag error: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -795,22 +824,24 @@ func (t *autoTagGalleryTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
|||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
if t.performers {
|
||||
if err := autotag.GalleryPerformers(ctx, t.gallery, r.Gallery, r.Performer, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.DisplayName(), err)
|
||||
return fmt.Errorf("tagging gallery performers for %s: %v", t.gallery.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
if t.studios {
|
||||
if err := autotag.GalleryStudios(ctx, t.gallery, r.Gallery, r.Studio, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.DisplayName(), err)
|
||||
return fmt.Errorf("tagging gallery studio for %s: %v", t.gallery.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
if t.tags {
|
||||
if err := autotag.GalleryTags(ctx, t.gallery, r.Gallery, r.Tag, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.DisplayName(), err)
|
||||
return fmt.Errorf("tagging gallery tags for %s: %v", t.gallery.DisplayName(), err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
if !job.IsCancelled(ctx) {
|
||||
logger.Errorf("auto-tag error: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -201,9 +201,9 @@ func (f *cleanFilter) shouldCleanFile(path string, info fs.FileInfo, stash *conf
|
|||
switch {
|
||||
case info.IsDir() || fsutil.MatchExtension(path, f.zipExt):
|
||||
return f.shouldCleanGallery(path, stash)
|
||||
case fsutil.MatchExtension(path, f.vidExt):
|
||||
case useAsVideo(path):
|
||||
return f.shouldCleanVideoFile(path, stash)
|
||||
case fsutil.MatchExtension(path, f.imgExt):
|
||||
case useAsImage(path):
|
||||
return f.shouldCleanImage(path, stash)
|
||||
default:
|
||||
logger.Infof("File extension does not match any media extensions. Marking to clean: \"%s\"", path)
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import (
|
|||
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/job"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
|
|
@ -29,6 +30,7 @@ type GenerateMetadataInput struct {
|
|||
ForceTranscodes bool `json:"forceTranscodes"`
|
||||
Phashes bool `json:"phashes"`
|
||||
InteractiveHeatmapsSpeeds bool `json:"interactiveHeatmapsSpeeds"`
|
||||
ClipPreviews bool `json:"clipPreviews"`
|
||||
// scene ids to generate for
|
||||
SceneIDs []string `json:"sceneIDs"`
|
||||
// marker ids to generate for
|
||||
|
|
@ -69,6 +71,7 @@ type totalsGenerate struct {
|
|||
transcodes int64
|
||||
phashes int64
|
||||
interactiveHeatmapSpeeds int64
|
||||
clipPreviews int64
|
||||
|
||||
tasks int
|
||||
}
|
||||
|
|
@ -142,7 +145,38 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||
return
|
||||
}
|
||||
|
||||
logger.Infof("Generating %d covers %d sprites %d previews %d image previews %d markers %d transcodes %d phashes %d heatmaps & speeds", totals.covers, totals.sprites, totals.previews, totals.imagePreviews, totals.markers, totals.transcodes, totals.phashes, totals.interactiveHeatmapSpeeds)
|
||||
logMsg := "Generating"
|
||||
if j.input.Covers {
|
||||
logMsg += fmt.Sprintf(" %d covers", totals.covers)
|
||||
}
|
||||
if j.input.Sprites {
|
||||
logMsg += fmt.Sprintf(" %d sprites", totals.sprites)
|
||||
}
|
||||
if j.input.Previews {
|
||||
logMsg += fmt.Sprintf(" %d previews", totals.previews)
|
||||
}
|
||||
if j.input.ImagePreviews {
|
||||
logMsg += fmt.Sprintf(" %d image previews", totals.imagePreviews)
|
||||
}
|
||||
if j.input.Markers {
|
||||
logMsg += fmt.Sprintf(" %d markers", totals.markers)
|
||||
}
|
||||
if j.input.Transcodes {
|
||||
logMsg += fmt.Sprintf(" %d transcodes", totals.transcodes)
|
||||
}
|
||||
if j.input.Phashes {
|
||||
logMsg += fmt.Sprintf(" %d phashes", totals.phashes)
|
||||
}
|
||||
if j.input.InteractiveHeatmapsSpeeds {
|
||||
logMsg += fmt.Sprintf(" %d heatmaps & speeds", totals.interactiveHeatmapSpeeds)
|
||||
}
|
||||
if j.input.ClipPreviews {
|
||||
logMsg += fmt.Sprintf(" %d Image Clip Previews", totals.clipPreviews)
|
||||
}
|
||||
if logMsg == "Generating" {
|
||||
logMsg = "Nothing selected to generate"
|
||||
}
|
||||
logger.Infof(logMsg)
|
||||
|
||||
progress.SetTotal(int(totals.tasks))
|
||||
}()
|
||||
|
|
@ -226,6 +260,38 @@ func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, que
|
|||
}
|
||||
}
|
||||
|
||||
*findFilter.Page = 1
|
||||
for more := j.input.ClipPreviews; more; {
|
||||
if job.IsCancelled(ctx) {
|
||||
return totals
|
||||
}
|
||||
|
||||
images, err := image.Query(ctx, j.txnManager.Image, nil, findFilter)
|
||||
if err != nil {
|
||||
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
|
||||
return totals
|
||||
}
|
||||
|
||||
for _, ss := range images {
|
||||
if job.IsCancelled(ctx) {
|
||||
return totals
|
||||
}
|
||||
|
||||
if err := ss.LoadFiles(ctx, j.txnManager.Image); err != nil {
|
||||
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
|
||||
return totals
|
||||
}
|
||||
|
||||
j.queueImageJob(g, ss, queue, &totals)
|
||||
}
|
||||
|
||||
if len(images) != batchSize {
|
||||
more = false
|
||||
} else {
|
||||
*findFilter.Page++
|
||||
}
|
||||
}
|
||||
|
||||
return totals
|
||||
}
|
||||
|
||||
|
|
@ -269,9 +335,10 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
|||
task := &GenerateCoverTask{
|
||||
txnManager: j.txnManager,
|
||||
Scene: *scene,
|
||||
Overwrite: j.overwrite,
|
||||
}
|
||||
|
||||
if j.overwrite || task.required(ctx) {
|
||||
if task.required(ctx) {
|
||||
totals.covers++
|
||||
totals.tasks++
|
||||
queue <- task
|
||||
|
|
@ -285,7 +352,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
|||
fileNamingAlgorithm: j.fileNamingAlgo,
|
||||
}
|
||||
|
||||
if j.overwrite || task.required() {
|
||||
if task.required() {
|
||||
totals.sprites++
|
||||
totals.tasks++
|
||||
queue <- task
|
||||
|
|
@ -309,21 +376,15 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
|||
}
|
||||
|
||||
if task.required() {
|
||||
addTask := false
|
||||
if j.overwrite || !task.doesVideoPreviewExist() {
|
||||
if task.videoPreviewRequired() {
|
||||
totals.previews++
|
||||
addTask = true
|
||||
}
|
||||
|
||||
if j.input.ImagePreviews && (j.overwrite || !task.doesImagePreviewExist()) {
|
||||
if task.imagePreviewRequired() {
|
||||
totals.imagePreviews++
|
||||
addTask = true
|
||||
}
|
||||
|
||||
if addTask {
|
||||
totals.tasks++
|
||||
queue <- task
|
||||
}
|
||||
totals.tasks++
|
||||
queue <- task
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -357,7 +418,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
|||
fileNamingAlgorithm: j.fileNamingAlgo,
|
||||
g: g,
|
||||
}
|
||||
if task.isTranscodeNeeded() {
|
||||
if task.required() {
|
||||
totals.transcodes++
|
||||
totals.tasks++
|
||||
queue <- task
|
||||
|
|
@ -375,7 +436,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
|||
Overwrite: j.overwrite,
|
||||
}
|
||||
|
||||
if task.shouldGenerate() {
|
||||
if task.required() {
|
||||
totals.phashes++
|
||||
totals.tasks++
|
||||
queue <- task
|
||||
|
|
@ -391,7 +452,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
|||
TxnManager: j.txnManager,
|
||||
}
|
||||
|
||||
if task.shouldGenerate() {
|
||||
if task.required() {
|
||||
totals.interactiveHeatmapSpeeds++
|
||||
totals.tasks++
|
||||
queue <- task
|
||||
|
|
@ -411,3 +472,16 @@ func (j *GenerateJob) queueMarkerJob(g *generate.Generator, marker *models.Scene
|
|||
totals.tasks++
|
||||
queue <- task
|
||||
}
|
||||
|
||||
func (j *GenerateJob) queueImageJob(g *generate.Generator, image *models.Image, queue chan<- Task, totals *totalsGenerate) {
|
||||
task := &GenerateClipPreviewTask{
|
||||
Image: *image,
|
||||
Overwrite: j.overwrite,
|
||||
}
|
||||
|
||||
if task.required() {
|
||||
totals.clipPreviews++
|
||||
totals.tasks++
|
||||
queue <- task
|
||||
}
|
||||
}
|
||||
|
|
|
|||
62
internal/manager/task_generate_clip_preview.go
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type GenerateClipPreviewTask struct {
|
||||
Image models.Image
|
||||
Overwrite bool
|
||||
}
|
||||
|
||||
func (t *GenerateClipPreviewTask) GetDescription() string {
|
||||
return fmt.Sprintf("Generating Preview for image Clip %s", t.Image.Path)
|
||||
}
|
||||
|
||||
func (t *GenerateClipPreviewTask) Start(ctx context.Context) {
|
||||
if !t.required() {
|
||||
return
|
||||
}
|
||||
|
||||
prevPath := GetInstance().Paths.Generated.GetClipPreviewPath(t.Image.Checksum, models.DefaultGthumbWidth)
|
||||
filePath := t.Image.Files.Primary().Base().Path
|
||||
|
||||
clipPreviewOptions := image.ClipPreviewOptions{
|
||||
InputArgs: GetInstance().Config.GetTranscodeInputArgs(),
|
||||
OutputArgs: GetInstance().Config.GetTranscodeOutputArgs(),
|
||||
Preset: GetInstance().Config.GetPreviewPreset().String(),
|
||||
}
|
||||
|
||||
encoder := image.NewThumbnailEncoder(GetInstance().FFMPEG, GetInstance().FFProbe, clipPreviewOptions)
|
||||
err := encoder.GetPreview(filePath, prevPath, models.DefaultGthumbWidth)
|
||||
if err != nil {
|
||||
logger.Errorf("getting preview for image %s: %w", filePath, err)
|
||||
return
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (t *GenerateClipPreviewTask) required() bool {
|
||||
_, ok := t.Image.Files.Primary().(*file.VideoFile)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
if t.Overwrite {
|
||||
return true
|
||||
}
|
||||
|
||||
prevPath := GetInstance().Paths.Generated.GetClipPreviewPath(t.Image.Checksum, models.DefaultGthumbWidth)
|
||||
if exists, _ := fsutil.FileExists(prevPath); exists {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
|
@ -22,7 +22,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string {
|
|||
}
|
||||
|
||||
func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
|
||||
if !t.shouldGenerate() {
|
||||
if !t.required() {
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -52,13 +52,18 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
|
|||
}
|
||||
}
|
||||
|
||||
func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool {
|
||||
func (t *GenerateInteractiveHeatmapSpeedTask) required() bool {
|
||||
primaryFile := t.Scene.Files.Primary()
|
||||
if primaryFile == nil || !primaryFile.Interactive {
|
||||
return false
|
||||
}
|
||||
|
||||
if t.Overwrite {
|
||||
return true
|
||||
}
|
||||
|
||||
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
return !t.doesHeatmapExist(sceneHash) || primaryFile.InteractiveSpeed == nil || t.Overwrite
|
||||
return !t.doesHeatmapExist(sceneHash) || primaryFile.InteractiveSpeed == nil
|
||||
}
|
||||
|
||||
func (t *GenerateInteractiveHeatmapSpeedTask) doesHeatmapExist(sceneChecksum string) bool {
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ func (t *GeneratePhashTask) GetDescription() string {
|
|||
}
|
||||
|
||||
func (t *GeneratePhashTask) Start(ctx context.Context) {
|
||||
if !t.shouldGenerate() {
|
||||
if !t.required() {
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -49,6 +49,10 @@ func (t *GeneratePhashTask) Start(ctx context.Context) {
|
|||
}
|
||||
}
|
||||
|
||||
func (t *GeneratePhashTask) shouldGenerate() bool {
|
||||
return t.Overwrite || t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil
|
||||
func (t *GeneratePhashTask) required() bool {
|
||||
if t.Overwrite {
|
||||
return true
|
||||
}
|
||||
|
||||
return t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,13 +30,9 @@ func (t *GeneratePreviewTask) GetDescription() string {
|
|||
}
|
||||
|
||||
func (t *GeneratePreviewTask) Start(ctx context.Context) {
|
||||
if !t.Overwrite && !t.required() {
|
||||
return
|
||||
}
|
||||
|
||||
videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
|
||||
if t.Overwrite || !t.doesVideoPreviewExist() {
|
||||
if t.videoPreviewRequired() {
|
||||
ffprobe := instance.FFProbe
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
|
||||
if err != nil {
|
||||
|
|
@ -51,7 +47,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
|
|||
}
|
||||
}
|
||||
|
||||
if t.ImagePreview && (t.Overwrite || !t.doesImagePreviewExist()) {
|
||||
if t.imagePreviewRequired() {
|
||||
if err := t.generateWebp(videoChecksum); err != nil {
|
||||
logger.Errorf("error generating preview webp: %v", err)
|
||||
logErrorOutput(err)
|
||||
|
|
@ -59,7 +55,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
|
|||
}
|
||||
}
|
||||
|
||||
func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64, videoFrameRate float64) error {
|
||||
func (t *GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64, videoFrameRate float64) error {
|
||||
videoFilename := t.Scene.Path
|
||||
useVsync2 := false
|
||||
|
||||
|
|
@ -78,12 +74,16 @@ func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration f
|
|||
return nil
|
||||
}
|
||||
|
||||
func (t GeneratePreviewTask) generateWebp(videoChecksum string) error {
|
||||
func (t *GeneratePreviewTask) generateWebp(videoChecksum string) error {
|
||||
videoFilename := t.Scene.Path
|
||||
return t.generator.PreviewWebp(context.TODO(), videoFilename, videoChecksum)
|
||||
}
|
||||
|
||||
func (t GeneratePreviewTask) required() bool {
|
||||
func (t *GeneratePreviewTask) required() bool {
|
||||
return t.videoPreviewRequired() || t.imagePreviewRequired()
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) videoPreviewRequired() bool {
|
||||
if t.Scene.Path == "" {
|
||||
return false
|
||||
}
|
||||
|
|
@ -92,12 +92,6 @@ func (t GeneratePreviewTask) required() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
videoExists := t.doesVideoPreviewExist()
|
||||
imageExists := !t.ImagePreview || t.doesImagePreviewExist()
|
||||
return !imageExists || !videoExists
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) doesVideoPreviewExist() bool {
|
||||
sceneChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
if sceneChecksum == "" {
|
||||
return false
|
||||
|
|
@ -108,10 +102,22 @@ func (t *GeneratePreviewTask) doesVideoPreviewExist() bool {
|
|||
t.videoPreviewExists = &videoExists
|
||||
}
|
||||
|
||||
return *t.videoPreviewExists
|
||||
return !*t.videoPreviewExists
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) doesImagePreviewExist() bool {
|
||||
func (t *GeneratePreviewTask) imagePreviewRequired() bool {
|
||||
if !t.ImagePreview {
|
||||
return false
|
||||
}
|
||||
|
||||
if t.Scene.Path == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
if t.Overwrite {
|
||||
return true
|
||||
}
|
||||
|
||||
sceneChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
if sceneChecksum == "" {
|
||||
return false
|
||||
|
|
@ -122,5 +128,5 @@ func (t *GeneratePreviewTask) doesImagePreviewExist() bool {
|
|||
t.imagePreviewExists = &imageExists
|
||||
}
|
||||
|
||||
return *t.imagePreviewExists
|
||||
return !*t.imagePreviewExists
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,8 +25,8 @@ func (t *GenerateCoverTask) Start(ctx context.Context) {
|
|||
|
||||
var required bool
|
||||
if err := t.txnManager.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||
// don't generate the screenshot if it already exists
|
||||
required = t.required(ctx)
|
||||
|
||||
return t.Scene.LoadPrimaryFile(ctx, t.txnManager.File)
|
||||
}); err != nil {
|
||||
logger.Error(err)
|
||||
|
|
@ -92,7 +92,12 @@ func (t *GenerateCoverTask) Start(ctx context.Context) {
|
|||
}
|
||||
|
||||
// required returns true if the sprite needs to be generated
|
||||
func (t GenerateCoverTask) required(ctx context.Context) bool {
|
||||
// assumes in a transaction
|
||||
func (t *GenerateCoverTask) required(ctx context.Context) bool {
|
||||
if t.Scene.Path == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
if t.Overwrite {
|
||||
return true
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ func (t *GenerateSpriteTask) GetDescription() string {
|
|||
}
|
||||
|
||||
func (t *GenerateSpriteTask) Start(ctx context.Context) {
|
||||
if !t.Overwrite && !t.required() {
|
||||
if !t.required() {
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -54,6 +54,11 @@ func (t GenerateSpriteTask) required() bool {
|
|||
if t.Scene.Path == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
if t.Overwrite {
|
||||
return true
|
||||
}
|
||||
|
||||
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
return !t.doesSpriteExist(sceneHash)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -141,8 +141,8 @@ func newHandlerRequiredFilter(c *config.Instance) *handlerRequiredFilter {
|
|||
|
||||
func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool {
|
||||
path := ff.Base().Path
|
||||
isVideoFile := fsutil.MatchExtension(path, f.vidExt)
|
||||
isImageFile := fsutil.MatchExtension(path, f.imgExt)
|
||||
isVideoFile := useAsVideo(path)
|
||||
isImageFile := useAsImage(path)
|
||||
isZipFile := fsutil.MatchExtension(path, f.zipExt)
|
||||
|
||||
var counter fileCounter
|
||||
|
|
@ -246,6 +246,7 @@ func newScanFilter(c *config.Instance, minModTime time.Time) *scanFilter {
|
|||
|
||||
func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
|
||||
if fsutil.IsPathInDir(f.generatedPath, path) {
|
||||
logger.Warnf("Skipping %q as it overlaps with the generated folder", path)
|
||||
return false
|
||||
}
|
||||
|
||||
|
|
@ -254,8 +255,8 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
|
|||
return false
|
||||
}
|
||||
|
||||
isVideoFile := fsutil.MatchExtension(path, f.vidExt)
|
||||
isImageFile := fsutil.MatchExtension(path, f.imgExt)
|
||||
isVideoFile := useAsVideo(path)
|
||||
isImageFile := useAsImage(path)
|
||||
isZipFile := fsutil.MatchExtension(path, f.zipExt)
|
||||
|
||||
// handle caption files
|
||||
|
|
@ -288,7 +289,7 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
|
|||
// shortcut: skip the directory entirely if it matches both exclusion patterns
|
||||
// add a trailing separator so that it correctly matches against patterns like path/.*
|
||||
pathExcludeTest := path + string(filepath.Separator)
|
||||
if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, f.videoExcludeRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, f.imageExcludeRegex)) {
|
||||
if (matchFileRegex(pathExcludeTest, f.videoExcludeRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, f.imageExcludeRegex)) {
|
||||
logger.Debugf("Skipping directory %s as it matches video and image exclusion patterns", path)
|
||||
return false
|
||||
}
|
||||
|
|
@ -305,17 +306,14 @@ func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo)
|
|||
}
|
||||
|
||||
type scanConfig struct {
|
||||
isGenerateThumbnails bool
|
||||
isGenerateThumbnails bool
|
||||
isGenerateClipPreviews bool
|
||||
}
|
||||
|
||||
func (c *scanConfig) GetCreateGalleriesFromFolders() bool {
|
||||
return instance.Config.GetCreateGalleriesFromFolders()
|
||||
}
|
||||
|
||||
func (c *scanConfig) IsGenerateThumbnails() bool {
|
||||
return c.isGenerateThumbnails
|
||||
}
|
||||
|
||||
func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progress *job.Progress) []file.Handler {
|
||||
db := instance.Database
|
||||
pluginCache := instance.PluginCache
|
||||
|
|
@ -324,11 +322,16 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
|
|||
&file.FilteredHandler{
|
||||
Filter: file.FilterFunc(imageFileFilter),
|
||||
Handler: &image.ScanHandler{
|
||||
CreatorUpdater: db.Image,
|
||||
GalleryFinder: db.Gallery,
|
||||
ThumbnailGenerator: &imageThumbnailGenerator{},
|
||||
CreatorUpdater: db.Image,
|
||||
GalleryFinder: db.Gallery,
|
||||
ScanGenerator: &imageGenerators{
|
||||
input: options,
|
||||
taskQueue: taskQueue,
|
||||
progress: progress,
|
||||
},
|
||||
ScanConfig: &scanConfig{
|
||||
isGenerateThumbnails: options.ScanGenerateThumbnails,
|
||||
isGenerateThumbnails: options.ScanGenerateThumbnails,
|
||||
isGenerateClipPreviews: options.ScanGenerateClipPreviews,
|
||||
},
|
||||
PluginCache: pluginCache,
|
||||
Paths: instance.Paths,
|
||||
|
|
@ -361,35 +364,97 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
|
|||
}
|
||||
}
|
||||
|
||||
type imageThumbnailGenerator struct{}
|
||||
type imageGenerators struct {
|
||||
input ScanMetadataInput
|
||||
taskQueue *job.TaskQueue
|
||||
progress *job.Progress
|
||||
}
|
||||
|
||||
func (g *imageThumbnailGenerator) GenerateThumbnail(ctx context.Context, i *models.Image, f *file.ImageFile) error {
|
||||
func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file.File) error {
|
||||
const overwrite = false
|
||||
|
||||
progress := g.progress
|
||||
t := g.input
|
||||
path := f.Base().Path
|
||||
config := instance.Config
|
||||
sequentialScanning := config.GetSequentialScanning()
|
||||
|
||||
if t.ScanGenerateThumbnails {
|
||||
// this should be quick, so always generate sequentially
|
||||
if err := g.generateThumbnail(ctx, i, f); err != nil {
|
||||
logger.Errorf("Error generating thumbnail for %s: %v", path, err)
|
||||
}
|
||||
}
|
||||
|
||||
// avoid adding a task if the file isn't a video file
|
||||
_, isVideo := f.(*file.VideoFile)
|
||||
if isVideo && t.ScanGenerateClipPreviews {
|
||||
// this is a bit of a hack: the task requires files to be loaded, but
|
||||
// we don't really need to since we already have the file
|
||||
ii := *i
|
||||
ii.Files = models.NewRelatedFiles([]file.File{f})
|
||||
|
||||
progress.AddTotal(1)
|
||||
previewsFn := func(ctx context.Context) {
|
||||
taskPreview := GenerateClipPreviewTask{
|
||||
Image: ii,
|
||||
Overwrite: overwrite,
|
||||
}
|
||||
|
||||
taskPreview.Start(ctx)
|
||||
progress.Increment()
|
||||
}
|
||||
|
||||
if sequentialScanning {
|
||||
previewsFn(ctx)
|
||||
} else {
|
||||
g.taskQueue.Add(fmt.Sprintf("Generating preview for %s", path), previewsFn)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image, f file.File) error {
|
||||
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
|
||||
exists, _ := fsutil.FileExists(thumbPath)
|
||||
if exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
if f.Height <= models.DefaultGthumbWidth && f.Width <= models.DefaultGthumbWidth {
|
||||
path := f.Base().Path
|
||||
|
||||
asFrame, ok := f.(file.VisualFile)
|
||||
if !ok {
|
||||
return fmt.Errorf("file %s does not implement Frame", path)
|
||||
}
|
||||
|
||||
if asFrame.GetHeight() <= models.DefaultGthumbWidth && asFrame.GetWidth() <= models.DefaultGthumbWidth {
|
||||
return nil
|
||||
}
|
||||
|
||||
logger.Debugf("Generating thumbnail for %s", f.Path)
|
||||
logger.Debugf("Generating thumbnail for %s", path)
|
||||
|
||||
encoder := image.NewThumbnailEncoder(instance.FFMPEG)
|
||||
clipPreviewOptions := image.ClipPreviewOptions{
|
||||
InputArgs: instance.Config.GetTranscodeInputArgs(),
|
||||
OutputArgs: instance.Config.GetTranscodeOutputArgs(),
|
||||
Preset: instance.Config.GetPreviewPreset().String(),
|
||||
}
|
||||
|
||||
encoder := image.NewThumbnailEncoder(instance.FFMPEG, instance.FFProbe, clipPreviewOptions)
|
||||
data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth)
|
||||
|
||||
if err != nil {
|
||||
// don't log for animated images
|
||||
if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
|
||||
return fmt.Errorf("getting thumbnail for image %s: %w", f.Path, err)
|
||||
return fmt.Errorf("getting thumbnail for image %s: %w", path, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
err = fsutil.WriteFile(thumbPath, data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("writing thumbnail for image %s: %w", f.Path, err)
|
||||
return fmt.Errorf("writing thumbnail for image %s: %w", path, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -490,6 +555,7 @@ func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *file
|
|||
taskCover := GenerateCoverTask{
|
||||
Scene: *s,
|
||||
txnManager: instance.Repository,
|
||||
Overwrite: overwrite,
|
||||
}
|
||||
taskCover.Start(ctx)
|
||||
progress.Increment()
|
||||
|
|
|
|||
|
|
@ -119,24 +119,27 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
|
|||
aliases = []string{}
|
||||
}
|
||||
newPerformer := models.Performer{
|
||||
Aliases: models.NewRelatedStrings(aliases),
|
||||
Birthdate: getDate(performer.Birthdate),
|
||||
CareerLength: getString(performer.CareerLength),
|
||||
Country: getString(performer.Country),
|
||||
CreatedAt: currentTime,
|
||||
Ethnicity: getString(performer.Ethnicity),
|
||||
EyeColor: getString(performer.EyeColor),
|
||||
FakeTits: getString(performer.FakeTits),
|
||||
Gender: models.GenderEnum(getString(performer.Gender)),
|
||||
Height: getIntPtr(performer.Height),
|
||||
Weight: getIntPtr(performer.Weight),
|
||||
Instagram: getString(performer.Instagram),
|
||||
Measurements: getString(performer.Measurements),
|
||||
Name: *performer.Name,
|
||||
Piercings: getString(performer.Piercings),
|
||||
Tattoos: getString(performer.Tattoos),
|
||||
Twitter: getString(performer.Twitter),
|
||||
URL: getString(performer.URL),
|
||||
Aliases: models.NewRelatedStrings(aliases),
|
||||
Disambiguation: getString(performer.Disambiguation),
|
||||
Details: getString(performer.Details),
|
||||
Birthdate: getDate(performer.Birthdate),
|
||||
DeathDate: getDate(performer.DeathDate),
|
||||
CareerLength: getString(performer.CareerLength),
|
||||
Country: getString(performer.Country),
|
||||
CreatedAt: currentTime,
|
||||
Ethnicity: getString(performer.Ethnicity),
|
||||
EyeColor: getString(performer.EyeColor),
|
||||
HairColor: getString(performer.HairColor),
|
||||
FakeTits: getString(performer.FakeTits),
|
||||
Height: getIntPtr(performer.Height),
|
||||
Weight: getIntPtr(performer.Weight),
|
||||
Instagram: getString(performer.Instagram),
|
||||
Measurements: getString(performer.Measurements),
|
||||
Name: *performer.Name,
|
||||
Piercings: getString(performer.Piercings),
|
||||
Tattoos: getString(performer.Tattoos),
|
||||
Twitter: getString(performer.Twitter),
|
||||
URL: getString(performer.URL),
|
||||
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
||||
{
|
||||
Endpoint: t.box.Endpoint,
|
||||
|
|
@ -146,6 +149,11 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
|
|||
UpdatedAt: currentTime,
|
||||
}
|
||||
|
||||
if performer.Gender != nil {
|
||||
v := models.GenderEnum(getString(performer.Gender))
|
||||
newPerformer.Gender = &v
|
||||
}
|
||||
|
||||
err := txn.WithTxn(ctx, instance.Repository, func(ctx context.Context) error {
|
||||
r := instance.Repository
|
||||
err := r.Performer.Create(ctx, &newPerformer)
|
||||
|
|
@ -192,6 +200,10 @@ func (t *StashBoxPerformerTagTask) getPartial(performer *models.ScrapedPerformer
|
|||
value := getDate(performer.Birthdate)
|
||||
partial.Birthdate = models.NewOptionalDate(*value)
|
||||
}
|
||||
if performer.DeathDate != nil && *performer.DeathDate != "" && !excluded["deathdate"] {
|
||||
value := getDate(performer.DeathDate)
|
||||
partial.Birthdate = models.NewOptionalDate(*value)
|
||||
}
|
||||
if performer.CareerLength != nil && !excluded["career_length"] {
|
||||
partial.CareerLength = models.NewOptionalString(*performer.CareerLength)
|
||||
}
|
||||
|
|
@ -204,6 +216,9 @@ func (t *StashBoxPerformerTagTask) getPartial(performer *models.ScrapedPerformer
|
|||
if performer.EyeColor != nil && !excluded["eye_color"] {
|
||||
partial.EyeColor = models.NewOptionalString(*performer.EyeColor)
|
||||
}
|
||||
if performer.HairColor != nil && !excluded["hair_color"] {
|
||||
partial.HairColor = models.NewOptionalString(*performer.HairColor)
|
||||
}
|
||||
if performer.FakeTits != nil && !excluded["fake_tits"] {
|
||||
partial.FakeTits = models.NewOptionalString(*performer.FakeTits)
|
||||
}
|
||||
|
|
@ -231,6 +246,9 @@ func (t *StashBoxPerformerTagTask) getPartial(performer *models.ScrapedPerformer
|
|||
if excluded["name"] && performer.Name != nil {
|
||||
partial.Name = models.NewOptionalString(*performer.Name)
|
||||
}
|
||||
if performer.Disambiguation != nil && !excluded["disambiguation"] {
|
||||
partial.Disambiguation = models.NewOptionalString(*performer.Disambiguation)
|
||||
}
|
||||
if performer.Piercings != nil && !excluded["piercings"] {
|
||||
partial.Piercings = models.NewOptionalString(*performer.Piercings)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
|
|||
// return true if transcode is needed
|
||||
// used only when counting files to generate, doesn't affect the actual transcode generation
|
||||
// if container is missing from DB it is treated as non supported in order not to delay the user
|
||||
func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
|
||||
func (t *GenerateTranscodeTask) required() bool {
|
||||
f := t.Scene.Files.Primary()
|
||||
if f == nil {
|
||||
return false
|
||||
|
|
|
|||
|
|
@ -13,3 +13,9 @@ var Scene embed.FS
|
|||
|
||||
//go:embed image
|
||||
var Image embed.FS
|
||||
|
||||
//go:embed tag
|
||||
var Tag embed.FS
|
||||
|
||||
//go:embed studio
|
||||
var Studio embed.FS
|
||||
|
|
|
|||
BIN
internal/static/performer_male/Male01.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
internal/static/performer_male/Male02.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
internal/static/performer_male/Male03.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
internal/static/performer_male/Male04.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
internal/static/performer_male/Male05.png
Normal file
|
After Width: | Height: | Size: 25 KiB |
BIN
internal/static/performer_male/Male06.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 27 KiB |
|
Before Width: | Height: | Size: 15 KiB |
7
internal/static/studio/studio.svg
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="-352 -104 1280 720">
|
||||
<!--!
|
||||
Font Awesome Free 6.3.0 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) Copyright 2023 Fonticons, Inc.
|
||||
Original from https://github.com/FortAwesome/Font-Awesome/blob/6.x/svgs/solid/video.svg
|
||||
Modified to change color and viewbox
|
||||
-->
|
||||
<path d="M0 128C0 92.7 28.7 64 64 64H320c35.3 0 64 28.7 64 64V384c0 35.3-28.7 64-64 64H64c-35.3 0-64-28.7-64-64V128zM559.1 99.8c10.4 5.6 16.9 16.4 16.9 28.2V384c0 11.8-6.5 22.6-16.9 28.2s-23 5-32.9-1.6l-96-64L416 337.1V320 192 174.9l14.2-9.5 96-64c9.8-6.5 22.4-7.2 32.9-1.6z" style="fill:#ffffff;fill-opacity:1"/></svg>
|
||||
|
After Width: | Height: | Size: 728 B |
67
internal/static/tag/tag.svg
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
<!--
|
||||
Original Tag image from: https://github.com/FortAwesome/Font-Awesome/blob/6.x/svgs/solid/tag.svg
|
||||
Modified to change color and rotate
|
||||
Licensed under CC Attribution 4.0: https://fontawesome.com/license
|
||||
-->
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="200"
|
||||
height="200"
|
||||
id="svg2"
|
||||
version="1.1"
|
||||
inkscape:version="0.48.4 r9939"
|
||||
sodipodi:docname="tag.svg">
|
||||
<defs
|
||||
id="defs4" />
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#000000"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="1"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="1"
|
||||
inkscape:cx="181.77771"
|
||||
inkscape:cy="279.72376"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1017"
|
||||
inkscape:window-x="-8"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1" />
|
||||
<metadata
|
||||
id="metadata7">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(-157.84358,-524.69522)">
|
||||
<path
|
||||
id="path2987"
|
||||
d="m 229.94314,669.26549 -36.08466,-36.08466 c -4.68653,-4.68653 -4.68653,-12.28468 0,-16.97121 l 36.08466,-36.08467 a 12.000453,12.000453 0 0 1 8.4856,-3.5148 l 74.91443,0 c 6.62761,0 12.00041,5.3728 12.00041,12.00041 l 0,72.16933 c 0,6.62761 -5.3728,12.00041 -12.00041,12.00041 l -74.91443,0 a 12.000453,12.000453 0 0 1 -8.4856,-3.51481 z m -13.45639,-53.05587 c -4.68653,4.68653 -4.68653,12.28468 0,16.97121 4.68652,4.68652 12.28467,4.68652 16.9712,0 4.68653,-4.68653 4.68653,-12.28468 0,-16.97121 -4.68653,-4.68652 -12.28468,-4.68652 -16.9712,0 z"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:#ffffff;fill-opacity:1" />
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.4 KiB |
|
|
@ -20,6 +20,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
|
||||
"github.com/zencoder/go-dash/v3/mpd"
|
||||
)
|
||||
|
|
@ -455,7 +456,7 @@ func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request,
|
|||
fmt.Fprint(&buf, "#EXT-X-ENDLIST\n")
|
||||
|
||||
w.Header().Set("Content-Type", MimeHLS)
|
||||
http.ServeContent(w, r, "", time.Time{}, bytes.NewReader(buf.Bytes()))
|
||||
utils.ServeStaticContent(w, r, buf.Bytes())
|
||||
}
|
||||
|
||||
// serveDASHManifest serves a generated DASH manifest.
|
||||
|
|
@ -546,7 +547,7 @@ func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request
|
|||
_ = m.Write(&buf)
|
||||
|
||||
w.Header().Set("Content-Type", MimeDASH)
|
||||
http.ServeContent(w, r, "", time.Time{}, bytes.NewReader(buf.Bytes()))
|
||||
utils.ServeStaticContent(w, r, buf.Bytes())
|
||||
}
|
||||
|
||||
func (sm *StreamManager) ServeManifest(w http.ResponseWriter, r *http.Request, streamType *StreamType, vf *file.VideoFile, resolution string) {
|
||||
|
|
@ -561,9 +562,7 @@ func (sm *StreamManager) serveWaitingSegment(w http.ResponseWriter, r *http.Requ
|
|||
if err == nil {
|
||||
logger.Tracef("[transcode] streaming segment file %s", segment.file)
|
||||
w.Header().Set("Content-Type", segment.segmentType.MimeType)
|
||||
// Prevent caching as segments are generated on the fly
|
||||
w.Header().Add("Cache-Control", "no-cache")
|
||||
http.ServeFile(w, r, segment.path)
|
||||
utils.ServeStaticFile(w, r, segment.path)
|
||||
} else if !errors.Is(err, context.Canceled) {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -260,6 +260,7 @@ func (sm *StreamManager) getTranscodeStream(ctx *fsutil.LockContext, options Tra
|
|||
|
||||
mimeType := options.StreamType.MimeType
|
||||
handler := func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Cache-Control", "no-store")
|
||||
w.Header().Set("Content-Type", mimeType)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ var ErrUnsupportedFormat = errors.New("unsupported image format")
|
|||
|
||||
type ImageThumbnailOptions struct {
|
||||
InputFormat ffmpeg.ImageFormat
|
||||
OutputFormat ffmpeg.ImageFormat
|
||||
OutputPath string
|
||||
MaxDimensions int
|
||||
Quality int
|
||||
|
|
@ -29,12 +30,15 @@ func ImageThumbnail(input string, options ImageThumbnailOptions) ffmpeg.Args {
|
|||
VideoFilter(videoFilter).
|
||||
VideoCodec(ffmpeg.VideoCodecMJpeg)
|
||||
|
||||
args = append(args, "-frames:v", "1")
|
||||
|
||||
if options.Quality > 0 {
|
||||
args = args.FixedQualityScaleVideo(options.Quality)
|
||||
}
|
||||
|
||||
args = args.ImageFormat(ffmpeg.ImageFormatImage2Pipe).
|
||||
Output(options.OutputPath)
|
||||
Output(options.OutputPath).
|
||||
ImageFormat(options.OutputFormat)
|
||||
|
||||
return args
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,16 +1,13 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
)
|
||||
|
||||
// ID represents an ID of a file.
|
||||
|
|
@ -119,8 +116,6 @@ func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) {
|
|||
}
|
||||
|
||||
func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error {
|
||||
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
|
||||
|
||||
reader, err := f.Open(fs)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -128,23 +123,22 @@ func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error {
|
|||
|
||||
defer reader.Close()
|
||||
|
||||
rsc, ok := reader.(io.ReadSeeker)
|
||||
content, ok := reader.(io.ReadSeeker)
|
||||
if !ok {
|
||||
// fallback to direct copy
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
k, err := w.Write(data)
|
||||
if err != nil && !errors.Is(err, syscall.EPIPE) {
|
||||
logger.Warnf("error serving file (wrote %v bytes out of %v): %v", k, len(data), err)
|
||||
}
|
||||
|
||||
return nil
|
||||
content = bytes.NewReader(data)
|
||||
}
|
||||
|
||||
http.ServeContent(w, r, f.Basename, f.ModTime, rsc)
|
||||
if r.URL.Query().Has("t") {
|
||||
w.Header().Set("Cache-Control", "private, max-age=31536000, immutable")
|
||||
} else {
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
}
|
||||
http.ServeContent(w, r, f.Basename, f.ModTime, content)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
|||