Merge branch 'stashapp:develop' into develop

This commit is contained in:
SuperMudkip 2025-11-26 12:22:32 -05:00 committed by GitHub
commit 8dff48eda8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
411 changed files with 28041 additions and 12106 deletions

View file

@ -1,40 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: "[Bug Report] Short Form Subject (50 Chars or less)"
labels: bug report
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem please ensure that your screenshots are SFW or at least appropriately censored.
**Stash Version: (from Settings -> About):**
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

64
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View file

@ -0,0 +1,64 @@
name: Bug Report
description: Create a report to help us fix the bug
labels: ["bug report"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report!
- type: textarea
id: description
attributes:
label: Describe the bug
description: Provide a clear and concise description of what the bug is.
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: Steps to reproduce
description: Detail the steps that would replicate this issue.
placeholder: |
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
validations:
required: true
- type: textarea
id: expected
attributes:
label: Expected behaviour
description: Provide clear and concise description of what you expected to happen.
validations:
required: true
- type: textarea
id: context
attributes:
label: Screenshots or additional context
description: Provide any additional context and SFW screenshots here to help us solve this issue.
validations:
required: false
- type: input
id: stashversion
attributes:
label: Stash version
description: This can be found in Settings > About.
placeholder: (e.g. v0.28.1)
validations:
required: true
- type: input
id: devicedetails
attributes:
label: Device details
description: |
If this is an issue that occurs when using the Stash interface, please provide details of the device/browser used which presents the reported issue.
placeholder: (e.g. Firefox 97 (64-bit) on Windows 11)
validations:
required: false
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output from Settings > Logs. This will be automatically formatted into code, so no need for backticks.
render: shell

11
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View file

@ -0,0 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: Community forum
url: https://discourse.stashapp.cc
about: Start a discussion on the community forum.
- name: Community Discord
url: https://discord.gg/Y8MNsvQBvZ
about: Chat with the community on Discord.
- name: Documentation
url: https://docs.stashapp.cc
about: Check out documentation for help and information.

View file

@ -1,24 +0,0 @@
---
name: Discussion / Request for Commentary [RFC]
about: This is for issues that will be discussed and won't necessarily result directly
in commits or pull requests.
title: "[RFC] Short Form Title"
labels: help wanted
assignees: ''
---
<!-- Update or delete the title if you need to delegate your title gore to something
# Title
*### Scope*
<!-- describe the scope of your topic and your goals ideally within a single paragraph or TL;DR kind of summary so its easier for people to determine if they can contribute at a glance. -->
## Long Form
<!-- Only required if your scope and titles can't cover everything. -->
## Examples
<!-- if you can show a picture or video examples post them here, please ensure that you respect people's time and attention and understand that people are volunteering their time, so concision is ideal and considerate. -->
## Reference Reading
<!-- if there is any reference reading or documentation, please refer to it here. -->

View file

@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[Feature] Short Form Title (50 chars or less.)"
labels: feature request
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View file

@ -0,0 +1,44 @@
name: Feature Request
description: Request a new feature or idea to be added to Stash
labels: ["feature request"]
body:
- type: textarea
id: description
attributes:
label: Describe the feature you'd like
description: Provide a clear description of the feature you'd like implemented
validations:
required: true
- type: textarea
id: benefits
attributes:
label: Describe the benefits this would bring to existing users
description: |
Explain the measurable benefits this feature would achieve for existing users.
The benefits should be described in terms of outcomes for users, not specific implementations.
validations:
required: true
- type: textarea
id: already_possible
attributes:
label: Is there an existing way to achieve this goal?
description: |
Yes/No. If Yes, describe how your proposed feature differs from or improves upon the current method
validations:
required: true
- type: checkboxes
id: confirm-search
attributes:
label: Have you searched for an existing open/closed issue?
description: |
To help us keep these issues under control, please ensure you have first [searched our issue list](https://github.com/stashapp/stash/issues?q=is%3Aissue) for any existing issues that cover the core request or benefit of your proposal.
options:
- label: I have searched for existing issues and none cover the core request of my proposal
required: true
- type: textarea
id: context
attributes:
label: Additional context
description: Add any other context or screenshots about the feature request here.
validations:
required: false

View file

@ -2,7 +2,10 @@ name: Build
on:
push:
branches: [ develop, master ]
branches:
- develop
- master
- 'releases/**'
pull_request:
release:
types: [ published ]
@ -12,7 +15,7 @@ concurrency:
cancel-in-progress: true
env:
COMPILER_IMAGE: stashapp/compiler:11
COMPILER_IMAGE: stashapp/compiler:12
jobs:
build:
@ -37,7 +40,7 @@ jobs:
cache-name: cache-node_modules
with:
path: ui/v2.5/node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock') }}
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml') }}
- name: Cache UI build
uses: actions/cache@v3
@ -46,7 +49,7 @@ jobs:
cache-name: cache-ui
with:
path: ui/v2.5/build
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
- name: Cache go build
uses: actions/cache@v3
@ -65,7 +68,7 @@ jobs:
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null
- name: Pre-install
run: docker exec -t build /bin/bash -c "make pre-ui"
run: docker exec -t build /bin/bash -c "make CI=1 pre-ui"
- name: Generate
run: docker exec -t build /bin/bash -c "make generate"

View file

@ -6,10 +6,11 @@ on:
branches:
- master
- develop
- 'releases/**'
pull_request:
env:
COMPILER_IMAGE: stashapp/compiler:11
COMPILER_IMAGE: stashapp/compiler:12
jobs:
golangci:

View file

@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="WEB_MODULE" version="4">
<component name="Go" enabled="true" />
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/certs" />
@ -10,4 +11,4 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
</module>

View file

@ -275,7 +275,7 @@ generate: generate-backend generate-ui
.PHONY: generate-ui
generate-ui:
cd ui/v2.5 && yarn run gqlgen
cd ui/v2.5 && npm run gqlgen
.PHONY: generate-backend
generate-backend: touch-ui
@ -338,9 +338,19 @@ server-clean:
# installs UI dependencies. Run when first cloning repository, or if UI
# dependencies have changed
# If CI is set, configures pnpm to use a local store to avoid
# putting .pnpm-store in /stash
# NOTE: to run in the docker build container, using the existing
# node_modules folder, rename the .modules.yaml to .modules.yaml.bak
# and a new one will be generated. This will need to be reversed after
# building.
.PHONY: pre-ui
pre-ui:
cd ui/v2.5 && yarn install --frozen-lockfile
ifdef CI
cd ui/v2.5 && pnpm config set store-dir ~/.pnpm-store && pnpm install --frozen-lockfile
else
cd ui/v2.5 && pnpm install --frozen-lockfile
endif
.PHONY: ui-env
ui-env: build-info
@ -359,7 +369,7 @@ ui: ui-only generate-login-locale
.PHONY: ui-only
ui-only: ui-env
cd ui/v2.5 && yarn build
cd ui/v2.5 && npm run build
.PHONY: zip-ui
zip-ui:
@ -368,20 +378,24 @@ zip-ui:
.PHONY: ui-start
ui-start: ui-env
cd ui/v2.5 && yarn start --host
cd ui/v2.5 && npm run start -- --host
.PHONY: fmt-ui
fmt-ui:
cd ui/v2.5 && yarn format
cd ui/v2.5 && npm run format
# runs all of the frontend PR-acceptance steps
.PHONY: validate-ui
validate-ui:
cd ui/v2.5 && yarn run validate
cd ui/v2.5 && npm run validate
# these targets run the same steps as fmt-ui and validate-ui, but only on files that have changed
fmt-ui-quick:
cd ui/v2.5 && yarn run prettier --write $$(git diff --name-only --relative --diff-filter d . ../../graphql)
cd ui/v2.5 && \
files=$$(git diff --name-only --relative --diff-filter d . ../../graphql); \
if [ -n "$$files" ]; then \
npm run prettier -- --write $$files; \
fi
# does not run tsc checks, as they are slow
validate-ui-quick:
@ -389,9 +403,9 @@ validate-ui-quick:
tsfiles=$$(git diff --name-only --relative --diff-filter d src | grep -e "\.tsx\?\$$"); \
scssfiles=$$(git diff --name-only --relative --diff-filter d src | grep "\.scss"); \
prettyfiles=$$(git diff --name-only --relative --diff-filter d . ../../graphql); \
if [ -n "$$tsfiles" ]; then yarn run eslint $$tsfiles; fi && \
if [ -n "$$scssfiles" ]; then yarn run stylelint $$scssfiles; fi && \
if [ -n "$$prettyfiles" ]; then yarn run prettier --check $$prettyfiles; fi
if [ -n "$$tsfiles" ]; then npm run eslint -- $$tsfiles; fi && \
if [ -n "$$scssfiles" ]; then npm run stylelint -- $$scssfiles; fi && \
if [ -n "$$prettyfiles" ]; then npm run prettier -- --check $$prettyfiles; fi
# runs all of the backend PR-acceptance steps
.PHONY: validate-backend

View file

@ -9,7 +9,7 @@
[![GitHub release (latest by date)](https://img.shields.io/github/v/release/stashapp/stash?logo=github)](https://github.com/stashapp/stash/releases/latest)
[![GitHub issues by-label](https://img.shields.io/github/issues-raw/stashapp/stash/bounty)](https://github.com/stashapp/stash/labels/bounty)
### **Stash is a self-hosted webapp written in Go which organizes and serves your porn.**
### **Stash is a self-hosted webapp written in Go which organizes and serves your diverse content collection, catering to both your SFW and NSFW needs.**
![demo image](docs/readme_assets/demo_image.png)
* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.

View file

@ -110,7 +110,7 @@ func main() {
// Logs only error level message to stderr.
func initLogTemp() *log.Logger {
l := log.NewLogger()
l.Init("", true, "Error")
l.Init("", true, "Error", 0)
logger.Logger = l
return l
@ -118,7 +118,7 @@ func initLogTemp() *log.Logger {
func initLog(cfg *config.Config) *log.Logger {
l := log.NewLogger()
l.Init(cfg.GetLogFile(), cfg.GetLogOut(), cfg.GetLogLevel())
l.Init(cfg.GetLogFile(), cfg.GetLogOut(), cfg.GetLogLevel(), cfg.GetLogFileMaxSize())
logger.Logger = l
return l

View file

@ -1,14 +1,16 @@
# This dockerfile should be built with `make docker-build` from the stash root.
# Build Frontend
FROM node:20-alpine AS frontend
FROM node:24-alpine AS frontend
RUN apk add --no-cache make git
## cache node_modules separately
COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/
COPY ./ui/v2.5/package.json ./ui/v2.5/pnpm-lock.yaml /stash/ui/v2.5/
WORKDIR /stash
COPY Makefile /stash/
COPY ./graphql /stash/graphql/
COPY ./ui /stash/ui/
# pnpm install with npm
RUN npm install -g pnpm
RUN make pre-ui
RUN make generate-ui
ARG GITHASH

View file

@ -5,11 +5,13 @@ ARG CUDA_VERSION=12.8.0
FROM node:20-alpine AS frontend
RUN apk add --no-cache make git
## cache node_modules separately
COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/
COPY ./ui/v2.5/package.json ./ui/v2.5/pnpm-lock.yaml /stash/ui/v2.5/
WORKDIR /stash
COPY Makefile /stash/
COPY ./graphql /stash/graphql/
COPY ./ui /stash/ui/
# pnpm install with npm
RUN npm install -g pnpm
RUN make pre-ui
RUN make generate-ui
ARG GITHASH

View file

@ -12,9 +12,8 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-linux-arm32v6; \
FROM --platform=$TARGETPLATFORM alpine:latest AS app
COPY --from=binary /stash /usr/bin/
RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg ruby tzdata vips vips-tools \
&& pip install --user --break-system-packages mechanicalsoup cloudscraper stashapp-tools \
&& gem install faraday
RUN apk add --no-cache ca-certificates python3 py3-requests py3-requests-toolbelt py3-lxml py3-pip ffmpeg tzdata vips vips-tools \
&& pip install --break-system-packages mechanicalsoup cloudscraper stashapp-tools
ENV STASH_CONFIG_FILE=/root/.stash/config.yml
# Basic build-time metadata as defined at https://github.com/opencontainers/image-spec/blob/main/annotations.md#pre-defined-annotation-keys

View file

@ -8,15 +8,11 @@ RUN mkdir -p /etc/apt/keyrings
ADD https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key nodesource.gpg.key
RUN cat nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && rm nodesource.gpg.key
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
ADD https://dl.yarnpkg.com/debian/pubkey.gpg yarn.gpg
RUN cat yarn.gpg | gpg --dearmor -o /etc/apt/keyrings/yarn.gpg && rm yarn.gpg
RUN echo "deb [signed-by=/etc/apt/keyrings/yarn.gpg] https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_24.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
RUN apt-get update && \
apt-get install -y --no-install-recommends \
git make tar bash nodejs yarn zip \
git make tar bash nodejs zip \
clang llvm-dev cmake patch libxml2-dev uuid-dev libssl-dev xz-utils \
bzip2 gzip sed cpio libbz2-dev zlib1g-dev \
gcc-mingw-w64 \
@ -24,6 +20,9 @@ RUN apt-get update && \
gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
rm -rf /var/lib/apt/lists/*;
# pnpm install with npm
RUN npm install -g pnpm
# FreeBSD cross-compilation setup
# https://github.com/smartmontools/docker-build/blob/6b8c92560d17d325310ba02d9f5a4b250cb0764a/Dockerfile#L66
ENV FREEBSD_VERSION 13.4

View file

@ -1,6 +1,6 @@
user=stashapp
repo=compiler
version=11
version=12
latest:
docker build -t ${user}/${repo}:latest .

View file

@ -5,7 +5,8 @@
* [Go](https://golang.org/dl/)
* [GolangCI](https://golangci-lint.run/) - A meta-linter which runs several linters in parallel
* To install, follow the [local installation instructions](https://golangci-lint.run/welcome/install/#local-installation)
* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager
* [nodejs](https://nodejs.org/en/download) - nodejs runtime
* corepack/[pnpm](https://pnpm.io/installation) - nodejs package manager (included with nodejs)
## Environment
@ -22,32 +23,22 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MinGW. For examp
### macOS
1. If you don't have it already, install the [Homebrew package manager](https://brew.sh).
2. Install dependencies: `brew install go git yarn gcc make node ffmpeg`
2. Install dependencies: `brew install go git gcc make node ffmpeg`
### Linux
#### Arch Linux
1. Install dependencies: `sudo pacman -S go git yarn gcc make nodejs ffmpeg --needed`
1. Install dependencies: `sudo pacman -S go git gcc make nodejs ffmpeg --needed`
#### Ubuntu
1. Install dependencies: `sudo apt-get install golang git yarnpkg gcc nodejs ffmpeg -y`
1. Install dependencies: `sudo apt-get install golang git gcc nodejs ffmpeg -y`
### OpenBSD
1. Install dependencies `doas pkg_add gmake go git yarn node cmake`
2. Compile a custom ffmpeg from ports. The default ffmpeg in OpenBSD's packages is not compiled with WebP support, which is required by Stash.
- If you've already installed ffmpeg, uninstall it: `doas pkg_delete ffmpeg`
- If you haven't already, [fetch the ports tree and verify](https://www.openbsd.org/faq/ports/ports.html#PortsFetch).
- Find the ffmpeg port in `/usr/ports/graphics/ffmpeg`, and patch the Makefile to include libwebp
- Add `webp` to `WANTLIB`
- Add `graphics/libwebp` to the list in `LIB_DEPENDS`
- Add `-lwebp -lwebpdecoder -lwebpdemux -lwebpmux` to `LIBavcodec_EXTRALIBS`
- Add `--enable-libweb` to the list in `CONFIGURE_ARGS`
- If you've already built ffmpeg from ports before, you may need to also increment `REVISION`
- Run `doas make install`
- Follow the instructions below to build a release, but replace the final step `make build-release` with `gmake flags-release stash`, to [avoid the PIE buildmode](https://github.com/golang/go/issues/59866).
1. Install dependencies `doas pkg_add gmake go git node cmake ffmpeg`
2. Follow the instructions below to build a release, but replace the final step `make build-release` with `gmake flags-release stash`, to [avoid the PIE buildmode](https://github.com/golang/go/issues/59866).
NOTE: The `make` command in OpenBSD will be `gmake`. For example, `make pre-ui` will be `gmake pre-ui`.

17
go.mod
View file

@ -15,7 +15,7 @@ require (
github.com/disintegration/imaging v1.6.2
github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d
github.com/doug-martin/goqu/v9 v9.18.0
github.com/go-chi/chi/v5 v5.0.12
github.com/go-chi/chi/v5 v5.2.2
github.com/go-chi/cors v1.2.1
github.com/go-chi/httplog v0.3.1
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
@ -32,7 +32,11 @@ require (
github.com/json-iterator/go v1.1.12
github.com/kermieisinthehouse/gosx-notifier v0.1.2
github.com/kermieisinthehouse/systray v1.2.4
github.com/knadh/koanf v1.5.0
github.com/knadh/koanf/parsers/yaml v1.1.0
github.com/knadh/koanf/providers/env v1.1.0
github.com/knadh/koanf/providers/file v1.2.0
github.com/knadh/koanf/providers/posflag v1.0.1
github.com/knadh/koanf/v2 v2.2.1
github.com/lucasb-eyer/go-colorful v1.2.0
github.com/mattn/go-sqlite3 v1.14.22
github.com/mitchellh/mapstructure v1.5.0
@ -42,7 +46,7 @@ require (
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cast v1.6.0
github.com/spf13/pflag v1.0.5
github.com/spf13/pflag v1.0.6
github.com/stretchr/testify v1.10.0
github.com/tidwall/gjson v1.16.0
github.com/vearutop/statigz v1.4.0
@ -59,6 +63,7 @@ require (
golang.org/x/text v0.25.0
golang.org/x/time v0.10.0
gopkg.in/guregu/null.v4 v4.0.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1
gopkg.in/yaml.v2 v2.4.0
)
@ -72,9 +77,9 @@ require (
github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.7.0 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/gobwas/ws v1.3.0 // indirect
@ -86,6 +91,7 @@ require (
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/knadh/koanf/maps v0.1.2 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
@ -114,6 +120,7 @@ require (
github.com/urfave/cli/v2 v2.27.6 // indirect
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.yaml.in/yaml/v3 v3.0.3 // indirect
golang.org/x/mod v0.24.0 // indirect
golang.org/x/sync v0.14.0 // indirect
golang.org/x/tools v0.33.0 // indirect

117
go.sum
View file

@ -72,7 +72,6 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/anacrolix/dms v1.2.2 h1:0mk2/DXNqa5KDDbaLgFPf3oMV6VCGdFNh3d/gt4oafM=
github.com/anacrolix/dms v1.2.2/go.mod h1:msPKAoppoNRfrYplJqx63FZ+VipDZ4Xsj3KzIQxyU7k=
github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
@ -104,16 +103,6 @@ github.com/asticode/go-astisub v0.25.1 h1:RZMGfZPp7CXOkI6g+zCU7DRLuciGPGup921uKZ
github.com/asticode/go-astisub v0.25.1/go.mod h1:WTkuSzFB+Bp7wezuSf2Oxulj5A8zu2zLRVFf6bIFQK8=
github.com/asticode/go-astits v1.8.0 h1:rf6aiiGn/QhlFjNON1n5plqF3Fs025XLUwiQ0NB6oZg=
github.com/asticode/go-astits v1.8.0/go.mod h1:DkOWmBNQpnr9mv24KfZjq4JawCFX1FCqjLVGvO0DygQ=
github.com/aws/aws-sdk-go-v2 v1.9.2/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4=
github.com/aws/aws-sdk-go-v2/config v1.8.3/go.mod h1:4AEiLtAb8kLs7vgw2ZV3p2VZ1+hBavOc84hqxVNpCyw=
github.com/aws/aws-sdk-go-v2/credentials v1.4.3/go.mod h1:FNNC6nQZQUuyhq5aE5c7ata8o9e4ECGmS4lAXC7o1mQ=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.6.0/go.mod h1:gqlclDEZp4aqJOancXK6TN24aKhT0W0Ae9MHk3wzTMM=
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.4/go.mod h1:ZcBrrI3zBKlhGFNYWvju0I3TR93I7YIgAfy82Fh4lcQ=
github.com/aws/aws-sdk-go-v2/service/appconfig v1.4.2/go.mod h1:FZ3HkCe+b10uFZZkFdvf98LHW21k49W8o8J366lqVKY=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.2/go.mod h1:72HRZDLMtmVQiLG2tLfQcaWLCssELvGl+Zf2WVxMmR8=
github.com/aws/aws-sdk-go-v2/service/sso v1.4.2/go.mod h1:NBvT9R1MEF+Ud6ApJKM0G+IkPchKS7p7c2YPKwHmBOk=
github.com/aws/aws-sdk-go-v2/service/sts v1.7.2/go.mod h1:8EzeIqfWt2wWT4rJVu3f21TfrhJ8AEMzVybRNSb/b4g=
github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
@ -185,7 +174,6 @@ github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8
github.com/doug-martin/goqu/v9 v9.18.0 h1:/6bcuEtAe6nsSMVK/M+fOiXUNfyFF3yYtE07DBPFMYY=
github.com/doug-martin/goqu/v9 v9.18.0/go.mod h1:nf0Wc2/hV3gYK9LiyqIrzBEVGlI8qW3GuDCEobC4wBQ=
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
@ -200,19 +188,17 @@ github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s=
github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618=
github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4=
github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
github.com/go-chi/httplog v0.3.1 h1:uC3IUWCZagtbCinb3ypFh36SEcgd6StWw2Bu0XSXRtg=
@ -222,22 +208,18 @@ github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10=
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
@ -288,7 +270,6 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
@ -305,7 +286,6 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@ -346,11 +326,9 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0=
github.com/hashicorp/consul/api v1.13.0/go.mod h1:ZlVrynguJKcYr54zGaDbaL3fOvKC9m72FhPvA8T35KQ=
github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
@ -358,8 +336,6 @@ github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brv
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI=
github.com/hashicorp/go-hclog v0.8.0/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
@ -369,17 +345,12 @@ github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHh
github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A=
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
@ -394,14 +365,8 @@ github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn
github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=
github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoIospckxBxk6Q=
github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M=
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
github.com/hasura/go-graphql-client v0.13.1 h1:kKbjhxhpwz58usVl+Xvgah/TDha5K2akNTRQdsEHN6U=
github.com/hasura/go-graphql-client v0.13.1/go.mod h1:k7FF7h53C+hSNFRG3++DdVZWIuHdCaTbI7siTJ//zGQ=
github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs=
github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E=
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
@ -412,18 +377,12 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
@ -431,17 +390,25 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/kermieisinthehouse/gosx-notifier v0.1.2 h1:KV0KBeKK2B24kIHY7iK0jgS64Q05f4oB+hUZmsPodxQ=
github.com/kermieisinthehouse/gosx-notifier v0.1.2/go.mod h1:xyWT07azFtUOcHl96qMVvKhvKzsMcS7rKTHQyv8WTho=
github.com/kermieisinthehouse/systray v1.2.4 h1:pdH5vnl+KKjRrVCRU4g/2W1/0HVzuuJ6WXHlPPHYY6s=
github.com/kermieisinthehouse/systray v1.2.4/go.mod h1:axh6C/jNuSyC0QGtidZJURc9h+h41HNoMySoLVrhVR4=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs=
github.com/knadh/koanf v1.5.0/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs=
github.com/knadh/koanf/maps v0.1.2 h1:RBfmAW5CnZT+PJ1CVc1QSJKf4Xu9kxfQgYVQSu8hpbo=
github.com/knadh/koanf/maps v0.1.2/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI=
github.com/knadh/koanf/parsers/yaml v1.1.0 h1:3ltfm9ljprAHt4jxgeYLlFPmUaunuCgu1yILuTXRdM4=
github.com/knadh/koanf/parsers/yaml v1.1.0/go.mod h1:HHmcHXUrp9cOPcuC+2wrr44GTUB0EC+PyfN3HZD9tFg=
github.com/knadh/koanf/providers/env v1.1.0 h1:U2VXPY0f+CsNDkvdsG8GcsnK4ah85WwWyJgef9oQMSc=
github.com/knadh/koanf/providers/env v1.1.0/go.mod h1:QhHHHZ87h9JxJAn2czdEl6pdkNnDh/JS1Vtsyt65hTY=
github.com/knadh/koanf/providers/file v1.2.0 h1:hrUJ6Y9YOA49aNu/RSYzOTFlqzXSCpmYIDXI7OJU6+U=
github.com/knadh/koanf/providers/file v1.2.0/go.mod h1:bp1PM5f83Q+TOUu10J/0ApLBd9uIzg+n9UgthfY+nRA=
github.com/knadh/koanf/providers/posflag v1.0.1 h1:EnMxHSrPkYCFnKgBUl5KBgrjed8gVFrcXDzaW4l/C6Y=
github.com/knadh/koanf/providers/posflag v1.0.1/go.mod h1:3Wn3+YG3f4ljzRyCUgIwH7G0sZ1pMjCOsNBovrbKmAk=
github.com/knadh/koanf/v2 v2.2.1 h1:jaleChtw85y3UdBnI0wCqcg1sj1gPoz6D3caGNHtrNE=
github.com/knadh/koanf/v2 v2.2.1/go.mod h1:PSFru3ufQgTsI7IF+95rf9s8XA1+aHxKuO/W+dPoHEY=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
@ -492,22 +459,17 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@ -519,26 +481,20 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+bczlMBiulwmqYzQpvQNUdtt3oc=
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
github.com/npillmayer/nestext v0.1.3/go.mod h1:h2lrijH8jpicr25dFY+oAJLyzlya6jhnuG+zWp9L0Uk=
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d h1:VhgPp6v9qf9Agr/56bj7Y/xa04UccTW04VP0Qed4vnQ=
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U=
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde h1:x0TT0RDC7UhAVbbWWBzr41ElhJx5tXPWkIHA2HWPRuw=
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM=
github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU=
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@ -555,24 +511,17 @@ github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSg
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E=
github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo=
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
@ -590,8 +539,6 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=
github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM=
@ -600,7 +547,6 @@ github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
@ -621,8 +567,9 @@ github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM=
github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU=
github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc=
@ -683,11 +630,8 @@ github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5t
github.com/zencoder/go-dash/v3 v3.0.2 h1:oP1+dOh+Gp57PkvdCyMfbHtrHaxfl3w4kR3KBBbuqQE=
github.com/zencoder/go-dash/v3 v3.0.2/go.mod h1:30R5bKy1aUYY45yesjtZ9l8trNc2TwNqbS17WVQmCzk=
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A=
go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs=
go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
@ -701,6 +645,8 @@ go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
go.yaml.in/yaml/v3 v3.0.3 h1:bXOww4E/J3f66rav3pX3m8w6jDE4knZjGOw8b5Y6iNE=
go.yaml.in/yaml/v3 v3.0.3/go.mod h1:tBHosrYAkRZjRAOREWbDnBXUf08JOwYq++0QNwQiWzI=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
@ -850,11 +796,9 @@ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190415145633-3fd5a3612ccd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -866,12 +810,10 @@ golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -886,8 +828,6 @@ golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -895,7 +835,6 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -908,7 +847,6 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@ -931,7 +869,6 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@ -945,7 +882,6 @@ golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@ -1068,7 +1004,6 @@ google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCID
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
@ -1132,11 +1067,9 @@ google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ6
google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.22.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
@ -1177,7 +1110,6 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@ -1190,14 +1122,14 @@ gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.66.3/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@ -1214,4 +1146,3 @@ honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=

View file

@ -35,6 +35,8 @@ models:
model: github.com/stashapp/stash/internal/api.BoolMap
PluginConfigMap:
model: github.com/stashapp/stash/internal/api.PluginConfigMap
File:
model: github.com/stashapp/stash/internal/api.File
VideoFile:
fields:
# override float fields - #1572

View file

@ -6,6 +6,26 @@ type Query {
findDefaultFilter(mode: FilterMode!): SavedFilter
@deprecated(reason: "default filter now stored in UI config")
"Find a file by its id or path"
findFile(id: ID, path: String): BaseFile!
"Queries for Files"
findFiles(
file_filter: FileFilterType
filter: FindFilterType
ids: [ID!]
): FindFilesResultType!
"Find a file by its id or path"
findFolder(id: ID, path: String): Folder!
"Queries for Files"
findFolders(
folder_filter: FolderFilterType
filter: FindFilterType
ids: [ID!]
): FindFoldersResultType!
"Find a scene by ID or Checksum"
findScene(id: ID, checksum: String): Scene
findSceneByHash(input: SceneHashInput!): Scene
@ -308,6 +328,7 @@ type Mutation {
sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker
sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker
bulkSceneMarkerUpdate(input: BulkSceneMarkerUpdateInput!): [SceneMarker!]
sceneMarkerDestroy(id: ID!): Boolean!
sceneMarkersDestroy(ids: [ID!]!): Boolean!
@ -351,6 +372,7 @@ type Mutation {
studioUpdate(input: StudioUpdateInput!): Studio
studioDestroy(input: StudioDestroyInput!): Boolean!
studiosDestroy(ids: [ID!]!): Boolean!
bulkStudioUpdate(input: BulkStudioUpdateInput!): [Studio!]
movieCreate(input: MovieCreateInput!): Movie
@deprecated(reason: "Use groupCreate instead")

View file

@ -2,6 +2,8 @@ input SetupInput {
"Empty to indicate $HOME/.stash/config.yml default"
configLocation: String!
stashes: [StashConfigInput!]!
"True if SFW content mode is enabled"
sfwContentMode: Boolean
"Empty to indicate default"
databaseFile: String!
"Empty to indicate default"
@ -67,6 +69,8 @@ input ConfigGeneralInput {
databasePath: String
"Path to backup directory"
backupDirectoryPath: String
"Path to trash directory - if set, deleted files will be moved here instead of being permanently deleted"
deleteTrashPath: String
"Path to generated files"
generatedPath: String
"Path to import/export files"
@ -153,6 +157,8 @@ input ConfigGeneralInput {
logLevel: String
"Whether to log http access"
logAccess: Boolean
"Maximum log size"
logFileMaxSize: Int
"True if galleries should be created from folders with images"
createGalleriesFromFolders: Boolean
"Regex used to identify images as gallery covers"
@ -187,6 +193,8 @@ type ConfigGeneralResult {
databasePath: String!
"Path to backup directory"
backupDirectoryPath: String!
"Path to trash directory - if set, deleted files will be moved here instead of being permanently deleted"
deleteTrashPath: String!
"Path to generated files"
generatedPath: String!
"Path to import/export files"
@ -277,6 +285,8 @@ type ConfigGeneralResult {
logLevel: String!
"Whether to log http access"
logAccess: Boolean!
"Maximum log size"
logFileMaxSize: Int!
"Array of video file extensions"
videoExtensions: [String!]!
"Array of image file extensions"
@ -341,6 +351,9 @@ type ConfigImageLightboxResult {
}
input ConfigInterfaceInput {
"True if SFW content mode is enabled"
sfwContentMode: Boolean
"Ordered list of items that should be shown in the menu"
menuItems: [String!]
@ -407,6 +420,9 @@ type ConfigDisableDropdownCreate {
}
type ConfigInterfaceResult {
"True if SFW content mode is enabled"
sfwContentMode: Boolean!
"Ordered list of items that should be shown in the menu"
menuItems: [String!]

View file

@ -7,8 +7,11 @@ type Folder {
id: ID!
path: String!
parent_folder_id: ID
zip_file_id: ID
parent_folder_id: ID @deprecated(reason: "Use parent_folder instead")
zip_file_id: ID @deprecated(reason: "Use zip_file instead")
parent_folder: Folder
zip_file: BasicFile
mod_time: Time!
@ -21,8 +24,32 @@ interface BaseFile {
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead")
zip_file_id: ID @deprecated(reason: "Use zip_file instead")
parent_folder: Folder!
zip_file: BasicFile
mod_time: Time!
size: Int64!
fingerprint(type: String!): String
fingerprints: [Fingerprint!]!
created_at: Time!
updated_at: Time!
}
type BasicFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead")
zip_file_id: ID @deprecated(reason: "Use zip_file instead")
parent_folder: Folder!
zip_file: BasicFile
mod_time: Time!
size: Int64!
@ -39,8 +66,11 @@ type VideoFile implements BaseFile {
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead")
zip_file_id: ID @deprecated(reason: "Use zip_file instead")
parent_folder: Folder!
zip_file: BasicFile
mod_time: Time!
size: Int64!
@ -66,8 +96,11 @@ type ImageFile implements BaseFile {
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead")
zip_file_id: ID @deprecated(reason: "Use zip_file instead")
parent_folder: Folder!
zip_file: BasicFile
mod_time: Time!
size: Int64!
@ -75,6 +108,7 @@ type ImageFile implements BaseFile {
fingerprint(type: String!): String
fingerprints: [Fingerprint!]!
format: String!
width: Int!
height: Int!
@ -89,8 +123,11 @@ type GalleryFile implements BaseFile {
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
parent_folder_id: ID! @deprecated(reason: "Use parent_folder instead")
zip_file_id: ID @deprecated(reason: "Use zip_file instead")
parent_folder: Folder!
zip_file: BasicFile
mod_time: Time!
size: Int64!
@ -125,3 +162,22 @@ input FileSetFingerprintsInput {
"only supplied fingerprint types will be modified"
fingerprints: [SetFingerprintsInput!]!
}
type FindFilesResultType {
count: Int!
"Total megapixels of any image files"
megapixels: Float!
"Total duration in seconds of any video files"
duration: Float!
"Total file size in bytes"
size: Int!
files: [BaseFile!]!
}
type FindFoldersResultType {
count: Int!
folders: [Folder!]!
}

View file

@ -330,6 +330,8 @@ input SceneFilterType {
groups_filter: GroupFilterType
"Filter by related markers that meet this criteria"
markers_filter: SceneMarkerFilterType
"Filter by related files that meet this criteria"
files_filter: FileFilterType
}
input MovieFilterType {
@ -401,6 +403,8 @@ input GroupFilterType {
created_at: TimestampCriterionInput
"Filter by last update time"
updated_at: TimestampCriterionInput
"Filter by o-counter"
o_counter: IntCriterionInput
"Filter by containing groups"
containing_groups: HierarchicalMultiCriterionInput
@ -534,6 +538,10 @@ input GalleryFilterType {
studios_filter: StudioFilterType
"Filter by related tags that meet this criteria"
tags_filter: TagFilterType
"Filter by related files that meet this criteria"
files_filter: FileFilterType
"Filter by related folders that meet this criteria"
folders_filter: FolderFilterType
}
input TagFilterType {
@ -679,6 +687,106 @@ input ImageFilterType {
studios_filter: StudioFilterType
"Filter by related tags that meet this criteria"
tags_filter: TagFilterType
"Filter by related files that meet this criteria"
files_filter: FileFilterType
}
input FileFilterType {
AND: FileFilterType
OR: FileFilterType
NOT: FileFilterType
path: StringCriterionInput
basename: StringCriterionInput
dir: StringCriterionInput
parent_folder: HierarchicalMultiCriterionInput
zip_file: MultiCriterionInput
"Filter by modification time"
mod_time: TimestampCriterionInput
"Filter files that have an exact match available"
duplicated: PHashDuplicationCriterionInput
"find files based on hash"
hashes: [FingerprintFilterInput!]
video_file_filter: VideoFileFilterInput
image_file_filter: ImageFileFilterInput
scene_count: IntCriterionInput
image_count: IntCriterionInput
gallery_count: IntCriterionInput
"Filter by related scenes that meet this criteria"
scenes_filter: SceneFilterType
"Filter by related images that meet this criteria"
images_filter: ImageFilterType
"Filter by related galleries that meet this criteria"
galleries_filter: GalleryFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"
updated_at: TimestampCriterionInput
}
input FolderFilterType {
AND: FolderFilterType
OR: FolderFilterType
NOT: FolderFilterType
path: StringCriterionInput
parent_folder: HierarchicalMultiCriterionInput
zip_file: MultiCriterionInput
"Filter by modification time"
mod_time: TimestampCriterionInput
gallery_count: IntCriterionInput
"Filter by files that meet this criteria"
files_filter: FileFilterType
"Filter by related galleries that meet this criteria"
galleries_filter: GalleryFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"
updated_at: TimestampCriterionInput
}
input VideoFileFilterInput {
resolution: ResolutionCriterionInput
orientation: OrientationCriterionInput
framerate: IntCriterionInput
bitrate: IntCriterionInput
format: StringCriterionInput
video_codec: StringCriterionInput
audio_codec: StringCriterionInput
"in seconds"
duration: IntCriterionInput
captions: StringCriterionInput
interactive: Boolean
interactive_speed: IntCriterionInput
}
input ImageFileFilterInput {
format: StringCriterionInput
resolution: ResolutionCriterionInput
orientation: OrientationCriterionInput
}
input FingerprintFilterInput {
type: String!
value: String!
"Hamming distance - defaults to 0"
distance: Int
}
enum CriterionModifier {

View file

@ -30,6 +30,7 @@ type Group {
performer_count(depth: Int): Int! # Resolver
sub_group_count(depth: Int): Int! # Resolver
scenes: [Scene!]!
o_counter: Int # Resolver
}
input GroupDescriptionInput {

View file

@ -42,6 +42,13 @@ input SceneMarkerUpdateInput {
tag_ids: [ID!]
}
input BulkSceneMarkerUpdateInput {
ids: [ID!]
title: String
primary_tag_id: ID
tag_ids: BulkUpdateIds
}
type FindSceneMarkersResultType {
count: Int!
scene_markers: [SceneMarker!]!

View file

@ -55,9 +55,14 @@ type ScrapedStudio {
"Set if studio matched"
stored_id: ID
name: String!
url: String
url: String @deprecated(reason: "use urls")
urls: [String!]
parent: ScrapedStudio
image: String
details: String
"Aliases must be comma-delimited to be parsed correctly"
aliases: String
tags: [ScrapedTag!]
remote_site_id: String
}
@ -66,6 +71,8 @@ type ScrapedTag {
"Set if tag matched"
stored_id: ID
name: String!
"Remote site ID, if applicable"
remote_site_id: String
}
type ScrapedScene {

View file

@ -1,7 +1,8 @@
type Studio {
id: ID!
name: String!
url: String
url: String @deprecated(reason: "Use urls")
urls: [String!]!
parent_studio: Studio
child_studios: [Studio!]!
aliases: [String!]!
@ -24,11 +25,13 @@ type Studio {
updated_at: Time!
groups: [Group!]!
movies: [Movie!]! @deprecated(reason: "use groups instead")
o_counter: Int
}
input StudioCreateInput {
name: String!
url: String
url: String @deprecated(reason: "Use urls")
urls: [String!]
parent_id: ID
"This should be a URL or a base64 encoded data URL"
image: String
@ -45,7 +48,8 @@ input StudioCreateInput {
input StudioUpdateInput {
id: ID!
name: String
url: String
url: String @deprecated(reason: "Use urls")
urls: [String!]
parent_id: ID
"This should be a URL or a base64 encoded data URL"
image: String
@ -59,6 +63,19 @@ input StudioUpdateInput {
ignore_auto_tag: Boolean
}
input BulkStudioUpdateInput {
ids: [ID!]!
url: String @deprecated(reason: "Use urls")
urls: BulkUpdateStrings
parent_id: ID
# rating expressed as 1-100
rating100: Int
favorite: Boolean
details: String
tag_ids: BulkUpdateIds
ignore_auto_tag: Boolean
}
input StudioDestroyInput {
id: ID!
}

View file

@ -9,6 +9,7 @@ type Tag {
created_at: Time!
updated_at: Time!
favorite: Boolean!
stash_ids: [StashID!]!
image_path: String # Resolver
scene_count(depth: Int): Int! # Resolver
scene_marker_count(depth: Int): Int! # Resolver
@ -35,6 +36,7 @@ input TagCreateInput {
favorite: Boolean
"This should be a URL or a base64 encoded data URL"
image: String
stash_ids: [StashIDInput!]
parent_ids: [ID!]
child_ids: [ID!]
@ -51,6 +53,7 @@ input TagUpdateInput {
favorite: Boolean
"This should be a URL or a base64 encoded data URL"
image: String
stash_ids: [StashIDInput!]
parent_ids: [ID!]
child_ids: [ID!]

View file

@ -13,6 +13,7 @@ fragment ImageFragment on Image {
fragment StudioFragment on Studio {
name
id
aliases
urls {
...URLFragment
}

View file

@ -7,8 +7,10 @@ import (
"fmt"
"io"
"net/http"
"os"
"regexp"
"runtime"
"strings"
"time"
"golang.org/x/sys/cpu"
@ -36,6 +38,24 @@ var stashReleases = func() map[string]string {
}
}
// isMacOSBundle checks if the application is running from within a macOS .app bundle
func isMacOSBundle() bool {
exec, err := os.Executable()
return err == nil && strings.Contains(exec, "Stash.app/")
}
// getWantedRelease determines which release variant to download based on platform and bundle type
func getWantedRelease(platform string) string {
release := stashReleases()[platform]
// On macOS, check if running from .app bundle
if runtime.GOOS == "darwin" && isMacOSBundle() {
return "Stash.app.zip"
}
return release
}
type githubReleasesResponse struct {
Url string
Assets_url string
@ -168,7 +188,7 @@ func GetLatestRelease(ctx context.Context) (*LatestRelease, error) {
}
platform := fmt.Sprintf("%s/%s", runtime.GOOS, arch)
wantedRelease := stashReleases()[platform]
wantedRelease := getWantedRelease(platform)
url := apiReleases
if build.IsDevelop() {

23
internal/api/fields.go Normal file
View file

@ -0,0 +1,23 @@
package api
import (
"context"
"github.com/99designs/gqlgen/graphql"
)
type queryFields []string
func collectQueryFields(ctx context.Context) queryFields {
fields := graphql.CollectAllFields(ctx)
return queryFields(fields)
}
func (f queryFields) Has(field string) bool {
for _, v := range f {
if v == field {
return true
}
}
return false
}

View file

@ -101,7 +101,7 @@ func initCustomPerformerImages(customPath string) {
}
}
func getDefaultPerformerImage(name string, gender *models.GenderEnum) []byte {
func getDefaultPerformerImage(name string, gender *models.GenderEnum, sfwMode bool) []byte {
// try the custom box first if we have one
if performerBoxCustom != nil {
ret, err := performerBoxCustom.GetRandomImageByName(name)
@ -111,6 +111,10 @@ func getDefaultPerformerImage(name string, gender *models.GenderEnum) []byte {
logger.Warnf("error loading custom default performer image: %v", err)
}
if sfwMode {
return static.ReadAll(static.DefaultSFWPerformerImage)
}
var g models.GenderEnum
if gender != nil {
g = *gender

View file

@ -10,6 +10,7 @@
//go:generate go run github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag
//go:generate go run github.com/vektah/dataloaden GroupLoader int *github.com/stashapp/stash/pkg/models.Group
//go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File
//go:generate go run github.com/vektah/dataloaden FolderLoader github.com/stashapp/stash/pkg/models.FolderID *github.com/stashapp/stash/pkg/models.Folder
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
@ -62,6 +63,7 @@ type Loaders struct {
TagByID *TagLoader
GroupByID *GroupLoader
FileByID *FileLoader
FolderByID *FolderLoader
}
type Middleware struct {
@ -117,6 +119,11 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchFiles(ctx),
},
FolderByID: &FolderLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFolders(ctx),
},
SceneFiles: &SceneFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
@ -279,6 +286,17 @@ func (m Middleware) fetchFiles(ctx context.Context) func(keys []models.FileID) (
}
}
func (m Middleware) fetchFolders(ctx context.Context) func(keys []models.FolderID) ([]*models.Folder, []error) {
return func(keys []models.FolderID) (ret []*models.Folder, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Folder.FindMany(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) {
return func(keys []int) (ret [][]models.FileID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {

View file

@ -0,0 +1,224 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// FolderLoaderConfig captures the config to create a new FolderLoader
type FolderLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []models.FolderID) ([]*models.Folder, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewFolderLoader creates a new FolderLoader given a fetch, wait, and maxBatch
func NewFolderLoader(config FolderLoaderConfig) *FolderLoader {
return &FolderLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// FolderLoader batches and caches requests
type FolderLoader struct {
// this method provides the data for the loader
fetch func(keys []models.FolderID) ([]*models.Folder, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[models.FolderID]*models.Folder
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *folderLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type folderLoaderBatch struct {
keys []models.FolderID
data []*models.Folder
error []error
closing bool
done chan struct{}
}
// Load a Folder by key, batching and caching will be applied automatically
func (l *FolderLoader) Load(key models.FolderID) (*models.Folder, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Folder.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FolderLoader) LoadThunk(key models.FolderID) func() (*models.Folder, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*models.Folder, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &folderLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*models.Folder, error) {
<-batch.done
var data *models.Folder
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *FolderLoader) LoadAll(keys []models.FolderID) ([]*models.Folder, []error) {
results := make([]func() (*models.Folder, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
folders := make([]*models.Folder, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
folders[i], errors[i] = thunk()
}
return folders, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Folders.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FolderLoader) LoadAllThunk(keys []models.FolderID) func() ([]*models.Folder, []error) {
results := make([]func() (*models.Folder, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*models.Folder, []error) {
folders := make([]*models.Folder, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
folders[i], errors[i] = thunk()
}
return folders, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *FolderLoader) Prime(key models.FolderID, value *models.Folder) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *FolderLoader) Clear(key models.FolderID) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *FolderLoader) unsafeSet(key models.FolderID, value *models.Folder) {
if l.cache == nil {
l.cache = map[models.FolderID]*models.Folder{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *folderLoaderBatch) keyIndex(l *FolderLoader, key models.FolderID) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *folderLoaderBatch) startTimer(l *FolderLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *folderLoaderBatch) end(l *FolderLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -4,6 +4,7 @@ import (
"fmt"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil"
)
type BaseFile interface {
@ -27,6 +28,29 @@ func convertVisualFile(f models.File) (VisualFile, error) {
}
}
func convertBaseFile(f models.File) BaseFile {
if f == nil {
return nil
}
switch f := f.(type) {
case BaseFile:
return f
case *models.VideoFile:
return &VideoFile{VideoFile: f}
case *models.ImageFile:
return &ImageFile{ImageFile: f}
case *models.BaseFile:
return &BasicFile{BaseFile: f}
default:
panic("unknown file type")
}
}
func convertBaseFiles(files []models.File) []BaseFile {
return sliceutil.Map(files, convertBaseFile)
}
type GalleryFile struct {
*models.BaseFile
}
@ -62,3 +86,15 @@ func (ImageFile) IsVisualFile() {}
func (f *ImageFile) Fingerprints() []models.Fingerprint {
return f.ImageFile.Fingerprints
}
type BasicFile struct {
*models.BaseFile
}
func (BasicFile) IsBaseFile() {}
func (BasicFile) IsVisualFile() {}
func (f *BasicFile) Fingerprints() []models.Fingerprint {
return f.BaseFile.Fingerprints
}

View file

@ -95,6 +95,12 @@ func (r *Resolver) VideoFile() VideoFileResolver {
func (r *Resolver) ImageFile() ImageFileResolver {
return &imageFileResolver{r}
}
func (r *Resolver) BasicFile() BasicFileResolver {
return &basicFileResolver{r}
}
func (r *Resolver) Folder() FolderResolver {
return &folderResolver{r}
}
func (r *Resolver) SavedFilter() SavedFilterResolver {
return &savedFilterResolver{r}
}
@ -125,6 +131,8 @@ type tagResolver struct{ *Resolver }
type galleryFileResolver struct{ *Resolver }
type videoFileResolver struct{ *Resolver }
type imageFileResolver struct{ *Resolver }
type basicFileResolver struct{ *Resolver }
type folderResolver struct{ *Resolver }
type savedFilterResolver struct{ *Resolver }
type pluginResolver struct{ *Resolver }
type configResultResolver struct{ *Resolver }

View file

@ -1,30 +1,80 @@
package api
import "context"
import (
"context"
func (r *galleryFileResolver) Fingerprint(ctx context.Context, obj *GalleryFile, type_ string) (*string, error) {
fp := obj.BaseFile.Fingerprints.For(type_)
if fp != nil {
v := fp.Value()
return &v, nil
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/pkg/models"
)
func fingerprintResolver(fp models.Fingerprints, type_ string) (*string, error) {
fingerprint := fp.For(type_)
if fingerprint != nil {
value := fingerprint.Value()
return &value, nil
}
return nil, nil
}
func (r *galleryFileResolver) Fingerprint(ctx context.Context, obj *GalleryFile, type_ string) (*string, error) {
return fingerprintResolver(obj.BaseFile.Fingerprints, type_)
}
func (r *imageFileResolver) Fingerprint(ctx context.Context, obj *ImageFile, type_ string) (*string, error) {
fp := obj.ImageFile.Fingerprints.For(type_)
if fp != nil {
v := fp.Value()
return &v, nil
}
return nil, nil
return fingerprintResolver(obj.ImageFile.Fingerprints, type_)
}
func (r *videoFileResolver) Fingerprint(ctx context.Context, obj *VideoFile, type_ string) (*string, error) {
fp := obj.VideoFile.Fingerprints.For(type_)
if fp != nil {
v := fp.Value()
return &v, nil
}
return nil, nil
return fingerprintResolver(obj.VideoFile.Fingerprints, type_)
}
func (r *basicFileResolver) Fingerprint(ctx context.Context, obj *BasicFile, type_ string) (*string, error) {
return fingerprintResolver(obj.BaseFile.Fingerprints, type_)
}
func (r *galleryFileResolver) ParentFolder(ctx context.Context, obj *GalleryFile) (*models.Folder, error) {
return loaders.From(ctx).FolderByID.Load(obj.ParentFolderID)
}
func (r *imageFileResolver) ParentFolder(ctx context.Context, obj *ImageFile) (*models.Folder, error) {
return loaders.From(ctx).FolderByID.Load(obj.ParentFolderID)
}
func (r *videoFileResolver) ParentFolder(ctx context.Context, obj *VideoFile) (*models.Folder, error) {
return loaders.From(ctx).FolderByID.Load(obj.ParentFolderID)
}
func (r *basicFileResolver) ParentFolder(ctx context.Context, obj *BasicFile) (*models.Folder, error) {
return loaders.From(ctx).FolderByID.Load(obj.ParentFolderID)
}
func zipFileResolver(ctx context.Context, zipFileID *models.FileID) (*BasicFile, error) {
if zipFileID == nil {
return nil, nil
}
f, err := loaders.From(ctx).FileByID.Load(*zipFileID)
if err != nil {
return nil, err
}
return &BasicFile{
BaseFile: f.Base(),
}, nil
}
func (r *galleryFileResolver) ZipFile(ctx context.Context, obj *GalleryFile) (*BasicFile, error) {
return zipFileResolver(ctx, obj.ZipFileID)
}
func (r *imageFileResolver) ZipFile(ctx context.Context, obj *ImageFile) (*BasicFile, error) {
return zipFileResolver(ctx, obj.ZipFileID)
}
func (r *videoFileResolver) ZipFile(ctx context.Context, obj *VideoFile) (*BasicFile, error) {
return zipFileResolver(ctx, obj.ZipFileID)
}
func (r *basicFileResolver) ZipFile(ctx context.Context, obj *BasicFile) (*BasicFile, error) {
return zipFileResolver(ctx, obj.ZipFileID)
}

View file

@ -0,0 +1,20 @@
package api
import (
"context"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/pkg/models"
)
func (r *folderResolver) ParentFolder(ctx context.Context, obj *models.Folder) (*models.Folder, error) {
if obj.ParentFolderID == nil {
return nil, nil
}
return loaders.From(ctx).FolderByID.Load(*obj.ParentFolderID)
}
func (r *folderResolver) ZipFile(ctx context.Context, obj *models.Folder) (*BasicFile, error) {
return zipFileResolver(ctx, obj.ZipFileID)
}

View file

@ -204,3 +204,14 @@ func (r *groupResolver) Scenes(ctx context.Context, obj *models.Group) (ret []*m
return ret, nil
}
func (r *groupResolver) OCounter(ctx context.Context, obj *models.Group) (ret *int, err error) {
var count int
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
count, err = r.repository.Scene.OCountByGroupID(ctx, obj.ID)
return err
}); err != nil {
return nil, err
}
return &count, nil
}

View file

@ -40,6 +40,35 @@ func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) ([]str
return obj.Aliases.List(), nil
}
func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string, error) {
if !obj.URLs.Loaded() {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
return obj.LoadURLs(ctx, r.repository.Studio)
}); err != nil {
return nil, err
}
}
urls := obj.URLs.List()
if len(urls) == 0 {
return nil, nil
}
return &urls[0], nil
}
func (r *studioResolver) Urls(ctx context.Context, obj *models.Studio) ([]string, error) {
if !obj.URLs.Loaded() {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
return obj.LoadURLs(ctx, r.repository.Studio)
}); err != nil {
return nil, err
}
}
return obj.URLs.List(), nil
}
func (r *studioResolver) Tags(ctx context.Context, obj *models.Studio) (ret []*models.Tag, err error) {
if !obj.TagIDs.Loaded() {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
@ -114,6 +143,24 @@ func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio, dep
return r.GroupCount(ctx, obj, depth)
}
func (r *studioResolver) OCounter(ctx context.Context, obj *models.Studio) (ret *int, err error) {
var res_scene int
var res_image int
var res int
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
res_scene, err = r.repository.Scene.OCountByStudioID(ctx, obj.ID)
if err != nil {
return err
}
res_image, err = r.repository.Image.OCountByStudioID(ctx, obj.ID)
return err
}); err != nil {
return nil, err
}
res = res_scene + res_image
return &res, nil
}
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) {
if obj.ParentID == nil {
return nil, nil

View file

@ -54,6 +54,16 @@ func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []strin
return obj.Aliases.List(), nil
}
func (r *tagResolver) StashIds(ctx context.Context, obj *models.Tag) ([]*models.StashID, error) {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
return obj.LoadStashIDs(ctx, r.repository.Tag)
}); err != nil {
return nil, err
}
return stashIDsSliceToPtrSlice(obj.StashIDs.List()), nil
}
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
ret, err = scene.CountByTagID(ctx, r.repository.Scene, obj.ID, depth)

View file

@ -150,6 +150,15 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
c.SetString(config.BackupDirectoryPath, *input.BackupDirectoryPath)
}
existingDeleteTrashPath := c.GetDeleteTrashPath()
if input.DeleteTrashPath != nil && existingDeleteTrashPath != *input.DeleteTrashPath {
if err := validateDir(config.DeleteTrashPath, *input.DeleteTrashPath, true); err != nil {
return makeConfigGeneralResult(), err
}
c.SetString(config.DeleteTrashPath, *input.DeleteTrashPath)
}
existingGeneratedPath := c.GetGeneratedPath()
if input.GeneratedPath != nil && existingGeneratedPath != *input.GeneratedPath {
if err := validateDir(config.Generated, *input.GeneratedPath, false); err != nil {
@ -334,6 +343,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
logger.SetLogLevel(*input.LogLevel)
}
if input.LogFileMaxSize != nil && *input.LogFileMaxSize != c.GetLogFileMaxSize() {
c.SetInt(config.LogFileMaxSize, *input.LogFileMaxSize)
}
if input.Excludes != nil {
for _, r := range input.Excludes {
_, err := regexp.Compile(r)
@ -445,6 +458,8 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigInterfaceInput) (*ConfigInterfaceResult, error) {
c := config.GetInstance()
r.setConfigBool(config.SFWContentMode, input.SfwContentMode)
if input.MenuItems != nil {
c.SetInterface(config.MenuItems, input.MenuItems)
}

View file

@ -149,7 +149,9 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b
return false, fmt.Errorf("converting ids: %w", err)
}
fileDeleter := file.NewDeleter()
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
fileDeleter := file.NewDeleterWithTrash(trashPath)
destroyer := &file.ZipDestroyer{
FileDestroyer: r.repository.File,
FolderDestroyer: r.repository.Folder,

View file

@ -333,10 +333,12 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
return false, fmt.Errorf("converting ids: %w", err)
}
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
var galleries []*models.Gallery
var imgsDestroyed []*models.Image
fileDeleter := &image.FileDeleter{
Deleter: file.NewDeleter(),
Deleter: file.NewDeleterWithTrash(trashPath),
Paths: manager.GetInstance().Paths,
}

View file

@ -308,9 +308,11 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
return false, fmt.Errorf("converting id: %w", err)
}
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
var i *models.Image
fileDeleter := &image.FileDeleter{
Deleter: file.NewDeleter(),
Deleter: file.NewDeleterWithTrash(trashPath),
Paths: manager.GetInstance().Paths,
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
@ -348,9 +350,11 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
return false, fmt.Errorf("converting ids: %w", err)
}
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
var images []*models.Image
fileDeleter := &image.FileDeleter{
Deleter: file.NewDeleter(),
Deleter: file.NewDeleterWithTrash(trashPath),
Paths: manager.GetInstance().Paths,
}
if err := r.withTxn(ctx, func(ctx context.Context) error {

View file

@ -428,10 +428,11 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
}
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
var s *models.Scene
fileDeleter := &scene.FileDeleter{
Deleter: file.NewDeleter(),
Deleter: file.NewDeleterWithTrash(trashPath),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
@ -482,9 +483,10 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
var scenes []*models.Scene
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
fileDeleter := &scene.FileDeleter{
Deleter: file.NewDeleter(),
Deleter: file.NewDeleterWithTrash(trashPath),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
@ -593,8 +595,9 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
}
mgr := manager.GetInstance()
trashPath := mgr.Config.GetDeleteTrashPath()
fileDeleter := &scene.FileDeleter{
Deleter: file.NewDeleter(),
Deleter: file.NewDeleterWithTrash(trashPath),
FileNamingAlgo: mgr.Config.GetVideoFileNamingAlgorithm(),
Paths: mgr.Paths,
}
@ -736,9 +739,10 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar
}
mgr := manager.GetInstance()
trashPath := mgr.Config.GetDeleteTrashPath()
fileDeleter := &scene.FileDeleter{
Deleter: file.NewDeleter(),
Deleter: file.NewDeleterWithTrash(trashPath),
FileNamingAlgo: mgr.Config.GetVideoFileNamingAlgorithm(),
Paths: mgr.Paths,
}
@ -820,6 +824,123 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar
return r.getSceneMarker(ctx, markerID)
}
func (r *mutationResolver) BulkSceneMarkerUpdate(ctx context.Context, input BulkSceneMarkerUpdateInput) ([]*models.SceneMarker, error) {
ids, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil {
return nil, fmt.Errorf("converting ids: %w", err)
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Populate performer from the input
partial := models.NewSceneMarkerPartial()
partial.Title = translator.optionalString(input.Title, "title")
partial.PrimaryTagID, err = translator.optionalIntFromString(input.PrimaryTagID, "primary_tag_id")
if err != nil {
return nil, fmt.Errorf("converting primary tag id: %w", err)
}
partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids")
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
ret := []*models.SceneMarker{}
// Start the transaction and save the performers
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.SceneMarker
for _, id := range ids {
l := partial
if err := adjustMarkerPartialForTagExclusion(ctx, r.repository.SceneMarker, id, &l); err != nil {
return err
}
updated, err := qb.UpdatePartial(ctx, id, l)
if err != nil {
return err
}
ret = append(ret, updated)
}
return nil
}); err != nil {
return nil, err
}
// execute post hooks outside of txn
var newRet []*models.SceneMarker
for _, m := range ret {
r.hookExecutor.ExecutePostHooks(ctx, m.ID, hook.SceneMarkerUpdatePost, input, translator.getFields())
m, err = r.getSceneMarker(ctx, m.ID)
if err != nil {
return nil, err
}
newRet = append(newRet, m)
}
return newRet, nil
}
// adjustMarkerPartialForTagExclusion adjusts the SceneMarkerPartial to exclude the primary tag from tag updates.
func adjustMarkerPartialForTagExclusion(ctx context.Context, r models.SceneMarkerReader, id int, partial *models.SceneMarkerPartial) error {
if partial.TagIDs == nil && !partial.PrimaryTagID.Set {
return nil
}
// exclude primary tag from tag updates
var primaryTagID int
if partial.PrimaryTagID.Set {
primaryTagID = partial.PrimaryTagID.Value
} else {
existing, err := r.Find(ctx, id)
if err != nil {
return fmt.Errorf("finding existing primary tag id: %w", err)
}
primaryTagID = existing.PrimaryTagID
}
existingTagIDs, err := r.GetTagIDs(ctx, id)
if err != nil {
return fmt.Errorf("getting existing tag ids: %w", err)
}
tagIDAttr := partial.TagIDs
if tagIDAttr == nil {
tagIDAttr = &models.UpdateIDs{
IDs: existingTagIDs,
Mode: models.RelationshipUpdateModeSet,
}
}
newTagIDs := tagIDAttr.Apply(existingTagIDs)
// Remove primary tag from newTagIDs if present
newTagIDs = sliceutil.Exclude(newTagIDs, []int{primaryTagID})
if len(existingTagIDs) != len(newTagIDs) {
partial.TagIDs = &models.UpdateIDs{
IDs: newTagIDs,
Mode: models.RelationshipUpdateModeSet,
}
} else {
// no change to tags required
partial.TagIDs = nil
}
return nil
}
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
return r.SceneMarkersDestroy(ctx, []string{id})
}
@ -832,9 +953,10 @@ func (r *mutationResolver) SceneMarkersDestroy(ctx context.Context, markerIDs []
var markers []*models.SceneMarker
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
fileDeleter := &scene.FileDeleter{
Deleter: file.NewDeleter(),
Deleter: file.NewDeleterWithTrash(trashPath),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}

View file

@ -153,6 +153,14 @@ func (r *mutationResolver) makeSceneDraft(ctx context.Context, s *models.Scene,
return nil, err
}
// Load StashIDs for tags
tqb := r.repository.Tag
for _, t := range draft.Tags {
if err := t.LoadStashIDs(ctx, tqb); err != nil {
return nil, err
}
}
draft.Cover = cover
return draft, nil

View file

@ -33,7 +33,6 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
newStudio := models.NewStudio()
newStudio.Name = input.Name
newStudio.URL = translator.string(input.URL)
newStudio.Rating = input.Rating100
newStudio.Favorite = translator.bool(input.Favorite)
newStudio.Details = translator.string(input.Details)
@ -43,6 +42,15 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
var err error
newStudio.URLs = models.NewRelatedStrings([]string{})
if input.URL != nil {
newStudio.URLs.Add(*input.URL)
}
if input.Urls != nil {
newStudio.URLs.Add(input.Urls...)
}
newStudio.ParentID, err = translator.intPtrFromString(input.ParentID)
if err != nil {
return nil, fmt.Errorf("converting parent id: %w", err)
@ -106,7 +114,6 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
updatedStudio.ID = studioID
updatedStudio.Name = translator.optionalString(input.Name, "name")
updatedStudio.URL = translator.optionalString(input.URL, "url")
updatedStudio.Details = translator.optionalString(input.Details, "details")
updatedStudio.Rating = translator.optionalInt(input.Rating100, "rating100")
updatedStudio.Favorite = translator.optionalBool(input.Favorite, "favorite")
@ -124,6 +131,26 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
return nil, fmt.Errorf("converting tag ids: %w", err)
}
if translator.hasField("urls") {
// ensure url not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
return nil, err
}
updatedStudio.URLs = translator.updateStrings(input.Urls, "urls")
} else if translator.hasField("url") {
// handle legacy url field
legacyURLs := []string{}
if input.URL != nil {
legacyURLs = append(legacyURLs, *input.URL)
}
updatedStudio.URLs = &models.UpdateStrings{
Mode: models.RelationshipUpdateModeSet,
Values: legacyURLs,
}
}
// Process the base 64 encoded image string
var imageData []byte
imageIncluded := translator.hasField("image")
@ -163,6 +190,96 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
return r.getStudio(ctx, studioID)
}
func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudioUpdateInput) ([]*models.Studio, error) {
ids, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil {
return nil, fmt.Errorf("converting ids: %w", err)
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Populate performer from the input
partial := models.NewStudioPartial()
partial.ParentID, err = translator.optionalIntFromString(input.ParentID, "parent_id")
if err != nil {
return nil, fmt.Errorf("converting parent id: %w", err)
}
if translator.hasField("urls") {
// ensure url/twitter/instagram are not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
return nil, err
}
partial.URLs = translator.updateStringsBulk(input.Urls, "urls")
} else if translator.hasField("url") {
// handle legacy url field
legacyURLs := []string{}
if input.URL != nil {
legacyURLs = append(legacyURLs, *input.URL)
}
partial.URLs = &models.UpdateStrings{
Mode: models.RelationshipUpdateModeSet,
Values: legacyURLs,
}
}
partial.Favorite = translator.optionalBool(input.Favorite, "favorite")
partial.Rating = translator.optionalInt(input.Rating100, "rating100")
partial.Details = translator.optionalString(input.Details, "details")
partial.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids")
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
ret := []*models.Studio{}
// Start the transaction and save the performers
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Studio
for _, id := range ids {
local := partial
local.ID = id
if err := studio.ValidateModify(ctx, local, qb); err != nil {
return err
}
updated, err := qb.UpdatePartial(ctx, local)
if err != nil {
return err
}
ret = append(ret, updated)
}
return nil
}); err != nil {
return nil, err
}
// execute post hooks outside of txn
var newRet []*models.Studio
for _, studio := range ret {
r.hookExecutor.ExecutePostHooks(ctx, studio.ID, hook.StudioUpdatePost, input, translator.getFields())
studio, err = r.getStudio(ctx, studio.ID)
if err != nil {
return nil, err
}
newRet = append(newRet, studio)
}
return newRet, nil
}
func (r *mutationResolver) StudioDestroy(ctx context.Context, input StudioDestroyInput) (bool, error) {
id, err := strconv.Atoi(input.ID)
if err != nil {

View file

@ -39,6 +39,14 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
newTag.Description = translator.string(input.Description)
newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
var stashIDInputs models.StashIDInputs
for _, sid := range input.StashIds {
if sid != nil {
stashIDInputs = append(stashIDInputs, *sid)
}
}
newTag.StashIDs = models.NewRelatedStashIDs(stashIDInputs.ToStashIDs())
var err error
newTag.ParentIDs, err = translator.relatedIds(input.ParentIds)
@ -110,6 +118,14 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput)
updatedTag.Aliases = translator.updateStrings(input.Aliases, "aliases")
var updateStashIDInputs models.StashIDInputs
for _, sid := range input.StashIds {
if sid != nil {
updateStashIDInputs = append(updateStashIDInputs, *sid)
}
}
updatedTag.StashIDs = translator.updateStashIDs(updateStashIDInputs, "stash_ids")
updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids")
if err != nil {
return nil, fmt.Errorf("converting parent tag ids: %w", err)

View file

@ -82,6 +82,7 @@ func makeConfigGeneralResult() *ConfigGeneralResult {
Stashes: config.GetStashPaths(),
DatabasePath: config.GetDatabasePath(),
BackupDirectoryPath: config.GetBackupDirectoryPath(),
DeleteTrashPath: config.GetDeleteTrashPath(),
GeneratedPath: config.GetGeneratedPath(),
MetadataPath: config.GetMetadataPath(),
ConfigFilePath: config.GetConfigFile(),
@ -115,6 +116,7 @@ func makeConfigGeneralResult() *ConfigGeneralResult {
LogOut: config.GetLogOut(),
LogLevel: config.GetLogLevel(),
LogAccess: config.GetLogAccess(),
LogFileMaxSize: config.GetLogFileMaxSize(),
VideoExtensions: config.GetVideoExtensions(),
ImageExtensions: config.GetImageExtensions(),
GalleryExtensions: config.GetGalleryExtensions(),
@ -162,6 +164,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult {
disableDropdownCreate := config.GetDisableDropdownCreate()
return &ConfigInterfaceResult{
SfwContentMode: config.GetSFWContentMode(),
MenuItems: menuItems,
SoundOnPreview: &soundOnPreview,
WallShowTitle: &wallShowTitle,

View file

@ -0,0 +1,120 @@
package api
import (
"context"
"errors"
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindFile(ctx context.Context, id *string, path *string) (BaseFile, error) {
var ret models.File
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
qb := r.repository.File
var err error
switch {
case id != nil:
idInt, err := strconv.Atoi(*id)
if err != nil {
return err
}
var files []models.File
files, err = qb.Find(ctx, models.FileID(idInt))
if err != nil {
return err
}
if len(files) > 0 {
ret = files[0]
}
case path != nil:
ret, err = qb.FindByPath(ctx, *path)
if err == nil && ret == nil {
return errors.New("file not found")
}
default:
return errors.New("either id or path must be provided")
}
return err
}); err != nil {
return nil, err
}
return convertBaseFile(ret), nil
}
func (r *queryResolver) FindFiles(
ctx context.Context,
fileFilter *models.FileFilterType,
filter *models.FindFilterType,
ids []string,
) (ret *FindFilesResultType, err error) {
var fileIDs []models.FileID
if len(ids) > 0 {
fileIDsInt, err := stringslice.StringSliceToIntSlice(ids)
if err != nil {
return nil, err
}
fileIDs = models.FileIDsFromInts(fileIDsInt)
}
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
var files []models.File
var err error
fields := collectQueryFields(ctx)
result := &models.FileQueryResult{}
if len(fileIDs) > 0 {
files, err = r.repository.File.Find(ctx, fileIDs...)
if err == nil {
result.Count = len(files)
for _, f := range files {
if asVideo, ok := f.(*models.VideoFile); ok {
result.TotalDuration += asVideo.Duration
}
if asImage, ok := f.(*models.ImageFile); ok {
result.Megapixels += asImage.Megapixels()
}
result.TotalSize += f.Base().Size
}
}
} else {
result, err = r.repository.File.Query(ctx, models.FileQueryOptions{
QueryOptions: models.QueryOptions{
FindFilter: filter,
Count: fields.Has("count"),
},
FileFilter: fileFilter,
TotalDuration: fields.Has("duration"),
Megapixels: fields.Has("megapixels"),
TotalSize: fields.Has("size"),
})
if err == nil {
files, err = result.Resolve(ctx)
}
}
if err != nil {
return err
}
ret = &FindFilesResultType{
Count: result.Count,
Files: convertBaseFiles(files),
Duration: result.TotalDuration,
Megapixels: result.Megapixels,
Size: int(result.TotalSize),
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -0,0 +1,100 @@
package api
import (
"context"
"errors"
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindFolder(ctx context.Context, id *string, path *string) (*models.Folder, error) {
var ret *models.Folder
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Folder
var err error
switch {
case id != nil:
idInt, err := strconv.Atoi(*id)
if err != nil {
return err
}
ret, err = qb.Find(ctx, models.FolderID(idInt))
if err != nil {
return err
}
case path != nil:
ret, err = qb.FindByPath(ctx, *path)
if err == nil && ret == nil {
return errors.New("folder not found")
}
default:
return errors.New("either id or path must be provided")
}
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) FindFolders(
ctx context.Context,
folderFilter *models.FolderFilterType,
filter *models.FindFilterType,
ids []string,
) (ret *FindFoldersResultType, err error) {
var folderIDs []models.FolderID
if len(ids) > 0 {
folderIDsInt, err := stringslice.StringSliceToIntSlice(ids)
if err != nil {
return nil, err
}
folderIDs = models.FolderIDsFromInts(folderIDsInt)
}
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
var folders []*models.Folder
var err error
fields := collectQueryFields(ctx)
result := &models.FolderQueryResult{}
if len(folderIDs) > 0 {
folders, err = r.repository.Folder.FindMany(ctx, folderIDs)
if err == nil {
result.Count = len(folders)
}
} else {
result, err = r.repository.Folder.Query(ctx, models.FolderQueryOptions{
QueryOptions: models.QueryOptions{
FindFilter: filter,
Count: fields.Has("count"),
},
FolderFilter: folderFilter,
})
if err == nil {
folders, err = result.Resolve(ctx)
}
}
if err != nil {
return err
}
ret = &FindFoldersResultType{
Count: result.Count,
Folders: folders,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -201,7 +201,7 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.So
}
// TODO - this should happen after any scene is scraped
if err := r.matchScenesRelationships(ctx, ret, *source.StashBoxEndpoint); err != nil {
if err := r.matchScenesRelationships(ctx, ret, b.Endpoint); err != nil {
return nil, err
}
default:
@ -245,7 +245,7 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.So
// just flatten the slice and pass it in
flat := sliceutil.Flatten(ret)
if err := r.matchScenesRelationships(ctx, flat, *source.StashBoxEndpoint); err != nil {
if err := r.matchScenesRelationships(ctx, flat, b.Endpoint); err != nil {
return nil, err
}
@ -335,7 +335,7 @@ func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.S
if len(ret) > 0 {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
for _, studio := range ret {
if err := match.ScrapedStudioHierarchy(ctx, r.repository.Studio, studio, *source.StashBoxEndpoint); err != nil {
if err := match.ScrapedStudioHierarchy(ctx, r.repository.Studio, studio, b.Endpoint); err != nil {
return err
}
}

View file

@ -18,9 +18,14 @@ type PerformerFinder interface {
GetImage(ctx context.Context, performerID int) ([]byte, error)
}
type sfwConfig interface {
GetSFWContentMode() bool
}
type performerRoutes struct {
routes
performerFinder PerformerFinder
sfwConfig sfwConfig
}
func (rs performerRoutes) Routes() chi.Router {
@ -54,7 +59,7 @@ func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
}
if len(image) == 0 {
image = getDefaultPerformerImage(performer.Name, performer.Gender)
image = getDefaultPerformerImage(performer.Name, performer.Gender, rs.sfwConfig.GetSFWContentMode())
}
utils.ServeImage(w, r, image)

View file

@ -135,6 +135,13 @@ func marshalScrapedGroups(content []scraper.ScrapedContent) ([]*models.ScrapedGr
ret = append(ret, m)
case models.ScrapedGroup:
ret = append(ret, &m)
// it's possible that a scraper returns models.ScrapedMovie
case *models.ScrapedMovie:
g := m.ScrapedGroup()
ret = append(ret, &g)
case models.ScrapedMovie:
g := m.ScrapedGroup()
ret = append(ret, &g)
default:
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGroup", models.ErrConversion)
}

View file

@ -322,6 +322,7 @@ func (s *Server) getPerformerRoutes() chi.Router {
return performerRoutes{
routes: routes{txnManager: repo.TxnManager},
performerFinder: repo.Performer,
sfwConfig: s.manager.Config,
}.Routes()
}

View file

@ -9,12 +9,14 @@ import (
type GalleryURLBuilder struct {
BaseURL string
GalleryID string
UpdatedAt string
}
func NewGalleryURLBuilder(baseURL string, gallery *models.Gallery) GalleryURLBuilder {
return GalleryURLBuilder{
BaseURL: baseURL,
GalleryID: strconv.Itoa(gallery.ID),
UpdatedAt: strconv.FormatInt(gallery.UpdatedAt.Unix(), 10),
}
}
@ -23,5 +25,5 @@ func (b GalleryURLBuilder) GetPreviewURL() string {
}
func (b GalleryURLBuilder) GetCoverURL() string {
return b.BaseURL + "/gallery/" + b.GalleryID + "/cover"
return b.BaseURL + "/gallery/" + b.GalleryID + "/cover?t=" + b.UpdatedAt
}

View file

@ -153,6 +153,8 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
tagIDs = originalTagIDs
}
endpoint := g.result.source.RemoteSite
for _, t := range scraped {
if t.StoredID != nil {
// existing tag, just add it
@ -163,10 +165,9 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
tagIDs = sliceutil.AppendUnique(tagIDs, int(tagID))
} else if createMissing {
newTag := models.NewTag()
newTag.Name = t.Name
newTag := t.ToTag(endpoint, nil)
err := g.tagCreator.Create(ctx, &newTag)
err := g.tagCreator.Create(ctx, newTag)
if err != nil {
return nil, fmt.Errorf("error creating tag: %w", err)
}

View file

@ -3,12 +3,14 @@ package log
import (
"fmt"
"io"
"os"
"strings"
"sync"
"time"
"github.com/sirupsen/logrus"
lumberjack "gopkg.in/natefinch/lumberjack.v2"
)
type LogItem struct {
@ -41,8 +43,8 @@ func NewLogger() *Logger {
}
// Init initialises the logger based on a logging configuration
func (log *Logger) Init(logFile string, logOut bool, logLevel string) {
var file *os.File
func (log *Logger) Init(logFile string, logOut bool, logLevel string, logFileMaxSize int) {
var logger io.WriteCloser
customFormatter := new(logrus.TextFormatter)
customFormatter.TimestampFormat = "2006-01-02 15:04:05"
customFormatter.ForceColors = true
@ -57,30 +59,38 @@ func (log *Logger) Init(logFile string, logOut bool, logLevel string) {
// the access log colouring not being applied
_, _ = customFormatter.Format(logrus.NewEntry(log.logger))
// if size is 0, disable rotation
if logFile != "" {
var err error
file, err = os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
if err != nil {
fmt.Printf("Could not open '%s' for log output due to error: %s\n", logFile, err.Error())
if logFileMaxSize == 0 {
var err error
logger, err = os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "unable to open log file %s: %v\n", logFile, err)
}
} else {
logger = &lumberjack.Logger{
Filename: logFile,
MaxSize: logFileMaxSize, // Megabytes
Compress: true,
}
}
}
if file != nil {
if logger != nil {
if logOut {
// log to file separately disabling colours
fileFormatter := new(logrus.TextFormatter)
fileFormatter.TimestampFormat = customFormatter.TimestampFormat
fileFormatter.FullTimestamp = customFormatter.FullTimestamp
log.logger.AddHook(&fileLogHook{
Writer: file,
Writer: logger,
Formatter: fileFormatter,
})
} else {
// logging to file only
// turn off the colouring for the file
customFormatter.ForceColors = false
log.logger.Out = file
log.logger.Out = logger
}
}

View file

@ -16,9 +16,9 @@ import (
"golang.org/x/crypto/bcrypt"
"github.com/knadh/koanf"
"github.com/knadh/koanf/parsers/yaml"
"github.com/knadh/koanf/providers/file"
"github.com/knadh/koanf/v2"
"github.com/stashapp/stash/internal/identify"
"github.com/stashapp/stash/pkg/fsutil"
@ -43,6 +43,9 @@ const (
Password = "password"
MaxSessionAge = "max_session_age"
// SFWContentMode mode config key
SFWContentMode = "sfw_content_mode"
FFMpegPath = "ffmpeg_path"
FFProbePath = "ffprobe_path"
@ -249,13 +252,15 @@ const (
DLNAPortDefault = 1338
// Logging options
LogFile = "logfile"
LogOut = "logout"
defaultLogOut = true
LogLevel = "loglevel"
defaultLogLevel = "Info"
LogAccess = "logaccess"
defaultLogAccess = true
LogFile = "logfile"
LogOut = "logout"
defaultLogOut = true
LogLevel = "loglevel"
defaultLogLevel = "Info"
LogAccess = "logaccess"
defaultLogAccess = true
LogFileMaxSize = "logfile_max_size"
defaultLogFileMaxSize = 0 // megabytes, default disabled
// Default settings
DefaultScanSettings = "defaults.scan_task"
@ -267,6 +272,9 @@ const (
DeleteGeneratedDefault = "defaults.delete_generated"
deleteGeneratedDefaultDefault = true
// Trash/Recycle Bin options
DeleteTrashPath = "delete_trash_path"
// Desktop Integration Options
NoBrowser = "nobrowser"
NoBrowserDefault = false
@ -287,7 +295,7 @@ var (
defaultVideoExtensions = []string{"m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm", "f4v"}
defaultImageExtensions = []string{"png", "jpg", "jpeg", "gif", "webp"}
defaultGalleryExtensions = []string{"zip", "cbz"}
defaultMenuItems = []string{"scenes", "images", "movies", "markers", "galleries", "performers", "studios", "tags"}
defaultMenuItems = []string{"scenes", "images", "groups", "markers", "galleries", "performers", "studios", "tags"}
)
type MissingConfigError struct {
@ -628,7 +636,15 @@ func (i *Config) getStringMapString(key string) map[string]string {
return ret
}
// GetStathPaths returns the configured stash library paths.
// GetSFW returns true if SFW mode is enabled.
// Default performer images are changed to more agnostic images when enabled.
func (i *Config) GetSFWContentMode() bool {
i.RLock()
defer i.RUnlock()
return i.getBool(SFWContentMode)
}
// GetStashPaths returns the configured stash library paths.
// Works opposite to the usual case - it will return the override
// value only if the main value is not set.
func (i *Config) GetStashPaths() StashConfigs {
@ -1456,6 +1472,14 @@ func (i *Config) GetDeleteGeneratedDefault() bool {
return i.getBoolDefault(DeleteGeneratedDefault, deleteGeneratedDefaultDefault)
}
func (i *Config) GetDeleteTrashPath() string {
return i.getString(DeleteTrashPath)
}
func (i *Config) SetDeleteTrashPath(value string) {
i.SetString(DeleteTrashPath, value)
}
// GetDefaultIdentifySettings returns the default Identify task settings.
// Returns nil if the settings could not be unmarshalled, or if it
// has not been set.
@ -1625,6 +1649,16 @@ func (i *Config) GetLogAccess() bool {
return i.getBoolDefault(LogAccess, defaultLogAccess)
}
// GetLogFileMaxSize returns the maximum size of the log file in megabytes for lumberjack to rotate
func (i *Config) GetLogFileMaxSize() int {
value := i.getInt(LogFileMaxSize)
if value < 0 {
value = defaultLogFileMaxSize
}
return value
}
// Max allowed graphql upload size in megabytes
func (i *Config) GetMaxUploadSize() int64 {
i.RLock()

View file

@ -8,9 +8,9 @@ import (
"path/filepath"
"strings"
"github.com/knadh/koanf"
"github.com/knadh/koanf/providers/env"
"github.com/knadh/koanf/providers/posflag"
"github.com/knadh/koanf/v2"
"github.com/spf13/pflag"
"github.com/stashapp/stash/pkg/fsutil"

View file

@ -60,6 +60,10 @@ func generateRegexps(patterns []string) []*regexp.Regexp {
var fileRegexps []*regexp.Regexp
for _, pattern := range patterns {
if pattern == "" || pattern == " " {
logger.Warnf("Skipping empty exclude pattern")
continue
}
if !strings.HasPrefix(pattern, "(?i)") {
pattern = "(?i)" + pattern
}

View file

@ -28,7 +28,8 @@ type InteractiveHeatmapSpeedGenerator struct {
type Script struct {
// Version of Launchscript
Version string `json:"version"`
// #5600 - ignore version, don't validate type
Version json.RawMessage `json:"version"`
// Inverted causes up and down movement to be flipped.
Inverted bool `json:"inverted,omitempty"`
// Range is the percentage of a full stroke to use.
@ -40,7 +41,7 @@ type Script struct {
// Action is a move at a specific time.
type Action struct {
// At time in milliseconds the action should fire.
At int64 `json:"at"`
At float64 `json:"at"`
// Pos is the place in percent to move to.
Pos int `json:"pos"`
@ -109,8 +110,8 @@ func (g *InteractiveHeatmapSpeedGenerator) LoadFunscriptData(path string, sceneD
// trim actions with negative timestamps to avoid index range errors when generating heatmap
// #3181 - also trim actions that occur after the scene duration
loggedBadTimestamp := false
sceneDurationMilli := int64(sceneDuration * 1000)
isValid := func(x int64) bool {
sceneDurationMilli := sceneDuration * 1000
isValid := func(x float64) bool {
return x >= 0 && x < sceneDurationMilli
}
@ -132,7 +133,7 @@ func (g *InteractiveHeatmapSpeedGenerator) LoadFunscriptData(path string, sceneD
func (funscript *Script) UpdateIntensityAndSpeed() {
var t1, t2 int64
var t1, t2 float64
var p1, p2 int
var intensity float64
for i := range funscript.Actions {
@ -241,13 +242,13 @@ func (gt GradientTable) GetYRange(t float64) [2]float64 {
func (funscript Script) getGradientTable(numSegments int, sceneDurationMilli int64) GradientTable {
const windowSize = 15
const backfillThreshold = 500
const backfillThreshold = float64(500)
segments := make([]struct {
count int
intensity int
yRange [2]float64
at int64
at float64
}, numSegments)
gradient := make(GradientTable, numSegments)
posList := []int{}
@ -297,7 +298,7 @@ func (funscript Script) getGradientTable(numSegments int, sceneDurationMilli int
// Fill in gaps in segments
for i := 0; i < numSegments; i++ {
segmentTS := (maxts / int64(numSegments)) * int64(i)
segmentTS := float64((maxts / int64(numSegments)) * int64(i))
// Empty segment - fill it with the previous up to backfillThreshold ms
if segments[i].count == 0 {
@ -406,7 +407,8 @@ func ConvertFunscriptToCSV(funscriptPath string) ([]byte, error) {
pos = convertRange(pos, 0, funscript.Range, 0, 100)
}
buffer.WriteString(fmt.Sprintf("%d,%d\r\n", action.At, pos))
// I don't know whether the csv format requires int or float, so for now we'll use int
buffer.WriteString(fmt.Sprintf("%d,%d\r\n", int(math.Round(action.At)), pos))
}
return buffer.Bytes(), nil
}

View file

@ -262,6 +262,10 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error {
cfg.SetString(config.Cache, input.CacheLocation)
}
if input.SFWContentMode {
cfg.SetBool(config.SFWContentMode, true)
}
if input.StoreBlobsInDatabase {
cfg.SetInterface(config.BlobsStorage, config.BlobStorageTypeDatabase)
} else {
@ -322,6 +326,11 @@ func (s *Manager) BackupDatabase(download bool) (string, string, error) {
backupPath = f.Name()
backupName = s.Database.DatabaseBackupPath("")
f.Close()
// delete the temp file so that the backup operation can create it
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
} else {
backupDir := s.Config.GetBackupDirectoryPathOrDefault()
if backupDir != "" {

View file

@ -294,6 +294,7 @@ func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
Handlers: []file.CleanHandler{
&cleanHandler{},
},
TrashPath: s.Config.GetDeleteTrashPath(),
}
j := cleanJob{

View file

@ -21,6 +21,7 @@ type SetupInput struct {
// Empty to indicate $HOME/.stash/config.yml default
ConfigLocation string `json:"configLocation"`
Stashes []*config.StashConfigInput `json:"stashes"`
SFWContentMode bool `json:"sfwContentMode"`
// Empty to indicate default
DatabaseFile string `json:"databaseFile"`
// Empty to indicate default

View file

@ -3,7 +3,9 @@ package manager
import (
"context"
"errors"
"mime"
"net/http"
"path/filepath"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/internal/static"
@ -46,14 +48,17 @@ func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWrit
sceneHash := scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, sceneHash)
fp := GetInstance().Paths.Scene.GetStreamPath(scene.Path, sceneHash)
streamRequestCtx := ffmpeg.NewStreamRequestContext(w, r)
// #2579 - hijacking and closing the connection here causes video playback to fail in Safari
// We trust that the request context will be closed, so we don't need to call Cancel on the
// returned context here.
_ = GetInstance().ReadLockManager.ReadLock(streamRequestCtx, filepath)
http.ServeFile(w, r, filepath)
_ = GetInstance().ReadLockManager.ReadLock(streamRequestCtx, fp)
_, filename := filepath.Split(fp)
contentDisposition := mime.FormatMediaType("inline", map[string]string{"filename": filename})
w.Header().Set("Content-Disposition", contentDisposition)
http.ServeFile(w, r, fp)
}
func (s *SceneServer) ServeScreenshot(scene *models.Scene, w http.ResponseWriter, r *http.Request) {

View file

@ -107,6 +107,12 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene
sceneHash := scene.GetHash(t.fileNamingAlgorithm)
seconds := float64(sceneMarker.Seconds)
// check if marker past duration
if seconds > float64(videoFile.Duration) {
logger.Warnf("[generator] scene marker at %.2f seconds exceeds video duration of %.2f seconds, skipping", seconds, float64(videoFile.Duration))
return
}
g := t.generator
if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil {

View file

@ -32,6 +32,7 @@ func (t *GenerateCoverTask) Start(ctx context.Context) {
return t.Scene.LoadPrimaryFile(ctx, r.File)
}); err != nil {
logger.Error(err)
return
}
if !required {

View file

@ -8,12 +8,13 @@ import (
"io/fs"
)
//go:embed performer performer_male scene image gallery tag studio group
//go:embed performer performer_male performer_sfw scene image gallery tag studio group
var data embed.FS
const (
Performer = "performer"
PerformerMale = "performer_male"
Performer = "performer"
PerformerMale = "performer_male"
DefaultSFWPerformerImage = "performer_sfw/performer.svg"
Scene = "scene"
DefaultSceneImage = "scene/scene.svg"

View file

@ -0,0 +1,7 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="-136 -284 720 1080">
<!--!
Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) Copyright 2024 Fonticons, Inc.
Original from https://github.com/FortAwesome/Font-Awesome/blob/6.x/svgs/solid/user.svg
Modified to change color and viewbox
-->
<path d="M224 256A128 128 0 1 0 224 0a128 128 0 1 0 0 256zm-45.7 48C79.8 304 0 383.8 0 482.3C0 498.7 13.3 512 29.7 512l388.6 0c16.4 0 29.7-13.3 29.7-29.7C448 383.8 368.2 304 269.7 304l-91.4 0z" style="fill:#ffffff;fill-opacity:1" /></svg>

After

Width:  |  Height:  |  Size: 645 B

View file

@ -5,9 +5,11 @@ import (
"context"
"fmt"
"math"
"os"
"regexp"
"strconv"
"strings"
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
@ -27,6 +29,7 @@ var (
VideoCodecIVP9 = makeVideoCodec("VP9 Intel Quick Sync Video (QSV)", "vp9_qsv")
VideoCodecVVP9 = makeVideoCodec("VP9 VAAPI", "vp9_vaapi")
VideoCodecVVPX = makeVideoCodec("VP8 VAAPI", "vp8_vaapi")
VideoCodecRK264 = makeVideoCodec("H264 Rockchip MPP (rkmpp)", "h264_rkmpp")
)
const minHeight int = 480
@ -43,6 +46,7 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
VideoCodecI264C,
VideoCodecV264,
VideoCodecR264,
VideoCodecRK264,
VideoCodecIVP9,
VideoCodecVVP9,
VideoCodecM264,
@ -64,12 +68,32 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
args = args.Format("null")
args = args.Output("-")
cmd := f.Command(ctx, args)
// #6064 - add timeout to context to prevent hangs
const hwTestTimeoutSecondsDefault = 1
hwTestTimeoutSeconds := hwTestTimeoutSecondsDefault * time.Second
// allow timeout to be overridden with environment variable
if timeout := os.Getenv("STASH_HW_TEST_TIMEOUT"); timeout != "" {
if seconds, err := strconv.Atoi(timeout); err == nil {
hwTestTimeoutSeconds = time.Duration(seconds) * time.Second
}
}
testCtx, cancel := context.WithTimeout(ctx, hwTestTimeoutSeconds)
defer cancel()
cmd := f.Command(testCtx, args)
logger.Tracef("[InitHWSupport] Testing codec %s: %v", codec, cmd.Args)
var stderr bytes.Buffer
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
if testCtx.Err() != nil {
logger.Debugf("[InitHWSupport] Codec %s test timed out after %d seconds", codec, hwTestTimeoutSeconds)
continue
}
errOutput := stderr.String()
if len(errOutput) == 0 {
@ -179,6 +203,19 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args {
args = append(args, "-init_hw_device")
args = append(args, "videotoolbox=vt")
}
case VideoCodecRK264:
// Rockchip: always create rkmpp device and make it the filter device, so
// scale_rkrga and subsequent hwupload/hwmap operate in the right context.
args = append(args, "-init_hw_device")
args = append(args, "rkmpp=rk")
args = append(args, "-filter_hw_device")
args = append(args, "rk")
if fullhw {
args = append(args, "-hwaccel")
args = append(args, "rkmpp")
args = append(args, "-hwaccel_output_format")
args = append(args, "drm_prime")
}
}
return args
@ -211,6 +248,14 @@ func (f *FFMpeg) hwFilterInit(toCodec VideoCodec, fullhw bool) VideoFilter {
videoFilter = videoFilter.Append("format=nv12")
videoFilter = videoFilter.Append("hwupload")
}
case VideoCodecRK264:
// For Rockchip full-hw, do NOT pre-map to rkrga here. scale_rkrga can
// consume DRM_PRIME frames directly when filter_hw_device is set.
// For non-fullhw, keep a sane software format.
if !fullhw {
videoFilter = videoFilter.Append("format=nv12")
videoFilter = videoFilter.Append("hwupload")
}
}
return videoFilter
@ -288,6 +333,9 @@ func (f *FFMpeg) hwApplyFullHWFilter(args VideoFilter, codec VideoCodec, fullhw
if fullhw && f.version.Gteq(Version{major: 3, minor: 3}) { // Added in FFMpeg 3.3
args = args.Append("scale_qsv=format=nv12")
}
case VideoCodecRK264:
// For Rockchip, no extra mapping here. If there is no scale filter,
// leave frames in DRM_PRIME for the encoder.
}
return args
@ -315,6 +363,14 @@ func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []in
}
case VideoCodecM264:
template = "scale_vt=$value"
case VideoCodecRK264:
// The original filter chain is a fallback for maximum compatibility:
// "scale_rkrga=$value:format=nv12,hwdownload,format=nv12,hwupload"
// It avoids hwmap(rkrga→rkmpp) failures (-38/-12) seen on some builds
// by downloading the scaled frame to system RAM and re-uploading it.
// The filter chain below uses a zero-copy approach, passing the hardware-scaled
// frame directly to the encoder. This is more efficient but may be less stable.
template = "scale_rkrga=$value"
default:
return VideoFilter(sargs)
}
@ -323,12 +379,15 @@ func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []in
isIntel := codec == VideoCodecI264 || codec == VideoCodecI264C || codec == VideoCodecIVP9
// BUG: scale_vt doesn't call ff_scale_adjust_dimensions, thus cant accept negative size values
isApple := codec == VideoCodecM264
// Rockchip's scale_rkrga supports -1/-2; don't apply minus-one hack here.
return VideoFilter(templateReplaceScale(sargs, template, match, vf, isIntel || isApple))
}
// Returns the max resolution for a given codec, or a default
func (f *FFMpeg) hwCodecMaxRes(codec VideoCodec) (int, int) {
switch codec {
case VideoCodecRK264:
return 8192, 8192
case VideoCodecN264,
VideoCodecN264H,
VideoCodecI264,
@ -360,7 +419,8 @@ func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec {
VideoCodecI264C,
VideoCodecV264,
VideoCodecR264,
VideoCodecM264: // Note that the Apple encoder sucks at startup, thus HLS quality is crap
VideoCodecM264, // Note that the Apple encoder sucks at startup, thus HLS quality is crap
VideoCodecRK264:
return &element
}
}
@ -375,7 +435,8 @@ func (f *FFMpeg) hwCodecMP4Compatible() *VideoCodec {
VideoCodecN264H,
VideoCodecI264,
VideoCodecI264C,
VideoCodecM264:
VideoCodecM264,
VideoCodecRK264:
return &element
}
}

View file

@ -18,7 +18,8 @@ type Cleaner struct {
FS models.FS
Repository Repository
Handlers []CleanHandler
Handlers []CleanHandler
TrashPath string
}
type cleanJob struct {
@ -392,7 +393,7 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool
func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) {
// delete associated objects
fileDeleter := NewDeleter()
fileDeleter := NewDeleterWithTrash(j.TrashPath)
r := j.Repository
if err := r.WithTxn(ctx, func(ctx context.Context) error {
fileDeleter.RegisterHooks(ctx)
@ -410,7 +411,7 @@ func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn stri
func (j *cleanJob) deleteFolder(ctx context.Context, folderID models.FolderID, fn string) {
// delete associated objects
fileDeleter := NewDeleter()
fileDeleter := NewDeleterWithTrash(j.TrashPath)
r := j.Repository
if err := r.WithTxn(ctx, func(ctx context.Context) error {
fileDeleter.RegisterHooks(ctx)

View file

@ -58,20 +58,33 @@ func newRenamerRemoverImpl() renamerRemoverImpl {
// Deleter is used to safely delete files and directories from the filesystem.
// During a transaction, files and directories are marked for deletion using
// the Files and Dirs methods. This will rename the files/directories to be
// deleted. If the transaction is rolled back, then the files/directories can
// be restored to their original state with the Abort method. If the
// transaction is committed, the marked files are then deleted from the
// filesystem using the Complete method.
// the Files and Dirs methods. If TrashPath is set, files are moved to trash
// immediately. Otherwise, they are renamed with a .delete suffix. If the
// transaction is rolled back, then the files/directories can be restored to
// their original state with the Rollback method. If the transaction is
// committed, the marked files are then deleted from the filesystem using the
// Commit method.
type Deleter struct {
RenamerRemover RenamerRemover
files []string
dirs []string
TrashPath string // if set, files will be moved to this directory instead of being permanently deleted
trashedPaths map[string]string // map of original path -> trash path (only used when TrashPath is set)
}
func NewDeleter() *Deleter {
return &Deleter{
RenamerRemover: newRenamerRemoverImpl(),
TrashPath: "",
trashedPaths: make(map[string]string),
}
}
func NewDeleterWithTrash(trashPath string) *Deleter {
return &Deleter{
RenamerRemover: newRenamerRemoverImpl(),
TrashPath: trashPath,
trashedPaths: make(map[string]string),
}
}
@ -92,6 +105,17 @@ func (d *Deleter) RegisterHooks(ctx context.Context) {
// Abort should be called to restore marked files if this function returns an
// error.
func (d *Deleter) Files(paths []string) error {
return d.filesInternal(paths, false)
}
// FilesWithoutTrash designates files to be deleted, bypassing the trash directory.
// Files will be permanently deleted even if TrashPath is configured.
// This is useful for deleting generated files that can be easily recreated.
func (d *Deleter) FilesWithoutTrash(paths []string) error {
return d.filesInternal(paths, true)
}
func (d *Deleter) filesInternal(paths []string, bypassTrash bool) error {
for _, p := range paths {
// fail silently if the file does not exist
if _, err := d.RenamerRemover.Stat(p); err != nil {
@ -103,7 +127,7 @@ func (d *Deleter) Files(paths []string) error {
return fmt.Errorf("check file %q exists: %w", p, err)
}
if err := d.renameForDelete(p); err != nil {
if err := d.renameForDelete(p, bypassTrash); err != nil {
return fmt.Errorf("marking file %q for deletion: %w", p, err)
}
d.files = append(d.files, p)
@ -118,6 +142,17 @@ func (d *Deleter) Files(paths []string) error {
// Abort should be called to restore marked files/directories if this function returns an
// error.
func (d *Deleter) Dirs(paths []string) error {
return d.dirsInternal(paths, false)
}
// DirsWithoutTrash designates directories to be deleted, bypassing the trash directory.
// Directories will be permanently deleted even if TrashPath is configured.
// This is useful for deleting generated directories that can be easily recreated.
func (d *Deleter) DirsWithoutTrash(paths []string) error {
return d.dirsInternal(paths, true)
}
func (d *Deleter) dirsInternal(paths []string, bypassTrash bool) error {
for _, p := range paths {
// fail silently if the file does not exist
if _, err := d.RenamerRemover.Stat(p); err != nil {
@ -129,7 +164,7 @@ func (d *Deleter) Dirs(paths []string) error {
return fmt.Errorf("check directory %q exists: %w", p, err)
}
if err := d.renameForDelete(p); err != nil {
if err := d.renameForDelete(p, bypassTrash); err != nil {
return fmt.Errorf("marking directory %q for deletion: %w", p, err)
}
d.dirs = append(d.dirs, p)
@ -150,33 +185,65 @@ func (d *Deleter) Rollback() {
d.files = nil
d.dirs = nil
d.trashedPaths = make(map[string]string)
}
// Commit deletes all files marked for deletion and clears the marked list.
// When using trash, files have already been moved during renameForDelete, so
// this just clears the tracking. Otherwise, permanently delete the .delete files.
// Any errors encountered are logged. All files will be attempted, regardless
// of the errors encountered.
func (d *Deleter) Commit() {
for _, f := range d.files {
if err := d.RenamerRemover.Remove(f + deleteFileSuffix); err != nil {
logger.Warnf("Error deleting file %q: %v", f+deleteFileSuffix, err)
if d.TrashPath != "" {
// Files were already moved to trash during renameForDelete, just clear tracking
logger.Debugf("Commit: %d files and %d directories already in trash, clearing tracking", len(d.files), len(d.dirs))
} else {
// Permanently delete files and directories marked with .delete suffix
for _, f := range d.files {
if err := d.RenamerRemover.Remove(f + deleteFileSuffix); err != nil {
logger.Warnf("Error deleting file %q: %v", f+deleteFileSuffix, err)
}
}
}
for _, f := range d.dirs {
if err := d.RenamerRemover.RemoveAll(f + deleteFileSuffix); err != nil {
logger.Warnf("Error deleting directory %q: %v", f+deleteFileSuffix, err)
for _, f := range d.dirs {
if err := d.RenamerRemover.RemoveAll(f + deleteFileSuffix); err != nil {
logger.Warnf("Error deleting directory %q: %v", f+deleteFileSuffix, err)
}
}
}
d.files = nil
d.dirs = nil
d.trashedPaths = make(map[string]string)
}
func (d *Deleter) renameForDelete(path string) error {
func (d *Deleter) renameForDelete(path string, bypassTrash bool) error {
if d.TrashPath != "" && !bypassTrash {
// Move file to trash immediately
trashDest, err := fsutil.MoveToTrash(path, d.TrashPath)
if err != nil {
return err
}
d.trashedPaths[path] = trashDest
logger.Infof("Moved %q to trash at %s", path, trashDest)
return nil
}
// Standard behavior: rename with .delete suffix (or when bypassing trash)
return d.RenamerRemover.Rename(path, path+deleteFileSuffix)
}
func (d *Deleter) renameForRestore(path string) error {
if d.TrashPath != "" {
// Restore file from trash
trashPath, ok := d.trashedPaths[path]
if !ok {
return fmt.Errorf("no trash path found for %q", path)
}
return d.RenamerRemover.Rename(trashPath, path)
}
// Standard behavior: restore from .delete suffix
return d.RenamerRemover.Rename(path+deleteFileSuffix, path)
}

View file

@ -107,7 +107,8 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models.
info, err := d.Info()
if err != nil {
return fmt.Errorf("reading info for %q: %w", path, err)
logger.Errorf("reading info for %q: %v", path, err)
return nil
}
if !s.acceptEntry(ctx, path, info) {

View file

@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"io"
"strings"
"github.com/rwcarlsen/goexif/exif"
"github.com/stashapp/stash/pkg/logger"
@ -33,7 +34,7 @@ func areDimensionsFlipped(fs models.FS, path string) (bool, error) {
x, err := exif.Decode(r)
if err != nil {
if errors.Is(err, io.EOF) {
if errors.Is(err, io.EOF) || strings.Contains(err.Error(), "failed to find exif") {
// no exif data
return false, nil
}

View file

@ -50,7 +50,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (
isClip := true
// This list is derived from ffmpegImageThumbnail in pkg/image/thumbnail. If one gets updated, the other should be as well
for _, item := range []string{"png", "mjpeg", "webp", "bmp"} {
for _, item := range []string{"png", "mjpeg", "webp", "bmp", "jpegxl"} {
if item == probe.VideoCodec {
isClip = false
}

View file

@ -239,7 +239,8 @@ func (s *scanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *scanF
info, err := d.Info()
if err != nil {
return fmt.Errorf("reading info for %q: %w", path, err)
logger.Errorf("reading info for %q: %v", path, err)
return nil
}
if !s.acceptEntry(ctx, path, info) {

43
pkg/fsutil/trash.go Normal file
View file

@ -0,0 +1,43 @@
package fsutil
import (
"fmt"
"os"
"path/filepath"
"time"
)
// MoveToTrash moves a file or directory to a custom trash directory.
// If a file with the same name already exists in the trash, a timestamp is appended.
// Returns the destination path where the file was moved to.
func MoveToTrash(sourcePath string, trashPath string) (string, error) {
// Get absolute path for the source
absSourcePath, err := filepath.Abs(sourcePath)
if err != nil {
return "", fmt.Errorf("failed to get absolute path: %w", err)
}
// Ensure trash directory exists
if err := os.MkdirAll(trashPath, 0755); err != nil {
return "", fmt.Errorf("failed to create trash directory: %w", err)
}
// Get the base name of the file/directory
baseName := filepath.Base(absSourcePath)
destPath := filepath.Join(trashPath, baseName)
// If a file with the same name already exists in trash, append timestamp
if _, err := os.Stat(destPath); err == nil {
ext := filepath.Ext(baseName)
nameWithoutExt := baseName[:len(baseName)-len(ext)]
timestamp := time.Now().Format("20060102-150405")
destPath = filepath.Join(trashPath, fmt.Sprintf("%s_%s%s", nameWithoutExt, timestamp, ext))
}
// Move the file to trash using SafeMove to support cross-filesystem moves
if err := SafeMove(absSourcePath, destPath); err != nil {
return "", fmt.Errorf("failed to move to trash: %w", err)
}
return destPath, nil
}

View file

@ -19,6 +19,7 @@ type FileDeleter struct {
}
// MarkGeneratedFiles marks for deletion the generated files for the provided image.
// Generated files bypass trash and are permanently deleted since they can be regenerated.
func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
var files []string
thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
@ -32,7 +33,7 @@ func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
files = append(files, prevPath)
}
return d.Files(files)
return d.FilesWithoutTrash(files)
}
// Destroy destroys an image, optionally marking the file and generated files for deletion.

View file

@ -45,7 +45,7 @@ func (r SceneRelationships) MatchRelationships(ctx context.Context, s *models.Sc
}
for _, t := range s.Tags {
err := ScrapedTag(ctx, r.TagFinder, t)
err := ScrapedTag(ctx, r.TagFinder, t, endpoint)
if err != nil {
return err
}
@ -190,11 +190,29 @@ func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, na
// ScrapedTag matches the provided tag with the tags
// in the database and sets the ID field if one is found.
func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag) error {
func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag, stashBoxEndpoint string) error {
if s.StoredID != nil {
return nil
}
// Check if a tag with the StashID already exists
if stashBoxEndpoint != "" && s.RemoteSiteID != nil {
if finder, ok := qb.(models.TagFinder); ok {
tags, err := finder.FindByStashID(ctx, models.StashID{
StashID: *s.RemoteSiteID,
Endpoint: stashBoxEndpoint,
})
if err != nil {
return err
}
if len(tags) > 0 {
id := strconv.Itoa(tags[0].ID)
s.StoredID = &id
return nil
}
}
}
t, err := tag.ByName(ctx, qb, s.Name)
if err != nil {

View file

@ -9,15 +9,35 @@ import (
type FileQueryOptions struct {
QueryOptions
FileFilter *FileFilterType
TotalDuration bool
Megapixels bool
TotalSize bool
}
type FileFilterType struct {
And *FileFilterType `json:"AND"`
Or *FileFilterType `json:"OR"`
Not *FileFilterType `json:"NOT"`
OperatorFilter[FileFilterType]
// Filter by path
Path *StringCriterionInput `json:"path"`
Basename *StringCriterionInput `json:"basename"`
Dir *StringCriterionInput `json:"dir"`
ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder"`
ZipFile *MultiCriterionInput `json:"zip_file"`
ModTime *TimestampCriterionInput `json:"mod_time"`
Duplicated *PHashDuplicationCriterionInput `json:"duplicated"`
Hashes []*FingerprintFilterInput `json:"hashes"`
VideoFileFilter *VideoFileFilterInput `json:"video_file_filter"`
ImageFileFilter *ImageFileFilterInput `json:"image_file_filter"`
SceneCount *IntCriterionInput `json:"scene_count"`
ImageCount *IntCriterionInput `json:"image_count"`
GalleryCount *IntCriterionInput `json:"gallery_count"`
ScenesFilter *SceneFilterType `json:"scenes_filter"`
ImagesFilter *ImageFilterType `json:"images_filter"`
GalleriesFilter *GalleryFilterType `json:"galleries_filter"`
CreatedAt *TimestampCriterionInput `json:"created_at"`
UpdatedAt *TimestampCriterionInput `json:"updated_at"`
}
func PathsFileFilter(paths []string) *FileFilterType {
@ -53,10 +73,10 @@ func PathsFileFilter(paths []string) *FileFilterType {
}
type FileQueryResult struct {
// can't use QueryResult because id type is wrong
IDs []FileID
Count int
QueryResult[FileID]
TotalDuration float64
Megapixels float64
TotalSize int64
getter FileGetter
files []File

View file

@ -200,3 +200,31 @@ type CustomFieldCriterionInput struct {
Value []any `json:"value"`
Modifier CriterionModifier `json:"modifier"`
}
type FingerprintFilterInput struct {
Type string `json:"type"`
Value string `json:"value"`
// Hamming distance - defaults to 0
Distance *int `json:"distance,omitempty"`
}
type VideoFileFilterInput struct {
Format *StringCriterionInput `json:"format,omitempty"`
Resolution *ResolutionCriterionInput `json:"resolution,omitempty"`
Orientation *OrientationCriterionInput `json:"orientation,omitempty"`
Framerate *IntCriterionInput `json:"framerate,omitempty"`
Bitrate *IntCriterionInput `json:"bitrate,omitempty"`
VideoCodec *StringCriterionInput `json:"video_codec,omitempty"`
AudioCodec *StringCriterionInput `json:"audio_codec,omitempty"`
// in seconds
Duration *IntCriterionInput `json:"duration,omitempty"`
Captions *StringCriterionInput `json:"captions,omitempty"`
Interactive *bool `json:"interactive,omitempty"`
InteractiveSpeed *IntCriterionInput `json:"interactive_speed,omitempty"`
}
type ImageFileFilterInput struct {
Format *StringCriterionInput `json:"format,omitempty"`
Resolution *ResolutionCriterionInput `json:"resolution,omitempty"`
Orientation *OrientationCriterionInput `json:"orientation,omitempty"`
}

92
pkg/models/folder.go Normal file
View file

@ -0,0 +1,92 @@
package models
import (
"context"
"path/filepath"
"strings"
)
type FolderQueryOptions struct {
QueryOptions
FolderFilter *FolderFilterType
TotalDuration bool
Megapixels bool
TotalSize bool
}
type FolderFilterType struct {
OperatorFilter[FolderFilterType]
Path *StringCriterionInput `json:"path,omitempty"`
Basename *StringCriterionInput `json:"basename,omitempty"`
// Filter by parent directory path
Dir *StringCriterionInput `json:"dir,omitempty"`
ParentFolder *HierarchicalMultiCriterionInput `json:"parent_folder,omitempty"`
ZipFile *MultiCriterionInput `json:"zip_file,omitempty"`
// Filter by modification time
ModTime *TimestampCriterionInput `json:"mod_time,omitempty"`
GalleryCount *IntCriterionInput `json:"gallery_count,omitempty"`
// Filter by files that meet this criteria
FilesFilter *FileFilterType `json:"files_filter,omitempty"`
// Filter by related galleries that meet this criteria
GalleriesFilter *GalleryFilterType `json:"galleries_filter,omitempty"`
// Filter by creation time
CreatedAt *TimestampCriterionInput `json:"created_at,omitempty"`
// Filter by last update time
UpdatedAt *TimestampCriterionInput `json:"updated_at,omitempty"`
}
func PathsFolderFilter(paths []string) *FileFilterType {
if paths == nil {
return nil
}
sep := string(filepath.Separator)
var ret *FileFilterType
var or *FileFilterType
for _, p := range paths {
newOr := &FileFilterType{}
if or != nil {
or.Or = newOr
} else {
ret = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p += sep
}
or.Path = &StringCriterionInput{
Modifier: CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}
type FolderQueryResult struct {
QueryResult[FolderID]
getter FolderGetter
folders []*Folder
resolveErr error
}
func NewFolderQueryResult(folderGetter FolderGetter) *FolderQueryResult {
return &FolderQueryResult{
getter: folderGetter,
}
}
func (r *FolderQueryResult) Resolve(ctx context.Context) ([]*Folder, error) {
// cache results
if r.folders == nil && r.resolveErr == nil {
r.folders, r.resolveErr = r.getter.FindMany(ctx, r.IDs)
}
return r.folders, r.resolveErr
}

View file

@ -59,6 +59,10 @@ type GalleryFilterType struct {
StudiosFilter *StudioFilterType `json:"studios_filter"`
// Filter by related tags that meet this criteria
TagsFilter *TagFilterType `json:"tags_filter"`
// Filter by related files that meet this criteria
FilesFilter *FileFilterType `json:"files_filter"`
// Filter by related folders that meet this criteria
FoldersFilter *FolderFilterType `json:"folders_filter"`
// Filter by created at
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at

View file

@ -23,6 +23,8 @@ type GroupFilterType struct {
TagCount *IntCriterionInput `json:"tag_count"`
// Filter by date
Date *DateCriterionInput `json:"date"`
// Filter by O counter
OCounter *IntCriterionInput `json:"o_counter"`
// Filter by containing groups
ContainingGroups *HierarchicalMultiCriterionInput `json:"containing_groups"`
// Filter by sub groups

View file

@ -57,6 +57,8 @@ type ImageFilterType struct {
StudiosFilter *StudioFilterType `json:"studios_filter"`
// Filter by related tags that meet this criteria
TagsFilter *TagFilterType `json:"tags_filter"`
// Filter by related files that meet this criteria
FilesFilter *FileFilterType `json:"files_filter"`
// Filter by created at
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at
@ -106,7 +108,7 @@ type ImageQueryOptions struct {
}
type ImageQueryResult struct {
QueryResult
QueryResult[int]
Megapixels float64
TotalSize float64

View file

@ -12,7 +12,7 @@ import (
type Studio struct {
Name string `json:"name,omitempty"`
URL string `json:"url,omitempty"`
URLs []string `json:"urls,omitempty"`
ParentStudio string `json:"parent_studio,omitempty"`
Image string `json:"image,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"`
@ -24,6 +24,9 @@ type Studio struct {
StashIDs []models.StashID `json:"stash_ids,omitempty"`
Tags []string `json:"tags,omitempty"`
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
// deprecated - for import only
URL string `json:"url,omitempty"`
}
func (s Studio) Filename() string {

View file

@ -6,20 +6,22 @@ import (
jsoniter "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json"
)
type Tag struct {
Name string `json:"name,omitempty"`
SortName string `json:"sort_name,omitempty"`
Description string `json:"description,omitempty"`
Favorite bool `json:"favorite,omitempty"`
Aliases []string `json:"aliases,omitempty"`
Image string `json:"image,omitempty"`
Parents []string `json:"parents,omitempty"`
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
Name string `json:"name,omitempty"`
SortName string `json:"sort_name,omitempty"`
Description string `json:"description,omitempty"`
Favorite bool `json:"favorite,omitempty"`
Aliases []string `json:"aliases,omitempty"`
Image string `json:"image,omitempty"`
Parents []string `json:"parents,omitempty"`
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
StashIDs []models.StashID `json:"stash_ids,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
}
func (s Tag) Filename() string {

View file

@ -178,6 +178,52 @@ func (_m *FolderReaderWriter) FindByZipFileID(ctx context.Context, zipFileID mod
return r0, r1
}
// FindMany provides a mock function with given fields: ctx, id
func (_m *FolderReaderWriter) FindMany(ctx context.Context, id []models.FolderID) ([]*models.Folder, error) {
ret := _m.Called(ctx, id)
var r0 []*models.Folder
if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) []*models.Folder); ok {
r0 = rf(ctx, id)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Folder)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Query provides a mock function with given fields: ctx, options
func (_m *FolderReaderWriter) Query(ctx context.Context, options models.FolderQueryOptions) (*models.FolderQueryResult, error) {
ret := _m.Called(ctx, options)
var r0 *models.FolderQueryResult
if rf, ok := ret.Get(0).(func(context.Context, models.FolderQueryOptions) *models.FolderQueryResult); ok {
r0 = rf(ctx, options)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*models.FolderQueryResult)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, models.FolderQueryOptions) error); ok {
r1 = rf(ctx, options)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Update provides a mock function with given fields: ctx, f
func (_m *FolderReaderWriter) Update(ctx context.Context, f *models.Folder) error {
ret := _m.Called(ctx, f)

View file

@ -594,6 +594,27 @@ func (_m *ImageReaderWriter) OCountByPerformerID(ctx context.Context, performerI
return r0, r1
}
// OCountByStudioID provides a mock function with given fields: ctx, studioID
func (_m *ImageReaderWriter) OCountByStudioID(ctx context.Context, studioID int) (int, error) {
ret := _m.Called(ctx, studioID)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, studioID)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, studioID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Query provides a mock function with given fields: ctx, options
func (_m *ImageReaderWriter) Query(ctx context.Context, options models.ImageQueryOptions) (*models.ImageQueryResult, error) {
ret := _m.Called(ctx, options)

View file

@ -1141,6 +1141,27 @@ func (_m *SceneReaderWriter) HasCover(ctx context.Context, sceneID int) (bool, e
return r0, r1
}
// OCountByGroupID provides a mock function with given fields: ctx, groupID
func (_m *SceneReaderWriter) OCountByGroupID(ctx context.Context, groupID int) (int, error) {
ret := _m.Called(ctx, groupID)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, groupID)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, groupID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// OCountByPerformerID provides a mock function with given fields: ctx, performerID
func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) {
ret := _m.Called(ctx, performerID)
@ -1162,6 +1183,27 @@ func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerI
return r0, r1
}
// OCountByStudioID provides a mock function with given fields: ctx, studioID
func (_m *SceneReaderWriter) OCountByStudioID(ctx context.Context, studioID int) (int, error) {
ret := _m.Called(ctx, studioID)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, studioID)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, studioID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// PlayDuration provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) PlayDuration(ctx context.Context) (float64, error) {
ret := _m.Called(ctx)

View file

@ -360,6 +360,29 @@ func (_m *StudioReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]i
return r0, r1
}
// GetURLs provides a mock function with given fields: ctx, relatedID
func (_m *StudioReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]string, error) {
ret := _m.Called(ctx, relatedID)
var r0 []string
if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok {
r0 = rf(ctx, relatedID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]string)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, relatedID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// HasImage provides a mock function with given fields: ctx, studioID
func (_m *StudioReaderWriter) HasImage(ctx context.Context, studioID int) (bool, error) {
ret := _m.Called(ctx, studioID)

View file

@ -427,6 +427,29 @@ func (_m *TagReaderWriter) FindBySceneMarkerID(ctx context.Context, sceneMarkerI
return r0, r1
}
// FindByStashID provides a mock function with given fields: ctx, stashID
func (_m *TagReaderWriter) FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Tag, error) {
ret := _m.Called(ctx, stashID)
var r0 []*models.Tag
if rf, ok := ret.Get(0).(func(context.Context, models.StashID) []*models.Tag); ok {
r0 = rf(ctx, stashID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Tag)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, models.StashID) error); ok {
r1 = rf(ctx, stashID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// FindByStudioID provides a mock function with given fields: ctx, studioID
func (_m *TagReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) {
ret := _m.Called(ctx, studioID)
@ -565,6 +588,29 @@ func (_m *TagReaderWriter) GetParentIDs(ctx context.Context, relatedID int) ([]i
return r0, r1
}
// GetStashIDs provides a mock function with given fields: ctx, relatedID
func (_m *TagReaderWriter) GetStashIDs(ctx context.Context, relatedID int) ([]models.StashID, error) {
ret := _m.Called(ctx, relatedID)
var r0 []models.StashID
if rf, ok := ret.Get(0).(func(context.Context, int) []models.StashID); ok {
r0 = rf(ctx, relatedID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]models.StashID)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, relatedID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// HasImage provides a mock function with given fields: ctx, tagID
func (_m *TagReaderWriter) HasImage(ctx context.Context, tagID int) (bool, error) {
ret := _m.Called(ctx, tagID)

View file

@ -79,6 +79,14 @@ func (i FileID) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(i.String()))
}
func FileIDsFromInts(ids []int) []FileID {
ret := make([]FileID, len(ids))
for i, id := range ids {
ret[i] = FileID(id)
}
return ret
}
// DirEntry represents a file or directory in the file system.
type DirEntry struct {
ZipFileID *FileID `json:"zip_file_id"`
@ -252,6 +260,10 @@ func (f ImageFile) GetHeight() int {
return f.Height
}
func (f ImageFile) Megapixels() float64 {
return float64(f.Width*f.Height) / 1e6
}
func (f ImageFile) GetFormat() string {
return f.Format
}

View file

@ -35,6 +35,14 @@ func (i FolderID) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(i.String()))
}
func FolderIDsFromInts(ids []int) []FolderID {
ret := make([]FolderID, len(ids))
for i, id := range ids {
ret[i] = FolderID(id)
}
return ret
}
// Folder represents a folder in the file system.
type Folder struct {
ID FolderID `json:"id"`

View file

@ -30,6 +30,7 @@ type SceneMarkerPartial struct {
Seconds OptionalFloat64
EndSeconds OptionalFloat64
PrimaryTagID OptionalInt
TagIDs *UpdateIDs
SceneID OptionalInt
CreatedAt OptionalTime
UpdatedAt OptionalTime

View file

@ -14,10 +14,14 @@ type ScrapedStudio struct {
// Set if studio matched
StoredID *string `json:"stored_id"`
Name string `json:"name"`
URL *string `json:"url"`
URL *string `json:"url"` // deprecated
URLs []string `json:"urls"`
Parent *ScrapedStudio `json:"parent"`
Image *string `json:"image"`
Images []string `json:"images"`
Details *string `json:"details"`
Aliases *string `json:"aliases"`
Tags []*ScrapedTag `json:"tags"`
RemoteSiteID *string `json:"remote_site_id"`
}
@ -38,8 +42,28 @@ func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Stu
})
}
if s.URL != nil && !excluded["url"] {
ret.URL = *s.URL
// if URLs are provided, only use those
if len(s.URLs) > 0 {
if !excluded["urls"] {
ret.URLs = NewRelatedStrings(s.URLs)
}
} else {
urls := []string{}
if s.URL != nil && !excluded["url"] {
urls = append(urls, *s.URL)
}
if len(urls) > 0 {
ret.URLs = NewRelatedStrings(urls)
}
}
if s.Details != nil && !excluded["details"] {
ret.Details = *s.Details
}
if s.Aliases != nil && *s.Aliases != "" && !excluded["aliases"] {
ret.Aliases = NewRelatedStrings(stringslice.FromString(*s.Aliases, ","))
}
if s.Parent != nil && s.Parent.StoredID != nil && !excluded["parent"] && !excluded["parent_studio"] {
@ -74,8 +98,36 @@ func (s *ScrapedStudio) ToPartial(id string, endpoint string, excluded map[strin
ret.Name = NewOptionalString(s.Name)
}
if s.URL != nil && !excluded["url"] {
ret.URL = NewOptionalString(*s.URL)
if len(s.URLs) > 0 {
if !excluded["urls"] {
ret.URLs = &UpdateStrings{
Values: s.URLs,
Mode: RelationshipUpdateModeSet,
}
}
} else {
urls := []string{}
if s.URL != nil && !excluded["url"] {
urls = append(urls, *s.URL)
}
if len(urls) > 0 {
ret.URLs = &UpdateStrings{
Values: urls,
Mode: RelationshipUpdateModeSet,
}
}
}
if s.Details != nil && !excluded["details"] {
ret.Details = NewOptionalString(*s.Details)
}
if s.Aliases != nil && !excluded["aliases"] {
ret.Aliases = &UpdateStrings{
Values: stringslice.FromString(*s.Aliases, ","),
Mode: RelationshipUpdateModeSet,
}
}
if s.Parent != nil && !excluded["parent"] {
@ -395,12 +447,31 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
type ScrapedTag struct {
// Set if tag matched
StoredID *string `json:"stored_id"`
Name string `json:"name"`
StoredID *string `json:"stored_id"`
Name string `json:"name"`
RemoteSiteID *string `json:"remote_site_id"`
}
func (ScrapedTag) IsScrapedContent() {}
func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag {
currentTime := time.Now()
ret := NewTag()
ret.Name = t.Name
if t.RemoteSiteID != nil && endpoint != "" {
ret.StashIDs = NewRelatedStashIDs([]StashID{
{
Endpoint: endpoint,
StashID: *t.RemoteSiteID,
UpdatedAt: currentTime,
},
})
}
return &ret
}
func ScrapedTagSortFunction(a, b *ScrapedTag) int {
return strings.Compare(strings.ToLower(a.Name), strings.ToLower(b.Name))
}
@ -462,6 +533,7 @@ type ScrapedGroup struct {
Date *string `json:"date"`
Rating *string `json:"rating"`
Director *string `json:"director"`
URL *string `json:"url"` // included for backward compatibility
URLs []string `json:"urls"`
Synopsis *string `json:"synopsis"`
Studio *ScrapedStudio `json:"studio"`

Some files were not shown because too many files have changed in this diff Show more