mirror of
https://github.com/stashapp/stash.git
synced 2025-12-07 00:43:12 +01:00
Compare commits
122 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
061d21dede | ||
|
|
88a149c085 | ||
|
|
d994df2900 | ||
|
|
39fd8a6550 | ||
|
|
877491e62b | ||
|
|
3d044896ad | ||
|
|
63e8830db4 | ||
|
|
0bc4faef2a | ||
|
|
ee61fc879b | ||
|
|
e02ef436a5 | ||
|
|
41f0612025 | ||
|
|
730e877e73 | ||
|
|
e213fde0cc | ||
|
|
69fd073d5d | ||
|
|
5f16547e58 | ||
|
|
90dd0b58d8 | ||
|
|
4017c42fe2 | ||
|
|
49fd47562e | ||
|
|
84e24eb612 | ||
|
|
c6ae43c1d6 | ||
|
|
de8139cf1b | ||
|
|
0ca416f75a | ||
|
|
1bc32a3099 | ||
|
|
d1ee64d36f | ||
|
|
e052a431d1 | ||
|
|
7e66ce8a49 | ||
|
|
88747b962a | ||
|
|
97c01c70b3 | ||
|
|
a3ed381901 | ||
|
|
b3da730a05 | ||
|
|
e0c1d4c51d | ||
|
|
90d1b2df2d | ||
|
|
4ef3a605dd | ||
|
|
f811590021 | ||
|
|
0bd78f4b62 | ||
|
|
a8bb9ae4d3 | ||
|
|
d10995302d | ||
|
|
d14053b570 | ||
|
|
ca357b9eb3 | ||
|
|
6892c7151c | ||
|
|
d6a2953371 | ||
|
|
50ad3c0778 | ||
|
|
dc520e2b2f | ||
|
|
ecd9c6ec5b | ||
|
|
ca8ee6bc2a | ||
|
|
5d02f916c2 | ||
|
|
e176cf5f71 | ||
|
|
2cac7d5b20 | ||
|
|
58b6833380 | ||
|
|
68ebeda5c8 | ||
|
|
2332401dbf | ||
|
|
33b59e02af | ||
|
|
367b96df0f | ||
|
|
a31df336f8 | ||
|
|
78aeb06f20 | ||
|
|
2f65a1da3e | ||
|
|
51999135be | ||
|
|
bb56b619f5 | ||
|
|
a590caa3d3 | ||
|
|
0a05a0b45b | ||
|
|
9ef2169055 | ||
|
|
1ec8d4afe5 | ||
|
|
15db2da361 | ||
|
|
892858a803 | ||
|
|
bc91ca0a25 | ||
|
|
d743787bb3 | ||
|
|
957c4fe1b5 | ||
|
|
e3b3fbbf63 | ||
|
|
c99825a453 | ||
|
|
a08d2e258a | ||
|
|
b2c8f09585 | ||
|
|
5e34df7b7b | ||
|
|
678b3de7c8 | ||
|
|
f434c1f529 | ||
|
|
12a9a0b5f6 | ||
|
|
34becdf436 | ||
|
|
d5b1046267 | ||
|
|
2e766952dd | ||
|
|
1cc983fb5b | ||
|
|
a76e515112 | ||
|
|
1a9a62eae9 | ||
|
|
638ebfc319 | ||
|
|
53655e51c4 | ||
|
|
289b698598 | ||
|
|
b4d148bdb0 | ||
|
|
600cb15102 | ||
|
|
d52b6afd4a | ||
|
|
96a7e087f2 | ||
|
|
20fa5d3146 | ||
|
|
095e5d50ab | ||
|
|
42f76ca34f | ||
|
|
a50a0d4289 | ||
|
|
04fcf6f512 | ||
|
|
7716c4dd87 | ||
|
|
2925325e68 | ||
|
|
d831e4573c | ||
|
|
1b864f28f6 | ||
|
|
8c4b607454 | ||
|
|
2a2a730296 | ||
|
|
beee37bc38 | ||
|
|
9be0cc3210 | ||
|
|
f2a787a2ba | ||
|
|
6cace4ff88 | ||
|
|
fa2fd31ac7 | ||
|
|
1b2b4c5221 | ||
|
|
336fa3b70e | ||
|
|
299e1ac1f9 | ||
|
|
fb7bd89834 | ||
|
|
f04be76224 | ||
|
|
db79cf9bb1 | ||
|
|
90baa31ee3 | ||
|
|
9b8300e882 | ||
|
|
d70ff551d4 | ||
|
|
1dccecc39c | ||
|
|
648875995c | ||
|
|
96b5a9448c | ||
|
|
fda97e7f6c | ||
|
|
869cbd496b | ||
|
|
5049d6e5c9 | ||
|
|
98df51755e | ||
|
|
947a17355c | ||
|
|
71e4071871 |
389 changed files with 22118 additions and 12437 deletions
13
.github/workflows/build.yml
vendored
13
.github/workflows/build.yml
vendored
|
|
@ -2,7 +2,10 @@ name: Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ develop, master ]
|
branches:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
- 'releases/**'
|
||||||
pull_request:
|
pull_request:
|
||||||
release:
|
release:
|
||||||
types: [ published ]
|
types: [ published ]
|
||||||
|
|
@ -12,7 +15,7 @@ concurrency:
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
COMPILER_IMAGE: stashapp/compiler:11
|
COMPILER_IMAGE: stashapp/compiler:12
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
@ -37,7 +40,7 @@ jobs:
|
||||||
cache-name: cache-node_modules
|
cache-name: cache-node_modules
|
||||||
with:
|
with:
|
||||||
path: ui/v2.5/node_modules
|
path: ui/v2.5/node_modules
|
||||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock') }}
|
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml') }}
|
||||||
|
|
||||||
- name: Cache UI build
|
- name: Cache UI build
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
|
|
@ -46,7 +49,7 @@ jobs:
|
||||||
cache-name: cache-ui
|
cache-name: cache-ui
|
||||||
with:
|
with:
|
||||||
path: ui/v2.5/build
|
path: ui/v2.5/build
|
||||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/yarn.lock', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
|
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('ui/v2.5/pnpm-lock.yaml', 'ui/v2.5/public/**', 'ui/v2.5/src/**', 'graphql/**/*.graphql') }}
|
||||||
|
|
||||||
- name: Cache go build
|
- name: Cache go build
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
|
|
@ -65,7 +68,7 @@ jobs:
|
||||||
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null
|
docker run -d --name build --mount type=bind,source="$(pwd)",target=/stash,consistency=delegated --mount type=bind,source="$(pwd)/.go-cache",target=/root/.cache/go-build,consistency=delegated --env OFFICIAL_BUILD=${{ env.official-build }} -w /stash $COMPILER_IMAGE tail -f /dev/null
|
||||||
|
|
||||||
- name: Pre-install
|
- name: Pre-install
|
||||||
run: docker exec -t build /bin/bash -c "make pre-ui"
|
run: docker exec -t build /bin/bash -c "make CI=1 pre-ui"
|
||||||
|
|
||||||
- name: Generate
|
- name: Generate
|
||||||
run: docker exec -t build /bin/bash -c "make generate"
|
run: docker exec -t build /bin/bash -c "make generate"
|
||||||
|
|
|
||||||
3
.github/workflows/golangci-lint.yml
vendored
3
.github/workflows/golangci-lint.yml
vendored
|
|
@ -6,10 +6,11 @@ on:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- develop
|
- develop
|
||||||
|
- 'releases/**'
|
||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
COMPILER_IMAGE: stashapp/compiler:11
|
COMPILER_IMAGE: stashapp/compiler:12
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
golangci:
|
golangci:
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<module type="WEB_MODULE" version="4">
|
<module type="WEB_MODULE" version="4">
|
||||||
|
<component name="Go" enabled="true" />
|
||||||
<component name="NewModuleRootManager">
|
<component name="NewModuleRootManager">
|
||||||
<content url="file://$MODULE_DIR$">
|
<content url="file://$MODULE_DIR$">
|
||||||
<excludeFolder url="file://$MODULE_DIR$/certs" />
|
<excludeFolder url="file://$MODULE_DIR$/certs" />
|
||||||
|
|
@ -10,4 +11,4 @@
|
||||||
<orderEntry type="inheritedJdk" />
|
<orderEntry type="inheritedJdk" />
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
</component>
|
</component>
|
||||||
</module>
|
</module>
|
||||||
34
Makefile
34
Makefile
|
|
@ -275,7 +275,7 @@ generate: generate-backend generate-ui
|
||||||
|
|
||||||
.PHONY: generate-ui
|
.PHONY: generate-ui
|
||||||
generate-ui:
|
generate-ui:
|
||||||
cd ui/v2.5 && yarn run gqlgen
|
cd ui/v2.5 && npm run gqlgen
|
||||||
|
|
||||||
.PHONY: generate-backend
|
.PHONY: generate-backend
|
||||||
generate-backend: touch-ui
|
generate-backend: touch-ui
|
||||||
|
|
@ -338,9 +338,19 @@ server-clean:
|
||||||
|
|
||||||
# installs UI dependencies. Run when first cloning repository, or if UI
|
# installs UI dependencies. Run when first cloning repository, or if UI
|
||||||
# dependencies have changed
|
# dependencies have changed
|
||||||
|
# If CI is set, configures pnpm to use a local store to avoid
|
||||||
|
# putting .pnpm-store in /stash
|
||||||
|
# NOTE: to run in the docker build container, using the existing
|
||||||
|
# node_modules folder, rename the .modules.yaml to .modules.yaml.bak
|
||||||
|
# and a new one will be generated. This will need to be reversed after
|
||||||
|
# building.
|
||||||
.PHONY: pre-ui
|
.PHONY: pre-ui
|
||||||
pre-ui:
|
pre-ui:
|
||||||
cd ui/v2.5 && yarn install --frozen-lockfile
|
ifdef CI
|
||||||
|
cd ui/v2.5 && pnpm config set store-dir ~/.pnpm-store && pnpm install --frozen-lockfile
|
||||||
|
else
|
||||||
|
cd ui/v2.5 && pnpm install --frozen-lockfile
|
||||||
|
endif
|
||||||
|
|
||||||
.PHONY: ui-env
|
.PHONY: ui-env
|
||||||
ui-env: build-info
|
ui-env: build-info
|
||||||
|
|
@ -359,7 +369,7 @@ ui: ui-only generate-login-locale
|
||||||
|
|
||||||
.PHONY: ui-only
|
.PHONY: ui-only
|
||||||
ui-only: ui-env
|
ui-only: ui-env
|
||||||
cd ui/v2.5 && yarn build
|
cd ui/v2.5 && npm run build
|
||||||
|
|
||||||
.PHONY: zip-ui
|
.PHONY: zip-ui
|
||||||
zip-ui:
|
zip-ui:
|
||||||
|
|
@ -368,20 +378,24 @@ zip-ui:
|
||||||
|
|
||||||
.PHONY: ui-start
|
.PHONY: ui-start
|
||||||
ui-start: ui-env
|
ui-start: ui-env
|
||||||
cd ui/v2.5 && yarn start --host
|
cd ui/v2.5 && npm run start -- --host
|
||||||
|
|
||||||
.PHONY: fmt-ui
|
.PHONY: fmt-ui
|
||||||
fmt-ui:
|
fmt-ui:
|
||||||
cd ui/v2.5 && yarn format
|
cd ui/v2.5 && npm run format
|
||||||
|
|
||||||
# runs all of the frontend PR-acceptance steps
|
# runs all of the frontend PR-acceptance steps
|
||||||
.PHONY: validate-ui
|
.PHONY: validate-ui
|
||||||
validate-ui:
|
validate-ui:
|
||||||
cd ui/v2.5 && yarn run validate
|
cd ui/v2.5 && npm run validate
|
||||||
|
|
||||||
# these targets run the same steps as fmt-ui and validate-ui, but only on files that have changed
|
# these targets run the same steps as fmt-ui and validate-ui, but only on files that have changed
|
||||||
fmt-ui-quick:
|
fmt-ui-quick:
|
||||||
cd ui/v2.5 && yarn run prettier --write $$(git diff --name-only --relative --diff-filter d . ../../graphql)
|
cd ui/v2.5 && \
|
||||||
|
files=$$(git diff --name-only --relative --diff-filter d . ../../graphql); \
|
||||||
|
if [ -n "$$files" ]; then \
|
||||||
|
npm run prettier -- --write $$files; \
|
||||||
|
fi
|
||||||
|
|
||||||
# does not run tsc checks, as they are slow
|
# does not run tsc checks, as they are slow
|
||||||
validate-ui-quick:
|
validate-ui-quick:
|
||||||
|
|
@ -389,9 +403,9 @@ validate-ui-quick:
|
||||||
tsfiles=$$(git diff --name-only --relative --diff-filter d src | grep -e "\.tsx\?\$$"); \
|
tsfiles=$$(git diff --name-only --relative --diff-filter d src | grep -e "\.tsx\?\$$"); \
|
||||||
scssfiles=$$(git diff --name-only --relative --diff-filter d src | grep "\.scss"); \
|
scssfiles=$$(git diff --name-only --relative --diff-filter d src | grep "\.scss"); \
|
||||||
prettyfiles=$$(git diff --name-only --relative --diff-filter d . ../../graphql); \
|
prettyfiles=$$(git diff --name-only --relative --diff-filter d . ../../graphql); \
|
||||||
if [ -n "$$tsfiles" ]; then yarn run eslint $$tsfiles; fi && \
|
if [ -n "$$tsfiles" ]; then npm run eslint -- $$tsfiles; fi && \
|
||||||
if [ -n "$$scssfiles" ]; then yarn run stylelint $$scssfiles; fi && \
|
if [ -n "$$scssfiles" ]; then npm run stylelint -- $$scssfiles; fi && \
|
||||||
if [ -n "$$prettyfiles" ]; then yarn run prettier --check $$prettyfiles; fi
|
if [ -n "$$prettyfiles" ]; then npm run prettier -- --check $$prettyfiles; fi
|
||||||
|
|
||||||
# runs all of the backend PR-acceptance steps
|
# runs all of the backend PR-acceptance steps
|
||||||
.PHONY: validate-backend
|
.PHONY: validate-backend
|
||||||
|
|
|
||||||
77
README.md
77
README.md
|
|
@ -9,8 +9,9 @@
|
||||||
[](https://github.com/stashapp/stash/releases/latest)
|
[](https://github.com/stashapp/stash/releases/latest)
|
||||||
[](https://github.com/stashapp/stash/labels/bounty)
|
[](https://github.com/stashapp/stash/labels/bounty)
|
||||||
|
|
||||||
### **Stash is a self-hosted webapp written in Go which organizes and serves your porn.**
|
### **Stash is a self-hosted webapp written in Go which organizes and serves your diverse content collection, catering to both your SFW and NSFW needs.**
|
||||||

|
|
||||||
|

|
||||||
|
|
||||||
* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.
|
* Stash gathers information about videos in your collection from the internet, and is extensible through the use of community-built plugins for a large number of content producers and sites.
|
||||||
* Stash supports a wide variety of both video and image formats.
|
* Stash supports a wide variety of both video and image formats.
|
||||||
|
|
@ -19,80 +20,88 @@
|
||||||
|
|
||||||
You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action.
|
You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action.
|
||||||
|
|
||||||
For further information you can consult the [documentation](https://docs.stashapp.cc) or [read the in-app manual](ui/v2.5/src/docs/en).
|
For further information you can consult the [documentation](https://docs.stashapp.cc) or access the in-app manual from within the application (also available at [docs.stashapp.cc/in-app-manual](https://docs.stashapp.cc/in-app-manual)).
|
||||||
|
|
||||||
# Installing Stash
|
# Installing Stash
|
||||||
|
|
||||||
|
Step-by-step instructions are available at [docs.stashapp.cc/installation](https://docs.stashapp.cc/installation/).
|
||||||
|
|
||||||
#### Windows Users:
|
#### Windows Users:
|
||||||
|
|
||||||
As of version 0.27.0, Stash doesn't support anymore _Windows 7, 8, Server 2008 and Server 2012._
|
As of version 0.27.0, Stash no longer supports _Windows 7, 8, Server 2008 and Server 2012._
|
||||||
Windows 10 or Server 2016 are at least required.
|
At least Windows 10 or Server 2016 is required.
|
||||||
|
|
||||||
#### Mac Users:
|
#### Mac Users:
|
||||||
|
|
||||||
As of version 0.29.0, Stash requires at least _macOS 11 Big Sur._
|
As of version 0.29.0, Stash requires _macOS 11 Big Sur_ or later.
|
||||||
Stash can still be ran through docker on older versions of macOS
|
Stash can still be run through docker on older versions of macOS.
|
||||||
|
|
||||||
<img src="docs/readme_assets/windows_logo.svg" width="100%" height="75"> Windows | <img src="docs/readme_assets/mac_logo.svg" width="100%" height="75"> macOS | <img src="docs/readme_assets/linux_logo.svg" width="100%" height="75"> Linux | <img src="docs/readme_assets/docker_logo.svg" width="100%" height="75"> Docker
|
<img src="docs/readme_assets/windows_logo.svg" width="100%" height="75"> Windows | <img src="docs/readme_assets/mac_logo.svg" width="100%" height="75"> macOS | <img src="docs/readme_assets/linux_logo.svg" width="100%" height="75"> Linux | <img src="docs/readme_assets/docker_logo.svg" width="100%" height="75"> Docker
|
||||||
:---:|:---:|:---:|:---:
|
:---:|:---:|:---:|:---:
|
||||||
[Latest Release](https://github.com/stashapp/stash/releases/latest/download/stash-win.exe) <br /> <sup><sub>[Development Preview](https://github.com/stashapp/stash/releases/download/latest_develop/stash-win.exe)</sub></sup> | [Latest Release](https://github.com/stashapp/stash/releases/latest/download/Stash.app.zip) <br /> <sup><sub>[Development Preview](https://github.com/stashapp/stash/releases/download/latest_develop/Stash.app.zip)</sub></sup> | [Latest Release (amd64)](https://github.com/stashapp/stash/releases/latest/download/stash-linux) <br /> <sup><sub>[Development Preview (amd64)](https://github.com/stashapp/stash/releases/download/latest_develop/stash-linux)</sub></sup> <br /> [More Architectures...](https://github.com/stashapp/stash/releases/latest) | [Instructions](docker/production/README.md) <br /> <sup><sub>[Sample docker-compose.yml](docker/production/docker-compose.yml)</sub></sup>
|
[Latest Release](https://github.com/stashapp/stash/releases/latest/download/stash-win.exe) <br /> <sup><sub>[Development Preview](https://github.com/stashapp/stash/releases/download/latest_develop/stash-win.exe)</sub></sup> | [Latest Release](https://github.com/stashapp/stash/releases/latest/download/Stash.app.zip) <br /> <sup><sub>[Development Preview](https://github.com/stashapp/stash/releases/download/latest_develop/Stash.app.zip)</sub></sup> | [Latest Release (amd64)](https://github.com/stashapp/stash/releases/latest/download/stash-linux) <br /> <sup><sub>[Development Preview (amd64)](https://github.com/stashapp/stash/releases/download/latest_develop/stash-linux)</sub></sup> <br /> [More Architectures...](https://github.com/stashapp/stash/releases/latest) | [Instructions](docker/production/README.md) <br /> <sup><sub>[Sample docker-compose.yml](docker/production/docker-compose.yml)</sub></sup>
|
||||||
|
|
||||||
Download links for other platforms and architectures are available on the [Releases page](https://github.com/stashapp/stash/releases).
|
Download links for other platforms and architectures are available on the [Releases](https://github.com/stashapp/stash/releases) page.
|
||||||
|
|
||||||
## First Run
|
## First Run
|
||||||
|
|
||||||
#### Windows/macOS Users: Security Prompt
|
#### Windows/macOS Users: Security Prompt
|
||||||
|
|
||||||
On Windows or macOS, running the app might present a security prompt since the binary isn't yet signed.
|
On Windows or macOS, running the app might present a security prompt since the application binary isn't yet signed.
|
||||||
|
|
||||||
On Windows, bypass this by clicking "more info" and then the "run anyway" button. On macOS, Control+Click the app, click "Open", and then "Open" again.
|
- On Windows, bypass this by clicking "more info" and then the "run anyway" button.
|
||||||
|
- On macOS, Control+Click the app, click "Open", and then "Open" again.
|
||||||
|
|
||||||
#### FFmpeg
|
#### ffmpeg
|
||||||
Stash requires FFmpeg. If you don't have it installed, Stash will download a copy for you. It is recommended that Linux users install `ffmpeg` from their distro's package manager.
|
|
||||||
|
Stash requires FFmpeg. If you don't have it installed, Stash will prompt you to download a copy during setup. It is recommended that Linux users install `ffmpeg` from their distro's package manager.
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
|
|
||||||
## Quickstart Guide
|
## Quickstart Guide
|
||||||
Stash is a web-based application. Once the application is running, the interface is available (by default) from http://localhost:9999.
|
|
||||||
|
Stash is a web-based application. Once the application is running, the interface is available (by default) from `http://localhost:9999`.
|
||||||
|
|
||||||
On first run, Stash will prompt you for some configuration options and media directories to index, called "Scanning" in Stash. After scanning, your media will be available for browsing, curating, editing, and tagging.
|
On first run, Stash will prompt you for some configuration options and media directories to index, called "Scanning" in Stash. After scanning, your media will be available for browsing, curating, editing, and tagging.
|
||||||
|
|
||||||
Stash can pull metadata (performers, tags, descriptions, studios, and more) directly from many sites through the use of [scrapers](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Manual/Scraping.md), which integrate directly into Stash. Identifying an entire collection will typically require a mix of multiple sources:
|
Stash can pull metadata (performers, tags, descriptions, studios, and more) directly from many sites through the use of [scrapers](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Manual/Scraping.md), which integrate directly into Stash. Identifying an entire collection will typically require a mix of multiple sources:
|
||||||
- The project maintains [StashDB](https://stashdb.org/), a crowd-sourced repository of scene, studio, and performer information. Connecting it to Stash will allow you to automatically identify much of a typical media collection. It runs on our stash-box software and is primarily focused on mainstream digital scenes and studios. Instructions, invite codes, and more can be found in this guide to [Accessing StashDB](https://guidelines.stashdb.org/docs/faq_getting-started/stashdb/accessing-stashdb/).
|
- The stashapp team maintains [StashDB](https://stashdb.org/), a crowd-sourced repository of scene, studio, and performer information. Connecting it to Stash will allow you to automatically identify much of a typical media collection. It runs on our stash-box software and is primarily focused on mainstream digital scenes and studios. Instructions, invite codes, and more can be found in this guide to [Accessing StashDB](https://guidelines.stashdb.org/docs/faq_getting-started/stashdb/accessing-stashdb/).
|
||||||
- Several community-managed stash-box databases can also be connected to Stash in a similar manner. Each one serves a slightly different niche and follows their own methodology. A rundown of each stash-box, their differences, and the information you need to sign up can be found in this guide to [Accessing Stash-Boxes](https://guidelines.stashdb.org/docs/faq_getting-started/stashdb/accessing-stash-boxes/).
|
- Several community-managed stash-box databases can also be connected to Stash in a similar manner. Each one serves a slightly different niche and follows their own methodology. A rundown of each stash-box, their differences, and the information you need to sign up can be found in this guide to [Accessing Stash-Boxes](https://guidelines.stashdb.org/docs/faq_getting-started/stashdb/accessing-stash-boxes/).
|
||||||
- Many community-maintained scrapers can also be downloaded, installed, and updated from within Stash, allowing you to pull data from a wide range of other websites and databases. They can be found by navigating to Settings -> Metadata Providers -> Available Scrapers -> Community (stable). These can be trickier to use than a stash-box because every scraper works a little differently. For more information, please visit the [CommunityScrapers repository](https://github.com/stashapp/CommunityScrapers).
|
- Many community-maintained scrapers can also be downloaded, installed, and updated from within Stash, allowing you to pull data from a wide range of other websites and databases. They can be found by navigating to `Settings → Metadata Providers → Available Scrapers → Community (stable)`. These can be trickier to use than a stash-box because every scraper works a little differently. For more information, please visit the [CommunityScrapers repository](https://github.com/stashapp/CommunityScrapers).
|
||||||
- All of the above methods of scraping data into Stash are also covered in more detail in our [Guide to Scraping](https://docs.stashapp.cc/beginner-guides/guide-to-scraping/).
|
- All of the above methods of scraping data into Stash are also covered in more detail in our [Guide to Scraping](https://docs.stashapp.cc/beginner-guides/guide-to-scraping/).
|
||||||
|
|
||||||
<sub>[StashDB](http://stashdb.org) is the canonical instance of our open source metadata API, [stash-box](https://github.com/stashapp/stash-box).</sub>
|
<sub>[StashDB](http://stashdb.org) is the canonical instance of our open source metadata API, [stash-box](https://github.com/stashapp/stash-box).</sub>
|
||||||
|
|
||||||
# Translation
|
# Translation
|
||||||
|
|
||||||
[](https://translate.codeberg.org/engage/stash/)
|
[](https://translate.codeberg.org/engage/stash/)
|
||||||
|
|
||||||
Stash is available in 32 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash into your language, you can make an account at [Codeberg's Weblate](https://translate.codeberg.org/projects/stash/stash/) to get started contributing new languages or improving existing ones. Thanks!
|
Stash is available in 32 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash, you can make an account at [Codeberg's Weblate](https://translate.codeberg.org/projects/stash/stash/) to contribute to new or existing languages. Thanks!
|
||||||
|
|
||||||
|
The badge below shows the current translation status of Stash across all supported languages:
|
||||||
|
|
||||||
[](https://translate.codeberg.org/engage/stash/)
|
[](https://translate.codeberg.org/engage/stash/)
|
||||||
|
|
||||||
## Join Our Community
|
# Support & Resources
|
||||||
|
|
||||||
We are excited to announce that we have a new home for support, feature requests, and discussions related to Stash and its associated projects. Join our community on the [Discourse forum](https://discourse.stashapp.cc) to connect with other users, share your ideas, and get help from fellow enthusiasts.
|
Need help or want to get involved? Start with the documentation, then reach out to the community if you need further assistance.
|
||||||
|
|
||||||
# Support (FAQ)
|
- Documentation
|
||||||
|
- Official docs: https://docs.stashapp.cc - official guides guides and troubleshooting.
|
||||||
|
- In-app manual: press <kbd>Shift</kbd> + <kbd>?</kbd> in the app or view the manual online: https://docs.stashapp.cc/in-app-manual.
|
||||||
|
- FAQ: https://discourse.stashapp.cc/c/support/faq/28 - common questions and answers.
|
||||||
|
- Community wiki: https://discourse.stashapp.cc/tags/c/community-wiki/22/stash - guides, how-to’s and tips.
|
||||||
|
|
||||||
|
- Community & discussion
|
||||||
|
- Community forum: https://discourse.stashapp.cc - community support, feature requests and discussions.
|
||||||
|
- Discord: https://discord.gg/2TsNFKt - real-time chat and community support.
|
||||||
|
- GitHub discussions: https://github.com/stashapp/stash/discussions - community support and feature discussions.
|
||||||
|
- Lemmy community: https://discuss.online/c/stashapp - Reddit-style community space.
|
||||||
|
|
||||||
Check out our documentation on [Stash-Docs](https://docs.stashapp.cc) for information about the software, questions, guides, add-ons and more.
|
- Community scrapers & plugins
|
||||||
|
- Metadata sources: https://docs.stashapp.cc/metadata-sources/
|
||||||
For more help you can:
|
- Plugins: https://docs.stashapp.cc/plugins/
|
||||||
* Check the in-app documentation, in the top right corner of the app (it's also mirrored on [Stash-Docs](https://docs.stashapp.cc/in-app-manual))
|
- Themes: https://docs.stashapp.cc/themes/
|
||||||
* Join our [community forum](https://discourse.stashapp.cc)
|
- Other projects: https://docs.stashapp.cc/other-projects/
|
||||||
* Join the [Discord server](https://discord.gg/2TsNFKt)
|
|
||||||
* Start a [discussion on GitHub](https://github.com/stashapp/stash/discussions)
|
|
||||||
|
|
||||||
# Customization
|
|
||||||
|
|
||||||
## Themes and CSS Customization
|
|
||||||
|
|
||||||
There is a [directory of community-created themes](https://docs.stashapp.cc/themes/list) on Stash-Docs.
|
|
||||||
|
|
||||||
You can also change the Stash interface to fit your desired style with various snippets from [Custom CSS snippets](https://docs.stashapp.cc/themes/custom-css-snippets).
|
|
||||||
|
|
||||||
# For Developers
|
# For Developers
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -110,7 +110,7 @@ func main() {
|
||||||
// Logs only error level message to stderr.
|
// Logs only error level message to stderr.
|
||||||
func initLogTemp() *log.Logger {
|
func initLogTemp() *log.Logger {
|
||||||
l := log.NewLogger()
|
l := log.NewLogger()
|
||||||
l.Init("", true, "Error")
|
l.Init("", true, "Error", 0)
|
||||||
logger.Logger = l
|
logger.Logger = l
|
||||||
|
|
||||||
return l
|
return l
|
||||||
|
|
@ -118,7 +118,7 @@ func initLogTemp() *log.Logger {
|
||||||
|
|
||||||
func initLog(cfg *config.Config) *log.Logger {
|
func initLog(cfg *config.Config) *log.Logger {
|
||||||
l := log.NewLogger()
|
l := log.NewLogger()
|
||||||
l.Init(cfg.GetLogFile(), cfg.GetLogOut(), cfg.GetLogLevel())
|
l.Init(cfg.GetLogFile(), cfg.GetLogOut(), cfg.GetLogLevel(), cfg.GetLogFileMaxSize())
|
||||||
logger.Logger = l
|
logger.Logger = l
|
||||||
|
|
||||||
return l
|
return l
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,16 @@
|
||||||
# This dockerfile should be built with `make docker-build` from the stash root.
|
# This dockerfile should be built with `make docker-build` from the stash root.
|
||||||
|
|
||||||
# Build Frontend
|
# Build Frontend
|
||||||
FROM node:20-alpine AS frontend
|
FROM node:24-alpine AS frontend
|
||||||
RUN apk add --no-cache make git
|
RUN apk add --no-cache make git
|
||||||
## cache node_modules separately
|
## cache node_modules separately
|
||||||
COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/
|
COPY ./ui/v2.5/package.json ./ui/v2.5/pnpm-lock.yaml /stash/ui/v2.5/
|
||||||
WORKDIR /stash
|
WORKDIR /stash
|
||||||
COPY Makefile /stash/
|
COPY Makefile /stash/
|
||||||
COPY ./graphql /stash/graphql/
|
COPY ./graphql /stash/graphql/
|
||||||
COPY ./ui /stash/ui/
|
COPY ./ui /stash/ui/
|
||||||
|
# pnpm install with npm
|
||||||
|
RUN npm install -g pnpm
|
||||||
RUN make pre-ui
|
RUN make pre-ui
|
||||||
RUN make generate-ui
|
RUN make generate-ui
|
||||||
ARG GITHASH
|
ARG GITHASH
|
||||||
|
|
|
||||||
|
|
@ -5,11 +5,13 @@ ARG CUDA_VERSION=12.8.0
|
||||||
FROM node:20-alpine AS frontend
|
FROM node:20-alpine AS frontend
|
||||||
RUN apk add --no-cache make git
|
RUN apk add --no-cache make git
|
||||||
## cache node_modules separately
|
## cache node_modules separately
|
||||||
COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/
|
COPY ./ui/v2.5/package.json ./ui/v2.5/pnpm-lock.yaml /stash/ui/v2.5/
|
||||||
WORKDIR /stash
|
WORKDIR /stash
|
||||||
COPY Makefile /stash/
|
COPY Makefile /stash/
|
||||||
COPY ./graphql /stash/graphql/
|
COPY ./graphql /stash/graphql/
|
||||||
COPY ./ui /stash/ui/
|
COPY ./ui /stash/ui/
|
||||||
|
# pnpm install with npm
|
||||||
|
RUN npm install -g pnpm
|
||||||
RUN make pre-ui
|
RUN make pre-ui
|
||||||
RUN make generate-ui
|
RUN make generate-ui
|
||||||
ARG GITHASH
|
ARG GITHASH
|
||||||
|
|
|
||||||
|
|
@ -8,15 +8,11 @@ RUN mkdir -p /etc/apt/keyrings
|
||||||
|
|
||||||
ADD https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key nodesource.gpg.key
|
ADD https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key nodesource.gpg.key
|
||||||
RUN cat nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && rm nodesource.gpg.key
|
RUN cat nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && rm nodesource.gpg.key
|
||||||
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
|
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_24.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
|
||||||
|
|
||||||
ADD https://dl.yarnpkg.com/debian/pubkey.gpg yarn.gpg
|
|
||||||
RUN cat yarn.gpg | gpg --dearmor -o /etc/apt/keyrings/yarn.gpg && rm yarn.gpg
|
|
||||||
RUN echo "deb [signed-by=/etc/apt/keyrings/yarn.gpg] https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
|
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends \
|
apt-get install -y --no-install-recommends \
|
||||||
git make tar bash nodejs yarn zip \
|
git make tar bash nodejs zip \
|
||||||
clang llvm-dev cmake patch libxml2-dev uuid-dev libssl-dev xz-utils \
|
clang llvm-dev cmake patch libxml2-dev uuid-dev libssl-dev xz-utils \
|
||||||
bzip2 gzip sed cpio libbz2-dev zlib1g-dev \
|
bzip2 gzip sed cpio libbz2-dev zlib1g-dev \
|
||||||
gcc-mingw-w64 \
|
gcc-mingw-w64 \
|
||||||
|
|
@ -24,6 +20,9 @@ RUN apt-get update && \
|
||||||
gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
|
gcc-aarch64-linux-gnu libc-dev-arm64-cross && \
|
||||||
rm -rf /var/lib/apt/lists/*;
|
rm -rf /var/lib/apt/lists/*;
|
||||||
|
|
||||||
|
# pnpm install with npm
|
||||||
|
RUN npm install -g pnpm
|
||||||
|
|
||||||
# FreeBSD cross-compilation setup
|
# FreeBSD cross-compilation setup
|
||||||
# https://github.com/smartmontools/docker-build/blob/6b8c92560d17d325310ba02d9f5a4b250cb0764a/Dockerfile#L66
|
# https://github.com/smartmontools/docker-build/blob/6b8c92560d17d325310ba02d9f5a4b250cb0764a/Dockerfile#L66
|
||||||
ENV FREEBSD_VERSION 13.4
|
ENV FREEBSD_VERSION 13.4
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
user=stashapp
|
user=stashapp
|
||||||
repo=compiler
|
repo=compiler
|
||||||
version=11
|
version=12
|
||||||
|
|
||||||
latest:
|
latest:
|
||||||
docker build -t ${user}/${repo}:latest .
|
docker build -t ${user}/${repo}:latest .
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,8 @@
|
||||||
* [Go](https://golang.org/dl/)
|
* [Go](https://golang.org/dl/)
|
||||||
* [GolangCI](https://golangci-lint.run/) - A meta-linter which runs several linters in parallel
|
* [GolangCI](https://golangci-lint.run/) - A meta-linter which runs several linters in parallel
|
||||||
* To install, follow the [local installation instructions](https://golangci-lint.run/welcome/install/#local-installation)
|
* To install, follow the [local installation instructions](https://golangci-lint.run/welcome/install/#local-installation)
|
||||||
* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager
|
* [nodejs](https://nodejs.org/en/download) - nodejs runtime
|
||||||
|
* corepack/[pnpm](https://pnpm.io/installation) - nodejs package manager (included with nodejs)
|
||||||
|
|
||||||
## Environment
|
## Environment
|
||||||
|
|
||||||
|
|
@ -22,32 +23,22 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MinGW. For examp
|
||||||
### macOS
|
### macOS
|
||||||
|
|
||||||
1. If you don't have it already, install the [Homebrew package manager](https://brew.sh).
|
1. If you don't have it already, install the [Homebrew package manager](https://brew.sh).
|
||||||
2. Install dependencies: `brew install go git yarn gcc make node ffmpeg`
|
2. Install dependencies: `brew install go git gcc make node ffmpeg`
|
||||||
|
|
||||||
### Linux
|
### Linux
|
||||||
|
|
||||||
#### Arch Linux
|
#### Arch Linux
|
||||||
|
|
||||||
1. Install dependencies: `sudo pacman -S go git yarn gcc make nodejs ffmpeg --needed`
|
1. Install dependencies: `sudo pacman -S go git gcc make nodejs ffmpeg --needed`
|
||||||
|
|
||||||
#### Ubuntu
|
#### Ubuntu
|
||||||
|
|
||||||
1. Install dependencies: `sudo apt-get install golang git yarnpkg gcc nodejs ffmpeg -y`
|
1. Install dependencies: `sudo apt-get install golang git gcc nodejs ffmpeg -y`
|
||||||
|
|
||||||
### OpenBSD
|
### OpenBSD
|
||||||
|
|
||||||
1. Install dependencies `doas pkg_add gmake go git yarn node cmake`
|
1. Install dependencies `doas pkg_add gmake go git node cmake ffmpeg`
|
||||||
2. Compile a custom ffmpeg from ports. The default ffmpeg in OpenBSD's packages is not compiled with WebP support, which is required by Stash.
|
2. Follow the instructions below to build a release, but replace the final step `make build-release` with `gmake flags-release stash`, to [avoid the PIE buildmode](https://github.com/golang/go/issues/59866).
|
||||||
- If you've already installed ffmpeg, uninstall it: `doas pkg_delete ffmpeg`
|
|
||||||
- If you haven't already, [fetch the ports tree and verify](https://www.openbsd.org/faq/ports/ports.html#PortsFetch).
|
|
||||||
- Find the ffmpeg port in `/usr/ports/graphics/ffmpeg`, and patch the Makefile to include libwebp
|
|
||||||
- Add `webp` to `WANTLIB`
|
|
||||||
- Add `graphics/libwebp` to the list in `LIB_DEPENDS`
|
|
||||||
- Add `-lwebp -lwebpdecoder -lwebpdemux -lwebpmux` to `LIBavcodec_EXTRALIBS`
|
|
||||||
- Add `--enable-libweb` to the list in `CONFIGURE_ARGS`
|
|
||||||
- If you've already built ffmpeg from ports before, you may need to also increment `REVISION`
|
|
||||||
- Run `doas make install`
|
|
||||||
- Follow the instructions below to build a release, but replace the final step `make build-release` with `gmake flags-release stash`, to [avoid the PIE buildmode](https://github.com/golang/go/issues/59866).
|
|
||||||
|
|
||||||
NOTE: The `make` command in OpenBSD will be `gmake`. For example, `make pre-ui` will be `gmake pre-ui`.
|
NOTE: The `make` command in OpenBSD will be `gmake`. For example, `make pre-ui` will be `gmake pre-ui`.
|
||||||
|
|
||||||
|
|
|
||||||
33
go.mod
33
go.mod
|
|
@ -15,7 +15,7 @@ require (
|
||||||
github.com/disintegration/imaging v1.6.2
|
github.com/disintegration/imaging v1.6.2
|
||||||
github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d
|
github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d
|
||||||
github.com/doug-martin/goqu/v9 v9.18.0
|
github.com/doug-martin/goqu/v9 v9.18.0
|
||||||
github.com/go-chi/chi/v5 v5.0.12
|
github.com/go-chi/chi/v5 v5.2.2
|
||||||
github.com/go-chi/cors v1.2.1
|
github.com/go-chi/cors v1.2.1
|
||||||
github.com/go-chi/httplog v0.3.1
|
github.com/go-chi/httplog v0.3.1
|
||||||
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
|
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
|
||||||
|
|
@ -32,7 +32,11 @@ require (
|
||||||
github.com/json-iterator/go v1.1.12
|
github.com/json-iterator/go v1.1.12
|
||||||
github.com/kermieisinthehouse/gosx-notifier v0.1.2
|
github.com/kermieisinthehouse/gosx-notifier v0.1.2
|
||||||
github.com/kermieisinthehouse/systray v1.2.4
|
github.com/kermieisinthehouse/systray v1.2.4
|
||||||
github.com/knadh/koanf v1.5.0
|
github.com/knadh/koanf/parsers/yaml v1.1.0
|
||||||
|
github.com/knadh/koanf/providers/env v1.1.0
|
||||||
|
github.com/knadh/koanf/providers/file v1.2.0
|
||||||
|
github.com/knadh/koanf/providers/posflag v1.0.1
|
||||||
|
github.com/knadh/koanf/v2 v2.2.1
|
||||||
github.com/lucasb-eyer/go-colorful v1.2.0
|
github.com/lucasb-eyer/go-colorful v1.2.0
|
||||||
github.com/mattn/go-sqlite3 v1.14.22
|
github.com/mattn/go-sqlite3 v1.14.22
|
||||||
github.com/mitchellh/mapstructure v1.5.0
|
github.com/mitchellh/mapstructure v1.5.0
|
||||||
|
|
@ -42,7 +46,7 @@ require (
|
||||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
|
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
|
||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/spf13/cast v1.6.0
|
github.com/spf13/cast v1.6.0
|
||||||
github.com/spf13/pflag v1.0.5
|
github.com/spf13/pflag v1.0.6
|
||||||
github.com/stretchr/testify v1.10.0
|
github.com/stretchr/testify v1.10.0
|
||||||
github.com/tidwall/gjson v1.16.0
|
github.com/tidwall/gjson v1.16.0
|
||||||
github.com/vearutop/statigz v1.4.0
|
github.com/vearutop/statigz v1.4.0
|
||||||
|
|
@ -51,14 +55,15 @@ require (
|
||||||
github.com/vektra/mockery/v2 v2.10.0
|
github.com/vektra/mockery/v2 v2.10.0
|
||||||
github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e
|
github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e
|
||||||
github.com/zencoder/go-dash/v3 v3.0.2
|
github.com/zencoder/go-dash/v3 v3.0.2
|
||||||
golang.org/x/crypto v0.38.0
|
golang.org/x/crypto v0.45.0
|
||||||
golang.org/x/image v0.18.0
|
golang.org/x/image v0.18.0
|
||||||
golang.org/x/net v0.40.0
|
golang.org/x/net v0.47.0
|
||||||
golang.org/x/sys v0.33.0
|
golang.org/x/sys v0.38.0
|
||||||
golang.org/x/term v0.32.0
|
golang.org/x/term v0.37.0
|
||||||
golang.org/x/text v0.25.0
|
golang.org/x/text v0.31.0
|
||||||
golang.org/x/time v0.10.0
|
golang.org/x/time v0.10.0
|
||||||
gopkg.in/guregu/null.v4 v4.0.0
|
gopkg.in/guregu/null.v4 v4.0.0
|
||||||
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -72,9 +77,9 @@ require (
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/dlclark/regexp2 v1.7.0 // indirect
|
github.com/dlclark/regexp2 v1.7.0 // indirect
|
||||||
github.com/fsnotify/fsnotify v1.6.0 // indirect
|
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
|
||||||
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
|
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
|
||||||
github.com/gobwas/httphead v0.1.0 // indirect
|
github.com/gobwas/httphead v0.1.0 // indirect
|
||||||
github.com/gobwas/pool v0.2.1 // indirect
|
github.com/gobwas/pool v0.2.1 // indirect
|
||||||
github.com/gobwas/ws v1.3.0 // indirect
|
github.com/gobwas/ws v1.3.0 // indirect
|
||||||
|
|
@ -86,6 +91,7 @@ require (
|
||||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/josharian/intern v1.0.0 // indirect
|
github.com/josharian/intern v1.0.0 // indirect
|
||||||
|
github.com/knadh/koanf/maps v0.1.2 // indirect
|
||||||
github.com/magiconair/properties v1.8.7 // indirect
|
github.com/magiconair/properties v1.8.7 // indirect
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
github.com/mailru/easyjson v0.7.7 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||||
|
|
@ -114,9 +120,10 @@ require (
|
||||||
github.com/urfave/cli/v2 v2.27.6 // indirect
|
github.com/urfave/cli/v2 v2.27.6 // indirect
|
||||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
||||||
go.uber.org/atomic v1.11.0 // indirect
|
go.uber.org/atomic v1.11.0 // indirect
|
||||||
golang.org/x/mod v0.24.0 // indirect
|
go.yaml.in/yaml/v3 v3.0.3 // indirect
|
||||||
golang.org/x/sync v0.14.0 // indirect
|
golang.org/x/mod v0.29.0 // indirect
|
||||||
golang.org/x/tools v0.33.0 // indirect
|
golang.org/x/sync v0.18.0 // indirect
|
||||||
|
golang.org/x/tools v0.38.0 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
|
|
||||||
149
go.sum
149
go.sum
|
|
@ -72,7 +72,6 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy
|
||||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
|
||||||
github.com/anacrolix/dms v1.2.2 h1:0mk2/DXNqa5KDDbaLgFPf3oMV6VCGdFNh3d/gt4oafM=
|
github.com/anacrolix/dms v1.2.2 h1:0mk2/DXNqa5KDDbaLgFPf3oMV6VCGdFNh3d/gt4oafM=
|
||||||
github.com/anacrolix/dms v1.2.2/go.mod h1:msPKAoppoNRfrYplJqx63FZ+VipDZ4Xsj3KzIQxyU7k=
|
github.com/anacrolix/dms v1.2.2/go.mod h1:msPKAoppoNRfrYplJqx63FZ+VipDZ4Xsj3KzIQxyU7k=
|
||||||
github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
|
github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
|
||||||
|
|
@ -104,16 +103,6 @@ github.com/asticode/go-astisub v0.25.1 h1:RZMGfZPp7CXOkI6g+zCU7DRLuciGPGup921uKZ
|
||||||
github.com/asticode/go-astisub v0.25.1/go.mod h1:WTkuSzFB+Bp7wezuSf2Oxulj5A8zu2zLRVFf6bIFQK8=
|
github.com/asticode/go-astisub v0.25.1/go.mod h1:WTkuSzFB+Bp7wezuSf2Oxulj5A8zu2zLRVFf6bIFQK8=
|
||||||
github.com/asticode/go-astits v1.8.0 h1:rf6aiiGn/QhlFjNON1n5plqF3Fs025XLUwiQ0NB6oZg=
|
github.com/asticode/go-astits v1.8.0 h1:rf6aiiGn/QhlFjNON1n5plqF3Fs025XLUwiQ0NB6oZg=
|
||||||
github.com/asticode/go-astits v1.8.0/go.mod h1:DkOWmBNQpnr9mv24KfZjq4JawCFX1FCqjLVGvO0DygQ=
|
github.com/asticode/go-astits v1.8.0/go.mod h1:DkOWmBNQpnr9mv24KfZjq4JawCFX1FCqjLVGvO0DygQ=
|
||||||
github.com/aws/aws-sdk-go-v2 v1.9.2/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4=
|
|
||||||
github.com/aws/aws-sdk-go-v2/config v1.8.3/go.mod h1:4AEiLtAb8kLs7vgw2ZV3p2VZ1+hBavOc84hqxVNpCyw=
|
|
||||||
github.com/aws/aws-sdk-go-v2/credentials v1.4.3/go.mod h1:FNNC6nQZQUuyhq5aE5c7ata8o9e4ECGmS4lAXC7o1mQ=
|
|
||||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.6.0/go.mod h1:gqlclDEZp4aqJOancXK6TN24aKhT0W0Ae9MHk3wzTMM=
|
|
||||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.4/go.mod h1:ZcBrrI3zBKlhGFNYWvju0I3TR93I7YIgAfy82Fh4lcQ=
|
|
||||||
github.com/aws/aws-sdk-go-v2/service/appconfig v1.4.2/go.mod h1:FZ3HkCe+b10uFZZkFdvf98LHW21k49W8o8J366lqVKY=
|
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.2/go.mod h1:72HRZDLMtmVQiLG2tLfQcaWLCssELvGl+Zf2WVxMmR8=
|
|
||||||
github.com/aws/aws-sdk-go-v2/service/sso v1.4.2/go.mod h1:NBvT9R1MEF+Ud6ApJKM0G+IkPchKS7p7c2YPKwHmBOk=
|
|
||||||
github.com/aws/aws-sdk-go-v2/service/sts v1.7.2/go.mod h1:8EzeIqfWt2wWT4rJVu3f21TfrhJ8AEMzVybRNSb/b4g=
|
|
||||||
github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=
|
|
||||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||||
|
|
@ -185,7 +174,6 @@ github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8
|
||||||
github.com/doug-martin/goqu/v9 v9.18.0 h1:/6bcuEtAe6nsSMVK/M+fOiXUNfyFF3yYtE07DBPFMYY=
|
github.com/doug-martin/goqu/v9 v9.18.0 h1:/6bcuEtAe6nsSMVK/M+fOiXUNfyFF3yYtE07DBPFMYY=
|
||||||
github.com/doug-martin/goqu/v9 v9.18.0/go.mod h1:nf0Wc2/hV3gYK9LiyqIrzBEVGlI8qW3GuDCEobC4wBQ=
|
github.com/doug-martin/goqu/v9 v9.18.0/go.mod h1:nf0Wc2/hV3gYK9LiyqIrzBEVGlI8qW3GuDCEobC4wBQ=
|
||||||
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
|
||||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||||
|
|
@ -200,19 +188,17 @@ github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E
|
||||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
|
||||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
|
||||||
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
||||||
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||||
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
|
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||||
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||||
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||||
github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||||
github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s=
|
github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618=
|
||||||
github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
|
||||||
github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4=
|
github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4=
|
||||||
github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
|
github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
|
||||||
github.com/go-chi/httplog v0.3.1 h1:uC3IUWCZagtbCinb3ypFh36SEcgd6StWw2Bu0XSXRtg=
|
github.com/go-chi/httplog v0.3.1 h1:uC3IUWCZagtbCinb3ypFh36SEcgd6StWw2Bu0XSXRtg=
|
||||||
|
|
@ -222,22 +208,18 @@ github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2
|
||||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
|
|
||||||
github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc=
|
|
||||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||||
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
|
||||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
|
||||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
||||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
|
||||||
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE=
|
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE=
|
||||||
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10=
|
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10=
|
||||||
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
|
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
|
||||||
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||||
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
|
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
|
||||||
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
|
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
|
||||||
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
|
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
|
||||||
|
|
@ -288,7 +270,6 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS
|
||||||
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
|
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
|
||||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
|
||||||
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||||
|
|
@ -305,7 +286,6 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
|
@ -346,11 +326,9 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg
|
||||||
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||||
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||||
github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
|
github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
|
||||||
github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0=
|
github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0=
|
||||||
github.com/hashicorp/consul/api v1.13.0/go.mod h1:ZlVrynguJKcYr54zGaDbaL3fOvKC9m72FhPvA8T35KQ=
|
|
||||||
github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms=
|
github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms=
|
||||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||||
|
|
@ -358,8 +336,6 @@ github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brv
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
||||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI=
|
|
||||||
github.com/hashicorp/go-hclog v0.8.0/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
|
|
||||||
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||||
github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||||
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||||
|
|
@ -369,17 +345,12 @@ github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHh
|
||||||
github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA=
|
github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA=
|
||||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||||
github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
|
|
||||||
github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
||||||
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
|
||||||
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
|
||||||
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||||
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
|
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
|
||||||
github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A=
|
|
||||||
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
|
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
|
||||||
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
|
||||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||||
|
|
@ -394,14 +365,8 @@ github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn
|
||||||
github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
|
github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
|
||||||
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
|
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
|
||||||
github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=
|
github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=
|
||||||
github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoIospckxBxk6Q=
|
|
||||||
github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M=
|
|
||||||
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
|
|
||||||
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
|
|
||||||
github.com/hasura/go-graphql-client v0.13.1 h1:kKbjhxhpwz58usVl+Xvgah/TDha5K2akNTRQdsEHN6U=
|
github.com/hasura/go-graphql-client v0.13.1 h1:kKbjhxhpwz58usVl+Xvgah/TDha5K2akNTRQdsEHN6U=
|
||||||
github.com/hasura/go-graphql-client v0.13.1/go.mod h1:k7FF7h53C+hSNFRG3++DdVZWIuHdCaTbI7siTJ//zGQ=
|
github.com/hasura/go-graphql-client v0.13.1/go.mod h1:k7FF7h53C+hSNFRG3++DdVZWIuHdCaTbI7siTJ//zGQ=
|
||||||
github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs=
|
|
||||||
github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E=
|
|
||||||
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
|
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
|
||||||
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
|
|
@ -412,18 +377,12 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
|
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
|
||||||
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
|
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
|
||||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
|
||||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
|
|
||||||
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
|
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
|
||||||
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
|
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
|
||||||
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
|
||||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
|
||||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
|
||||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
|
||||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
|
|
@ -431,17 +390,25 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
|
||||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||||
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||||
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
|
|
||||||
github.com/kermieisinthehouse/gosx-notifier v0.1.2 h1:KV0KBeKK2B24kIHY7iK0jgS64Q05f4oB+hUZmsPodxQ=
|
github.com/kermieisinthehouse/gosx-notifier v0.1.2 h1:KV0KBeKK2B24kIHY7iK0jgS64Q05f4oB+hUZmsPodxQ=
|
||||||
github.com/kermieisinthehouse/gosx-notifier v0.1.2/go.mod h1:xyWT07azFtUOcHl96qMVvKhvKzsMcS7rKTHQyv8WTho=
|
github.com/kermieisinthehouse/gosx-notifier v0.1.2/go.mod h1:xyWT07azFtUOcHl96qMVvKhvKzsMcS7rKTHQyv8WTho=
|
||||||
github.com/kermieisinthehouse/systray v1.2.4 h1:pdH5vnl+KKjRrVCRU4g/2W1/0HVzuuJ6WXHlPPHYY6s=
|
github.com/kermieisinthehouse/systray v1.2.4 h1:pdH5vnl+KKjRrVCRU4g/2W1/0HVzuuJ6WXHlPPHYY6s=
|
||||||
github.com/kermieisinthehouse/systray v1.2.4/go.mod h1:axh6C/jNuSyC0QGtidZJURc9h+h41HNoMySoLVrhVR4=
|
github.com/kermieisinthehouse/systray v1.2.4/go.mod h1:axh6C/jNuSyC0QGtidZJURc9h+h41HNoMySoLVrhVR4=
|
||||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs=
|
github.com/knadh/koanf/maps v0.1.2 h1:RBfmAW5CnZT+PJ1CVc1QSJKf4Xu9kxfQgYVQSu8hpbo=
|
||||||
github.com/knadh/koanf v1.5.0/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs=
|
github.com/knadh/koanf/maps v0.1.2/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI=
|
||||||
|
github.com/knadh/koanf/parsers/yaml v1.1.0 h1:3ltfm9ljprAHt4jxgeYLlFPmUaunuCgu1yILuTXRdM4=
|
||||||
|
github.com/knadh/koanf/parsers/yaml v1.1.0/go.mod h1:HHmcHXUrp9cOPcuC+2wrr44GTUB0EC+PyfN3HZD9tFg=
|
||||||
|
github.com/knadh/koanf/providers/env v1.1.0 h1:U2VXPY0f+CsNDkvdsG8GcsnK4ah85WwWyJgef9oQMSc=
|
||||||
|
github.com/knadh/koanf/providers/env v1.1.0/go.mod h1:QhHHHZ87h9JxJAn2czdEl6pdkNnDh/JS1Vtsyt65hTY=
|
||||||
|
github.com/knadh/koanf/providers/file v1.2.0 h1:hrUJ6Y9YOA49aNu/RSYzOTFlqzXSCpmYIDXI7OJU6+U=
|
||||||
|
github.com/knadh/koanf/providers/file v1.2.0/go.mod h1:bp1PM5f83Q+TOUu10J/0ApLBd9uIzg+n9UgthfY+nRA=
|
||||||
|
github.com/knadh/koanf/providers/posflag v1.0.1 h1:EnMxHSrPkYCFnKgBUl5KBgrjed8gVFrcXDzaW4l/C6Y=
|
||||||
|
github.com/knadh/koanf/providers/posflag v1.0.1/go.mod h1:3Wn3+YG3f4ljzRyCUgIwH7G0sZ1pMjCOsNBovrbKmAk=
|
||||||
|
github.com/knadh/koanf/v2 v2.2.1 h1:jaleChtw85y3UdBnI0wCqcg1sj1gPoz6D3caGNHtrNE=
|
||||||
|
github.com/knadh/koanf/v2 v2.2.1/go.mod h1:PSFru3ufQgTsI7IF+95rf9s8XA1+aHxKuO/W+dPoHEY=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
|
||||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
|
@ -492,22 +459,17 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5
|
||||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||||
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
|
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
|
||||||
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
|
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
|
||||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
|
||||||
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
|
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
|
||||||
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
|
|
||||||
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
||||||
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
||||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
|
||||||
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||||
github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
|
|
||||||
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||||
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
|
||||||
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
||||||
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
|
@ -519,26 +481,20 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
|
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
|
||||||
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+bczlMBiulwmqYzQpvQNUdtt3oc=
|
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+bczlMBiulwmqYzQpvQNUdtt3oc=
|
||||||
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ=
|
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ=
|
||||||
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
|
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
|
||||||
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
||||||
github.com/npillmayer/nestext v0.1.3/go.mod h1:h2lrijH8jpicr25dFY+oAJLyzlya6jhnuG+zWp9L0Uk=
|
|
||||||
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d h1:VhgPp6v9qf9Agr/56bj7Y/xa04UccTW04VP0Qed4vnQ=
|
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d h1:VhgPp6v9qf9Agr/56bj7Y/xa04UccTW04VP0Qed4vnQ=
|
||||||
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U=
|
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U=
|
||||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
|
||||||
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde h1:x0TT0RDC7UhAVbbWWBzr41ElhJx5tXPWkIHA2HWPRuw=
|
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde h1:x0TT0RDC7UhAVbbWWBzr41ElhJx5tXPWkIHA2HWPRuw=
|
||||||
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
||||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||||
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||||
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
|
|
||||||
github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM=
|
|
||||||
github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||||
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
|
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
|
||||||
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||||
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
||||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
|
||||||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU=
|
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU=
|
||||||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=
|
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=
|
||||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
|
@ -555,24 +511,17 @@ github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSg
|
||||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||||
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
||||||
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
|
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
|
||||||
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
|
|
||||||
github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
|
|
||||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||||
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
|
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
|
||||||
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
|
|
||||||
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
|
|
||||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||||
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||||
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
|
|
||||||
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
|
|
||||||
github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E=
|
github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E=
|
||||||
github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo=
|
github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo=
|
||||||
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
|
|
||||||
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
||||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
|
|
@ -590,8 +539,6 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
|
||||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=
|
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=
|
||||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
|
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
|
||||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||||
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
|
||||||
github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
|
|
||||||
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
|
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
|
||||||
github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=
|
github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=
|
||||||
github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM=
|
github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM=
|
||||||
|
|
@ -600,7 +547,6 @@ github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
|
||||||
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
|
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
|
||||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||||
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
|
||||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||||
|
|
@ -621,8 +567,9 @@ github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
|
||||||
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
|
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||||
|
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM=
|
github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM=
|
||||||
github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU=
|
github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU=
|
||||||
github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc=
|
github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc=
|
||||||
|
|
@ -683,11 +630,8 @@ github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5t
|
||||||
github.com/zencoder/go-dash/v3 v3.0.2 h1:oP1+dOh+Gp57PkvdCyMfbHtrHaxfl3w4kR3KBBbuqQE=
|
github.com/zencoder/go-dash/v3 v3.0.2 h1:oP1+dOh+Gp57PkvdCyMfbHtrHaxfl3w4kR3KBBbuqQE=
|
||||||
github.com/zencoder/go-dash/v3 v3.0.2/go.mod h1:30R5bKy1aUYY45yesjtZ9l8trNc2TwNqbS17WVQmCzk=
|
github.com/zencoder/go-dash/v3 v3.0.2/go.mod h1:30R5bKy1aUYY45yesjtZ9l8trNc2TwNqbS17WVQmCzk=
|
||||||
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
||||||
go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A=
|
|
||||||
go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
||||||
go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
|
||||||
go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs=
|
go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs=
|
||||||
go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY=
|
|
||||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||||
|
|
@ -701,6 +645,8 @@ go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
|
||||||
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
||||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||||
|
go.yaml.in/yaml/v3 v3.0.3 h1:bXOww4E/J3f66rav3pX3m8w6jDE4knZjGOw8b5Y6iNE=
|
||||||
|
go.yaml.in/yaml/v3 v3.0.3/go.mod h1:tBHosrYAkRZjRAOREWbDnBXUf08JOwYq++0QNwQiWzI=
|
||||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
|
@ -718,8 +664,8 @@ golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5y
|
||||||
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
|
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
|
||||||
golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||||
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||||
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||||
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
|
|
@ -761,8 +707,8 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||||
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU=
|
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
||||||
golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
|
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
|
@ -812,8 +758,8 @@ golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qx
|
||||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
|
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
|
||||||
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||||
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
|
|
@ -843,18 +789,16 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
||||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ=
|
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||||
golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190415145633-3fd5a3612ccd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190415145633-3fd5a3612ccd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -866,12 +810,10 @@ golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -886,8 +828,6 @@ golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -895,7 +835,6 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -908,7 +847,6 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
|
@ -931,21 +869,19 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
|
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
|
||||||
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
|
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
|
||||||
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
|
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
|
||||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
|
@ -954,8 +890,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||||
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||||
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
|
@ -1020,8 +956,8 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||||
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
|
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
|
||||||
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc=
|
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
||||||
golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI=
|
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
|
@ -1068,7 +1004,6 @@ google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCID
|
||||||
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
|
||||||
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
|
@ -1132,11 +1067,9 @@ google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ6
|
||||||
google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
|
google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
|
||||||
google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
|
google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
|
||||||
google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
|
google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
|
||||||
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
|
||||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
google.golang.org/grpc v1.22.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
|
||||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||||
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||||
|
|
@ -1177,7 +1110,6 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0
|
||||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||||
gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
|
@ -1190,14 +1122,14 @@ gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/ini.v1 v1.66.3/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
gopkg.in/ini.v1 v1.66.3/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
|
||||||
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
||||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|
@ -1214,4 +1146,3 @@ honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9
|
||||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||||
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
|
|
||||||
|
|
|
||||||
|
|
@ -165,6 +165,12 @@ type Query {
|
||||||
input: ScrapeSingleStudioInput!
|
input: ScrapeSingleStudioInput!
|
||||||
): [ScrapedStudio!]!
|
): [ScrapedStudio!]!
|
||||||
|
|
||||||
|
"Scrape for a single tag"
|
||||||
|
scrapeSingleTag(
|
||||||
|
source: ScraperSourceInput!
|
||||||
|
input: ScrapeSingleTagInput!
|
||||||
|
): [ScrapedTag!]!
|
||||||
|
|
||||||
"Scrape for a single performer"
|
"Scrape for a single performer"
|
||||||
scrapeSinglePerformer(
|
scrapeSinglePerformer(
|
||||||
source: ScraperSourceInput!
|
source: ScraperSourceInput!
|
||||||
|
|
@ -328,6 +334,7 @@ type Mutation {
|
||||||
|
|
||||||
sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker
|
sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker
|
||||||
sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker
|
sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker
|
||||||
|
bulkSceneMarkerUpdate(input: BulkSceneMarkerUpdateInput!): [SceneMarker!]
|
||||||
sceneMarkerDestroy(id: ID!): Boolean!
|
sceneMarkerDestroy(id: ID!): Boolean!
|
||||||
sceneMarkersDestroy(ids: [ID!]!): Boolean!
|
sceneMarkersDestroy(ids: [ID!]!): Boolean!
|
||||||
|
|
||||||
|
|
@ -371,6 +378,7 @@ type Mutation {
|
||||||
studioUpdate(input: StudioUpdateInput!): Studio
|
studioUpdate(input: StudioUpdateInput!): Studio
|
||||||
studioDestroy(input: StudioDestroyInput!): Boolean!
|
studioDestroy(input: StudioDestroyInput!): Boolean!
|
||||||
studiosDestroy(ids: [ID!]!): Boolean!
|
studiosDestroy(ids: [ID!]!): Boolean!
|
||||||
|
bulkStudioUpdate(input: BulkStudioUpdateInput!): [Studio!]
|
||||||
|
|
||||||
movieCreate(input: MovieCreateInput!): Movie
|
movieCreate(input: MovieCreateInput!): Movie
|
||||||
@deprecated(reason: "Use groupCreate instead")
|
@deprecated(reason: "Use groupCreate instead")
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,8 @@ input SetupInput {
|
||||||
"Empty to indicate $HOME/.stash/config.yml default"
|
"Empty to indicate $HOME/.stash/config.yml default"
|
||||||
configLocation: String!
|
configLocation: String!
|
||||||
stashes: [StashConfigInput!]!
|
stashes: [StashConfigInput!]!
|
||||||
|
"True if SFW content mode is enabled"
|
||||||
|
sfwContentMode: Boolean
|
||||||
"Empty to indicate default"
|
"Empty to indicate default"
|
||||||
databaseFile: String!
|
databaseFile: String!
|
||||||
"Empty to indicate default"
|
"Empty to indicate default"
|
||||||
|
|
@ -67,6 +69,8 @@ input ConfigGeneralInput {
|
||||||
databasePath: String
|
databasePath: String
|
||||||
"Path to backup directory"
|
"Path to backup directory"
|
||||||
backupDirectoryPath: String
|
backupDirectoryPath: String
|
||||||
|
"Path to trash directory - if set, deleted files will be moved here instead of being permanently deleted"
|
||||||
|
deleteTrashPath: String
|
||||||
"Path to generated files"
|
"Path to generated files"
|
||||||
generatedPath: String
|
generatedPath: String
|
||||||
"Path to import/export files"
|
"Path to import/export files"
|
||||||
|
|
@ -153,6 +157,8 @@ input ConfigGeneralInput {
|
||||||
logLevel: String
|
logLevel: String
|
||||||
"Whether to log http access"
|
"Whether to log http access"
|
||||||
logAccess: Boolean
|
logAccess: Boolean
|
||||||
|
"Maximum log size"
|
||||||
|
logFileMaxSize: Int
|
||||||
"True if galleries should be created from folders with images"
|
"True if galleries should be created from folders with images"
|
||||||
createGalleriesFromFolders: Boolean
|
createGalleriesFromFolders: Boolean
|
||||||
"Regex used to identify images as gallery covers"
|
"Regex used to identify images as gallery covers"
|
||||||
|
|
@ -187,6 +193,8 @@ type ConfigGeneralResult {
|
||||||
databasePath: String!
|
databasePath: String!
|
||||||
"Path to backup directory"
|
"Path to backup directory"
|
||||||
backupDirectoryPath: String!
|
backupDirectoryPath: String!
|
||||||
|
"Path to trash directory - if set, deleted files will be moved here instead of being permanently deleted"
|
||||||
|
deleteTrashPath: String!
|
||||||
"Path to generated files"
|
"Path to generated files"
|
||||||
generatedPath: String!
|
generatedPath: String!
|
||||||
"Path to import/export files"
|
"Path to import/export files"
|
||||||
|
|
@ -277,6 +285,8 @@ type ConfigGeneralResult {
|
||||||
logLevel: String!
|
logLevel: String!
|
||||||
"Whether to log http access"
|
"Whether to log http access"
|
||||||
logAccess: Boolean!
|
logAccess: Boolean!
|
||||||
|
"Maximum log size"
|
||||||
|
logFileMaxSize: Int!
|
||||||
"Array of video file extensions"
|
"Array of video file extensions"
|
||||||
videoExtensions: [String!]!
|
videoExtensions: [String!]!
|
||||||
"Array of image file extensions"
|
"Array of image file extensions"
|
||||||
|
|
@ -329,6 +339,7 @@ input ConfigImageLightboxInput {
|
||||||
resetZoomOnNav: Boolean
|
resetZoomOnNav: Boolean
|
||||||
scrollMode: ImageLightboxScrollMode
|
scrollMode: ImageLightboxScrollMode
|
||||||
scrollAttemptsBeforeChange: Int
|
scrollAttemptsBeforeChange: Int
|
||||||
|
disableAnimation: Boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
type ConfigImageLightboxResult {
|
type ConfigImageLightboxResult {
|
||||||
|
|
@ -338,9 +349,13 @@ type ConfigImageLightboxResult {
|
||||||
resetZoomOnNav: Boolean
|
resetZoomOnNav: Boolean
|
||||||
scrollMode: ImageLightboxScrollMode
|
scrollMode: ImageLightboxScrollMode
|
||||||
scrollAttemptsBeforeChange: Int!
|
scrollAttemptsBeforeChange: Int!
|
||||||
|
disableAnimation: Boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
input ConfigInterfaceInput {
|
input ConfigInterfaceInput {
|
||||||
|
"True if SFW content mode is enabled"
|
||||||
|
sfwContentMode: Boolean
|
||||||
|
|
||||||
"Ordered list of items that should be shown in the menu"
|
"Ordered list of items that should be shown in the menu"
|
||||||
menuItems: [String!]
|
menuItems: [String!]
|
||||||
|
|
||||||
|
|
@ -407,6 +422,9 @@ type ConfigDisableDropdownCreate {
|
||||||
}
|
}
|
||||||
|
|
||||||
type ConfigInterfaceResult {
|
type ConfigInterfaceResult {
|
||||||
|
"True if SFW content mode is enabled"
|
||||||
|
sfwContentMode: Boolean!
|
||||||
|
|
||||||
"Ordered list of items that should be shown in the menu"
|
"Ordered list of items that should be shown in the menu"
|
||||||
menuItems: [String!]
|
menuItems: [String!]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -330,6 +330,8 @@ input SceneFilterType {
|
||||||
groups_filter: GroupFilterType
|
groups_filter: GroupFilterType
|
||||||
"Filter by related markers that meet this criteria"
|
"Filter by related markers that meet this criteria"
|
||||||
markers_filter: SceneMarkerFilterType
|
markers_filter: SceneMarkerFilterType
|
||||||
|
"Filter by related files that meet this criteria"
|
||||||
|
files_filter: FileFilterType
|
||||||
}
|
}
|
||||||
|
|
||||||
input MovieFilterType {
|
input MovieFilterType {
|
||||||
|
|
@ -401,6 +403,8 @@ input GroupFilterType {
|
||||||
created_at: TimestampCriterionInput
|
created_at: TimestampCriterionInput
|
||||||
"Filter by last update time"
|
"Filter by last update time"
|
||||||
updated_at: TimestampCriterionInput
|
updated_at: TimestampCriterionInput
|
||||||
|
"Filter by o-counter"
|
||||||
|
o_counter: IntCriterionInput
|
||||||
|
|
||||||
"Filter by containing groups"
|
"Filter by containing groups"
|
||||||
containing_groups: HierarchicalMultiCriterionInput
|
containing_groups: HierarchicalMultiCriterionInput
|
||||||
|
|
@ -534,6 +538,10 @@ input GalleryFilterType {
|
||||||
studios_filter: StudioFilterType
|
studios_filter: StudioFilterType
|
||||||
"Filter by related tags that meet this criteria"
|
"Filter by related tags that meet this criteria"
|
||||||
tags_filter: TagFilterType
|
tags_filter: TagFilterType
|
||||||
|
"Filter by related files that meet this criteria"
|
||||||
|
files_filter: FileFilterType
|
||||||
|
"Filter by related folders that meet this criteria"
|
||||||
|
folders_filter: FolderFilterType
|
||||||
}
|
}
|
||||||
|
|
||||||
input TagFilterType {
|
input TagFilterType {
|
||||||
|
|
@ -679,6 +687,8 @@ input ImageFilterType {
|
||||||
studios_filter: StudioFilterType
|
studios_filter: StudioFilterType
|
||||||
"Filter by related tags that meet this criteria"
|
"Filter by related tags that meet this criteria"
|
||||||
tags_filter: TagFilterType
|
tags_filter: TagFilterType
|
||||||
|
"Filter by related files that meet this criteria"
|
||||||
|
files_filter: FileFilterType
|
||||||
}
|
}
|
||||||
|
|
||||||
input FileFilterType {
|
input FileFilterType {
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,7 @@ type Group {
|
||||||
performer_count(depth: Int): Int! # Resolver
|
performer_count(depth: Int): Int! # Resolver
|
||||||
sub_group_count(depth: Int): Int! # Resolver
|
sub_group_count(depth: Int): Int! # Resolver
|
||||||
scenes: [Scene!]!
|
scenes: [Scene!]!
|
||||||
|
o_counter: Int # Resolver
|
||||||
}
|
}
|
||||||
|
|
||||||
input GroupDescriptionInput {
|
input GroupDescriptionInput {
|
||||||
|
|
|
||||||
|
|
@ -344,4 +344,6 @@ input CustomFieldsInput {
|
||||||
full: Map
|
full: Map
|
||||||
"If populated, only the keys in this map will be updated"
|
"If populated, only the keys in this map will be updated"
|
||||||
partial: Map
|
partial: Map
|
||||||
|
"Remove any keys in this list"
|
||||||
|
remove: [String!]
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -42,6 +42,13 @@ input SceneMarkerUpdateInput {
|
||||||
tag_ids: [ID!]
|
tag_ids: [ID!]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
input BulkSceneMarkerUpdateInput {
|
||||||
|
ids: [ID!]
|
||||||
|
title: String
|
||||||
|
primary_tag_id: ID
|
||||||
|
tag_ids: BulkUpdateIds
|
||||||
|
}
|
||||||
|
|
||||||
type FindSceneMarkersResultType {
|
type FindSceneMarkersResultType {
|
||||||
count: Int!
|
count: Int!
|
||||||
scene_markers: [SceneMarker!]!
|
scene_markers: [SceneMarker!]!
|
||||||
|
|
|
||||||
|
|
@ -55,9 +55,14 @@ type ScrapedStudio {
|
||||||
"Set if studio matched"
|
"Set if studio matched"
|
||||||
stored_id: ID
|
stored_id: ID
|
||||||
name: String!
|
name: String!
|
||||||
url: String
|
url: String @deprecated(reason: "use urls")
|
||||||
|
urls: [String!]
|
||||||
parent: ScrapedStudio
|
parent: ScrapedStudio
|
||||||
image: String
|
image: String
|
||||||
|
details: String
|
||||||
|
"Aliases must be comma-delimited to be parsed correctly"
|
||||||
|
aliases: String
|
||||||
|
tags: [ScrapedTag!]
|
||||||
|
|
||||||
remote_site_id: String
|
remote_site_id: String
|
||||||
}
|
}
|
||||||
|
|
@ -66,6 +71,8 @@ type ScrapedTag {
|
||||||
"Set if tag matched"
|
"Set if tag matched"
|
||||||
stored_id: ID
|
stored_id: ID
|
||||||
name: String!
|
name: String!
|
||||||
|
"Remote site ID, if applicable"
|
||||||
|
remote_site_id: String
|
||||||
}
|
}
|
||||||
|
|
||||||
type ScrapedScene {
|
type ScrapedScene {
|
||||||
|
|
@ -191,6 +198,13 @@ input ScrapeSingleStudioInput {
|
||||||
query: String
|
query: String
|
||||||
}
|
}
|
||||||
|
|
||||||
|
input ScrapeSingleTagInput {
|
||||||
|
"""
|
||||||
|
Query can be either a name or a Stash ID
|
||||||
|
"""
|
||||||
|
query: String
|
||||||
|
}
|
||||||
|
|
||||||
input ScrapeSinglePerformerInput {
|
input ScrapeSinglePerformerInput {
|
||||||
"Instructs to query by string"
|
"Instructs to query by string"
|
||||||
query: String
|
query: String
|
||||||
|
|
@ -274,7 +288,10 @@ type StashBoxFingerprint {
|
||||||
duration: Int!
|
duration: Int!
|
||||||
}
|
}
|
||||||
|
|
||||||
"If neither ids nor names are set, tag all items"
|
"""
|
||||||
|
Accepts either ids, or a combination of names and stash_ids.
|
||||||
|
If none are set, then all existing items will be tagged.
|
||||||
|
"""
|
||||||
input StashBoxBatchTagInput {
|
input StashBoxBatchTagInput {
|
||||||
"Stash endpoint to use for the tagging"
|
"Stash endpoint to use for the tagging"
|
||||||
endpoint: Int @deprecated(reason: "use stash_box_endpoint")
|
endpoint: Int @deprecated(reason: "use stash_box_endpoint")
|
||||||
|
|
@ -286,12 +303,17 @@ input StashBoxBatchTagInput {
|
||||||
refresh: Boolean!
|
refresh: Boolean!
|
||||||
"If batch adding studios, should their parent studios also be created?"
|
"If batch adding studios, should their parent studios also be created?"
|
||||||
createParent: Boolean!
|
createParent: Boolean!
|
||||||
"If set, only tag these ids"
|
"""
|
||||||
|
IDs in stash of the items to update.
|
||||||
|
If set, names and stash_ids fields will be ignored.
|
||||||
|
"""
|
||||||
ids: [ID!]
|
ids: [ID!]
|
||||||
"If set, only tag these names"
|
"Names of the items in the stash-box instance to search for and create"
|
||||||
names: [String!]
|
names: [String!]
|
||||||
"If set, only tag these performer ids"
|
"Stash IDs of the items in the stash-box instance to search for and create"
|
||||||
|
stash_ids: [String!]
|
||||||
|
"IDs in stash of the performers to update"
|
||||||
performer_ids: [ID!] @deprecated(reason: "use ids")
|
performer_ids: [ID!] @deprecated(reason: "use ids")
|
||||||
"If set, only tag these performer names"
|
"Names of the performers in the stash-box instance to search for and create"
|
||||||
performer_names: [String!] @deprecated(reason: "use names")
|
performer_names: [String!] @deprecated(reason: "use names")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
type Studio {
|
type Studio {
|
||||||
id: ID!
|
id: ID!
|
||||||
name: String!
|
name: String!
|
||||||
url: String
|
url: String @deprecated(reason: "Use urls")
|
||||||
|
urls: [String!]!
|
||||||
parent_studio: Studio
|
parent_studio: Studio
|
||||||
child_studios: [Studio!]!
|
child_studios: [Studio!]!
|
||||||
aliases: [String!]!
|
aliases: [String!]!
|
||||||
|
|
@ -24,11 +25,13 @@ type Studio {
|
||||||
updated_at: Time!
|
updated_at: Time!
|
||||||
groups: [Group!]!
|
groups: [Group!]!
|
||||||
movies: [Movie!]! @deprecated(reason: "use groups instead")
|
movies: [Movie!]! @deprecated(reason: "use groups instead")
|
||||||
|
o_counter: Int
|
||||||
}
|
}
|
||||||
|
|
||||||
input StudioCreateInput {
|
input StudioCreateInput {
|
||||||
name: String!
|
name: String!
|
||||||
url: String
|
url: String @deprecated(reason: "Use urls")
|
||||||
|
urls: [String!]
|
||||||
parent_id: ID
|
parent_id: ID
|
||||||
"This should be a URL or a base64 encoded data URL"
|
"This should be a URL or a base64 encoded data URL"
|
||||||
image: String
|
image: String
|
||||||
|
|
@ -45,7 +48,8 @@ input StudioCreateInput {
|
||||||
input StudioUpdateInput {
|
input StudioUpdateInput {
|
||||||
id: ID!
|
id: ID!
|
||||||
name: String
|
name: String
|
||||||
url: String
|
url: String @deprecated(reason: "Use urls")
|
||||||
|
urls: [String!]
|
||||||
parent_id: ID
|
parent_id: ID
|
||||||
"This should be a URL or a base64 encoded data URL"
|
"This should be a URL or a base64 encoded data URL"
|
||||||
image: String
|
image: String
|
||||||
|
|
@ -59,6 +63,19 @@ input StudioUpdateInput {
|
||||||
ignore_auto_tag: Boolean
|
ignore_auto_tag: Boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
input BulkStudioUpdateInput {
|
||||||
|
ids: [ID!]!
|
||||||
|
url: String @deprecated(reason: "Use urls")
|
||||||
|
urls: BulkUpdateStrings
|
||||||
|
parent_id: ID
|
||||||
|
# rating expressed as 1-100
|
||||||
|
rating100: Int
|
||||||
|
favorite: Boolean
|
||||||
|
details: String
|
||||||
|
tag_ids: BulkUpdateIds
|
||||||
|
ignore_auto_tag: Boolean
|
||||||
|
}
|
||||||
|
|
||||||
input StudioDestroyInput {
|
input StudioDestroyInput {
|
||||||
id: ID!
|
id: ID!
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ type Tag {
|
||||||
created_at: Time!
|
created_at: Time!
|
||||||
updated_at: Time!
|
updated_at: Time!
|
||||||
favorite: Boolean!
|
favorite: Boolean!
|
||||||
|
stash_ids: [StashID!]!
|
||||||
image_path: String # Resolver
|
image_path: String # Resolver
|
||||||
scene_count(depth: Int): Int! # Resolver
|
scene_count(depth: Int): Int! # Resolver
|
||||||
scene_marker_count(depth: Int): Int! # Resolver
|
scene_marker_count(depth: Int): Int! # Resolver
|
||||||
|
|
@ -35,6 +36,7 @@ input TagCreateInput {
|
||||||
favorite: Boolean
|
favorite: Boolean
|
||||||
"This should be a URL or a base64 encoded data URL"
|
"This should be a URL or a base64 encoded data URL"
|
||||||
image: String
|
image: String
|
||||||
|
stash_ids: [StashIDInput!]
|
||||||
|
|
||||||
parent_ids: [ID!]
|
parent_ids: [ID!]
|
||||||
child_ids: [ID!]
|
child_ids: [ID!]
|
||||||
|
|
@ -51,6 +53,7 @@ input TagUpdateInput {
|
||||||
favorite: Boolean
|
favorite: Boolean
|
||||||
"This should be a URL or a base64 encoded data URL"
|
"This should be a URL or a base64 encoded data URL"
|
||||||
image: String
|
image: String
|
||||||
|
stash_ids: [StashIDInput!]
|
||||||
|
|
||||||
parent_ids: [ID!]
|
parent_ids: [ID!]
|
||||||
child_ids: [ID!]
|
child_ids: [ID!]
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ fragment ImageFragment on Image {
|
||||||
fragment StudioFragment on Studio {
|
fragment StudioFragment on Studio {
|
||||||
name
|
name
|
||||||
id
|
id
|
||||||
|
aliases
|
||||||
urls {
|
urls {
|
||||||
...URLFragment
|
...URLFragment
|
||||||
}
|
}
|
||||||
|
|
@ -169,6 +170,21 @@ query FindStudio($id: ID, $name: String) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
query FindTag($id: ID, $name: String) {
|
||||||
|
findTag(id: $id, name: $name) {
|
||||||
|
...TagFragment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query QueryTags($input: TagQueryInput!) {
|
||||||
|
queryTags(input: $input) {
|
||||||
|
count
|
||||||
|
tags {
|
||||||
|
...TagFragment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
mutation SubmitFingerprint($input: FingerprintSubmission!) {
|
mutation SubmitFingerprint($input: FingerprintSubmission!) {
|
||||||
submitFingerprint(input: $input)
|
submitFingerprint(input: $input)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -98,7 +98,7 @@ func (t changesetTranslator) string(value *string) string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
return *value
|
return strings.TrimSpace(*value)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t changesetTranslator) optionalString(value *string, field string) models.OptionalString {
|
func (t changesetTranslator) optionalString(value *string, field string) models.OptionalString {
|
||||||
|
|
@ -106,7 +106,12 @@ func (t changesetTranslator) optionalString(value *string, field string) models.
|
||||||
return models.OptionalString{}
|
return models.OptionalString{}
|
||||||
}
|
}
|
||||||
|
|
||||||
return models.NewOptionalStringPtr(value)
|
if value == nil {
|
||||||
|
return models.NewOptionalStringPtr(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
trimmed := strings.TrimSpace(*value)
|
||||||
|
return models.NewOptionalString(trimmed)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t changesetTranslator) optionalDate(value *string, field string) (models.OptionalDate, error) {
|
func (t changesetTranslator) optionalDate(value *string, field string) (models.OptionalDate, error) {
|
||||||
|
|
@ -318,8 +323,14 @@ func (t changesetTranslator) updateStrings(value []string, field string) *models
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Trim whitespace from each string
|
||||||
|
trimmedValues := make([]string, len(value))
|
||||||
|
for i, v := range value {
|
||||||
|
trimmedValues[i] = strings.TrimSpace(v)
|
||||||
|
}
|
||||||
|
|
||||||
return &models.UpdateStrings{
|
return &models.UpdateStrings{
|
||||||
Values: value,
|
Values: trimmedValues,
|
||||||
Mode: models.RelationshipUpdateModeSet,
|
Mode: models.RelationshipUpdateModeSet,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -329,8 +340,14 @@ func (t changesetTranslator) updateStringsBulk(value *BulkUpdateStrings, field s
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Trim whitespace from each string
|
||||||
|
trimmedValues := make([]string, len(value.Values))
|
||||||
|
for i, v := range value.Values {
|
||||||
|
trimmedValues[i] = strings.TrimSpace(v)
|
||||||
|
}
|
||||||
|
|
||||||
return &models.UpdateStrings{
|
return &models.UpdateStrings{
|
||||||
Values: value.Values,
|
Values: trimmedValues,
|
||||||
Mode: value.Mode,
|
Mode: value.Mode,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -448,7 +465,7 @@ func groupsDescriptionsFromGroupInput(input []*GroupDescriptionInput) ([]models.
|
||||||
GroupID: gID,
|
GroupID: gID,
|
||||||
}
|
}
|
||||||
if v.Description != nil {
|
if v.Description != nil {
|
||||||
ret[i].Description = *v.Description
|
ret[i].Description = strings.TrimSpace(*v.Description)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,8 +7,10 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"golang.org/x/sys/cpu"
|
"golang.org/x/sys/cpu"
|
||||||
|
|
@ -36,6 +38,24 @@ var stashReleases = func() map[string]string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// isMacOSBundle checks if the application is running from within a macOS .app bundle
|
||||||
|
func isMacOSBundle() bool {
|
||||||
|
exec, err := os.Executable()
|
||||||
|
return err == nil && strings.Contains(exec, "Stash.app/")
|
||||||
|
}
|
||||||
|
|
||||||
|
// getWantedRelease determines which release variant to download based on platform and bundle type
|
||||||
|
func getWantedRelease(platform string) string {
|
||||||
|
release := stashReleases()[platform]
|
||||||
|
|
||||||
|
// On macOS, check if running from .app bundle
|
||||||
|
if runtime.GOOS == "darwin" && isMacOSBundle() {
|
||||||
|
return "Stash.app.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
return release
|
||||||
|
}
|
||||||
|
|
||||||
type githubReleasesResponse struct {
|
type githubReleasesResponse struct {
|
||||||
Url string
|
Url string
|
||||||
Assets_url string
|
Assets_url string
|
||||||
|
|
@ -168,7 +188,7 @@ func GetLatestRelease(ctx context.Context) (*LatestRelease, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
platform := fmt.Sprintf("%s/%s", runtime.GOOS, arch)
|
platform := fmt.Sprintf("%s/%s", runtime.GOOS, arch)
|
||||||
wantedRelease := stashReleases()[platform]
|
wantedRelease := getWantedRelease(platform)
|
||||||
|
|
||||||
url := apiReleases
|
url := apiReleases
|
||||||
if build.IsDevelop() {
|
if build.IsDevelop() {
|
||||||
|
|
|
||||||
12
internal/api/custom_fields.go
Normal file
12
internal/api/custom_fields.go
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import "github.com/stashapp/stash/pkg/models"
|
||||||
|
|
||||||
|
func handleUpdateCustomFields(input models.CustomFieldsInput) models.CustomFieldsInput {
|
||||||
|
ret := input
|
||||||
|
// convert json.Numbers to int/float
|
||||||
|
ret.Full = convertMapJSONNumbers(ret.Full)
|
||||||
|
ret.Partial = convertMapJSONNumbers(ret.Partial)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
@ -26,6 +26,7 @@ var imageBoxExts = []string{
|
||||||
".gif",
|
".gif",
|
||||||
".svg",
|
".svg",
|
||||||
".webp",
|
".webp",
|
||||||
|
".avif",
|
||||||
}
|
}
|
||||||
|
|
||||||
func newImageBox(box fs.FS) (*imageBox, error) {
|
func newImageBox(box fs.FS) (*imageBox, error) {
|
||||||
|
|
@ -101,7 +102,7 @@ func initCustomPerformerImages(customPath string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getDefaultPerformerImage(name string, gender *models.GenderEnum) []byte {
|
func getDefaultPerformerImage(name string, gender *models.GenderEnum, sfwMode bool) []byte {
|
||||||
// try the custom box first if we have one
|
// try the custom box first if we have one
|
||||||
if performerBoxCustom != nil {
|
if performerBoxCustom != nil {
|
||||||
ret, err := performerBoxCustom.GetRandomImageByName(name)
|
ret, err := performerBoxCustom.GetRandomImageByName(name)
|
||||||
|
|
@ -111,6 +112,10 @@ func getDefaultPerformerImage(name string, gender *models.GenderEnum) []byte {
|
||||||
logger.Warnf("error loading custom default performer image: %v", err)
|
logger.Warnf("error loading custom default performer image: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if sfwMode {
|
||||||
|
return static.ReadAll(static.DefaultSFWPerformerImage)
|
||||||
|
}
|
||||||
|
|
||||||
var g models.GenderEnum
|
var g models.GenderEnum
|
||||||
if gender != nil {
|
if gender != nil {
|
||||||
g = *gender
|
g = *gender
|
||||||
|
|
|
||||||
|
|
@ -204,3 +204,14 @@ func (r *groupResolver) Scenes(ctx context.Context, obj *models.Group) (ret []*m
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *groupResolver) OCounter(ctx context.Context, obj *models.Group) (ret *int, err error) {
|
||||||
|
var count int
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
count, err = r.repository.Scene.OCountByGroupID(ctx, obj.ID)
|
||||||
|
return err
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &count, nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -40,6 +40,35 @@ func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) ([]str
|
||||||
return obj.Aliases.List(), nil
|
return obj.Aliases.List(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string, error) {
|
||||||
|
if !obj.URLs.Loaded() {
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
return obj.LoadURLs(ctx, r.repository.Studio)
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
urls := obj.URLs.List()
|
||||||
|
if len(urls) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &urls[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *studioResolver) Urls(ctx context.Context, obj *models.Studio) ([]string, error) {
|
||||||
|
if !obj.URLs.Loaded() {
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
return obj.LoadURLs(ctx, r.repository.Studio)
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj.URLs.List(), nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *studioResolver) Tags(ctx context.Context, obj *models.Studio) (ret []*models.Tag, err error) {
|
func (r *studioResolver) Tags(ctx context.Context, obj *models.Studio) (ret []*models.Tag, err error) {
|
||||||
if !obj.TagIDs.Loaded() {
|
if !obj.TagIDs.Loaded() {
|
||||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
|
@ -114,6 +143,24 @@ func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio, dep
|
||||||
return r.GroupCount(ctx, obj, depth)
|
return r.GroupCount(ctx, obj, depth)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *studioResolver) OCounter(ctx context.Context, obj *models.Studio) (ret *int, err error) {
|
||||||
|
var res_scene int
|
||||||
|
var res_image int
|
||||||
|
var res int
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
res_scene, err = r.repository.Scene.OCountByStudioID(ctx, obj.ID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
res_image, err = r.repository.Image.OCountByStudioID(ctx, obj.ID)
|
||||||
|
return err
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
res = res_scene + res_image
|
||||||
|
return &res, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) {
|
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) {
|
||||||
if obj.ParentID == nil {
|
if obj.ParentID == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
|
|
||||||
|
|
@ -54,6 +54,16 @@ func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []strin
|
||||||
return obj.Aliases.List(), nil
|
return obj.Aliases.List(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *tagResolver) StashIds(ctx context.Context, obj *models.Tag) ([]*models.StashID, error) {
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
return obj.LoadStashIDs(ctx, r.repository.Tag)
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return stashIDsSliceToPtrSlice(obj.StashIDs.List()), nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) {
|
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) {
|
||||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
ret, err = scene.CountByTagID(ctx, r.repository.Scene, obj.ID, depth)
|
ret, err = scene.CountByTagID(ctx, r.repository.Scene, obj.ID, depth)
|
||||||
|
|
|
||||||
|
|
@ -150,6 +150,15 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
|
||||||
c.SetString(config.BackupDirectoryPath, *input.BackupDirectoryPath)
|
c.SetString(config.BackupDirectoryPath, *input.BackupDirectoryPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
existingDeleteTrashPath := c.GetDeleteTrashPath()
|
||||||
|
if input.DeleteTrashPath != nil && existingDeleteTrashPath != *input.DeleteTrashPath {
|
||||||
|
if err := validateDir(config.DeleteTrashPath, *input.DeleteTrashPath, true); err != nil {
|
||||||
|
return makeConfigGeneralResult(), err
|
||||||
|
}
|
||||||
|
|
||||||
|
c.SetString(config.DeleteTrashPath, *input.DeleteTrashPath)
|
||||||
|
}
|
||||||
|
|
||||||
existingGeneratedPath := c.GetGeneratedPath()
|
existingGeneratedPath := c.GetGeneratedPath()
|
||||||
if input.GeneratedPath != nil && existingGeneratedPath != *input.GeneratedPath {
|
if input.GeneratedPath != nil && existingGeneratedPath != *input.GeneratedPath {
|
||||||
if err := validateDir(config.Generated, *input.GeneratedPath, false); err != nil {
|
if err := validateDir(config.Generated, *input.GeneratedPath, false); err != nil {
|
||||||
|
|
@ -334,6 +343,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
|
||||||
logger.SetLogLevel(*input.LogLevel)
|
logger.SetLogLevel(*input.LogLevel)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if input.LogFileMaxSize != nil && *input.LogFileMaxSize != c.GetLogFileMaxSize() {
|
||||||
|
c.SetInt(config.LogFileMaxSize, *input.LogFileMaxSize)
|
||||||
|
}
|
||||||
|
|
||||||
if input.Excludes != nil {
|
if input.Excludes != nil {
|
||||||
for _, r := range input.Excludes {
|
for _, r := range input.Excludes {
|
||||||
_, err := regexp.Compile(r)
|
_, err := regexp.Compile(r)
|
||||||
|
|
@ -445,6 +458,8 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
|
||||||
func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigInterfaceInput) (*ConfigInterfaceResult, error) {
|
func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigInterfaceInput) (*ConfigInterfaceResult, error) {
|
||||||
c := config.GetInstance()
|
c := config.GetInstance()
|
||||||
|
|
||||||
|
r.setConfigBool(config.SFWContentMode, input.SfwContentMode)
|
||||||
|
|
||||||
if input.MenuItems != nil {
|
if input.MenuItems != nil {
|
||||||
c.SetInterface(config.MenuItems, input.MenuItems)
|
c.SetInterface(config.MenuItems, input.MenuItems)
|
||||||
}
|
}
|
||||||
|
|
@ -478,6 +493,8 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigI
|
||||||
r.setConfigString(config.ImageLightboxScrollModeKey, (*string)(options.ScrollMode))
|
r.setConfigString(config.ImageLightboxScrollModeKey, (*string)(options.ScrollMode))
|
||||||
|
|
||||||
r.setConfigInt(config.ImageLightboxScrollAttemptsBeforeChange, options.ScrollAttemptsBeforeChange)
|
r.setConfigInt(config.ImageLightboxScrollAttemptsBeforeChange, options.ScrollAttemptsBeforeChange)
|
||||||
|
|
||||||
|
r.setConfigBool(config.ImageLightboxDisableAnimation, options.DisableAnimation)
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.CSS != nil {
|
if input.CSS != nil {
|
||||||
|
|
|
||||||
|
|
@ -149,7 +149,9 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b
|
||||||
return false, fmt.Errorf("converting ids: %w", err)
|
return false, fmt.Errorf("converting ids: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fileDeleter := file.NewDeleter()
|
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
|
||||||
|
|
||||||
|
fileDeleter := file.NewDeleterWithTrash(trashPath)
|
||||||
destroyer := &file.ZipDestroyer{
|
destroyer := &file.ZipDestroyer{
|
||||||
FileDestroyer: r.repository.File,
|
FileDestroyer: r.repository.File,
|
||||||
FolderDestroyer: r.repository.Folder,
|
FolderDestroyer: r.repository.Folder,
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/internal/manager"
|
"github.com/stashapp/stash/internal/manager"
|
||||||
"github.com/stashapp/stash/pkg/file"
|
"github.com/stashapp/stash/pkg/file"
|
||||||
|
|
@ -43,7 +44,7 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
|
||||||
// Populate a new gallery from the input
|
// Populate a new gallery from the input
|
||||||
newGallery := models.NewGallery()
|
newGallery := models.NewGallery()
|
||||||
|
|
||||||
newGallery.Title = input.Title
|
newGallery.Title = strings.TrimSpace(input.Title)
|
||||||
newGallery.Code = translator.string(input.Code)
|
newGallery.Code = translator.string(input.Code)
|
||||||
newGallery.Details = translator.string(input.Details)
|
newGallery.Details = translator.string(input.Details)
|
||||||
newGallery.Photographer = translator.string(input.Photographer)
|
newGallery.Photographer = translator.string(input.Photographer)
|
||||||
|
|
@ -74,9 +75,9 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Urls != nil {
|
if input.Urls != nil {
|
||||||
newGallery.URLs = models.NewRelatedStrings(input.Urls)
|
newGallery.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls))
|
||||||
} else if input.URL != nil {
|
} else if input.URL != nil {
|
||||||
newGallery.URLs = models.NewRelatedStrings([]string{*input.URL})
|
newGallery.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the transaction and save the gallery
|
// Start the transaction and save the gallery
|
||||||
|
|
@ -333,10 +334,12 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
||||||
return false, fmt.Errorf("converting ids: %w", err)
|
return false, fmt.Errorf("converting ids: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
|
||||||
|
|
||||||
var galleries []*models.Gallery
|
var galleries []*models.Gallery
|
||||||
var imgsDestroyed []*models.Image
|
var imgsDestroyed []*models.Image
|
||||||
fileDeleter := &image.FileDeleter{
|
fileDeleter := &image.FileDeleter{
|
||||||
Deleter: file.NewDeleter(),
|
Deleter: file.NewDeleterWithTrash(trashPath),
|
||||||
Paths: manager.GetInstance().Paths,
|
Paths: manager.GetInstance().Paths,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/internal/static"
|
"github.com/stashapp/stash/internal/static"
|
||||||
"github.com/stashapp/stash/pkg/group"
|
"github.com/stashapp/stash/pkg/group"
|
||||||
|
|
@ -21,7 +22,7 @@ func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*mo
|
||||||
// Populate a new group from the input
|
// Populate a new group from the input
|
||||||
newGroup := models.NewGroup()
|
newGroup := models.NewGroup()
|
||||||
|
|
||||||
newGroup.Name = input.Name
|
newGroup.Name = strings.TrimSpace(input.Name)
|
||||||
newGroup.Aliases = translator.string(input.Aliases)
|
newGroup.Aliases = translator.string(input.Aliases)
|
||||||
newGroup.Duration = input.Duration
|
newGroup.Duration = input.Duration
|
||||||
newGroup.Rating = input.Rating100
|
newGroup.Rating = input.Rating100
|
||||||
|
|
@ -55,7 +56,7 @@ func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*mo
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Urls != nil {
|
if input.Urls != nil {
|
||||||
newGroup.URLs = models.NewRelatedStrings(input.Urls)
|
newGroup.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls))
|
||||||
}
|
}
|
||||||
|
|
||||||
return &newGroup, nil
|
return &newGroup, nil
|
||||||
|
|
|
||||||
|
|
@ -308,9 +308,11 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
|
||||||
return false, fmt.Errorf("converting id: %w", err)
|
return false, fmt.Errorf("converting id: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
|
||||||
|
|
||||||
var i *models.Image
|
var i *models.Image
|
||||||
fileDeleter := &image.FileDeleter{
|
fileDeleter := &image.FileDeleter{
|
||||||
Deleter: file.NewDeleter(),
|
Deleter: file.NewDeleterWithTrash(trashPath),
|
||||||
Paths: manager.GetInstance().Paths,
|
Paths: manager.GetInstance().Paths,
|
||||||
}
|
}
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
|
@ -348,9 +350,11 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
|
||||||
return false, fmt.Errorf("converting ids: %w", err)
|
return false, fmt.Errorf("converting ids: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
|
||||||
|
|
||||||
var images []*models.Image
|
var images []*models.Image
|
||||||
fileDeleter := &image.FileDeleter{
|
fileDeleter := &image.FileDeleter{
|
||||||
Deleter: file.NewDeleter(),
|
Deleter: file.NewDeleterWithTrash(trashPath),
|
||||||
Paths: manager.GetInstance().Paths,
|
Paths: manager.GetInstance().Paths,
|
||||||
}
|
}
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/internal/static"
|
"github.com/stashapp/stash/internal/static"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
|
@ -32,7 +33,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp
|
||||||
// Populate a new group from the input
|
// Populate a new group from the input
|
||||||
newGroup := models.NewGroup()
|
newGroup := models.NewGroup()
|
||||||
|
|
||||||
newGroup.Name = input.Name
|
newGroup.Name = strings.TrimSpace(input.Name)
|
||||||
newGroup.Aliases = translator.string(input.Aliases)
|
newGroup.Aliases = translator.string(input.Aliases)
|
||||||
newGroup.Duration = input.Duration
|
newGroup.Duration = input.Duration
|
||||||
newGroup.Rating = input.Rating100
|
newGroup.Rating = input.Rating100
|
||||||
|
|
@ -56,9 +57,9 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Urls != nil {
|
if input.Urls != nil {
|
||||||
newGroup.URLs = models.NewRelatedStrings(input.Urls)
|
newGroup.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls))
|
||||||
} else if input.URL != nil {
|
} else if input.URL != nil {
|
||||||
newGroup.URLs = models.NewRelatedStrings([]string{*input.URL})
|
newGroup.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process the base 64 encoded image string
|
// Process the base 64 encoded image string
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/performer"
|
"github.com/stashapp/stash/pkg/performer"
|
||||||
|
|
@ -37,9 +38,9 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
||||||
// Populate a new performer from the input
|
// Populate a new performer from the input
|
||||||
newPerformer := models.NewPerformer()
|
newPerformer := models.NewPerformer()
|
||||||
|
|
||||||
newPerformer.Name = input.Name
|
newPerformer.Name = strings.TrimSpace(input.Name)
|
||||||
newPerformer.Disambiguation = translator.string(input.Disambiguation)
|
newPerformer.Disambiguation = translator.string(input.Disambiguation)
|
||||||
newPerformer.Aliases = models.NewRelatedStrings(input.AliasList)
|
newPerformer.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.AliasList))
|
||||||
newPerformer.Gender = input.Gender
|
newPerformer.Gender = input.Gender
|
||||||
newPerformer.Ethnicity = translator.string(input.Ethnicity)
|
newPerformer.Ethnicity = translator.string(input.Ethnicity)
|
||||||
newPerformer.Country = translator.string(input.Country)
|
newPerformer.Country = translator.string(input.Country)
|
||||||
|
|
@ -62,17 +63,17 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
||||||
|
|
||||||
newPerformer.URLs = models.NewRelatedStrings([]string{})
|
newPerformer.URLs = models.NewRelatedStrings([]string{})
|
||||||
if input.URL != nil {
|
if input.URL != nil {
|
||||||
newPerformer.URLs.Add(*input.URL)
|
newPerformer.URLs.Add(strings.TrimSpace(*input.URL))
|
||||||
}
|
}
|
||||||
if input.Twitter != nil {
|
if input.Twitter != nil {
|
||||||
newPerformer.URLs.Add(utils.URLFromHandle(*input.Twitter, twitterURL))
|
newPerformer.URLs.Add(utils.URLFromHandle(strings.TrimSpace(*input.Twitter), twitterURL))
|
||||||
}
|
}
|
||||||
if input.Instagram != nil {
|
if input.Instagram != nil {
|
||||||
newPerformer.URLs.Add(utils.URLFromHandle(*input.Instagram, instagramURL))
|
newPerformer.URLs.Add(utils.URLFromHandle(strings.TrimSpace(*input.Instagram), instagramURL))
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Urls != nil {
|
if input.Urls != nil {
|
||||||
newPerformer.URLs.Add(input.Urls...)
|
newPerformer.URLs.Add(stringslice.TrimSpace(input.Urls)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
|
@ -296,10 +297,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
||||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
updatedPerformer.CustomFields = input.CustomFields
|
updatedPerformer.CustomFields = handleUpdateCustomFields(input.CustomFields)
|
||||||
// convert json.Numbers to int/float
|
|
||||||
updatedPerformer.CustomFields.Full = convertMapJSONNumbers(updatedPerformer.CustomFields.Full)
|
|
||||||
updatedPerformer.CustomFields.Partial = convertMapJSONNumbers(updatedPerformer.CustomFields.Partial)
|
|
||||||
|
|
||||||
var imageData []byte
|
var imageData []byte
|
||||||
imageIncluded := translator.hasField("image")
|
imageIncluded := translator.hasField("image")
|
||||||
|
|
@ -416,6 +414,10 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
|
||||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if input.CustomFields != nil {
|
||||||
|
updatedPerformer.CustomFields = handleUpdateCustomFields(*input.CustomFields)
|
||||||
|
}
|
||||||
|
|
||||||
ret := []*models.Performer{}
|
ret := []*models.Performer{}
|
||||||
|
|
||||||
// Start the transaction and save the performers
|
// Start the transaction and save the performers
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,7 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput
|
||||||
|
|
||||||
f := models.SavedFilter{
|
f := models.SavedFilter{
|
||||||
Mode: input.Mode,
|
Mode: input.Mode,
|
||||||
Name: input.Name,
|
Name: strings.TrimSpace(input.Name),
|
||||||
FindFilter: input.FindFilter,
|
FindFilter: input.FindFilter,
|
||||||
ObjectFilter: input.ObjectFilter,
|
ObjectFilter: input.ObjectFilter,
|
||||||
UIOptions: input.UIOptions,
|
UIOptions: input.UIOptions,
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/stashapp/stash/internal/manager"
|
"github.com/stashapp/stash/internal/manager"
|
||||||
|
|
@ -62,9 +63,9 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCr
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Urls != nil {
|
if input.Urls != nil {
|
||||||
newScene.URLs = models.NewRelatedStrings(input.Urls)
|
newScene.URLs = models.NewRelatedStrings(stringslice.TrimSpace(input.Urls))
|
||||||
} else if input.URL != nil {
|
} else if input.URL != nil {
|
||||||
newScene.URLs = models.NewRelatedStrings([]string{*input.URL})
|
newScene.URLs = models.NewRelatedStrings([]string{strings.TrimSpace(*input.URL)})
|
||||||
}
|
}
|
||||||
|
|
||||||
newScene.PerformerIDs, err = translator.relatedIds(input.PerformerIds)
|
newScene.PerformerIDs, err = translator.relatedIds(input.PerformerIds)
|
||||||
|
|
@ -428,10 +429,11 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
|
||||||
}
|
}
|
||||||
|
|
||||||
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
||||||
|
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
|
||||||
|
|
||||||
var s *models.Scene
|
var s *models.Scene
|
||||||
fileDeleter := &scene.FileDeleter{
|
fileDeleter := &scene.FileDeleter{
|
||||||
Deleter: file.NewDeleter(),
|
Deleter: file.NewDeleterWithTrash(trashPath),
|
||||||
FileNamingAlgo: fileNamingAlgo,
|
FileNamingAlgo: fileNamingAlgo,
|
||||||
Paths: manager.GetInstance().Paths,
|
Paths: manager.GetInstance().Paths,
|
||||||
}
|
}
|
||||||
|
|
@ -482,9 +484,10 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
|
||||||
|
|
||||||
var scenes []*models.Scene
|
var scenes []*models.Scene
|
||||||
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
||||||
|
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
|
||||||
|
|
||||||
fileDeleter := &scene.FileDeleter{
|
fileDeleter := &scene.FileDeleter{
|
||||||
Deleter: file.NewDeleter(),
|
Deleter: file.NewDeleterWithTrash(trashPath),
|
||||||
FileNamingAlgo: fileNamingAlgo,
|
FileNamingAlgo: fileNamingAlgo,
|
||||||
Paths: manager.GetInstance().Paths,
|
Paths: manager.GetInstance().Paths,
|
||||||
}
|
}
|
||||||
|
|
@ -593,8 +596,9 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
|
||||||
}
|
}
|
||||||
|
|
||||||
mgr := manager.GetInstance()
|
mgr := manager.GetInstance()
|
||||||
|
trashPath := mgr.Config.GetDeleteTrashPath()
|
||||||
fileDeleter := &scene.FileDeleter{
|
fileDeleter := &scene.FileDeleter{
|
||||||
Deleter: file.NewDeleter(),
|
Deleter: file.NewDeleterWithTrash(trashPath),
|
||||||
FileNamingAlgo: mgr.Config.GetVideoFileNamingAlgorithm(),
|
FileNamingAlgo: mgr.Config.GetVideoFileNamingAlgorithm(),
|
||||||
Paths: mgr.Paths,
|
Paths: mgr.Paths,
|
||||||
}
|
}
|
||||||
|
|
@ -650,7 +654,7 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input SceneMar
|
||||||
// Populate a new scene marker from the input
|
// Populate a new scene marker from the input
|
||||||
newMarker := models.NewSceneMarker()
|
newMarker := models.NewSceneMarker()
|
||||||
|
|
||||||
newMarker.Title = input.Title
|
newMarker.Title = strings.TrimSpace(input.Title)
|
||||||
newMarker.Seconds = input.Seconds
|
newMarker.Seconds = input.Seconds
|
||||||
newMarker.PrimaryTagID = primaryTagID
|
newMarker.PrimaryTagID = primaryTagID
|
||||||
newMarker.SceneID = sceneID
|
newMarker.SceneID = sceneID
|
||||||
|
|
@ -736,9 +740,10 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar
|
||||||
}
|
}
|
||||||
|
|
||||||
mgr := manager.GetInstance()
|
mgr := manager.GetInstance()
|
||||||
|
trashPath := mgr.Config.GetDeleteTrashPath()
|
||||||
|
|
||||||
fileDeleter := &scene.FileDeleter{
|
fileDeleter := &scene.FileDeleter{
|
||||||
Deleter: file.NewDeleter(),
|
Deleter: file.NewDeleterWithTrash(trashPath),
|
||||||
FileNamingAlgo: mgr.Config.GetVideoFileNamingAlgorithm(),
|
FileNamingAlgo: mgr.Config.GetVideoFileNamingAlgorithm(),
|
||||||
Paths: mgr.Paths,
|
Paths: mgr.Paths,
|
||||||
}
|
}
|
||||||
|
|
@ -820,6 +825,123 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar
|
||||||
return r.getSceneMarker(ctx, markerID)
|
return r.getSceneMarker(ctx, markerID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) BulkSceneMarkerUpdate(ctx context.Context, input BulkSceneMarkerUpdateInput) ([]*models.SceneMarker, error) {
|
||||||
|
ids, err := stringslice.StringSliceToIntSlice(input.Ids)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting ids: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
translator := changesetTranslator{
|
||||||
|
inputMap: getUpdateInputMap(ctx),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate performer from the input
|
||||||
|
partial := models.NewSceneMarkerPartial()
|
||||||
|
|
||||||
|
partial.Title = translator.optionalString(input.Title, "title")
|
||||||
|
|
||||||
|
partial.PrimaryTagID, err = translator.optionalIntFromString(input.PrimaryTagID, "primary_tag_id")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting primary tag id: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ret := []*models.SceneMarker{}
|
||||||
|
|
||||||
|
// Start the transaction and save the performers
|
||||||
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
qb := r.repository.SceneMarker
|
||||||
|
|
||||||
|
for _, id := range ids {
|
||||||
|
l := partial
|
||||||
|
|
||||||
|
if err := adjustMarkerPartialForTagExclusion(ctx, r.repository.SceneMarker, id, &l); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
updated, err := qb.UpdatePartial(ctx, id, l)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, updated)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// execute post hooks outside of txn
|
||||||
|
var newRet []*models.SceneMarker
|
||||||
|
for _, m := range ret {
|
||||||
|
r.hookExecutor.ExecutePostHooks(ctx, m.ID, hook.SceneMarkerUpdatePost, input, translator.getFields())
|
||||||
|
|
||||||
|
m, err = r.getSceneMarker(ctx, m.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
newRet = append(newRet, m)
|
||||||
|
}
|
||||||
|
|
||||||
|
return newRet, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// adjustMarkerPartialForTagExclusion adjusts the SceneMarkerPartial to exclude the primary tag from tag updates.
|
||||||
|
func adjustMarkerPartialForTagExclusion(ctx context.Context, r models.SceneMarkerReader, id int, partial *models.SceneMarkerPartial) error {
|
||||||
|
if partial.TagIDs == nil && !partial.PrimaryTagID.Set {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// exclude primary tag from tag updates
|
||||||
|
var primaryTagID int
|
||||||
|
if partial.PrimaryTagID.Set {
|
||||||
|
primaryTagID = partial.PrimaryTagID.Value
|
||||||
|
} else {
|
||||||
|
existing, err := r.Find(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("finding existing primary tag id: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
primaryTagID = existing.PrimaryTagID
|
||||||
|
}
|
||||||
|
|
||||||
|
existingTagIDs, err := r.GetTagIDs(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("getting existing tag ids: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
tagIDAttr := partial.TagIDs
|
||||||
|
|
||||||
|
if tagIDAttr == nil {
|
||||||
|
tagIDAttr = &models.UpdateIDs{
|
||||||
|
IDs: existingTagIDs,
|
||||||
|
Mode: models.RelationshipUpdateModeSet,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
newTagIDs := tagIDAttr.Apply(existingTagIDs)
|
||||||
|
// Remove primary tag from newTagIDs if present
|
||||||
|
newTagIDs = sliceutil.Exclude(newTagIDs, []int{primaryTagID})
|
||||||
|
|
||||||
|
if len(existingTagIDs) != len(newTagIDs) {
|
||||||
|
partial.TagIDs = &models.UpdateIDs{
|
||||||
|
IDs: newTagIDs,
|
||||||
|
Mode: models.RelationshipUpdateModeSet,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// no change to tags required
|
||||||
|
partial.TagIDs = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
|
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
|
||||||
return r.SceneMarkersDestroy(ctx, []string{id})
|
return r.SceneMarkersDestroy(ctx, []string{id})
|
||||||
}
|
}
|
||||||
|
|
@ -832,9 +954,10 @@ func (r *mutationResolver) SceneMarkersDestroy(ctx context.Context, markerIDs []
|
||||||
|
|
||||||
var markers []*models.SceneMarker
|
var markers []*models.SceneMarker
|
||||||
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
||||||
|
trashPath := manager.GetInstance().Config.GetDeleteTrashPath()
|
||||||
|
|
||||||
fileDeleter := &scene.FileDeleter{
|
fileDeleter := &scene.FileDeleter{
|
||||||
Deleter: file.NewDeleter(),
|
Deleter: file.NewDeleterWithTrash(trashPath),
|
||||||
FileNamingAlgo: fileNamingAlgo,
|
FileNamingAlgo: fileNamingAlgo,
|
||||||
Paths: manager.GetInstance().Paths,
|
Paths: manager.GetInstance().Paths,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) {
|
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) {
|
||||||
b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint)
|
b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) //nolint:staticcheck
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
@ -49,7 +49,7 @@ func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) StashBoxBatchStudioTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) {
|
func (r *mutationResolver) StashBoxBatchStudioTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) {
|
||||||
b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint)
|
b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) //nolint:staticcheck
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
@ -153,6 +153,14 @@ func (r *mutationResolver) makeSceneDraft(ctx context.Context, s *models.Scene,
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Load StashIDs for tags
|
||||||
|
tqb := r.repository.Tag
|
||||||
|
for _, t := range draft.Tags {
|
||||||
|
if err := t.LoadStashIDs(ctx, tqb); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
draft.Cover = cover
|
draft.Cover = cover
|
||||||
|
|
||||||
return draft, nil
|
return draft, nil
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/plugin/hook"
|
"github.com/stashapp/stash/pkg/plugin/hook"
|
||||||
|
|
@ -32,17 +33,25 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
||||||
// Populate a new studio from the input
|
// Populate a new studio from the input
|
||||||
newStudio := models.NewStudio()
|
newStudio := models.NewStudio()
|
||||||
|
|
||||||
newStudio.Name = input.Name
|
newStudio.Name = strings.TrimSpace(input.Name)
|
||||||
newStudio.URL = translator.string(input.URL)
|
|
||||||
newStudio.Rating = input.Rating100
|
newStudio.Rating = input.Rating100
|
||||||
newStudio.Favorite = translator.bool(input.Favorite)
|
newStudio.Favorite = translator.bool(input.Favorite)
|
||||||
newStudio.Details = translator.string(input.Details)
|
newStudio.Details = translator.string(input.Details)
|
||||||
newStudio.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
|
newStudio.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
|
||||||
newStudio.Aliases = models.NewRelatedStrings(input.Aliases)
|
newStudio.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases))
|
||||||
newStudio.StashIDs = models.NewRelatedStashIDs(models.StashIDInputs(input.StashIds).ToStashIDs())
|
newStudio.StashIDs = models.NewRelatedStashIDs(models.StashIDInputs(input.StashIds).ToStashIDs())
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
newStudio.URLs = models.NewRelatedStrings([]string{})
|
||||||
|
if input.URL != nil {
|
||||||
|
newStudio.URLs.Add(strings.TrimSpace(*input.URL))
|
||||||
|
}
|
||||||
|
|
||||||
|
if input.Urls != nil {
|
||||||
|
newStudio.URLs.Add(stringslice.TrimSpace(input.Urls)...)
|
||||||
|
}
|
||||||
|
|
||||||
newStudio.ParentID, err = translator.intPtrFromString(input.ParentID)
|
newStudio.ParentID, err = translator.intPtrFromString(input.ParentID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("converting parent id: %w", err)
|
return nil, fmt.Errorf("converting parent id: %w", err)
|
||||||
|
|
@ -106,7 +115,6 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||||
|
|
||||||
updatedStudio.ID = studioID
|
updatedStudio.ID = studioID
|
||||||
updatedStudio.Name = translator.optionalString(input.Name, "name")
|
updatedStudio.Name = translator.optionalString(input.Name, "name")
|
||||||
updatedStudio.URL = translator.optionalString(input.URL, "url")
|
|
||||||
updatedStudio.Details = translator.optionalString(input.Details, "details")
|
updatedStudio.Details = translator.optionalString(input.Details, "details")
|
||||||
updatedStudio.Rating = translator.optionalInt(input.Rating100, "rating100")
|
updatedStudio.Rating = translator.optionalInt(input.Rating100, "rating100")
|
||||||
updatedStudio.Favorite = translator.optionalBool(input.Favorite, "favorite")
|
updatedStudio.Favorite = translator.optionalBool(input.Favorite, "favorite")
|
||||||
|
|
@ -124,6 +132,26 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if translator.hasField("urls") {
|
||||||
|
// ensure url not included in the input
|
||||||
|
if err := r.validateNoLegacyURLs(translator); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
updatedStudio.URLs = translator.updateStrings(input.Urls, "urls")
|
||||||
|
} else if translator.hasField("url") {
|
||||||
|
// handle legacy url field
|
||||||
|
legacyURLs := []string{}
|
||||||
|
if input.URL != nil {
|
||||||
|
legacyURLs = append(legacyURLs, *input.URL)
|
||||||
|
}
|
||||||
|
|
||||||
|
updatedStudio.URLs = &models.UpdateStrings{
|
||||||
|
Mode: models.RelationshipUpdateModeSet,
|
||||||
|
Values: legacyURLs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Process the base 64 encoded image string
|
// Process the base 64 encoded image string
|
||||||
var imageData []byte
|
var imageData []byte
|
||||||
imageIncluded := translator.hasField("image")
|
imageIncluded := translator.hasField("image")
|
||||||
|
|
@ -163,6 +191,96 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||||
return r.getStudio(ctx, studioID)
|
return r.getStudio(ctx, studioID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudioUpdateInput) ([]*models.Studio, error) {
|
||||||
|
ids, err := stringslice.StringSliceToIntSlice(input.Ids)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting ids: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
translator := changesetTranslator{
|
||||||
|
inputMap: getUpdateInputMap(ctx),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate performer from the input
|
||||||
|
partial := models.NewStudioPartial()
|
||||||
|
|
||||||
|
partial.ParentID, err = translator.optionalIntFromString(input.ParentID, "parent_id")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting parent id: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if translator.hasField("urls") {
|
||||||
|
// ensure url/twitter/instagram are not included in the input
|
||||||
|
if err := r.validateNoLegacyURLs(translator); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
partial.URLs = translator.updateStringsBulk(input.Urls, "urls")
|
||||||
|
} else if translator.hasField("url") {
|
||||||
|
// handle legacy url field
|
||||||
|
legacyURLs := []string{}
|
||||||
|
if input.URL != nil {
|
||||||
|
legacyURLs = append(legacyURLs, *input.URL)
|
||||||
|
}
|
||||||
|
|
||||||
|
partial.URLs = &models.UpdateStrings{
|
||||||
|
Mode: models.RelationshipUpdateModeSet,
|
||||||
|
Values: legacyURLs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
partial.Favorite = translator.optionalBool(input.Favorite, "favorite")
|
||||||
|
partial.Rating = translator.optionalInt(input.Rating100, "rating100")
|
||||||
|
partial.Details = translator.optionalString(input.Details, "details")
|
||||||
|
partial.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
|
||||||
|
|
||||||
|
partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ret := []*models.Studio{}
|
||||||
|
|
||||||
|
// Start the transaction and save the performers
|
||||||
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
qb := r.repository.Studio
|
||||||
|
|
||||||
|
for _, id := range ids {
|
||||||
|
local := partial
|
||||||
|
local.ID = id
|
||||||
|
if err := studio.ValidateModify(ctx, local, qb); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
updated, err := qb.UpdatePartial(ctx, local)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, updated)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// execute post hooks outside of txn
|
||||||
|
var newRet []*models.Studio
|
||||||
|
for _, studio := range ret {
|
||||||
|
r.hookExecutor.ExecutePostHooks(ctx, studio.ID, hook.StudioUpdatePost, input, translator.getFields())
|
||||||
|
|
||||||
|
studio, err = r.getStudio(ctx, studio.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
newRet = append(newRet, studio)
|
||||||
|
}
|
||||||
|
|
||||||
|
return newRet, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) StudioDestroy(ctx context.Context, input StudioDestroyInput) (bool, error) {
|
func (r *mutationResolver) StudioDestroy(ctx context.Context, input StudioDestroyInput) (bool, error) {
|
||||||
id, err := strconv.Atoi(input.ID)
|
id, err := strconv.Atoi(input.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
|
@ -32,13 +33,21 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
|
||||||
// Populate a new tag from the input
|
// Populate a new tag from the input
|
||||||
newTag := models.NewTag()
|
newTag := models.NewTag()
|
||||||
|
|
||||||
newTag.Name = input.Name
|
newTag.Name = strings.TrimSpace(input.Name)
|
||||||
newTag.SortName = translator.string(input.SortName)
|
newTag.SortName = translator.string(input.SortName)
|
||||||
newTag.Aliases = models.NewRelatedStrings(input.Aliases)
|
newTag.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases))
|
||||||
newTag.Favorite = translator.bool(input.Favorite)
|
newTag.Favorite = translator.bool(input.Favorite)
|
||||||
newTag.Description = translator.string(input.Description)
|
newTag.Description = translator.string(input.Description)
|
||||||
newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
|
newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
|
||||||
|
|
||||||
|
var stashIDInputs models.StashIDInputs
|
||||||
|
for _, sid := range input.StashIds {
|
||||||
|
if sid != nil {
|
||||||
|
stashIDInputs = append(stashIDInputs, *sid)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
newTag.StashIDs = models.NewRelatedStashIDs(stashIDInputs.ToStashIDs())
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
newTag.ParentIDs, err = translator.relatedIds(input.ParentIds)
|
newTag.ParentIDs, err = translator.relatedIds(input.ParentIds)
|
||||||
|
|
@ -110,6 +119,14 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput)
|
||||||
|
|
||||||
updatedTag.Aliases = translator.updateStrings(input.Aliases, "aliases")
|
updatedTag.Aliases = translator.updateStrings(input.Aliases, "aliases")
|
||||||
|
|
||||||
|
var updateStashIDInputs models.StashIDInputs
|
||||||
|
for _, sid := range input.StashIds {
|
||||||
|
if sid != nil {
|
||||||
|
updateStashIDInputs = append(updateStashIDInputs, *sid)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updatedTag.StashIDs = translator.updateStashIDs(updateStashIDInputs, "stash_ids")
|
||||||
|
|
||||||
updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids")
|
updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("converting parent tag ids: %w", err)
|
return nil, fmt.Errorf("converting parent tag ids: %w", err)
|
||||||
|
|
|
||||||
|
|
@ -82,6 +82,7 @@ func makeConfigGeneralResult() *ConfigGeneralResult {
|
||||||
Stashes: config.GetStashPaths(),
|
Stashes: config.GetStashPaths(),
|
||||||
DatabasePath: config.GetDatabasePath(),
|
DatabasePath: config.GetDatabasePath(),
|
||||||
BackupDirectoryPath: config.GetBackupDirectoryPath(),
|
BackupDirectoryPath: config.GetBackupDirectoryPath(),
|
||||||
|
DeleteTrashPath: config.GetDeleteTrashPath(),
|
||||||
GeneratedPath: config.GetGeneratedPath(),
|
GeneratedPath: config.GetGeneratedPath(),
|
||||||
MetadataPath: config.GetMetadataPath(),
|
MetadataPath: config.GetMetadataPath(),
|
||||||
ConfigFilePath: config.GetConfigFile(),
|
ConfigFilePath: config.GetConfigFile(),
|
||||||
|
|
@ -115,6 +116,7 @@ func makeConfigGeneralResult() *ConfigGeneralResult {
|
||||||
LogOut: config.GetLogOut(),
|
LogOut: config.GetLogOut(),
|
||||||
LogLevel: config.GetLogLevel(),
|
LogLevel: config.GetLogLevel(),
|
||||||
LogAccess: config.GetLogAccess(),
|
LogAccess: config.GetLogAccess(),
|
||||||
|
LogFileMaxSize: config.GetLogFileMaxSize(),
|
||||||
VideoExtensions: config.GetVideoExtensions(),
|
VideoExtensions: config.GetVideoExtensions(),
|
||||||
ImageExtensions: config.GetImageExtensions(),
|
ImageExtensions: config.GetImageExtensions(),
|
||||||
GalleryExtensions: config.GetGalleryExtensions(),
|
GalleryExtensions: config.GetGalleryExtensions(),
|
||||||
|
|
@ -162,6 +164,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult {
|
||||||
disableDropdownCreate := config.GetDisableDropdownCreate()
|
disableDropdownCreate := config.GetDisableDropdownCreate()
|
||||||
|
|
||||||
return &ConfigInterfaceResult{
|
return &ConfigInterfaceResult{
|
||||||
|
SfwContentMode: config.GetSFWContentMode(),
|
||||||
MenuItems: menuItems,
|
MenuItems: menuItems,
|
||||||
SoundOnPreview: &soundOnPreview,
|
SoundOnPreview: &soundOnPreview,
|
||||||
WallShowTitle: &wallShowTitle,
|
WallShowTitle: &wallShowTitle,
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,7 @@ func (r *queryResolver) FindFile(ctx context.Context, id *string, path *string)
|
||||||
ret = files[0]
|
ret = files[0]
|
||||||
}
|
}
|
||||||
case path != nil:
|
case path != nil:
|
||||||
ret, err = qb.FindByPath(ctx, *path)
|
ret, err = qb.FindByPath(ctx, *path, true)
|
||||||
if err == nil && ret == nil {
|
if err == nil && ret == nil {
|
||||||
return errors.New("file not found")
|
return errors.New("file not found")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ func (r *queryResolver) FindFolder(ctx context.Context, id *string, path *string
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
case path != nil:
|
case path != nil:
|
||||||
ret, err = qb.FindByPath(ctx, *path)
|
ret, err = qb.FindByPath(ctx, *path, true)
|
||||||
if err == nil && ret == nil {
|
if err == nil && ret == nil {
|
||||||
return errors.New("folder not found")
|
return errors.New("folder not found")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -201,7 +201,7 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.So
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO - this should happen after any scene is scraped
|
// TODO - this should happen after any scene is scraped
|
||||||
if err := r.matchScenesRelationships(ctx, ret, *source.StashBoxEndpoint); err != nil {
|
if err := r.matchScenesRelationships(ctx, ret, b.Endpoint); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
|
|
@ -245,7 +245,7 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.So
|
||||||
// just flatten the slice and pass it in
|
// just flatten the slice and pass it in
|
||||||
flat := sliceutil.Flatten(ret)
|
flat := sliceutil.Flatten(ret)
|
||||||
|
|
||||||
if err := r.matchScenesRelationships(ctx, flat, *source.StashBoxEndpoint); err != nil {
|
if err := r.matchScenesRelationships(ctx, flat, b.Endpoint); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -335,7 +335,7 @@ func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.S
|
||||||
if len(ret) > 0 {
|
if len(ret) > 0 {
|
||||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
for _, studio := range ret {
|
for _, studio := range ret {
|
||||||
if err := match.ScrapedStudioHierarchy(ctx, r.repository.Studio, studio, *source.StashBoxEndpoint); err != nil {
|
if err := match.ScrapedStudioHierarchy(ctx, r.repository.Studio, studio, b.Endpoint); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -350,7 +350,46 @@ func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.S
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("stash_box_index must be set")
|
return nil, errors.New("stash_box_endpoint must be set")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Source, input ScrapeSingleTagInput) ([]*models.ScrapedTag, error) {
|
||||||
|
if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
||||||
|
b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
client := r.newStashBoxClient(*b)
|
||||||
|
|
||||||
|
var ret []*models.ScrapedTag
|
||||||
|
out, err := client.QueryTag(ctx, *input.Query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if out != nil {
|
||||||
|
ret = append(ret, out...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ret) > 0 {
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
for _, tag := range ret {
|
||||||
|
if err := match.ScrapedTag(ctx, r.repository.Tag, tag, b.Endpoint); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errors.New("stash_box_endpoint must be set")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scraper.Source, input ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
|
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scraper.Source, input ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
|
||||||
|
|
|
||||||
|
|
@ -18,9 +18,14 @@ type PerformerFinder interface {
|
||||||
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type sfwConfig interface {
|
||||||
|
GetSFWContentMode() bool
|
||||||
|
}
|
||||||
|
|
||||||
type performerRoutes struct {
|
type performerRoutes struct {
|
||||||
routes
|
routes
|
||||||
performerFinder PerformerFinder
|
performerFinder PerformerFinder
|
||||||
|
sfwConfig sfwConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
func (rs performerRoutes) Routes() chi.Router {
|
func (rs performerRoutes) Routes() chi.Router {
|
||||||
|
|
@ -54,7 +59,7 @@ func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(image) == 0 {
|
if len(image) == 0 {
|
||||||
image = getDefaultPerformerImage(performer.Name, performer.Gender)
|
image = getDefaultPerformerImage(performer.Name, performer.Gender, rs.sfwConfig.GetSFWContentMode())
|
||||||
}
|
}
|
||||||
|
|
||||||
utils.ServeImage(w, r, image)
|
utils.ServeImage(w, r, image)
|
||||||
|
|
|
||||||
|
|
@ -322,6 +322,7 @@ func (s *Server) getPerformerRoutes() chi.Router {
|
||||||
return performerRoutes{
|
return performerRoutes{
|
||||||
routes: routes{txnManager: repo.TxnManager},
|
routes: routes{txnManager: repo.TxnManager},
|
||||||
performerFinder: repo.Performer,
|
performerFinder: repo.Performer,
|
||||||
|
sfwConfig: s.manager.Config,
|
||||||
}.Routes()
|
}.Routes()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -225,7 +225,7 @@ func createSceneFile(ctx context.Context, name string, folderStore models.Folder
|
||||||
}
|
}
|
||||||
|
|
||||||
func getOrCreateFolder(ctx context.Context, folderStore models.FolderFinderCreator, folderPath string) (*models.Folder, error) {
|
func getOrCreateFolder(ctx context.Context, folderStore models.FolderFinderCreator, folderPath string) (*models.Folder, error) {
|
||||||
f, err := folderStore.FindByPath(ctx, folderPath)
|
f, err := folderStore.FindByPath(ctx, folderPath, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("getting folder by path: %w", err)
|
return nil, fmt.Errorf("getting folder by path: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@
|
||||||
package desktop
|
package desktop
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/kermieisinthehouse/systray"
|
"github.com/kermieisinthehouse/systray"
|
||||||
|
|
@ -20,7 +21,12 @@ func startSystray(exit chan int, faviconProvider FaviconProvider) {
|
||||||
// system is started from a non-terminal method, e.g. double-clicking an icon.
|
// system is started from a non-terminal method, e.g. double-clicking an icon.
|
||||||
c := config.GetInstance()
|
c := config.GetInstance()
|
||||||
if c.GetShowOneTimeMovedNotification() {
|
if c.GetShowOneTimeMovedNotification() {
|
||||||
SendNotification("Stash has moved!", "Stash now runs in your tray, instead of a terminal window.")
|
// Use platform-appropriate terminology
|
||||||
|
location := "tray"
|
||||||
|
if runtime.GOOS == "darwin" {
|
||||||
|
location = "menu bar"
|
||||||
|
}
|
||||||
|
SendNotification("Stash has moved!", "Stash now runs in your "+location+", instead of a terminal window.")
|
||||||
c.SetBool(config.ShowOneTimeMovedNotification, false)
|
c.SetBool(config.ShowOneTimeMovedNotification, false)
|
||||||
if err := c.Write(); err != nil {
|
if err := c.Write(); err != nil {
|
||||||
logger.Errorf("Error while writing configuration file: %v", err)
|
logger.Errorf("Error while writing configuration file: %v", err)
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ import (
|
||||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
// THE SOFTWARE.
|
// THE SOFTWARE.
|
||||||
|
|
||||||
const defaultProtocolInfo = "http-get:*:video/mpeg:*,http-get:*:video/mp4:*,http-get:*:video/vnd.dlna.mpeg-tts:*,http-get:*:video/avi:*,http-get:*:video/x-matroska:*,http-get:*:video/x-ms-wmv:*,http-get:*:video/wtv:*,http-get:*:audio/mpeg:*,http-get:*:audio/mp3:*,http-get:*:audio/mp4:*,http-get:*:audio/x-ms-wma*,http-get:*:audio/wav:*,http-get:*:audio/L16:*,http-get:*image/jpeg:*,http-get:*image/png:*,http-get:*image/gif:*,http-get:*image/tiff:*"
|
const defaultProtocolInfo = "http-get:*:video/mpeg:*,http-get:*:video/mp4:*,http-get:*:video/vnd.dlna.mpeg-tts:*,http-get:*:video/avi:*,http-get:*:video/x-matroska:*,http-get:*:video/x-ms-wmv:*,http-get:*:video/wtv:*,http-get:*:audio/mpeg:*,http-get:*:audio/mp3:*,http-get:*:audio/mp4:*,http-get:*:audio/x-ms-wma*,http-get:*:audio/wav:*,http-get:*:audio/L16:*,http-get:*image/jpeg:*,http-get:*image/png:*,http-get:*image/gif:*,http-get:*image/tiff:*,http-get:*:image/avif:*"
|
||||||
|
|
||||||
type connectionManagerService struct {
|
type connectionManagerService struct {
|
||||||
*Server
|
*Server
|
||||||
|
|
|
||||||
|
|
@ -153,6 +153,8 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
|
||||||
tagIDs = originalTagIDs
|
tagIDs = originalTagIDs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
endpoint := g.result.source.RemoteSite
|
||||||
|
|
||||||
for _, t := range scraped {
|
for _, t := range scraped {
|
||||||
if t.StoredID != nil {
|
if t.StoredID != nil {
|
||||||
// existing tag, just add it
|
// existing tag, just add it
|
||||||
|
|
@ -163,10 +165,9 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
|
||||||
|
|
||||||
tagIDs = sliceutil.AppendUnique(tagIDs, int(tagID))
|
tagIDs = sliceutil.AppendUnique(tagIDs, int(tagID))
|
||||||
} else if createMissing {
|
} else if createMissing {
|
||||||
newTag := models.NewTag()
|
newTag := t.ToTag(endpoint, nil)
|
||||||
newTag.Name = t.Name
|
|
||||||
|
|
||||||
err := g.tagCreator.Create(ctx, &newTag)
|
err := g.tagCreator.Create(ctx, newTag)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error creating tag: %w", err)
|
return nil, fmt.Errorf("error creating tag: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,14 @@ package log
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
|
lumberjack "gopkg.in/natefinch/lumberjack.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
type LogItem struct {
|
type LogItem struct {
|
||||||
|
|
@ -41,8 +43,8 @@ func NewLogger() *Logger {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Init initialises the logger based on a logging configuration
|
// Init initialises the logger based on a logging configuration
|
||||||
func (log *Logger) Init(logFile string, logOut bool, logLevel string) {
|
func (log *Logger) Init(logFile string, logOut bool, logLevel string, logFileMaxSize int) {
|
||||||
var file *os.File
|
var logger io.WriteCloser
|
||||||
customFormatter := new(logrus.TextFormatter)
|
customFormatter := new(logrus.TextFormatter)
|
||||||
customFormatter.TimestampFormat = "2006-01-02 15:04:05"
|
customFormatter.TimestampFormat = "2006-01-02 15:04:05"
|
||||||
customFormatter.ForceColors = true
|
customFormatter.ForceColors = true
|
||||||
|
|
@ -57,30 +59,38 @@ func (log *Logger) Init(logFile string, logOut bool, logLevel string) {
|
||||||
// the access log colouring not being applied
|
// the access log colouring not being applied
|
||||||
_, _ = customFormatter.Format(logrus.NewEntry(log.logger))
|
_, _ = customFormatter.Format(logrus.NewEntry(log.logger))
|
||||||
|
|
||||||
|
// if size is 0, disable rotation
|
||||||
if logFile != "" {
|
if logFile != "" {
|
||||||
var err error
|
if logFileMaxSize == 0 {
|
||||||
file, err = os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
|
var err error
|
||||||
|
logger, err = os.OpenFile(logFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Printf("Could not open '%s' for log output due to error: %s\n", logFile, err.Error())
|
fmt.Fprintf(os.Stderr, "unable to open log file %s: %v\n", logFile, err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger = &lumberjack.Logger{
|
||||||
|
Filename: logFile,
|
||||||
|
MaxSize: logFileMaxSize, // Megabytes
|
||||||
|
Compress: true,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if file != nil {
|
if logger != nil {
|
||||||
if logOut {
|
if logOut {
|
||||||
// log to file separately disabling colours
|
// log to file separately disabling colours
|
||||||
fileFormatter := new(logrus.TextFormatter)
|
fileFormatter := new(logrus.TextFormatter)
|
||||||
fileFormatter.TimestampFormat = customFormatter.TimestampFormat
|
fileFormatter.TimestampFormat = customFormatter.TimestampFormat
|
||||||
fileFormatter.FullTimestamp = customFormatter.FullTimestamp
|
fileFormatter.FullTimestamp = customFormatter.FullTimestamp
|
||||||
log.logger.AddHook(&fileLogHook{
|
log.logger.AddHook(&fileLogHook{
|
||||||
Writer: file,
|
Writer: logger,
|
||||||
Formatter: fileFormatter,
|
Formatter: fileFormatter,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
// logging to file only
|
// logging to file only
|
||||||
// turn off the colouring for the file
|
// turn off the colouring for the file
|
||||||
customFormatter.ForceColors = false
|
customFormatter.ForceColors = false
|
||||||
log.logger.Out = file
|
log.logger.Out = logger
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,9 +16,9 @@ import (
|
||||||
|
|
||||||
"golang.org/x/crypto/bcrypt"
|
"golang.org/x/crypto/bcrypt"
|
||||||
|
|
||||||
"github.com/knadh/koanf"
|
|
||||||
"github.com/knadh/koanf/parsers/yaml"
|
"github.com/knadh/koanf/parsers/yaml"
|
||||||
"github.com/knadh/koanf/providers/file"
|
"github.com/knadh/koanf/providers/file"
|
||||||
|
"github.com/knadh/koanf/v2"
|
||||||
|
|
||||||
"github.com/stashapp/stash/internal/identify"
|
"github.com/stashapp/stash/internal/identify"
|
||||||
"github.com/stashapp/stash/pkg/fsutil"
|
"github.com/stashapp/stash/pkg/fsutil"
|
||||||
|
|
@ -43,6 +43,9 @@ const (
|
||||||
Password = "password"
|
Password = "password"
|
||||||
MaxSessionAge = "max_session_age"
|
MaxSessionAge = "max_session_age"
|
||||||
|
|
||||||
|
// SFWContentMode mode config key
|
||||||
|
SFWContentMode = "sfw_content_mode"
|
||||||
|
|
||||||
FFMpegPath = "ffmpeg_path"
|
FFMpegPath = "ffmpeg_path"
|
||||||
FFProbePath = "ffprobe_path"
|
FFProbePath = "ffprobe_path"
|
||||||
|
|
||||||
|
|
@ -206,6 +209,7 @@ const (
|
||||||
ImageLightboxResetZoomOnNav = "image_lightbox.reset_zoom_on_nav"
|
ImageLightboxResetZoomOnNav = "image_lightbox.reset_zoom_on_nav"
|
||||||
ImageLightboxScrollModeKey = "image_lightbox.scroll_mode"
|
ImageLightboxScrollModeKey = "image_lightbox.scroll_mode"
|
||||||
ImageLightboxScrollAttemptsBeforeChange = "image_lightbox.scroll_attempts_before_change"
|
ImageLightboxScrollAttemptsBeforeChange = "image_lightbox.scroll_attempts_before_change"
|
||||||
|
ImageLightboxDisableAnimation = "image_lightbox.disable_animation"
|
||||||
|
|
||||||
UI = "ui"
|
UI = "ui"
|
||||||
|
|
||||||
|
|
@ -249,13 +253,15 @@ const (
|
||||||
DLNAPortDefault = 1338
|
DLNAPortDefault = 1338
|
||||||
|
|
||||||
// Logging options
|
// Logging options
|
||||||
LogFile = "logfile"
|
LogFile = "logfile"
|
||||||
LogOut = "logout"
|
LogOut = "logout"
|
||||||
defaultLogOut = true
|
defaultLogOut = true
|
||||||
LogLevel = "loglevel"
|
LogLevel = "loglevel"
|
||||||
defaultLogLevel = "Info"
|
defaultLogLevel = "Info"
|
||||||
LogAccess = "logaccess"
|
LogAccess = "logaccess"
|
||||||
defaultLogAccess = true
|
defaultLogAccess = true
|
||||||
|
LogFileMaxSize = "logfile_max_size"
|
||||||
|
defaultLogFileMaxSize = 0 // megabytes, default disabled
|
||||||
|
|
||||||
// Default settings
|
// Default settings
|
||||||
DefaultScanSettings = "defaults.scan_task"
|
DefaultScanSettings = "defaults.scan_task"
|
||||||
|
|
@ -267,6 +273,9 @@ const (
|
||||||
DeleteGeneratedDefault = "defaults.delete_generated"
|
DeleteGeneratedDefault = "defaults.delete_generated"
|
||||||
deleteGeneratedDefaultDefault = true
|
deleteGeneratedDefaultDefault = true
|
||||||
|
|
||||||
|
// Trash/Recycle Bin options
|
||||||
|
DeleteTrashPath = "delete_trash_path"
|
||||||
|
|
||||||
// Desktop Integration Options
|
// Desktop Integration Options
|
||||||
NoBrowser = "nobrowser"
|
NoBrowser = "nobrowser"
|
||||||
NoBrowserDefault = false
|
NoBrowserDefault = false
|
||||||
|
|
@ -285,7 +294,7 @@ const (
|
||||||
// slice default values
|
// slice default values
|
||||||
var (
|
var (
|
||||||
defaultVideoExtensions = []string{"m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm", "f4v"}
|
defaultVideoExtensions = []string{"m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm", "f4v"}
|
||||||
defaultImageExtensions = []string{"png", "jpg", "jpeg", "gif", "webp"}
|
defaultImageExtensions = []string{"png", "jpg", "jpeg", "gif", "webp", "avif"}
|
||||||
defaultGalleryExtensions = []string{"zip", "cbz"}
|
defaultGalleryExtensions = []string{"zip", "cbz"}
|
||||||
defaultMenuItems = []string{"scenes", "images", "groups", "markers", "galleries", "performers", "studios", "tags"}
|
defaultMenuItems = []string{"scenes", "images", "groups", "markers", "galleries", "performers", "studios", "tags"}
|
||||||
)
|
)
|
||||||
|
|
@ -628,7 +637,15 @@ func (i *Config) getStringMapString(key string) map[string]string {
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetStathPaths returns the configured stash library paths.
|
// GetSFW returns true if SFW mode is enabled.
|
||||||
|
// Default performer images are changed to more agnostic images when enabled.
|
||||||
|
func (i *Config) GetSFWContentMode() bool {
|
||||||
|
i.RLock()
|
||||||
|
defer i.RUnlock()
|
||||||
|
return i.getBool(SFWContentMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStashPaths returns the configured stash library paths.
|
||||||
// Works opposite to the usual case - it will return the override
|
// Works opposite to the usual case - it will return the override
|
||||||
// value only if the main value is not set.
|
// value only if the main value is not set.
|
||||||
func (i *Config) GetStashPaths() StashConfigs {
|
func (i *Config) GetStashPaths() StashConfigs {
|
||||||
|
|
@ -1280,6 +1297,10 @@ func (i *Config) GetImageLightboxOptions() ConfigImageLightboxResult {
|
||||||
if v := i.with(ImageLightboxScrollAttemptsBeforeChange); v != nil {
|
if v := i.with(ImageLightboxScrollAttemptsBeforeChange); v != nil {
|
||||||
ret.ScrollAttemptsBeforeChange = v.Int(ImageLightboxScrollAttemptsBeforeChange)
|
ret.ScrollAttemptsBeforeChange = v.Int(ImageLightboxScrollAttemptsBeforeChange)
|
||||||
}
|
}
|
||||||
|
if v := i.with(ImageLightboxDisableAnimation); v != nil {
|
||||||
|
value := v.Bool(ImageLightboxDisableAnimation)
|
||||||
|
ret.DisableAnimation = &value
|
||||||
|
}
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
@ -1456,6 +1477,14 @@ func (i *Config) GetDeleteGeneratedDefault() bool {
|
||||||
return i.getBoolDefault(DeleteGeneratedDefault, deleteGeneratedDefaultDefault)
|
return i.getBoolDefault(DeleteGeneratedDefault, deleteGeneratedDefaultDefault)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (i *Config) GetDeleteTrashPath() string {
|
||||||
|
return i.getString(DeleteTrashPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Config) SetDeleteTrashPath(value string) {
|
||||||
|
i.SetString(DeleteTrashPath, value)
|
||||||
|
}
|
||||||
|
|
||||||
// GetDefaultIdentifySettings returns the default Identify task settings.
|
// GetDefaultIdentifySettings returns the default Identify task settings.
|
||||||
// Returns nil if the settings could not be unmarshalled, or if it
|
// Returns nil if the settings could not be unmarshalled, or if it
|
||||||
// has not been set.
|
// has not been set.
|
||||||
|
|
@ -1625,6 +1654,16 @@ func (i *Config) GetLogAccess() bool {
|
||||||
return i.getBoolDefault(LogAccess, defaultLogAccess)
|
return i.getBoolDefault(LogAccess, defaultLogAccess)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetLogFileMaxSize returns the maximum size of the log file in megabytes for lumberjack to rotate
|
||||||
|
func (i *Config) GetLogFileMaxSize() int {
|
||||||
|
value := i.getInt(LogFileMaxSize)
|
||||||
|
if value < 0 {
|
||||||
|
value = defaultLogFileMaxSize
|
||||||
|
}
|
||||||
|
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
// Max allowed graphql upload size in megabytes
|
// Max allowed graphql upload size in megabytes
|
||||||
func (i *Config) GetMaxUploadSize() int64 {
|
func (i *Config) GetMaxUploadSize() int64 {
|
||||||
i.RLock()
|
i.RLock()
|
||||||
|
|
|
||||||
|
|
@ -8,9 +8,9 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/knadh/koanf"
|
|
||||||
"github.com/knadh/koanf/providers/env"
|
"github.com/knadh/koanf/providers/env"
|
||||||
"github.com/knadh/koanf/providers/posflag"
|
"github.com/knadh/koanf/providers/posflag"
|
||||||
|
"github.com/knadh/koanf/v2"
|
||||||
"github.com/spf13/pflag"
|
"github.com/spf13/pflag"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/fsutil"
|
"github.com/stashapp/stash/pkg/fsutil"
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ type ConfigImageLightboxResult struct {
|
||||||
ResetZoomOnNav *bool `json:"resetZoomOnNav"`
|
ResetZoomOnNav *bool `json:"resetZoomOnNav"`
|
||||||
ScrollMode *ImageLightboxScrollMode `json:"scrollMode"`
|
ScrollMode *ImageLightboxScrollMode `json:"scrollMode"`
|
||||||
ScrollAttemptsBeforeChange int `json:"scrollAttemptsBeforeChange"`
|
ScrollAttemptsBeforeChange int `json:"scrollAttemptsBeforeChange"`
|
||||||
|
DisableAnimation *bool `json:"disableAnimation"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ImageLightboxDisplayMode string
|
type ImageLightboxDisplayMode string
|
||||||
|
|
|
||||||
|
|
@ -219,8 +219,11 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error {
|
||||||
// paths since they must not be relative. The config file property is
|
// paths since they must not be relative. The config file property is
|
||||||
// resolved to an absolute path when stash is run normally, so convert
|
// resolved to an absolute path when stash is run normally, so convert
|
||||||
// relative paths to absolute paths during setup.
|
// relative paths to absolute paths during setup.
|
||||||
configFile, _ := filepath.Abs(input.ConfigLocation)
|
// #6287 - this should no longer be necessary since the ffmpeg code
|
||||||
|
// converts to absolute paths. Converting the config location to
|
||||||
|
// absolute means that scraper and plugin paths default to absolute
|
||||||
|
// which we don't want.
|
||||||
|
configFile := input.ConfigLocation
|
||||||
configDir := filepath.Dir(configFile)
|
configDir := filepath.Dir(configFile)
|
||||||
|
|
||||||
if exists, _ := fsutil.DirExists(configDir); !exists {
|
if exists, _ := fsutil.DirExists(configDir); !exists {
|
||||||
|
|
@ -262,6 +265,10 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error {
|
||||||
cfg.SetString(config.Cache, input.CacheLocation)
|
cfg.SetString(config.Cache, input.CacheLocation)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if input.SFWContentMode {
|
||||||
|
cfg.SetBool(config.SFWContentMode, true)
|
||||||
|
}
|
||||||
|
|
||||||
if input.StoreBlobsInDatabase {
|
if input.StoreBlobsInDatabase {
|
||||||
cfg.SetInterface(config.BlobsStorage, config.BlobStorageTypeDatabase)
|
cfg.SetInterface(config.BlobsStorage, config.BlobStorageTypeDatabase)
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -322,6 +329,11 @@ func (s *Manager) BackupDatabase(download bool) (string, string, error) {
|
||||||
backupPath = f.Name()
|
backupPath = f.Name()
|
||||||
backupName = s.Database.DatabaseBackupPath("")
|
backupName = s.Database.DatabaseBackupPath("")
|
||||||
f.Close()
|
f.Close()
|
||||||
|
|
||||||
|
// delete the temp file so that the backup operation can create it
|
||||||
|
if err := os.Remove(backupPath); err != nil {
|
||||||
|
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
backupDir := s.Config.GetBackupDirectoryPathOrDefault()
|
backupDir := s.Config.GetBackupDirectoryPathOrDefault()
|
||||||
if backupDir != "" {
|
if backupDir != "" {
|
||||||
|
|
|
||||||
|
|
@ -294,6 +294,7 @@ func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
|
||||||
Handlers: []file.CleanHandler{
|
Handlers: []file.CleanHandler{
|
||||||
&cleanHandler{},
|
&cleanHandler{},
|
||||||
},
|
},
|
||||||
|
TrashPath: s.Config.GetDeleteTrashPath(),
|
||||||
}
|
}
|
||||||
|
|
||||||
j := cleanJob{
|
j := cleanJob{
|
||||||
|
|
@ -364,9 +365,37 @@ func (s *Manager) MigrateHash(ctx context.Context) int {
|
||||||
return s.JobManager.Add(ctx, "Migrating scene hashes...", j)
|
return s.JobManager.Add(ctx, "Migrating scene hashes...", j)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If neither ids nor names are set, tag all items
|
// batchTagType indicates which batch tagging mode to use
|
||||||
|
type batchTagType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
batchTagByIds batchTagType = iota
|
||||||
|
batchTagByNamesOrStashIds
|
||||||
|
batchTagAll
|
||||||
|
)
|
||||||
|
|
||||||
|
// getBatchTagType determines the batch tag mode based on the input
|
||||||
|
func (input StashBoxBatchTagInput) getBatchTagType(hasPerformerFields bool) batchTagType {
|
||||||
|
switch {
|
||||||
|
case len(input.Ids) > 0:
|
||||||
|
return batchTagByIds
|
||||||
|
case hasPerformerFields && len(input.PerformerIds) > 0:
|
||||||
|
return batchTagByIds
|
||||||
|
case len(input.StashIDs) > 0 || len(input.Names) > 0:
|
||||||
|
return batchTagByNamesOrStashIds
|
||||||
|
case hasPerformerFields && len(input.PerformerNames) > 0:
|
||||||
|
return batchTagByNamesOrStashIds
|
||||||
|
default:
|
||||||
|
return batchTagAll
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Accepts either ids, or a combination of names and stash_ids.
|
||||||
|
// If none are set, then all existing items will be tagged.
|
||||||
type StashBoxBatchTagInput struct {
|
type StashBoxBatchTagInput struct {
|
||||||
// Stash endpoint to use for the tagging - deprecated - use StashBoxEndpoint
|
// Stash endpoint to use for the tagging
|
||||||
|
//
|
||||||
|
// Deprecated: use StashBoxEndpoint
|
||||||
Endpoint *int `json:"endpoint"`
|
Endpoint *int `json:"endpoint"`
|
||||||
StashBoxEndpoint *string `json:"stash_box_endpoint"`
|
StashBoxEndpoint *string `json:"stash_box_endpoint"`
|
||||||
// Fields to exclude when executing the tagging
|
// Fields to exclude when executing the tagging
|
||||||
|
|
@ -375,128 +404,143 @@ type StashBoxBatchTagInput struct {
|
||||||
Refresh bool `json:"refresh"`
|
Refresh bool `json:"refresh"`
|
||||||
// If batch adding studios, should their parent studios also be created?
|
// If batch adding studios, should their parent studios also be created?
|
||||||
CreateParent bool `json:"createParent"`
|
CreateParent bool `json:"createParent"`
|
||||||
// If set, only tag these ids
|
// IDs in stash of the items to update.
|
||||||
|
// If set, names and stash_ids fields will be ignored.
|
||||||
Ids []string `json:"ids"`
|
Ids []string `json:"ids"`
|
||||||
// If set, only tag these names
|
// Names of the items in the stash-box instance to search for and create
|
||||||
Names []string `json:"names"`
|
Names []string `json:"names"`
|
||||||
// If set, only tag these performer ids
|
// Stash IDs of the items in the stash-box instance to search for and create
|
||||||
|
StashIDs []string `json:"stash_ids"`
|
||||||
|
// IDs in stash of the performers to update
|
||||||
//
|
//
|
||||||
// Deprecated: please use Ids
|
// Deprecated: use Ids
|
||||||
PerformerIds []string `json:"performer_ids"`
|
PerformerIds []string `json:"performer_ids"`
|
||||||
// If set, only tag these performer names
|
// Names of the performers in the stash-box instance to search for and create
|
||||||
//
|
//
|
||||||
// Deprecated: please use Names
|
// Deprecated: use Names
|
||||||
PerformerNames []string `json:"performer_names"`
|
PerformerNames []string `json:"performer_names"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Manager) batchTagPerformersByIds(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) {
|
||||||
|
var tasks []Task
|
||||||
|
|
||||||
|
err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
|
performerQuery := s.Repository.Performer
|
||||||
|
|
||||||
|
ids := input.Ids
|
||||||
|
if len(ids) == 0 {
|
||||||
|
ids = input.PerformerIds //nolint:staticcheck
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, performerID := range ids {
|
||||||
|
if id, err := strconv.Atoi(performerID); err == nil {
|
||||||
|
performer, err := performerQuery.Find(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := performer.LoadStashIDs(ctx, performerQuery); err != nil {
|
||||||
|
return fmt.Errorf("loading performer stash ids: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasStashID := performer.StashIDs.ForEndpoint(box.Endpoint) != nil
|
||||||
|
if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) {
|
||||||
|
tasks = append(tasks, &stashBoxBatchPerformerTagTask{
|
||||||
|
performer: performer,
|
||||||
|
box: box,
|
||||||
|
excludedFields: input.ExcludeFields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return tasks, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Manager) batchTagPerformersByNamesOrStashIds(input StashBoxBatchTagInput, box *models.StashBox) []Task {
|
||||||
|
var tasks []Task
|
||||||
|
|
||||||
|
for i := range input.StashIDs {
|
||||||
|
stashID := input.StashIDs[i]
|
||||||
|
if len(stashID) > 0 {
|
||||||
|
tasks = append(tasks, &stashBoxBatchPerformerTagTask{
|
||||||
|
stashID: &stashID,
|
||||||
|
box: box,
|
||||||
|
excludedFields: input.ExcludeFields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
names := input.Names
|
||||||
|
if len(names) == 0 {
|
||||||
|
names = input.PerformerNames //nolint:staticcheck
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range names {
|
||||||
|
name := names[i]
|
||||||
|
if len(name) > 0 {
|
||||||
|
tasks = append(tasks, &stashBoxBatchPerformerTagTask{
|
||||||
|
name: &name,
|
||||||
|
box: box,
|
||||||
|
excludedFields: input.ExcludeFields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tasks
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Manager) batchTagAllPerformers(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) {
|
||||||
|
var tasks []Task
|
||||||
|
|
||||||
|
err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
|
performerQuery := s.Repository.Performer
|
||||||
|
var performers []*models.Performer
|
||||||
|
var err error
|
||||||
|
|
||||||
|
performers, err = performerQuery.FindByStashIDStatus(ctx, input.Refresh, box.Endpoint)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error querying performers: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, performer := range performers {
|
||||||
|
if err := performer.LoadStashIDs(ctx, performerQuery); err != nil {
|
||||||
|
return fmt.Errorf("error loading stash ids for performer %s: %v", performer.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks = append(tasks, &stashBoxBatchPerformerTagTask{
|
||||||
|
performer: performer,
|
||||||
|
box: box,
|
||||||
|
excludedFields: input.ExcludeFields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return tasks, err
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int {
|
func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int {
|
||||||
j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error {
|
j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error {
|
||||||
logger.Infof("Initiating stash-box batch performer tag")
|
logger.Infof("Initiating stash-box batch performer tag")
|
||||||
|
|
||||||
var tasks []StashBoxBatchTagTask
|
var tasks []Task
|
||||||
|
var err error
|
||||||
|
|
||||||
// The gocritic linter wants to turn this ifElseChain into a switch.
|
switch input.getBatchTagType(true) {
|
||||||
// however, such a switch would contain quite large blocks for each section
|
case batchTagByIds:
|
||||||
// and would arguably be hard to read.
|
tasks, err = s.batchTagPerformersByIds(ctx, input, box)
|
||||||
//
|
case batchTagByNamesOrStashIds:
|
||||||
// This is why we mark this section nolint. In principle, we should look to
|
tasks = s.batchTagPerformersByNamesOrStashIds(input, box)
|
||||||
// rewrite the section at some point, to avoid the linter warning.
|
case batchTagAll:
|
||||||
if len(input.Ids) > 0 || len(input.PerformerIds) > 0 { //nolint:gocritic
|
tasks, err = s.batchTagAllPerformers(ctx, input, box)
|
||||||
// The user has chosen only to tag the items on the current page
|
}
|
||||||
if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
|
||||||
performerQuery := s.Repository.Performer
|
|
||||||
|
|
||||||
idsToUse := input.PerformerIds
|
if err != nil {
|
||||||
if len(input.Ids) > 0 {
|
return err
|
||||||
idsToUse = input.Ids
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, performerID := range idsToUse {
|
|
||||||
if id, err := strconv.Atoi(performerID); err == nil {
|
|
||||||
performer, err := performerQuery.Find(ctx, id)
|
|
||||||
if err == nil {
|
|
||||||
if err := performer.LoadStashIDs(ctx, performerQuery); err != nil {
|
|
||||||
return fmt.Errorf("loading performer stash ids: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the user wants to refresh existing or new items
|
|
||||||
hasStashID := performer.StashIDs.ForEndpoint(box.Endpoint) != nil
|
|
||||||
if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) {
|
|
||||||
tasks = append(tasks, StashBoxBatchTagTask{
|
|
||||||
performer: performer,
|
|
||||||
refresh: input.Refresh,
|
|
||||||
box: box,
|
|
||||||
excludedFields: input.ExcludeFields,
|
|
||||||
taskType: Performer,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else if len(input.Names) > 0 || len(input.PerformerNames) > 0 {
|
|
||||||
// The user is batch adding performers
|
|
||||||
namesToUse := input.PerformerNames
|
|
||||||
if len(input.Names) > 0 {
|
|
||||||
namesToUse = input.Names
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := range namesToUse {
|
|
||||||
name := namesToUse[i]
|
|
||||||
if len(name) > 0 {
|
|
||||||
tasks = append(tasks, StashBoxBatchTagTask{
|
|
||||||
name: &name,
|
|
||||||
refresh: false,
|
|
||||||
box: box,
|
|
||||||
excludedFields: input.ExcludeFields,
|
|
||||||
taskType: Performer,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else { //nolint:gocritic
|
|
||||||
// The gocritic linter wants to fold this if-block into the else on the line above.
|
|
||||||
// However, this doesn't really help with readability of the current section. Mark it
|
|
||||||
// as nolint for now. In the future we'd like to rewrite this code by factoring some of
|
|
||||||
// this into separate functions.
|
|
||||||
|
|
||||||
// The user has chosen to tag every item in their database
|
|
||||||
if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
|
||||||
performerQuery := s.Repository.Performer
|
|
||||||
var performers []*models.Performer
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if input.Refresh {
|
|
||||||
performers, err = performerQuery.FindByStashIDStatus(ctx, true, box.Endpoint)
|
|
||||||
} else {
|
|
||||||
performers, err = performerQuery.FindByStashIDStatus(ctx, false, box.Endpoint)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("error querying performers: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, performer := range performers {
|
|
||||||
if err := performer.LoadStashIDs(ctx, performerQuery); err != nil {
|
|
||||||
return fmt.Errorf("error loading stash ids for performer %s: %v", performer.Name, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tasks = append(tasks, StashBoxBatchTagTask{
|
|
||||||
performer: performer,
|
|
||||||
refresh: input.Refresh,
|
|
||||||
box: box,
|
|
||||||
excludedFields: input.ExcludeFields,
|
|
||||||
taskType: Performer,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(tasks) == 0 {
|
if len(tasks) == 0 {
|
||||||
|
|
@ -508,7 +552,7 @@ func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, box *models.Sta
|
||||||
logger.Infof("Starting stash-box batch operation for %d performers", len(tasks))
|
logger.Infof("Starting stash-box batch operation for %d performers", len(tasks))
|
||||||
|
|
||||||
for _, task := range tasks {
|
for _, task := range tasks {
|
||||||
progress.ExecuteTask(task.Description(), func() {
|
progress.ExecuteTask(task.GetDescription(), func() {
|
||||||
task.Start(ctx)
|
task.Start(ctx)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -521,103 +565,116 @@ func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, box *models.Sta
|
||||||
return s.JobManager.Add(ctx, "Batch stash-box performer tag...", j)
|
return s.JobManager.Add(ctx, "Batch stash-box performer tag...", j)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Manager) batchTagStudiosByIds(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) {
|
||||||
|
var tasks []Task
|
||||||
|
|
||||||
|
err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
|
studioQuery := s.Repository.Studio
|
||||||
|
|
||||||
|
for _, studioID := range input.Ids {
|
||||||
|
if id, err := strconv.Atoi(studioID); err == nil {
|
||||||
|
studio, err := studioQuery.Find(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := studio.LoadStashIDs(ctx, studioQuery); err != nil {
|
||||||
|
return fmt.Errorf("loading studio stash ids: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasStashID := studio.StashIDs.ForEndpoint(box.Endpoint) != nil
|
||||||
|
if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) {
|
||||||
|
tasks = append(tasks, &stashBoxBatchStudioTagTask{
|
||||||
|
studio: studio,
|
||||||
|
createParent: input.CreateParent,
|
||||||
|
box: box,
|
||||||
|
excludedFields: input.ExcludeFields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return tasks, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Manager) batchTagStudiosByNamesOrStashIds(input StashBoxBatchTagInput, box *models.StashBox) []Task {
|
||||||
|
var tasks []Task
|
||||||
|
|
||||||
|
for i := range input.StashIDs {
|
||||||
|
stashID := input.StashIDs[i]
|
||||||
|
if len(stashID) > 0 {
|
||||||
|
tasks = append(tasks, &stashBoxBatchStudioTagTask{
|
||||||
|
stashID: &stashID,
|
||||||
|
createParent: input.CreateParent,
|
||||||
|
box: box,
|
||||||
|
excludedFields: input.ExcludeFields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range input.Names {
|
||||||
|
name := input.Names[i]
|
||||||
|
if len(name) > 0 {
|
||||||
|
tasks = append(tasks, &stashBoxBatchStudioTagTask{
|
||||||
|
name: &name,
|
||||||
|
createParent: input.CreateParent,
|
||||||
|
box: box,
|
||||||
|
excludedFields: input.ExcludeFields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tasks
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Manager) batchTagAllStudios(ctx context.Context, input StashBoxBatchTagInput, box *models.StashBox) ([]Task, error) {
|
||||||
|
var tasks []Task
|
||||||
|
|
||||||
|
err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
|
studioQuery := s.Repository.Studio
|
||||||
|
var studios []*models.Studio
|
||||||
|
var err error
|
||||||
|
|
||||||
|
studios, err = studioQuery.FindByStashIDStatus(ctx, input.Refresh, box.Endpoint)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error querying studios: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, studio := range studios {
|
||||||
|
tasks = append(tasks, &stashBoxBatchStudioTagTask{
|
||||||
|
studio: studio,
|
||||||
|
createParent: input.CreateParent,
|
||||||
|
box: box,
|
||||||
|
excludedFields: input.ExcludeFields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return tasks, err
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int {
|
func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int {
|
||||||
j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error {
|
j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error {
|
||||||
logger.Infof("Initiating stash-box batch studio tag")
|
logger.Infof("Initiating stash-box batch studio tag")
|
||||||
|
|
||||||
var tasks []StashBoxBatchTagTask
|
var tasks []Task
|
||||||
|
var err error
|
||||||
|
|
||||||
// The gocritic linter wants to turn this ifElseChain into a switch.
|
switch input.getBatchTagType(false) {
|
||||||
// however, such a switch would contain quite large blocks for each section
|
case batchTagByIds:
|
||||||
// and would arguably be hard to read.
|
tasks, err = s.batchTagStudiosByIds(ctx, input, box)
|
||||||
//
|
case batchTagByNamesOrStashIds:
|
||||||
// This is why we mark this section nolint. In principle, we should look to
|
tasks = s.batchTagStudiosByNamesOrStashIds(input, box)
|
||||||
// rewrite the section at some point, to avoid the linter warning.
|
case batchTagAll:
|
||||||
if len(input.Ids) > 0 { //nolint:gocritic
|
tasks, err = s.batchTagAllStudios(ctx, input, box)
|
||||||
// The user has chosen only to tag the items on the current page
|
}
|
||||||
if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
|
||||||
studioQuery := s.Repository.Studio
|
|
||||||
|
|
||||||
for _, studioID := range input.Ids {
|
if err != nil {
|
||||||
if id, err := strconv.Atoi(studioID); err == nil {
|
return err
|
||||||
studio, err := studioQuery.Find(ctx, id)
|
|
||||||
if err == nil {
|
|
||||||
if err := studio.LoadStashIDs(ctx, studioQuery); err != nil {
|
|
||||||
return fmt.Errorf("loading studio stash ids: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the user wants to refresh existing or new items
|
|
||||||
hasStashID := studio.StashIDs.ForEndpoint(box.Endpoint) != nil
|
|
||||||
if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) {
|
|
||||||
tasks = append(tasks, StashBoxBatchTagTask{
|
|
||||||
studio: studio,
|
|
||||||
refresh: input.Refresh,
|
|
||||||
createParent: input.CreateParent,
|
|
||||||
box: box,
|
|
||||||
excludedFields: input.ExcludeFields,
|
|
||||||
taskType: Studio,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
logger.Error(err.Error())
|
|
||||||
}
|
|
||||||
} else if len(input.Names) > 0 {
|
|
||||||
// The user is batch adding studios
|
|
||||||
for i := range input.Names {
|
|
||||||
name := input.Names[i]
|
|
||||||
if len(name) > 0 {
|
|
||||||
tasks = append(tasks, StashBoxBatchTagTask{
|
|
||||||
name: &name,
|
|
||||||
refresh: false,
|
|
||||||
createParent: input.CreateParent,
|
|
||||||
box: box,
|
|
||||||
excludedFields: input.ExcludeFields,
|
|
||||||
taskType: Studio,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else { //nolint:gocritic
|
|
||||||
// The gocritic linter wants to fold this if-block into the else on the line above.
|
|
||||||
// However, this doesn't really help with readability of the current section. Mark it
|
|
||||||
// as nolint for now. In the future we'd like to rewrite this code by factoring some of
|
|
||||||
// this into separate functions.
|
|
||||||
|
|
||||||
// The user has chosen to tag every item in their database
|
|
||||||
if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
|
||||||
studioQuery := s.Repository.Studio
|
|
||||||
var studios []*models.Studio
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if input.Refresh {
|
|
||||||
studios, err = studioQuery.FindByStashIDStatus(ctx, true, box.Endpoint)
|
|
||||||
} else {
|
|
||||||
studios, err = studioQuery.FindByStashIDStatus(ctx, false, box.Endpoint)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("error querying studios: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, studio := range studios {
|
|
||||||
tasks = append(tasks, StashBoxBatchTagTask{
|
|
||||||
studio: studio,
|
|
||||||
refresh: input.Refresh,
|
|
||||||
createParent: input.CreateParent,
|
|
||||||
box: box,
|
|
||||||
excludedFields: input.ExcludeFields,
|
|
||||||
taskType: Studio,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(tasks) == 0 {
|
if len(tasks) == 0 {
|
||||||
|
|
@ -629,7 +686,7 @@ func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashB
|
||||||
logger.Infof("Starting stash-box batch operation for %d studios", len(tasks))
|
logger.Infof("Starting stash-box batch operation for %d studios", len(tasks))
|
||||||
|
|
||||||
for _, task := range tasks {
|
for _, task := range tasks {
|
||||||
progress.ExecuteTask(task.Description(), func() {
|
progress.ExecuteTask(task.GetDescription(), func() {
|
||||||
task.Start(ctx)
|
task.Start(ctx)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ type SetupInput struct {
|
||||||
// Empty to indicate $HOME/.stash/config.yml default
|
// Empty to indicate $HOME/.stash/config.yml default
|
||||||
ConfigLocation string `json:"configLocation"`
|
ConfigLocation string `json:"configLocation"`
|
||||||
Stashes []*config.StashConfigInput `json:"stashes"`
|
Stashes []*config.StashConfigInput `json:"stashes"`
|
||||||
|
SFWContentMode bool `json:"sfwContentMode"`
|
||||||
// Empty to indicate default
|
// Empty to indicate default
|
||||||
DatabaseFile string `json:"databaseFile"`
|
DatabaseFile string `json:"databaseFile"`
|
||||||
// Empty to indicate default
|
// Empty to indicate default
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os/exec"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/fsutil"
|
"github.com/stashapp/stash/pkg/fsutil"
|
||||||
"github.com/stashapp/stash/pkg/image"
|
"github.com/stashapp/stash/pkg/image"
|
||||||
|
|
@ -20,6 +21,13 @@ func (t *GenerateImageThumbnailTask) GetDescription() string {
|
||||||
return fmt.Sprintf("Generating Thumbnail for image %s", t.Image.Path)
|
return fmt.Sprintf("Generating Thumbnail for image %s", t.Image.Path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *GenerateImageThumbnailTask) logStderr(err error) {
|
||||||
|
var exitErr *exec.ExitError
|
||||||
|
if errors.As(err, &exitErr) {
|
||||||
|
logger.Debugf("[generator] error output: %s", exitErr.Stderr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (t *GenerateImageThumbnailTask) Start(ctx context.Context) {
|
func (t *GenerateImageThumbnailTask) Start(ctx context.Context) {
|
||||||
if !t.required() {
|
if !t.required() {
|
||||||
return
|
return
|
||||||
|
|
@ -46,14 +54,15 @@ func (t *GenerateImageThumbnailTask) Start(ctx context.Context) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// don't log for animated images
|
// don't log for animated images
|
||||||
if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
|
if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
|
||||||
logger.Errorf("[generator] getting thumbnail for image %s: %w", path, err)
|
logger.Errorf("[generator] getting thumbnail for image %s: %s", path, err.Error())
|
||||||
|
t.logStderr(err)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
err = fsutil.WriteFile(thumbPath, data)
|
err = fsutil.WriteFile(thumbPath, data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("[generator] writing thumbnail for image %s: %w", path, err)
|
logger.Errorf("[generator] writing thumbnail for image %s: %s", path, err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -107,6 +107,12 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene
|
||||||
sceneHash := scene.GetHash(t.fileNamingAlgorithm)
|
sceneHash := scene.GetHash(t.fileNamingAlgorithm)
|
||||||
seconds := float64(sceneMarker.Seconds)
|
seconds := float64(sceneMarker.Seconds)
|
||||||
|
|
||||||
|
// check if marker past duration
|
||||||
|
if seconds > float64(videoFile.Duration) {
|
||||||
|
logger.Warnf("[generator] scene marker at %.2f seconds exceeds video duration of %.2f seconds, skipping", seconds, float64(videoFile.Duration))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
g := t.generator
|
g := t.generator
|
||||||
|
|
||||||
if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil {
|
if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil {
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@ func (t *GenerateCoverTask) Start(ctx context.Context) {
|
||||||
return t.Scene.LoadPrimaryFile(ctx, r.File)
|
return t.Scene.LoadPrimaryFile(ctx, r.File)
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
logger.Error(err)
|
logger.Error(err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !required {
|
if !required {
|
||||||
|
|
|
||||||
|
|
@ -14,57 +14,33 @@ import (
|
||||||
"github.com/stashapp/stash/pkg/studio"
|
"github.com/stashapp/stash/pkg/studio"
|
||||||
)
|
)
|
||||||
|
|
||||||
type StashBoxTagTaskType int
|
// stashBoxBatchPerformerTagTask is used to tag or create performers from stash-box.
|
||||||
|
//
|
||||||
const (
|
// Two modes of operation:
|
||||||
Performer StashBoxTagTaskType = iota
|
// - Update existing performer: set performer to update from stash-box data
|
||||||
Studio
|
// - Create new performer: set name or stashID to search stash-box and create locally
|
||||||
)
|
type stashBoxBatchPerformerTagTask struct {
|
||||||
|
|
||||||
type StashBoxBatchTagTask struct {
|
|
||||||
box *models.StashBox
|
box *models.StashBox
|
||||||
name *string
|
name *string
|
||||||
|
stashID *string
|
||||||
performer *models.Performer
|
performer *models.Performer
|
||||||
studio *models.Studio
|
|
||||||
refresh bool
|
|
||||||
createParent bool
|
|
||||||
excludedFields []string
|
excludedFields []string
|
||||||
taskType StashBoxTagTaskType
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) Start(ctx context.Context) {
|
func (t *stashBoxBatchPerformerTagTask) getName() string {
|
||||||
switch t.taskType {
|
switch {
|
||||||
case Performer:
|
case t.name != nil:
|
||||||
t.stashBoxPerformerTag(ctx)
|
return *t.name
|
||||||
case Studio:
|
case t.stashID != nil:
|
||||||
t.stashBoxStudioTag(ctx)
|
return *t.stashID
|
||||||
|
case t.performer != nil:
|
||||||
|
return t.performer.Name
|
||||||
default:
|
default:
|
||||||
logger.Errorf("Error starting batch task, unknown task_type %d", t.taskType)
|
return ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) Description() string {
|
func (t *stashBoxBatchPerformerTagTask) Start(ctx context.Context) {
|
||||||
if t.taskType == Performer {
|
|
||||||
var name string
|
|
||||||
if t.name != nil {
|
|
||||||
name = *t.name
|
|
||||||
} else {
|
|
||||||
name = t.performer.Name
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("Tagging performer %s from stash-box", name)
|
|
||||||
} else if t.taskType == Studio {
|
|
||||||
var name string
|
|
||||||
if t.name != nil {
|
|
||||||
name = *t.name
|
|
||||||
} else {
|
|
||||||
name = t.studio.Name
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("Tagging studio %s from stash-box", name)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("Unknown tagging task type %d from stash-box", t.taskType)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) stashBoxPerformerTag(ctx context.Context) {
|
|
||||||
performer, err := t.findStashBoxPerformer(ctx)
|
performer, err := t.findStashBoxPerformer(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("Error fetching performer data from stash-box: %v", err)
|
logger.Errorf("Error fetching performer data from stash-box: %v", err)
|
||||||
|
|
@ -76,21 +52,18 @@ func (t *StashBoxBatchTagTask) stashBoxPerformerTag(ctx context.Context) {
|
||||||
excluded[field] = true
|
excluded[field] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// performer will have a value if pulling from Stash-box by Stash ID or name was successful
|
|
||||||
if performer != nil {
|
if performer != nil {
|
||||||
t.processMatchedPerformer(ctx, performer, excluded)
|
t.processMatchedPerformer(ctx, performer, excluded)
|
||||||
} else {
|
} else {
|
||||||
var name string
|
logger.Infof("No match found for %s", t.getName())
|
||||||
if t.name != nil {
|
|
||||||
name = *t.name
|
|
||||||
} else if t.performer != nil {
|
|
||||||
name = t.performer.Name
|
|
||||||
}
|
|
||||||
logger.Infof("No match found for %s", name)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*models.ScrapedPerformer, error) {
|
func (t *stashBoxBatchPerformerTagTask) GetDescription() string {
|
||||||
|
return fmt.Sprintf("Tagging performer %s from stash-box", t.getName())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *stashBoxBatchPerformerTagTask) findStashBoxPerformer(ctx context.Context) (*models.ScrapedPerformer, error) {
|
||||||
var performer *models.ScrapedPerformer
|
var performer *models.ScrapedPerformer
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
|
@ -98,7 +71,24 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode
|
||||||
|
|
||||||
client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns()))
|
client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns()))
|
||||||
|
|
||||||
if t.refresh {
|
switch {
|
||||||
|
case t.name != nil:
|
||||||
|
performer, err = client.FindPerformerByName(ctx, *t.name)
|
||||||
|
case t.stashID != nil:
|
||||||
|
performer, err = client.FindPerformerByID(ctx, *t.stashID)
|
||||||
|
|
||||||
|
if performer != nil && performer.RemoteMergedIntoId != nil {
|
||||||
|
mergedPerformer, err := t.handleMergedPerformer(ctx, performer, client)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if mergedPerformer != nil {
|
||||||
|
logger.Infof("Performer id %s merged into %s, updating local performer", *t.stashID, *performer.RemoteMergedIntoId)
|
||||||
|
performer = mergedPerformer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case t.performer != nil: // tagging or updating existing performer
|
||||||
var remoteID string
|
var remoteID string
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||||
qb := r.Performer
|
qb := r.Performer
|
||||||
|
|
@ -118,6 +108,7 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if remoteID != "" {
|
if remoteID != "" {
|
||||||
performer, err = client.FindPerformerByID(ctx, remoteID)
|
performer, err = client.FindPerformerByID(ctx, remoteID)
|
||||||
|
|
||||||
|
|
@ -132,15 +123,10 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode
|
||||||
performer = mergedPerformer
|
performer = mergedPerformer
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
} else {
|
|
||||||
var name string
|
|
||||||
if t.name != nil {
|
|
||||||
name = *t.name
|
|
||||||
} else {
|
} else {
|
||||||
name = t.performer.Name
|
// find by performer name instead
|
||||||
|
performer, err = client.FindPerformerByName(ctx, t.performer.Name)
|
||||||
}
|
}
|
||||||
performer, err = client.FindPerformerByName(ctx, name)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if performer != nil {
|
if performer != nil {
|
||||||
|
|
@ -154,7 +140,7 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode
|
||||||
return performer, err
|
return performer, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) handleMergedPerformer(ctx context.Context, performer *models.ScrapedPerformer, client *stashbox.Client) (mergedPerformer *models.ScrapedPerformer, err error) {
|
func (t *stashBoxBatchPerformerTagTask) handleMergedPerformer(ctx context.Context, performer *models.ScrapedPerformer, client *stashbox.Client) (mergedPerformer *models.ScrapedPerformer, err error) {
|
||||||
mergedPerformer, err = client.FindPerformerByID(ctx, *performer.RemoteMergedIntoId)
|
mergedPerformer, err = client.FindPerformerByID(ctx, *performer.RemoteMergedIntoId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("loading merged performer %s from stashbox", *performer.RemoteMergedIntoId)
|
return nil, fmt.Errorf("loading merged performer %s from stashbox", *performer.RemoteMergedIntoId)
|
||||||
|
|
@ -169,8 +155,7 @@ func (t *StashBoxBatchTagTask) handleMergedPerformer(ctx context.Context, perfor
|
||||||
return mergedPerformer, nil
|
return mergedPerformer, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *models.ScrapedPerformer, excluded map[string]bool) {
|
func (t *stashBoxBatchPerformerTagTask) processMatchedPerformer(ctx context.Context, p *models.ScrapedPerformer, excluded map[string]bool) {
|
||||||
// Refreshing an existing performer
|
|
||||||
if t.performer != nil {
|
if t.performer != nil {
|
||||||
storedID, _ := strconv.Atoi(*p.StoredID)
|
storedID, _ := strconv.Atoi(*p.StoredID)
|
||||||
|
|
||||||
|
|
@ -180,7 +165,6 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the transaction and update the performer
|
|
||||||
r := instance.Repository
|
r := instance.Repository
|
||||||
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
qb := r.Performer
|
qb := r.Performer
|
||||||
|
|
@ -226,8 +210,8 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m
|
||||||
} else {
|
} else {
|
||||||
logger.Infof("Updated performer %s", *p.Name)
|
logger.Infof("Updated performer %s", *p.Name)
|
||||||
}
|
}
|
||||||
} else if t.name != nil && p.Name != nil {
|
} else {
|
||||||
// Creating a new performer
|
// no existing performer, create a new one
|
||||||
newPerformer := p.ToPerformer(t.box.Endpoint, excluded)
|
newPerformer := p.ToPerformer(t.box.Endpoint, excluded)
|
||||||
image, err := p.GetImage(ctx, excluded)
|
image, err := p.GetImage(ctx, excluded)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -263,7 +247,34 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) stashBoxStudioTag(ctx context.Context) {
|
// stashBoxBatchStudioTagTask is used to tag or create studios from stash-box.
|
||||||
|
//
|
||||||
|
// Two modes of operation:
|
||||||
|
// - Update existing studio: set studio to update from stash-box data
|
||||||
|
// - Create new studio: set name or stashID to search stash-box and create locally
|
||||||
|
type stashBoxBatchStudioTagTask struct {
|
||||||
|
box *models.StashBox
|
||||||
|
name *string
|
||||||
|
stashID *string
|
||||||
|
studio *models.Studio
|
||||||
|
createParent bool
|
||||||
|
excludedFields []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *stashBoxBatchStudioTagTask) getName() string {
|
||||||
|
switch {
|
||||||
|
case t.name != nil:
|
||||||
|
return *t.name
|
||||||
|
case t.stashID != nil:
|
||||||
|
return *t.stashID
|
||||||
|
case t.studio != nil:
|
||||||
|
return t.studio.Name
|
||||||
|
default:
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *stashBoxBatchStudioTagTask) Start(ctx context.Context) {
|
||||||
studio, err := t.findStashBoxStudio(ctx)
|
studio, err := t.findStashBoxStudio(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("Error fetching studio data from stash-box: %v", err)
|
logger.Errorf("Error fetching studio data from stash-box: %v", err)
|
||||||
|
|
@ -275,21 +286,18 @@ func (t *StashBoxBatchTagTask) stashBoxStudioTag(ctx context.Context) {
|
||||||
excluded[field] = true
|
excluded[field] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// studio will have a value if pulling from Stash-box by Stash ID or name was successful
|
|
||||||
if studio != nil {
|
if studio != nil {
|
||||||
t.processMatchedStudio(ctx, studio, excluded)
|
t.processMatchedStudio(ctx, studio, excluded)
|
||||||
} else {
|
} else {
|
||||||
var name string
|
logger.Infof("No match found for %s", t.getName())
|
||||||
if t.name != nil {
|
|
||||||
name = *t.name
|
|
||||||
} else if t.studio != nil {
|
|
||||||
name = t.studio.Name
|
|
||||||
}
|
|
||||||
logger.Infof("No match found for %s", name)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models.ScrapedStudio, error) {
|
func (t *stashBoxBatchStudioTagTask) GetDescription() string {
|
||||||
|
return fmt.Sprintf("Tagging studio %s from stash-box", t.getName())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *stashBoxBatchStudioTagTask) findStashBoxStudio(ctx context.Context) (*models.ScrapedStudio, error) {
|
||||||
var studio *models.ScrapedStudio
|
var studio *models.ScrapedStudio
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
|
@ -297,7 +305,12 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models.
|
||||||
|
|
||||||
client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns()))
|
client := stashbox.NewClient(*t.box, stashbox.ExcludeTagPatterns(instance.Config.GetScraperExcludeTagPatterns()))
|
||||||
|
|
||||||
if t.refresh {
|
switch {
|
||||||
|
case t.name != nil:
|
||||||
|
studio, err = client.FindStudio(ctx, *t.name)
|
||||||
|
case t.stashID != nil:
|
||||||
|
studio, err = client.FindStudio(ctx, *t.stashID)
|
||||||
|
case t.studio != nil:
|
||||||
var remoteID string
|
var remoteID string
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||||
if !t.studio.StashIDs.Loaded() {
|
if !t.studio.StashIDs.Loaded() {
|
||||||
|
|
@ -315,17 +328,13 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models.
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if remoteID != "" {
|
if remoteID != "" {
|
||||||
studio, err = client.FindStudio(ctx, remoteID)
|
studio, err = client.FindStudio(ctx, remoteID)
|
||||||
}
|
|
||||||
} else {
|
|
||||||
var name string
|
|
||||||
if t.name != nil {
|
|
||||||
name = *t.name
|
|
||||||
} else {
|
} else {
|
||||||
name = t.studio.Name
|
// find by studio name instead
|
||||||
|
studio, err = client.FindStudio(ctx, t.studio.Name)
|
||||||
}
|
}
|
||||||
studio, err = client.FindStudio(ctx, name)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
|
@ -343,8 +352,7 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models.
|
||||||
return studio, err
|
return studio, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *models.ScrapedStudio, excluded map[string]bool) {
|
func (t *stashBoxBatchStudioTagTask) processMatchedStudio(ctx context.Context, s *models.ScrapedStudio, excluded map[string]bool) {
|
||||||
// Refreshing an existing studio
|
|
||||||
if t.studio != nil {
|
if t.studio != nil {
|
||||||
storedID, _ := strconv.Atoi(*s.StoredID)
|
storedID, _ := strconv.Atoi(*s.StoredID)
|
||||||
|
|
||||||
|
|
@ -361,7 +369,6 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the transaction and update the studio
|
|
||||||
r := instance.Repository
|
r := instance.Repository
|
||||||
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
qb := r.Studio
|
qb := r.Studio
|
||||||
|
|
@ -394,8 +401,8 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode
|
||||||
} else {
|
} else {
|
||||||
logger.Infof("Updated studio %s", s.Name)
|
logger.Infof("Updated studio %s", s.Name)
|
||||||
}
|
}
|
||||||
} else if t.name != nil && s.Name != "" {
|
} else if s.Name != "" {
|
||||||
// Creating a new studio
|
// no existing studio, create a new one
|
||||||
if s.Parent != nil && t.createParent {
|
if s.Parent != nil && t.createParent {
|
||||||
err := t.processParentStudio(ctx, s.Parent, excluded)
|
err := t.processParentStudio(ctx, s.Parent, excluded)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -410,7 +417,6 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the transaction and save the studio
|
|
||||||
r := instance.Repository
|
r := instance.Repository
|
||||||
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
qb := r.Studio
|
qb := r.Studio
|
||||||
|
|
@ -439,9 +445,8 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent *models.ScrapedStudio, excluded map[string]bool) error {
|
func (t *stashBoxBatchStudioTagTask) processParentStudio(ctx context.Context, parent *models.ScrapedStudio, excluded map[string]bool) error {
|
||||||
if parent.StoredID == nil {
|
if parent.StoredID == nil {
|
||||||
// The parent needs to be created
|
|
||||||
newParentStudio := parent.ToStudio(t.box.Endpoint, excluded)
|
newParentStudio := parent.ToStudio(t.box.Endpoint, excluded)
|
||||||
|
|
||||||
image, err := parent.GetImage(ctx, excluded)
|
image, err := parent.GetImage(ctx, excluded)
|
||||||
|
|
@ -450,7 +455,6 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent *
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the transaction and save the studio
|
|
||||||
r := instance.Repository
|
r := instance.Repository
|
||||||
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
qb := r.Studio
|
qb := r.Studio
|
||||||
|
|
@ -476,7 +480,6 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent *
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
} else {
|
} else {
|
||||||
// The parent studio matched an existing one and the user has chosen in the UI to link and/or update it
|
|
||||||
storedID, _ := strconv.Atoi(*parent.StoredID)
|
storedID, _ := strconv.Atoi(*parent.StoredID)
|
||||||
|
|
||||||
image, err := parent.GetImage(ctx, excluded)
|
image, err := parent.GetImage(ctx, excluded)
|
||||||
|
|
@ -485,7 +488,6 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent *
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the transaction and update the studio
|
|
||||||
r := instance.Repository
|
r := instance.Repository
|
||||||
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
err = r.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
qb := r.Studio
|
qb := r.Studio
|
||||||
|
|
|
||||||
|
|
@ -8,12 +8,13 @@ import (
|
||||||
"io/fs"
|
"io/fs"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed performer performer_male scene image gallery tag studio group
|
//go:embed performer performer_male performer_sfw scene image gallery tag studio group
|
||||||
var data embed.FS
|
var data embed.FS
|
||||||
|
|
||||||
const (
|
const (
|
||||||
Performer = "performer"
|
Performer = "performer"
|
||||||
PerformerMale = "performer_male"
|
PerformerMale = "performer_male"
|
||||||
|
DefaultSFWPerformerImage = "performer_sfw/performer.svg"
|
||||||
|
|
||||||
Scene = "scene"
|
Scene = "scene"
|
||||||
DefaultSceneImage = "scene/scene.svg"
|
DefaultSceneImage = "scene/scene.svg"
|
||||||
|
|
|
||||||
7
internal/static/performer_sfw/performer.svg
Normal file
7
internal/static/performer_sfw/performer.svg
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="-136 -284 720 1080">
|
||||||
|
<!--!
|
||||||
|
Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) Copyright 2024 Fonticons, Inc.
|
||||||
|
Original from https://github.com/FortAwesome/Font-Awesome/blob/6.x/svgs/solid/user.svg
|
||||||
|
Modified to change color and viewbox
|
||||||
|
-->
|
||||||
|
<path d="M224 256A128 128 0 1 0 224 0a128 128 0 1 0 0 256zm-45.7 48C79.8 304 0 383.8 0 482.3C0 498.7 13.3 512 29.7 512l388.6 0c16.4 0 29.7-13.3 29.7-29.7C448 383.8 368.2 304 269.7 304l-91.4 0z" style="fill:#ffffff;fill-opacity:1" /></svg>
|
||||||
|
After Width: | Height: | Size: 645 B |
|
|
@ -29,6 +29,7 @@ var (
|
||||||
VideoCodecIVP9 = makeVideoCodec("VP9 Intel Quick Sync Video (QSV)", "vp9_qsv")
|
VideoCodecIVP9 = makeVideoCodec("VP9 Intel Quick Sync Video (QSV)", "vp9_qsv")
|
||||||
VideoCodecVVP9 = makeVideoCodec("VP9 VAAPI", "vp9_vaapi")
|
VideoCodecVVP9 = makeVideoCodec("VP9 VAAPI", "vp9_vaapi")
|
||||||
VideoCodecVVPX = makeVideoCodec("VP8 VAAPI", "vp8_vaapi")
|
VideoCodecVVPX = makeVideoCodec("VP8 VAAPI", "vp8_vaapi")
|
||||||
|
VideoCodecRK264 = makeVideoCodec("H264 Rockchip MPP (rkmpp)", "h264_rkmpp")
|
||||||
)
|
)
|
||||||
|
|
||||||
const minHeight int = 480
|
const minHeight int = 480
|
||||||
|
|
@ -45,6 +46,7 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
|
||||||
VideoCodecI264C,
|
VideoCodecI264C,
|
||||||
VideoCodecV264,
|
VideoCodecV264,
|
||||||
VideoCodecR264,
|
VideoCodecR264,
|
||||||
|
VideoCodecRK264,
|
||||||
VideoCodecIVP9,
|
VideoCodecIVP9,
|
||||||
VideoCodecVVP9,
|
VideoCodecVVP9,
|
||||||
VideoCodecM264,
|
VideoCodecM264,
|
||||||
|
|
@ -67,7 +69,7 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
|
||||||
args = args.Output("-")
|
args = args.Output("-")
|
||||||
|
|
||||||
// #6064 - add timeout to context to prevent hangs
|
// #6064 - add timeout to context to prevent hangs
|
||||||
const hwTestTimeoutSecondsDefault = 1
|
const hwTestTimeoutSecondsDefault = 10
|
||||||
hwTestTimeoutSeconds := hwTestTimeoutSecondsDefault * time.Second
|
hwTestTimeoutSeconds := hwTestTimeoutSecondsDefault * time.Second
|
||||||
|
|
||||||
// allow timeout to be overridden with environment variable
|
// allow timeout to be overridden with environment variable
|
||||||
|
|
@ -88,7 +90,7 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
|
||||||
|
|
||||||
if err := cmd.Run(); err != nil {
|
if err := cmd.Run(); err != nil {
|
||||||
if testCtx.Err() != nil {
|
if testCtx.Err() != nil {
|
||||||
logger.Debugf("[InitHWSupport] Codec %s test timed out after %d seconds", codec, hwTestTimeoutSeconds)
|
logger.Debugf("[InitHWSupport] Codec %s test timed out after %s", codec, hwTestTimeoutSeconds)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -201,6 +203,19 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args {
|
||||||
args = append(args, "-init_hw_device")
|
args = append(args, "-init_hw_device")
|
||||||
args = append(args, "videotoolbox=vt")
|
args = append(args, "videotoolbox=vt")
|
||||||
}
|
}
|
||||||
|
case VideoCodecRK264:
|
||||||
|
// Rockchip: always create rkmpp device and make it the filter device, so
|
||||||
|
// scale_rkrga and subsequent hwupload/hwmap operate in the right context.
|
||||||
|
args = append(args, "-init_hw_device")
|
||||||
|
args = append(args, "rkmpp=rk")
|
||||||
|
args = append(args, "-filter_hw_device")
|
||||||
|
args = append(args, "rk")
|
||||||
|
if fullhw {
|
||||||
|
args = append(args, "-hwaccel")
|
||||||
|
args = append(args, "rkmpp")
|
||||||
|
args = append(args, "-hwaccel_output_format")
|
||||||
|
args = append(args, "drm_prime")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
@ -233,6 +248,14 @@ func (f *FFMpeg) hwFilterInit(toCodec VideoCodec, fullhw bool) VideoFilter {
|
||||||
videoFilter = videoFilter.Append("format=nv12")
|
videoFilter = videoFilter.Append("format=nv12")
|
||||||
videoFilter = videoFilter.Append("hwupload")
|
videoFilter = videoFilter.Append("hwupload")
|
||||||
}
|
}
|
||||||
|
case VideoCodecRK264:
|
||||||
|
// For Rockchip full-hw, do NOT pre-map to rkrga here. scale_rkrga can
|
||||||
|
// consume DRM_PRIME frames directly when filter_hw_device is set.
|
||||||
|
// For non-fullhw, keep a sane software format.
|
||||||
|
if !fullhw {
|
||||||
|
videoFilter = videoFilter.Append("format=nv12")
|
||||||
|
videoFilter = videoFilter.Append("hwupload")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return videoFilter
|
return videoFilter
|
||||||
|
|
@ -310,6 +333,9 @@ func (f *FFMpeg) hwApplyFullHWFilter(args VideoFilter, codec VideoCodec, fullhw
|
||||||
if fullhw && f.version.Gteq(Version{major: 3, minor: 3}) { // Added in FFMpeg 3.3
|
if fullhw && f.version.Gteq(Version{major: 3, minor: 3}) { // Added in FFMpeg 3.3
|
||||||
args = args.Append("scale_qsv=format=nv12")
|
args = args.Append("scale_qsv=format=nv12")
|
||||||
}
|
}
|
||||||
|
case VideoCodecRK264:
|
||||||
|
// For Rockchip, no extra mapping here. If there is no scale filter,
|
||||||
|
// leave frames in DRM_PRIME for the encoder.
|
||||||
}
|
}
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
@ -337,6 +363,14 @@ func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []in
|
||||||
}
|
}
|
||||||
case VideoCodecM264:
|
case VideoCodecM264:
|
||||||
template = "scale_vt=$value"
|
template = "scale_vt=$value"
|
||||||
|
case VideoCodecRK264:
|
||||||
|
// The original filter chain is a fallback for maximum compatibility:
|
||||||
|
// "scale_rkrga=$value:format=nv12,hwdownload,format=nv12,hwupload"
|
||||||
|
// It avoids hwmap(rkrga→rkmpp) failures (-38/-12) seen on some builds
|
||||||
|
// by downloading the scaled frame to system RAM and re-uploading it.
|
||||||
|
// The filter chain below uses a zero-copy approach, passing the hardware-scaled
|
||||||
|
// frame directly to the encoder. This is more efficient but may be less stable.
|
||||||
|
template = "scale_rkrga=$value"
|
||||||
default:
|
default:
|
||||||
return VideoFilter(sargs)
|
return VideoFilter(sargs)
|
||||||
}
|
}
|
||||||
|
|
@ -345,12 +379,15 @@ func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []in
|
||||||
isIntel := codec == VideoCodecI264 || codec == VideoCodecI264C || codec == VideoCodecIVP9
|
isIntel := codec == VideoCodecI264 || codec == VideoCodecI264C || codec == VideoCodecIVP9
|
||||||
// BUG: scale_vt doesn't call ff_scale_adjust_dimensions, thus cant accept negative size values
|
// BUG: scale_vt doesn't call ff_scale_adjust_dimensions, thus cant accept negative size values
|
||||||
isApple := codec == VideoCodecM264
|
isApple := codec == VideoCodecM264
|
||||||
|
// Rockchip's scale_rkrga supports -1/-2; don't apply minus-one hack here.
|
||||||
return VideoFilter(templateReplaceScale(sargs, template, match, vf, isIntel || isApple))
|
return VideoFilter(templateReplaceScale(sargs, template, match, vf, isIntel || isApple))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the max resolution for a given codec, or a default
|
// Returns the max resolution for a given codec, or a default
|
||||||
func (f *FFMpeg) hwCodecMaxRes(codec VideoCodec) (int, int) {
|
func (f *FFMpeg) hwCodecMaxRes(codec VideoCodec) (int, int) {
|
||||||
switch codec {
|
switch codec {
|
||||||
|
case VideoCodecRK264:
|
||||||
|
return 8192, 8192
|
||||||
case VideoCodecN264,
|
case VideoCodecN264,
|
||||||
VideoCodecN264H,
|
VideoCodecN264H,
|
||||||
VideoCodecI264,
|
VideoCodecI264,
|
||||||
|
|
@ -382,7 +419,8 @@ func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec {
|
||||||
VideoCodecI264C,
|
VideoCodecI264C,
|
||||||
VideoCodecV264,
|
VideoCodecV264,
|
||||||
VideoCodecR264,
|
VideoCodecR264,
|
||||||
VideoCodecM264: // Note that the Apple encoder sucks at startup, thus HLS quality is crap
|
VideoCodecM264, // Note that the Apple encoder sucks at startup, thus HLS quality is crap
|
||||||
|
VideoCodecRK264:
|
||||||
return &element
|
return &element
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -397,7 +435,8 @@ func (f *FFMpeg) hwCodecMP4Compatible() *VideoCodec {
|
||||||
VideoCodecN264H,
|
VideoCodecN264H,
|
||||||
VideoCodecI264,
|
VideoCodecI264,
|
||||||
VideoCodecI264C,
|
VideoCodecI264C,
|
||||||
VideoCodecM264:
|
VideoCodecM264,
|
||||||
|
VideoCodecRK264:
|
||||||
return &element
|
return &element
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,8 @@ type Cleaner struct {
|
||||||
FS models.FS
|
FS models.FS
|
||||||
Repository Repository
|
Repository Repository
|
||||||
|
|
||||||
Handlers []CleanHandler
|
Handlers []CleanHandler
|
||||||
|
TrashPath string
|
||||||
}
|
}
|
||||||
|
|
||||||
type cleanJob struct {
|
type cleanJob struct {
|
||||||
|
|
@ -392,7 +393,7 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool
|
||||||
|
|
||||||
func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) {
|
func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) {
|
||||||
// delete associated objects
|
// delete associated objects
|
||||||
fileDeleter := NewDeleter()
|
fileDeleter := NewDeleterWithTrash(j.TrashPath)
|
||||||
r := j.Repository
|
r := j.Repository
|
||||||
if err := r.WithTxn(ctx, func(ctx context.Context) error {
|
if err := r.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
fileDeleter.RegisterHooks(ctx)
|
fileDeleter.RegisterHooks(ctx)
|
||||||
|
|
@ -410,7 +411,7 @@ func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn stri
|
||||||
|
|
||||||
func (j *cleanJob) deleteFolder(ctx context.Context, folderID models.FolderID, fn string) {
|
func (j *cleanJob) deleteFolder(ctx context.Context, folderID models.FolderID, fn string) {
|
||||||
// delete associated objects
|
// delete associated objects
|
||||||
fileDeleter := NewDeleter()
|
fileDeleter := NewDeleterWithTrash(j.TrashPath)
|
||||||
r := j.Repository
|
r := j.Repository
|
||||||
if err := r.WithTxn(ctx, func(ctx context.Context) error {
|
if err := r.WithTxn(ctx, func(ctx context.Context) error {
|
||||||
fileDeleter.RegisterHooks(ctx)
|
fileDeleter.RegisterHooks(ctx)
|
||||||
|
|
|
||||||
|
|
@ -58,20 +58,33 @@ func newRenamerRemoverImpl() renamerRemoverImpl {
|
||||||
|
|
||||||
// Deleter is used to safely delete files and directories from the filesystem.
|
// Deleter is used to safely delete files and directories from the filesystem.
|
||||||
// During a transaction, files and directories are marked for deletion using
|
// During a transaction, files and directories are marked for deletion using
|
||||||
// the Files and Dirs methods. This will rename the files/directories to be
|
// the Files and Dirs methods. If TrashPath is set, files are moved to trash
|
||||||
// deleted. If the transaction is rolled back, then the files/directories can
|
// immediately. Otherwise, they are renamed with a .delete suffix. If the
|
||||||
// be restored to their original state with the Abort method. If the
|
// transaction is rolled back, then the files/directories can be restored to
|
||||||
// transaction is committed, the marked files are then deleted from the
|
// their original state with the Rollback method. If the transaction is
|
||||||
// filesystem using the Complete method.
|
// committed, the marked files are then deleted from the filesystem using the
|
||||||
|
// Commit method.
|
||||||
type Deleter struct {
|
type Deleter struct {
|
||||||
RenamerRemover RenamerRemover
|
RenamerRemover RenamerRemover
|
||||||
files []string
|
files []string
|
||||||
dirs []string
|
dirs []string
|
||||||
|
TrashPath string // if set, files will be moved to this directory instead of being permanently deleted
|
||||||
|
trashedPaths map[string]string // map of original path -> trash path (only used when TrashPath is set)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewDeleter() *Deleter {
|
func NewDeleter() *Deleter {
|
||||||
return &Deleter{
|
return &Deleter{
|
||||||
RenamerRemover: newRenamerRemoverImpl(),
|
RenamerRemover: newRenamerRemoverImpl(),
|
||||||
|
TrashPath: "",
|
||||||
|
trashedPaths: make(map[string]string),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDeleterWithTrash(trashPath string) *Deleter {
|
||||||
|
return &Deleter{
|
||||||
|
RenamerRemover: newRenamerRemoverImpl(),
|
||||||
|
TrashPath: trashPath,
|
||||||
|
trashedPaths: make(map[string]string),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -92,6 +105,17 @@ func (d *Deleter) RegisterHooks(ctx context.Context) {
|
||||||
// Abort should be called to restore marked files if this function returns an
|
// Abort should be called to restore marked files if this function returns an
|
||||||
// error.
|
// error.
|
||||||
func (d *Deleter) Files(paths []string) error {
|
func (d *Deleter) Files(paths []string) error {
|
||||||
|
return d.filesInternal(paths, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilesWithoutTrash designates files to be deleted, bypassing the trash directory.
|
||||||
|
// Files will be permanently deleted even if TrashPath is configured.
|
||||||
|
// This is useful for deleting generated files that can be easily recreated.
|
||||||
|
func (d *Deleter) FilesWithoutTrash(paths []string) error {
|
||||||
|
return d.filesInternal(paths, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Deleter) filesInternal(paths []string, bypassTrash bool) error {
|
||||||
for _, p := range paths {
|
for _, p := range paths {
|
||||||
// fail silently if the file does not exist
|
// fail silently if the file does not exist
|
||||||
if _, err := d.RenamerRemover.Stat(p); err != nil {
|
if _, err := d.RenamerRemover.Stat(p); err != nil {
|
||||||
|
|
@ -103,7 +127,7 @@ func (d *Deleter) Files(paths []string) error {
|
||||||
return fmt.Errorf("check file %q exists: %w", p, err)
|
return fmt.Errorf("check file %q exists: %w", p, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := d.renameForDelete(p); err != nil {
|
if err := d.renameForDelete(p, bypassTrash); err != nil {
|
||||||
return fmt.Errorf("marking file %q for deletion: %w", p, err)
|
return fmt.Errorf("marking file %q for deletion: %w", p, err)
|
||||||
}
|
}
|
||||||
d.files = append(d.files, p)
|
d.files = append(d.files, p)
|
||||||
|
|
@ -118,6 +142,17 @@ func (d *Deleter) Files(paths []string) error {
|
||||||
// Abort should be called to restore marked files/directories if this function returns an
|
// Abort should be called to restore marked files/directories if this function returns an
|
||||||
// error.
|
// error.
|
||||||
func (d *Deleter) Dirs(paths []string) error {
|
func (d *Deleter) Dirs(paths []string) error {
|
||||||
|
return d.dirsInternal(paths, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DirsWithoutTrash designates directories to be deleted, bypassing the trash directory.
|
||||||
|
// Directories will be permanently deleted even if TrashPath is configured.
|
||||||
|
// This is useful for deleting generated directories that can be easily recreated.
|
||||||
|
func (d *Deleter) DirsWithoutTrash(paths []string) error {
|
||||||
|
return d.dirsInternal(paths, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Deleter) dirsInternal(paths []string, bypassTrash bool) error {
|
||||||
for _, p := range paths {
|
for _, p := range paths {
|
||||||
// fail silently if the file does not exist
|
// fail silently if the file does not exist
|
||||||
if _, err := d.RenamerRemover.Stat(p); err != nil {
|
if _, err := d.RenamerRemover.Stat(p); err != nil {
|
||||||
|
|
@ -129,7 +164,7 @@ func (d *Deleter) Dirs(paths []string) error {
|
||||||
return fmt.Errorf("check directory %q exists: %w", p, err)
|
return fmt.Errorf("check directory %q exists: %w", p, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := d.renameForDelete(p); err != nil {
|
if err := d.renameForDelete(p, bypassTrash); err != nil {
|
||||||
return fmt.Errorf("marking directory %q for deletion: %w", p, err)
|
return fmt.Errorf("marking directory %q for deletion: %w", p, err)
|
||||||
}
|
}
|
||||||
d.dirs = append(d.dirs, p)
|
d.dirs = append(d.dirs, p)
|
||||||
|
|
@ -150,33 +185,65 @@ func (d *Deleter) Rollback() {
|
||||||
|
|
||||||
d.files = nil
|
d.files = nil
|
||||||
d.dirs = nil
|
d.dirs = nil
|
||||||
|
d.trashedPaths = make(map[string]string)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commit deletes all files marked for deletion and clears the marked list.
|
// Commit deletes all files marked for deletion and clears the marked list.
|
||||||
|
// When using trash, files have already been moved during renameForDelete, so
|
||||||
|
// this just clears the tracking. Otherwise, permanently delete the .delete files.
|
||||||
// Any errors encountered are logged. All files will be attempted, regardless
|
// Any errors encountered are logged. All files will be attempted, regardless
|
||||||
// of the errors encountered.
|
// of the errors encountered.
|
||||||
func (d *Deleter) Commit() {
|
func (d *Deleter) Commit() {
|
||||||
for _, f := range d.files {
|
if d.TrashPath != "" {
|
||||||
if err := d.RenamerRemover.Remove(f + deleteFileSuffix); err != nil {
|
// Files were already moved to trash during renameForDelete, just clear tracking
|
||||||
logger.Warnf("Error deleting file %q: %v", f+deleteFileSuffix, err)
|
logger.Debugf("Commit: %d files and %d directories already in trash, clearing tracking", len(d.files), len(d.dirs))
|
||||||
|
} else {
|
||||||
|
// Permanently delete files and directories marked with .delete suffix
|
||||||
|
for _, f := range d.files {
|
||||||
|
if err := d.RenamerRemover.Remove(f + deleteFileSuffix); err != nil {
|
||||||
|
logger.Warnf("Error deleting file %q: %v", f+deleteFileSuffix, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
for _, f := range d.dirs {
|
for _, f := range d.dirs {
|
||||||
if err := d.RenamerRemover.RemoveAll(f + deleteFileSuffix); err != nil {
|
if err := d.RenamerRemover.RemoveAll(f + deleteFileSuffix); err != nil {
|
||||||
logger.Warnf("Error deleting directory %q: %v", f+deleteFileSuffix, err)
|
logger.Warnf("Error deleting directory %q: %v", f+deleteFileSuffix, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
d.files = nil
|
d.files = nil
|
||||||
d.dirs = nil
|
d.dirs = nil
|
||||||
|
d.trashedPaths = make(map[string]string)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Deleter) renameForDelete(path string) error {
|
func (d *Deleter) renameForDelete(path string, bypassTrash bool) error {
|
||||||
|
if d.TrashPath != "" && !bypassTrash {
|
||||||
|
// Move file to trash immediately
|
||||||
|
trashDest, err := fsutil.MoveToTrash(path, d.TrashPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.trashedPaths[path] = trashDest
|
||||||
|
logger.Infof("Moved %q to trash at %s", path, trashDest)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard behavior: rename with .delete suffix (or when bypassing trash)
|
||||||
return d.RenamerRemover.Rename(path, path+deleteFileSuffix)
|
return d.RenamerRemover.Rename(path, path+deleteFileSuffix)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Deleter) renameForRestore(path string) error {
|
func (d *Deleter) renameForRestore(path string) error {
|
||||||
|
if d.TrashPath != "" {
|
||||||
|
// Restore file from trash
|
||||||
|
trashPath, ok := d.trashedPaths[path]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("no trash path found for %q", path)
|
||||||
|
}
|
||||||
|
return d.RenamerRemover.Rename(trashPath, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard behavior: restore from .delete suffix
|
||||||
return d.RenamerRemover.Rename(path+deleteFileSuffix, path)
|
return d.RenamerRemover.Rename(path+deleteFileSuffix, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,9 @@ import (
|
||||||
// Does not create any folders in the file system
|
// Does not create any folders in the file system
|
||||||
func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string) (*models.Folder, error) {
|
func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string) (*models.Folder, error) {
|
||||||
// get or create folder hierarchy
|
// get or create folder hierarchy
|
||||||
folder, err := fc.FindByPath(ctx, path)
|
// assume case sensitive when searching for the folder
|
||||||
|
const caseSensitive = true
|
||||||
|
folder, err := fc.FindByPath(ctx, path, caseSensitive)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,8 +2,11 @@ package image
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
_ "image/gif"
|
_ "image/gif"
|
||||||
_ "image/jpeg"
|
_ "image/jpeg"
|
||||||
|
|
@ -17,6 +20,8 @@ import (
|
||||||
_ "golang.org/x/image/webp"
|
_ "golang.org/x/image/webp"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var ErrUnsupportedAVIFInZip = errors.New("AVIF images in zip files is unsupported")
|
||||||
|
|
||||||
// Decorator adds image specific fields to a File.
|
// Decorator adds image specific fields to a File.
|
||||||
type Decorator struct {
|
type Decorator struct {
|
||||||
FFProbe *ffmpeg.FFProbe
|
FFProbe *ffmpeg.FFProbe
|
||||||
|
|
@ -28,6 +33,10 @@ func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (
|
||||||
// ignore clips in non-OsFS filesystems as ffprobe cannot read them
|
// ignore clips in non-OsFS filesystems as ffprobe cannot read them
|
||||||
// TODO - copy to temp file if not an OsFS
|
// TODO - copy to temp file if not an OsFS
|
||||||
if _, isOs := fs.(*file.OsFS); !isOs {
|
if _, isOs := fs.(*file.OsFS); !isOs {
|
||||||
|
// AVIF images inside zip files are not supported
|
||||||
|
if strings.ToLower(filepath.Ext(base.Path)) == ".avif" {
|
||||||
|
return nil, fmt.Errorf("%w: %s", ErrUnsupportedAVIFInZip, base.Path)
|
||||||
|
}
|
||||||
logger.Debugf("assuming ImageFile for non-OsFS file %q", base.Path)
|
logger.Debugf("assuming ImageFile for non-OsFS file %q", base.Path)
|
||||||
return decorateFallback(fs, f)
|
return decorateFallback(fs, f)
|
||||||
}
|
}
|
||||||
|
|
@ -50,7 +59,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (
|
||||||
|
|
||||||
isClip := true
|
isClip := true
|
||||||
// This list is derived from ffmpegImageThumbnail in pkg/image/thumbnail. If one gets updated, the other should be as well
|
// This list is derived from ffmpegImageThumbnail in pkg/image/thumbnail. If one gets updated, the other should be as well
|
||||||
for _, item := range []string{"png", "mjpeg", "webp", "bmp"} {
|
for _, item := range []string{"png", "mjpeg", "webp", "bmp", "jpegxl"} {
|
||||||
if item == probe.VideoCodec {
|
if item == probe.VideoCodec {
|
||||||
isClip = false
|
isClip = false
|
||||||
}
|
}
|
||||||
|
|
@ -67,6 +76,25 @@ func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (
|
||||||
Height: probe.Height,
|
Height: probe.Height,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FFprobe has a known bug where it returns 0x0 dimensions for some animated WebP files
|
||||||
|
// Fall back to image.DecodeConfig in this case.
|
||||||
|
// See: https://trac.ffmpeg.org/ticket/4907
|
||||||
|
if ret.Width == 0 || ret.Height == 0 {
|
||||||
|
logger.Warnf("FFprobe returned invalid dimensions (%dx%d) for %q, trying fallback decoder", ret.Width, ret.Height, base.Path)
|
||||||
|
c, format, err := decodeConfig(fs, base.Path)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("Fallback decoder failed for %q: %s. Proceeding with original FFprobe result", base.Path, err)
|
||||||
|
} else {
|
||||||
|
ret.Width = c.Width
|
||||||
|
ret.Height = c.Height
|
||||||
|
// Update format if it differs (fallback decoder may be more accurate)
|
||||||
|
if format != "" && format != ret.Format {
|
||||||
|
logger.Debugf("Updating format from %q to %q for %q", ret.Format, format, base.Path)
|
||||||
|
ret.Format = format
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
adjustForOrientation(fs, base.Path, ret)
|
adjustForOrientation(fs, base.Path, ret)
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
|
|
|
||||||
|
|
@ -120,7 +120,7 @@ func (i *Importer) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonsch
|
||||||
func (i *Importer) populateZipFileID(ctx context.Context, f *models.DirEntry) error {
|
func (i *Importer) populateZipFileID(ctx context.Context, f *models.DirEntry) error {
|
||||||
zipFilePath := i.Input.DirEntry().ZipFile
|
zipFilePath := i.Input.DirEntry().ZipFile
|
||||||
if zipFilePath != "" {
|
if zipFilePath != "" {
|
||||||
zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath)
|
zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error finding file by path %q: %v", zipFilePath, err)
|
return fmt.Errorf("error finding file by path %q: %v", zipFilePath, err)
|
||||||
}
|
}
|
||||||
|
|
@ -146,7 +146,7 @@ func (i *Importer) Name() string {
|
||||||
|
|
||||||
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||||
path := i.Input.DirEntry().Path
|
path := i.Input.DirEntry().Path
|
||||||
existing, err := i.ReaderWriter.FindByPath(ctx, path)
|
existing, err := i.ReaderWriter.FindByPath(ctx, path, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
@ -176,7 +176,7 @@ func (i *Importer) createFolderHierarchy(ctx context.Context, p string) (*models
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Importer) getOrCreateFolder(ctx context.Context, path string, parent *models.Folder) (*models.Folder, error) {
|
func (i *Importer) getOrCreateFolder(ctx context.Context, path string, parent *models.Folder) (*models.Folder, error) {
|
||||||
folder, err := i.FolderStore.FindByPath(ctx, path)
|
folder, err := i.FolderStore.FindByPath(ctx, path, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -443,7 +443,10 @@ func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderI
|
||||||
return &v, nil
|
return &v, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
ret, err := s.Repository.Folder.FindByPath(ctx, path)
|
// assume case sensitive when searching for the folder
|
||||||
|
const caseSensitive = true
|
||||||
|
|
||||||
|
ret, err := s.Repository.Folder.FindByPath(ctx, path, caseSensitive)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
@ -473,7 +476,10 @@ func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*models.
|
||||||
return &v, nil
|
return &v, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
ret, err := s.Repository.File.FindByPath(ctx, path)
|
// assume case sensitive when searching for the zip file
|
||||||
|
const caseSensitive = true
|
||||||
|
|
||||||
|
ret, err := s.Repository.File.FindByPath(ctx, path, caseSensitive)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err)
|
return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
@ -493,11 +499,26 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error {
|
||||||
defer s.incrementProgress(file)
|
defer s.incrementProgress(file)
|
||||||
|
|
||||||
// determine if folder already exists in data store (by path)
|
// determine if folder already exists in data store (by path)
|
||||||
f, err := s.Repository.Folder.FindByPath(ctx, path)
|
// assume case sensitive by default
|
||||||
|
f, err := s.Repository.Folder.FindByPath(ctx, path, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("checking for existing folder %q: %w", path, err)
|
return fmt.Errorf("checking for existing folder %q: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// #1426 / #6326 - if folder is in a case-insensitive filesystem, then try
|
||||||
|
// case insensitive searching
|
||||||
|
// assume case sensitive if in zip
|
||||||
|
if f == nil && file.ZipFileID == nil {
|
||||||
|
caseSensitive, _ := file.fs.IsPathCaseSensitive(file.Path)
|
||||||
|
|
||||||
|
if !caseSensitive {
|
||||||
|
f, err = s.Repository.Folder.FindByPath(ctx, path, false)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("checking for existing folder %q: %w", path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// if folder not exists, create it
|
// if folder not exists, create it
|
||||||
if f == nil {
|
if f == nil {
|
||||||
f, err = s.onNewFolder(ctx, file)
|
f, err = s.onNewFolder(ctx, file)
|
||||||
|
|
@ -611,10 +632,18 @@ func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *mo
|
||||||
// update if mod time is changed
|
// update if mod time is changed
|
||||||
entryModTime := f.ModTime
|
entryModTime := f.ModTime
|
||||||
if !entryModTime.Equal(existing.ModTime) {
|
if !entryModTime.Equal(existing.ModTime) {
|
||||||
|
existing.Path = f.Path
|
||||||
existing.ModTime = entryModTime
|
existing.ModTime = entryModTime
|
||||||
update = true
|
update = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// #6326 - update if path has changed - should only happen if case is
|
||||||
|
// changed and filesystem is case insensitive
|
||||||
|
if existing.Path != f.Path {
|
||||||
|
existing.Path = f.Path
|
||||||
|
update = true
|
||||||
|
}
|
||||||
|
|
||||||
// update if zip file ID has changed
|
// update if zip file ID has changed
|
||||||
fZfID := f.ZipFileID
|
fZfID := f.ZipFileID
|
||||||
existingZfID := existing.ZipFileID
|
existingZfID := existing.ZipFileID
|
||||||
|
|
@ -647,15 +676,31 @@ func (s *scanJob) handleFile(ctx context.Context, f scanFile) error {
|
||||||
defer s.incrementProgress(f)
|
defer s.incrementProgress(f)
|
||||||
|
|
||||||
var ff models.File
|
var ff models.File
|
||||||
|
|
||||||
// don't use a transaction to check if new or existing
|
// don't use a transaction to check if new or existing
|
||||||
if err := s.withDB(ctx, func(ctx context.Context) error {
|
if err := s.withDB(ctx, func(ctx context.Context) error {
|
||||||
// determine if file already exists in data store
|
// determine if file already exists in data store
|
||||||
|
// assume case sensitive when searching for the file to begin with
|
||||||
var err error
|
var err error
|
||||||
ff, err = s.Repository.File.FindByPath(ctx, f.Path)
|
ff, err = s.Repository.File.FindByPath(ctx, f.Path, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("checking for existing file %q: %w", f.Path, err)
|
return fmt.Errorf("checking for existing file %q: %w", f.Path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// #1426 / #6326 - if file is in a case-insensitive filesystem, then try
|
||||||
|
// case insensitive search
|
||||||
|
// assume case sensitive if in zip
|
||||||
|
if ff == nil && f.ZipFileID != nil {
|
||||||
|
caseSensitive, _ := f.fs.IsPathCaseSensitive(f.Path)
|
||||||
|
|
||||||
|
if !caseSensitive {
|
||||||
|
ff, err = s.Repository.File.FindByPath(ctx, f.Path, false)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("checking for existing file %q: %w", f.Path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if ff == nil {
|
if ff == nil {
|
||||||
// returns a file only if it is actually new
|
// returns a file only if it is actually new
|
||||||
ff, err = s.onNewFile(ctx, f)
|
ff, err = s.onNewFile(ctx, f)
|
||||||
|
|
@ -879,6 +924,7 @@ func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.F
|
||||||
// #1426 - if file exists but is a case-insensitive match for the
|
// #1426 - if file exists but is a case-insensitive match for the
|
||||||
// original filename, and the filesystem is case-insensitive
|
// original filename, and the filesystem is case-insensitive
|
||||||
// then treat it as a move
|
// then treat it as a move
|
||||||
|
// #6326 - this should now be handled earlier, and this shouldn't be necessary
|
||||||
if caseSensitive, _ := fs.IsPathCaseSensitive(other.Base().Path); !caseSensitive {
|
if caseSensitive, _ := fs.IsPathCaseSensitive(other.Base().Path); !caseSensitive {
|
||||||
// treat as a move
|
// treat as a move
|
||||||
missing = append(missing, other)
|
missing = append(missing, other)
|
||||||
|
|
@ -1026,7 +1072,8 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model
|
||||||
path := base.Path
|
path := base.Path
|
||||||
|
|
||||||
fileModTime := f.ModTime
|
fileModTime := f.ModTime
|
||||||
updated := !fileModTime.Equal(base.ModTime)
|
// #6326 - also force a rescan if the basename changed
|
||||||
|
updated := !fileModTime.Equal(base.ModTime) || base.Basename != f.Basename
|
||||||
forceRescan := s.options.Rescan
|
forceRescan := s.options.Rescan
|
||||||
|
|
||||||
if !updated && !forceRescan {
|
if !updated && !forceRescan {
|
||||||
|
|
@ -1041,6 +1088,8 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing model
|
||||||
logger.Infof("%s has been updated: rescanning", path)
|
logger.Infof("%s has been updated: rescanning", path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// #6326 - update basename in case it changed
|
||||||
|
base.Basename = f.Basename
|
||||||
base.ModTime = fileModTime
|
base.ModTime = fileModTime
|
||||||
base.Size = f.Size
|
base.Size = f.Size
|
||||||
base.UpdatedAt = time.Now()
|
base.UpdatedAt = time.Now()
|
||||||
|
|
|
||||||
|
|
@ -97,7 +97,7 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag
|
||||||
captionPrefix := getCaptionPrefix(captionPath)
|
captionPrefix := getCaptionPrefix(captionPath)
|
||||||
if err := txn.WithTxn(ctx, txnMgr, func(ctx context.Context) error {
|
if err := txn.WithTxn(ctx, txnMgr, func(ctx context.Context) error {
|
||||||
var err error
|
var err error
|
||||||
files, er := fqb.FindAllByPath(ctx, captionPrefix+"*")
|
files, er := fqb.FindAllByPath(ctx, captionPrefix+"*", true)
|
||||||
|
|
||||||
if er != nil {
|
if er != nil {
|
||||||
return fmt.Errorf("searching for scene %s: %w", captionPrefix, er)
|
return fmt.Errorf("searching for scene %s: %w", captionPrefix, er)
|
||||||
|
|
|
||||||
43
pkg/fsutil/trash.go
Normal file
43
pkg/fsutil/trash.go
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
package fsutil
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MoveToTrash moves a file or directory to a custom trash directory.
|
||||||
|
// If a file with the same name already exists in the trash, a timestamp is appended.
|
||||||
|
// Returns the destination path where the file was moved to.
|
||||||
|
func MoveToTrash(sourcePath string, trashPath string) (string, error) {
|
||||||
|
// Get absolute path for the source
|
||||||
|
absSourcePath, err := filepath.Abs(sourcePath)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to get absolute path: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure trash directory exists
|
||||||
|
if err := os.MkdirAll(trashPath, 0755); err != nil {
|
||||||
|
return "", fmt.Errorf("failed to create trash directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the base name of the file/directory
|
||||||
|
baseName := filepath.Base(absSourcePath)
|
||||||
|
destPath := filepath.Join(trashPath, baseName)
|
||||||
|
|
||||||
|
// If a file with the same name already exists in trash, append timestamp
|
||||||
|
if _, err := os.Stat(destPath); err == nil {
|
||||||
|
ext := filepath.Ext(baseName)
|
||||||
|
nameWithoutExt := baseName[:len(baseName)-len(ext)]
|
||||||
|
timestamp := time.Now().Format("20060102-150405")
|
||||||
|
destPath = filepath.Join(trashPath, fmt.Sprintf("%s_%s%s", nameWithoutExt, timestamp, ext))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move the file to trash using SafeMove to support cross-filesystem moves
|
||||||
|
if err := SafeMove(absSourcePath, destPath); err != nil {
|
||||||
|
return "", fmt.Errorf("failed to move to trash: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return destPath, nil
|
||||||
|
}
|
||||||
|
|
@ -265,7 +265,7 @@ func (i *Importer) populateFilesFolder(ctx context.Context) error {
|
||||||
|
|
||||||
for _, ref := range i.Input.ZipFiles {
|
for _, ref := range i.Input.ZipFiles {
|
||||||
path := ref
|
path := ref
|
||||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
f, err := i.FileFinder.FindByPath(ctx, path, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error finding file: %w", err)
|
return fmt.Errorf("error finding file: %w", err)
|
||||||
}
|
}
|
||||||
|
|
@ -281,7 +281,7 @@ func (i *Importer) populateFilesFolder(ctx context.Context) error {
|
||||||
|
|
||||||
if i.Input.FolderPath != "" {
|
if i.Input.FolderPath != "" {
|
||||||
path := i.Input.FolderPath
|
path := i.Input.FolderPath
|
||||||
f, err := i.FolderFinder.FindByPath(ctx, path)
|
f, err := i.FolderFinder.FindByPath(ctx, path, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error finding folder: %w", err)
|
return fmt.Errorf("error finding folder: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,7 @@ type FileDeleter struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// MarkGeneratedFiles marks for deletion the generated files for the provided image.
|
// MarkGeneratedFiles marks for deletion the generated files for the provided image.
|
||||||
|
// Generated files bypass trash and are permanently deleted since they can be regenerated.
|
||||||
func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
|
func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
|
||||||
var files []string
|
var files []string
|
||||||
thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
|
thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
|
||||||
|
|
@ -32,7 +33,7 @@ func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
|
||||||
files = append(files, prevPath)
|
files = append(files, prevPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
return d.Files(files)
|
return d.FilesWithoutTrash(files)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Destroy destroys an image, optionally marking the file and generated files for deletion.
|
// Destroy destroys an image, optionally marking the file and generated files for deletion.
|
||||||
|
|
|
||||||
|
|
@ -110,7 +110,7 @@ func (i *Importer) populateFiles(ctx context.Context) error {
|
||||||
|
|
||||||
for _, ref := range i.Input.Files {
|
for _, ref := range i.Input.Files {
|
||||||
path := ref
|
path := ref
|
||||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
f, err := i.FileFinder.FindByPath(ctx, path, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error finding file: %w", err)
|
return fmt.Errorf("error finding file: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -22,12 +22,8 @@ const ffmpegImageQuality = 5
|
||||||
var vipsPath string
|
var vipsPath string
|
||||||
var once sync.Once
|
var once sync.Once
|
||||||
|
|
||||||
var (
|
// ErrNotSupportedForThumbnail is returned if the image format is not supported for thumbnail generation
|
||||||
ErrUnsupportedImageFormat = errors.New("unsupported image format")
|
var ErrNotSupportedForThumbnail = errors.New("unsupported image format for thumbnail")
|
||||||
|
|
||||||
// ErrNotSupportedForThumbnail is returned if the image format is not supported for thumbnail generation
|
|
||||||
ErrNotSupportedForThumbnail = errors.New("unsupported image format for thumbnail")
|
|
||||||
)
|
|
||||||
|
|
||||||
type ThumbnailEncoder struct {
|
type ThumbnailEncoder struct {
|
||||||
FFMpeg *ffmpeg.FFMpeg
|
FFMpeg *ffmpeg.FFMpeg
|
||||||
|
|
@ -83,8 +79,9 @@ func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, err
|
||||||
|
|
||||||
data := buf.Bytes()
|
data := buf.Bytes()
|
||||||
|
|
||||||
|
format := ""
|
||||||
if imageFile, ok := f.(*models.ImageFile); ok {
|
if imageFile, ok := f.(*models.ImageFile); ok {
|
||||||
format := imageFile.Format
|
format = imageFile.Format
|
||||||
animated := imageFile.Format == formatGif
|
animated := imageFile.Format == formatGif
|
||||||
|
|
||||||
// #2266 - if image is webp, then determine if it is animated
|
// #2266 - if image is webp, then determine if it is animated
|
||||||
|
|
@ -96,6 +93,19 @@ func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, err
|
||||||
if animated {
|
if animated {
|
||||||
return nil, fmt.Errorf("%w: %s", ErrNotSupportedForThumbnail, format)
|
return nil, fmt.Errorf("%w: %s", ErrNotSupportedForThumbnail, format)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AVIF cannot be read from stdin, must use file path
|
||||||
|
// AVIF in zip files is not supported
|
||||||
|
// Note: No Windows check needed here since we use file path, not stdin
|
||||||
|
if format == "avif" {
|
||||||
|
if f.Base().ZipFileID != nil {
|
||||||
|
return nil, fmt.Errorf("%w: AVIF in zip file", ErrNotSupportedForThumbnail)
|
||||||
|
}
|
||||||
|
if e.vips != nil {
|
||||||
|
return e.vips.ImageThumbnailPath(f.Base().Path, maxSize)
|
||||||
|
}
|
||||||
|
return e.ffmpegImageThumbnailPath(f.Base().Path, maxSize)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Videofiles can only be thumbnailed with ffmpeg
|
// Videofiles can only be thumbnailed with ffmpeg
|
||||||
|
|
@ -104,11 +114,15 @@ func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// vips has issues loading files from stdin on Windows
|
// vips has issues loading files from stdin on Windows
|
||||||
if e.vips != nil && runtime.GOOS != "windows" {
|
if e.vips != nil {
|
||||||
return e.vips.ImageThumbnail(buf, maxSize)
|
if runtime.GOOS == "windows" && f.Base().ZipFileID == nil {
|
||||||
} else {
|
return e.vips.ImageThumbnailPath(f.Base().Path, maxSize)
|
||||||
return e.ffmpegImageThumbnail(buf, maxSize)
|
}
|
||||||
|
if runtime.GOOS != "windows" {
|
||||||
|
return e.vips.ImageThumbnail(buf, maxSize)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return e.ffmpegImageThumbnail(buf, maxSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetPreview returns the preview clip of the provided image clip resized to
|
// GetPreview returns the preview clip of the provided image clip resized to
|
||||||
|
|
@ -130,16 +144,32 @@ func (e *ThumbnailEncoder) GetPreview(inPath string, outPath string, maxSize int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *ThumbnailEncoder) ffmpegImageThumbnail(image *bytes.Buffer, maxSize int) ([]byte, error) {
|
func (e *ThumbnailEncoder) ffmpegImageThumbnail(image *bytes.Buffer, maxSize int) ([]byte, error) {
|
||||||
args := transcoder.ImageThumbnail("-", transcoder.ImageThumbnailOptions{
|
options := transcoder.ImageThumbnailOptions{
|
||||||
OutputFormat: ffmpeg.ImageFormatJpeg,
|
OutputFormat: ffmpeg.ImageFormatJpeg,
|
||||||
OutputPath: "-",
|
OutputPath: "-",
|
||||||
MaxDimensions: maxSize,
|
MaxDimensions: maxSize,
|
||||||
Quality: ffmpegImageQuality,
|
Quality: ffmpegImageQuality,
|
||||||
})
|
}
|
||||||
|
|
||||||
|
args := transcoder.ImageThumbnail("-", options)
|
||||||
|
|
||||||
return e.FFMpeg.GenerateOutput(context.TODO(), args, image)
|
return e.FFMpeg.GenerateOutput(context.TODO(), args, image)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ffmpegImageThumbnailPath generates a thumbnail from a file path (used for AVIF which can't be piped)
|
||||||
|
func (e *ThumbnailEncoder) ffmpegImageThumbnailPath(inputPath string, maxSize int) ([]byte, error) {
|
||||||
|
options := transcoder.ImageThumbnailOptions{
|
||||||
|
OutputFormat: ffmpeg.ImageFormatJpeg,
|
||||||
|
OutputPath: "-",
|
||||||
|
MaxDimensions: maxSize,
|
||||||
|
Quality: ffmpegImageQuality,
|
||||||
|
}
|
||||||
|
|
||||||
|
args := transcoder.ImageThumbnail(inputPath, options)
|
||||||
|
|
||||||
|
return e.FFMpeg.GenerateOutput(context.TODO(), args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
func (e *ThumbnailEncoder) getClipPreview(inPath string, outPath string, maxSize int, clipDuration float64, frameRate float64) error {
|
func (e *ThumbnailEncoder) getClipPreview(inPath string, outPath string, maxSize int, clipDuration float64, frameRate float64) error {
|
||||||
var thumbFilter ffmpeg.VideoFilter
|
var thumbFilter ffmpeg.VideoFilter
|
||||||
thumbFilter = thumbFilter.ScaleMaxSize(maxSize)
|
thumbFilter = thumbFilter.ScaleMaxSize(maxSize)
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,38 @@ func (e *vipsEncoder) ImageThumbnail(image *bytes.Buffer, maxSize int) ([]byte,
|
||||||
return []byte(data), err
|
return []byte(data), err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ImageThumbnailPath generates a thumbnail from a file path instead of stdin.
|
||||||
|
// This is required for formats like AVIF that need random file access (seeking)
|
||||||
|
// which stdin cannot provide.
|
||||||
|
func (e *vipsEncoder) ImageThumbnailPath(path string, maxSize int) ([]byte, error) {
|
||||||
|
// vips thumbnail syntax: thumbnail input output width [options]
|
||||||
|
// Using .jpg[Q=70,strip] as output writes to stdout
|
||||||
|
args := []string{
|
||||||
|
"thumbnail",
|
||||||
|
path,
|
||||||
|
".jpg[Q=70,strip]",
|
||||||
|
fmt.Sprint(maxSize),
|
||||||
|
"--size", "down",
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := exec.Command(string(*e), args...)
|
||||||
|
|
||||||
|
var stdout, stderr bytes.Buffer
|
||||||
|
cmd.Stdout = &stdout
|
||||||
|
cmd.Stderr = &stderr
|
||||||
|
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := cmd.Wait(); err != nil {
|
||||||
|
logger.Errorf("image encoder error when running command <%s>: %s", strings.Join(cmd.Args, " "), stderr.String())
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
func (e *vipsEncoder) run(args []string, stdin *bytes.Buffer) (string, error) {
|
func (e *vipsEncoder) run(args []string, stdin *bytes.Buffer) (string, error) {
|
||||||
cmd := exec.Command(string(*e), args...)
|
cmd := exec.Command(string(*e), args...)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -45,7 +45,7 @@ func (r SceneRelationships) MatchRelationships(ctx context.Context, s *models.Sc
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, t := range s.Tags {
|
for _, t := range s.Tags {
|
||||||
err := ScrapedTag(ctx, r.TagFinder, t)
|
err := ScrapedTag(ctx, r.TagFinder, t, endpoint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -190,11 +190,29 @@ func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, na
|
||||||
|
|
||||||
// ScrapedTag matches the provided tag with the tags
|
// ScrapedTag matches the provided tag with the tags
|
||||||
// in the database and sets the ID field if one is found.
|
// in the database and sets the ID field if one is found.
|
||||||
func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag) error {
|
func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag, stashBoxEndpoint string) error {
|
||||||
if s.StoredID != nil {
|
if s.StoredID != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if a tag with the StashID already exists
|
||||||
|
if stashBoxEndpoint != "" && s.RemoteSiteID != nil {
|
||||||
|
if finder, ok := qb.(models.TagFinder); ok {
|
||||||
|
tags, err := finder.FindByStashID(ctx, models.StashID{
|
||||||
|
StashID: *s.RemoteSiteID,
|
||||||
|
Endpoint: stashBoxEndpoint,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(tags) > 0 {
|
||||||
|
id := strconv.Itoa(tags[0].ID)
|
||||||
|
s.StoredID = &id
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
t, err := tag.ByName(ctx, qb, s.Name)
|
t, err := tag.ByName(ctx, qb, s.Name)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,8 @@ type CustomFieldsInput struct {
|
||||||
Full map[string]interface{} `json:"full"`
|
Full map[string]interface{} `json:"full"`
|
||||||
// If populated, only the keys in this map will be updated
|
// If populated, only the keys in this map will be updated
|
||||||
Partial map[string]interface{} `json:"partial"`
|
Partial map[string]interface{} `json:"partial"`
|
||||||
|
// Remove any keys in this list
|
||||||
|
Remove []string `json:"remove"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type CustomFieldsReader interface {
|
type CustomFieldsReader interface {
|
||||||
|
|
|
||||||
|
|
@ -59,6 +59,10 @@ type GalleryFilterType struct {
|
||||||
StudiosFilter *StudioFilterType `json:"studios_filter"`
|
StudiosFilter *StudioFilterType `json:"studios_filter"`
|
||||||
// Filter by related tags that meet this criteria
|
// Filter by related tags that meet this criteria
|
||||||
TagsFilter *TagFilterType `json:"tags_filter"`
|
TagsFilter *TagFilterType `json:"tags_filter"`
|
||||||
|
// Filter by related files that meet this criteria
|
||||||
|
FilesFilter *FileFilterType `json:"files_filter"`
|
||||||
|
// Filter by related folders that meet this criteria
|
||||||
|
FoldersFilter *FolderFilterType `json:"folders_filter"`
|
||||||
// Filter by created at
|
// Filter by created at
|
||||||
CreatedAt *TimestampCriterionInput `json:"created_at"`
|
CreatedAt *TimestampCriterionInput `json:"created_at"`
|
||||||
// Filter by updated at
|
// Filter by updated at
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,8 @@ type GroupFilterType struct {
|
||||||
TagCount *IntCriterionInput `json:"tag_count"`
|
TagCount *IntCriterionInput `json:"tag_count"`
|
||||||
// Filter by date
|
// Filter by date
|
||||||
Date *DateCriterionInput `json:"date"`
|
Date *DateCriterionInput `json:"date"`
|
||||||
|
// Filter by O counter
|
||||||
|
OCounter *IntCriterionInput `json:"o_counter"`
|
||||||
// Filter by containing groups
|
// Filter by containing groups
|
||||||
ContainingGroups *HierarchicalMultiCriterionInput `json:"containing_groups"`
|
ContainingGroups *HierarchicalMultiCriterionInput `json:"containing_groups"`
|
||||||
// Filter by sub groups
|
// Filter by sub groups
|
||||||
|
|
|
||||||
|
|
@ -57,6 +57,8 @@ type ImageFilterType struct {
|
||||||
StudiosFilter *StudioFilterType `json:"studios_filter"`
|
StudiosFilter *StudioFilterType `json:"studios_filter"`
|
||||||
// Filter by related tags that meet this criteria
|
// Filter by related tags that meet this criteria
|
||||||
TagsFilter *TagFilterType `json:"tags_filter"`
|
TagsFilter *TagFilterType `json:"tags_filter"`
|
||||||
|
// Filter by related files that meet this criteria
|
||||||
|
FilesFilter *FileFilterType `json:"files_filter"`
|
||||||
// Filter by created at
|
// Filter by created at
|
||||||
CreatedAt *TimestampCriterionInput `json:"created_at"`
|
CreatedAt *TimestampCriterionInput `json:"created_at"`
|
||||||
// Filter by updated at
|
// Filter by updated at
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ import (
|
||||||
|
|
||||||
type Studio struct {
|
type Studio struct {
|
||||||
Name string `json:"name,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
URL string `json:"url,omitempty"`
|
URLs []string `json:"urls,omitempty"`
|
||||||
ParentStudio string `json:"parent_studio,omitempty"`
|
ParentStudio string `json:"parent_studio,omitempty"`
|
||||||
Image string `json:"image,omitempty"`
|
Image string `json:"image,omitempty"`
|
||||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||||
|
|
@ -24,6 +24,9 @@ type Studio struct {
|
||||||
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
||||||
Tags []string `json:"tags,omitempty"`
|
Tags []string `json:"tags,omitempty"`
|
||||||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||||
|
|
||||||
|
// deprecated - for import only
|
||||||
|
URL string `json:"url,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s Studio) Filename() string {
|
func (s Studio) Filename() string {
|
||||||
|
|
|
||||||
|
|
@ -6,20 +6,22 @@ import (
|
||||||
|
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
"github.com/stashapp/stash/pkg/fsutil"
|
"github.com/stashapp/stash/pkg/fsutil"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/models/json"
|
"github.com/stashapp/stash/pkg/models/json"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Tag struct {
|
type Tag struct {
|
||||||
Name string `json:"name,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
SortName string `json:"sort_name,omitempty"`
|
SortName string `json:"sort_name,omitempty"`
|
||||||
Description string `json:"description,omitempty"`
|
Description string `json:"description,omitempty"`
|
||||||
Favorite bool `json:"favorite,omitempty"`
|
Favorite bool `json:"favorite,omitempty"`
|
||||||
Aliases []string `json:"aliases,omitempty"`
|
Aliases []string `json:"aliases,omitempty"`
|
||||||
Image string `json:"image,omitempty"`
|
Image string `json:"image,omitempty"`
|
||||||
Parents []string `json:"parents,omitempty"`
|
Parents []string `json:"parents,omitempty"`
|
||||||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
||||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||||
|
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s Tag) Filename() string {
|
func (s Tag) Filename() string {
|
||||||
|
|
|
||||||
|
|
@ -130,13 +130,13 @@ func (_m *FileReaderWriter) Find(ctx context.Context, id ...models.FileID) ([]mo
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindAllByPath provides a mock function with given fields: ctx, path
|
// FindAllByPath provides a mock function with given fields: ctx, path, caseSensitive
|
||||||
func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string) ([]models.File, error) {
|
func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string, caseSensitive bool) ([]models.File, error) {
|
||||||
ret := _m.Called(ctx, path)
|
ret := _m.Called(ctx, path, caseSensitive)
|
||||||
|
|
||||||
var r0 []models.File
|
var r0 []models.File
|
||||||
if rf, ok := ret.Get(0).(func(context.Context, string) []models.File); ok {
|
if rf, ok := ret.Get(0).(func(context.Context, string, bool) []models.File); ok {
|
||||||
r0 = rf(ctx, path)
|
r0 = rf(ctx, path, caseSensitive)
|
||||||
} else {
|
} else {
|
||||||
if ret.Get(0) != nil {
|
if ret.Get(0) != nil {
|
||||||
r0 = ret.Get(0).([]models.File)
|
r0 = ret.Get(0).([]models.File)
|
||||||
|
|
@ -144,8 +144,8 @@ func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string) ([]m
|
||||||
}
|
}
|
||||||
|
|
||||||
var r1 error
|
var r1 error
|
||||||
if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
|
if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok {
|
||||||
r1 = rf(ctx, path)
|
r1 = rf(ctx, path, caseSensitive)
|
||||||
} else {
|
} else {
|
||||||
r1 = ret.Error(1)
|
r1 = ret.Error(1)
|
||||||
}
|
}
|
||||||
|
|
@ -222,13 +222,13 @@ func (_m *FileReaderWriter) FindByFingerprint(ctx context.Context, fp models.Fin
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindByPath provides a mock function with given fields: ctx, path
|
// FindByPath provides a mock function with given fields: ctx, path, caseSensitive
|
||||||
func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string) (models.File, error) {
|
func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string, caseSensitive bool) (models.File, error) {
|
||||||
ret := _m.Called(ctx, path)
|
ret := _m.Called(ctx, path, caseSensitive)
|
||||||
|
|
||||||
var r0 models.File
|
var r0 models.File
|
||||||
if rf, ok := ret.Get(0).(func(context.Context, string) models.File); ok {
|
if rf, ok := ret.Get(0).(func(context.Context, string, bool) models.File); ok {
|
||||||
r0 = rf(ctx, path)
|
r0 = rf(ctx, path, caseSensitive)
|
||||||
} else {
|
} else {
|
||||||
if ret.Get(0) != nil {
|
if ret.Get(0) != nil {
|
||||||
r0 = ret.Get(0).(models.File)
|
r0 = ret.Get(0).(models.File)
|
||||||
|
|
@ -236,8 +236,8 @@ func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string) (models
|
||||||
}
|
}
|
||||||
|
|
||||||
var r1 error
|
var r1 error
|
||||||
if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
|
if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok {
|
||||||
r1 = rf(ctx, path)
|
r1 = rf(ctx, path, caseSensitive)
|
||||||
} else {
|
} else {
|
||||||
r1 = ret.Error(1)
|
r1 = ret.Error(1)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -132,13 +132,13 @@ func (_m *FolderReaderWriter) FindByParentFolderID(ctx context.Context, parentFo
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindByPath provides a mock function with given fields: ctx, path
|
// FindByPath provides a mock function with given fields: ctx, path, caseSensitive
|
||||||
func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string) (*models.Folder, error) {
|
func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string, caseSensitive bool) (*models.Folder, error) {
|
||||||
ret := _m.Called(ctx, path)
|
ret := _m.Called(ctx, path, caseSensitive)
|
||||||
|
|
||||||
var r0 *models.Folder
|
var r0 *models.Folder
|
||||||
if rf, ok := ret.Get(0).(func(context.Context, string) *models.Folder); ok {
|
if rf, ok := ret.Get(0).(func(context.Context, string, bool) *models.Folder); ok {
|
||||||
r0 = rf(ctx, path)
|
r0 = rf(ctx, path, caseSensitive)
|
||||||
} else {
|
} else {
|
||||||
if ret.Get(0) != nil {
|
if ret.Get(0) != nil {
|
||||||
r0 = ret.Get(0).(*models.Folder)
|
r0 = ret.Get(0).(*models.Folder)
|
||||||
|
|
@ -146,8 +146,8 @@ func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string) (*mod
|
||||||
}
|
}
|
||||||
|
|
||||||
var r1 error
|
var r1 error
|
||||||
if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
|
if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok {
|
||||||
r1 = rf(ctx, path)
|
r1 = rf(ctx, path, caseSensitive)
|
||||||
} else {
|
} else {
|
||||||
r1 = ret.Error(1)
|
r1 = ret.Error(1)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -594,6 +594,27 @@ func (_m *ImageReaderWriter) OCountByPerformerID(ctx context.Context, performerI
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// OCountByStudioID provides a mock function with given fields: ctx, studioID
|
||||||
|
func (_m *ImageReaderWriter) OCountByStudioID(ctx context.Context, studioID int) (int, error) {
|
||||||
|
ret := _m.Called(ctx, studioID)
|
||||||
|
|
||||||
|
var r0 int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
|
||||||
|
r0 = rf(ctx, studioID)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Get(0).(int)
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, studioID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// Query provides a mock function with given fields: ctx, options
|
// Query provides a mock function with given fields: ctx, options
|
||||||
func (_m *ImageReaderWriter) Query(ctx context.Context, options models.ImageQueryOptions) (*models.ImageQueryResult, error) {
|
func (_m *ImageReaderWriter) Query(ctx context.Context, options models.ImageQueryOptions) (*models.ImageQueryResult, error) {
|
||||||
ret := _m.Called(ctx, options)
|
ret := _m.Called(ctx, options)
|
||||||
|
|
|
||||||
|
|
@ -1141,6 +1141,27 @@ func (_m *SceneReaderWriter) HasCover(ctx context.Context, sceneID int) (bool, e
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// OCountByGroupID provides a mock function with given fields: ctx, groupID
|
||||||
|
func (_m *SceneReaderWriter) OCountByGroupID(ctx context.Context, groupID int) (int, error) {
|
||||||
|
ret := _m.Called(ctx, groupID)
|
||||||
|
|
||||||
|
var r0 int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
|
||||||
|
r0 = rf(ctx, groupID)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Get(0).(int)
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, groupID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// OCountByPerformerID provides a mock function with given fields: ctx, performerID
|
// OCountByPerformerID provides a mock function with given fields: ctx, performerID
|
||||||
func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) {
|
func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) {
|
||||||
ret := _m.Called(ctx, performerID)
|
ret := _m.Called(ctx, performerID)
|
||||||
|
|
@ -1162,6 +1183,27 @@ func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerI
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// OCountByStudioID provides a mock function with given fields: ctx, studioID
|
||||||
|
func (_m *SceneReaderWriter) OCountByStudioID(ctx context.Context, studioID int) (int, error) {
|
||||||
|
ret := _m.Called(ctx, studioID)
|
||||||
|
|
||||||
|
var r0 int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
|
||||||
|
r0 = rf(ctx, studioID)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Get(0).(int)
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, studioID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// PlayDuration provides a mock function with given fields: ctx
|
// PlayDuration provides a mock function with given fields: ctx
|
||||||
func (_m *SceneReaderWriter) PlayDuration(ctx context.Context) (float64, error) {
|
func (_m *SceneReaderWriter) PlayDuration(ctx context.Context) (float64, error) {
|
||||||
ret := _m.Called(ctx)
|
ret := _m.Called(ctx)
|
||||||
|
|
|
||||||
|
|
@ -360,6 +360,29 @@ func (_m *StudioReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]i
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetURLs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *StudioReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]string, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []string
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]string)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// HasImage provides a mock function with given fields: ctx, studioID
|
// HasImage provides a mock function with given fields: ctx, studioID
|
||||||
func (_m *StudioReaderWriter) HasImage(ctx context.Context, studioID int) (bool, error) {
|
func (_m *StudioReaderWriter) HasImage(ctx context.Context, studioID int) (bool, error) {
|
||||||
ret := _m.Called(ctx, studioID)
|
ret := _m.Called(ctx, studioID)
|
||||||
|
|
|
||||||
|
|
@ -427,6 +427,29 @@ func (_m *TagReaderWriter) FindBySceneMarkerID(ctx context.Context, sceneMarkerI
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FindByStashID provides a mock function with given fields: ctx, stashID
|
||||||
|
func (_m *TagReaderWriter) FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Tag, error) {
|
||||||
|
ret := _m.Called(ctx, stashID)
|
||||||
|
|
||||||
|
var r0 []*models.Tag
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, models.StashID) []*models.Tag); ok {
|
||||||
|
r0 = rf(ctx, stashID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]*models.Tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, models.StashID) error); ok {
|
||||||
|
r1 = rf(ctx, stashID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// FindByStudioID provides a mock function with given fields: ctx, studioID
|
// FindByStudioID provides a mock function with given fields: ctx, studioID
|
||||||
func (_m *TagReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) {
|
func (_m *TagReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) {
|
||||||
ret := _m.Called(ctx, studioID)
|
ret := _m.Called(ctx, studioID)
|
||||||
|
|
@ -565,6 +588,29 @@ func (_m *TagReaderWriter) GetParentIDs(ctx context.Context, relatedID int) ([]i
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetStashIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *TagReaderWriter) GetStashIDs(ctx context.Context, relatedID int) ([]models.StashID, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []models.StashID
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []models.StashID); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]models.StashID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// HasImage provides a mock function with given fields: ctx, tagID
|
// HasImage provides a mock function with given fields: ctx, tagID
|
||||||
func (_m *TagReaderWriter) HasImage(ctx context.Context, tagID int) (bool, error) {
|
func (_m *TagReaderWriter) HasImage(ctx context.Context, tagID int) (bool, error) {
|
||||||
ret := _m.Called(ctx, tagID)
|
ret := _m.Called(ctx, tagID)
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,7 @@ type SceneMarkerPartial struct {
|
||||||
Seconds OptionalFloat64
|
Seconds OptionalFloat64
|
||||||
EndSeconds OptionalFloat64
|
EndSeconds OptionalFloat64
|
||||||
PrimaryTagID OptionalInt
|
PrimaryTagID OptionalInt
|
||||||
|
TagIDs *UpdateIDs
|
||||||
SceneID OptionalInt
|
SceneID OptionalInt
|
||||||
CreatedAt OptionalTime
|
CreatedAt OptionalTime
|
||||||
UpdatedAt OptionalTime
|
UpdatedAt OptionalTime
|
||||||
|
|
|
||||||
|
|
@ -14,10 +14,14 @@ type ScrapedStudio struct {
|
||||||
// Set if studio matched
|
// Set if studio matched
|
||||||
StoredID *string `json:"stored_id"`
|
StoredID *string `json:"stored_id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
URL *string `json:"url"`
|
URL *string `json:"url"` // deprecated
|
||||||
|
URLs []string `json:"urls"`
|
||||||
Parent *ScrapedStudio `json:"parent"`
|
Parent *ScrapedStudio `json:"parent"`
|
||||||
Image *string `json:"image"`
|
Image *string `json:"image"`
|
||||||
Images []string `json:"images"`
|
Images []string `json:"images"`
|
||||||
|
Details *string `json:"details"`
|
||||||
|
Aliases *string `json:"aliases"`
|
||||||
|
Tags []*ScrapedTag `json:"tags"`
|
||||||
RemoteSiteID *string `json:"remote_site_id"`
|
RemoteSiteID *string `json:"remote_site_id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -26,9 +30,9 @@ func (ScrapedStudio) IsScrapedContent() {}
|
||||||
func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Studio {
|
func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Studio {
|
||||||
// Populate a new studio from the input
|
// Populate a new studio from the input
|
||||||
ret := NewStudio()
|
ret := NewStudio()
|
||||||
ret.Name = s.Name
|
ret.Name = strings.TrimSpace(s.Name)
|
||||||
|
|
||||||
if s.RemoteSiteID != nil && endpoint != "" {
|
if s.RemoteSiteID != nil && endpoint != "" && *s.RemoteSiteID != "" {
|
||||||
ret.StashIDs = NewRelatedStashIDs([]StashID{
|
ret.StashIDs = NewRelatedStashIDs([]StashID{
|
||||||
{
|
{
|
||||||
Endpoint: endpoint,
|
Endpoint: endpoint,
|
||||||
|
|
@ -38,8 +42,28 @@ func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Stu
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.URL != nil && !excluded["url"] {
|
// if URLs are provided, only use those
|
||||||
ret.URL = *s.URL
|
if len(s.URLs) > 0 {
|
||||||
|
if !excluded["urls"] {
|
||||||
|
ret.URLs = NewRelatedStrings(s.URLs)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
urls := []string{}
|
||||||
|
if s.URL != nil && !excluded["url"] {
|
||||||
|
urls = append(urls, *s.URL)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(urls) > 0 {
|
||||||
|
ret.URLs = NewRelatedStrings(urls)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Details != nil && !excluded["details"] {
|
||||||
|
ret.Details = *s.Details
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Aliases != nil && *s.Aliases != "" && !excluded["aliases"] {
|
||||||
|
ret.Aliases = NewRelatedStrings(stringslice.FromString(*s.Aliases, ","))
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.Parent != nil && s.Parent.StoredID != nil && !excluded["parent"] && !excluded["parent_studio"] {
|
if s.Parent != nil && s.Parent.StoredID != nil && !excluded["parent"] && !excluded["parent_studio"] {
|
||||||
|
|
@ -71,11 +95,40 @@ func (s *ScrapedStudio) ToPartial(id string, endpoint string, excluded map[strin
|
||||||
currentTime := time.Now()
|
currentTime := time.Now()
|
||||||
|
|
||||||
if s.Name != "" && !excluded["name"] {
|
if s.Name != "" && !excluded["name"] {
|
||||||
ret.Name = NewOptionalString(s.Name)
|
ret.Name = NewOptionalString(strings.TrimSpace(s.Name))
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.URL != nil && !excluded["url"] {
|
if len(s.URLs) > 0 {
|
||||||
ret.URL = NewOptionalString(*s.URL)
|
if !excluded["urls"] {
|
||||||
|
|
||||||
|
ret.URLs = &UpdateStrings{
|
||||||
|
Values: stringslice.TrimSpace(s.URLs),
|
||||||
|
Mode: RelationshipUpdateModeSet,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
urls := []string{}
|
||||||
|
if s.URL != nil && !excluded["url"] {
|
||||||
|
urls = append(urls, strings.TrimSpace(*s.URL))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(urls) > 0 {
|
||||||
|
ret.URLs = &UpdateStrings{
|
||||||
|
Values: stringslice.TrimSpace(urls),
|
||||||
|
Mode: RelationshipUpdateModeSet,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Details != nil && !excluded["details"] {
|
||||||
|
ret.Details = NewOptionalString(strings.TrimSpace(*s.Details))
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Aliases != nil && *s.Aliases != "" && !excluded["aliases"] {
|
||||||
|
ret.Aliases = &UpdateStrings{
|
||||||
|
Values: stringslice.TrimSpace(stringslice.FromString(*s.Aliases, ",")),
|
||||||
|
Mode: RelationshipUpdateModeSet,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.Parent != nil && !excluded["parent"] {
|
if s.Parent != nil && !excluded["parent"] {
|
||||||
|
|
@ -88,7 +141,7 @@ func (s *ScrapedStudio) ToPartial(id string, endpoint string, excluded map[strin
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.RemoteSiteID != nil && endpoint != "" {
|
if s.RemoteSiteID != nil && endpoint != "" && *s.RemoteSiteID != "" {
|
||||||
ret.StashIDs = &UpdateStashIDs{
|
ret.StashIDs = &UpdateStashIDs{
|
||||||
StashIDs: existingStashIDs,
|
StashIDs: existingStashIDs,
|
||||||
Mode: RelationshipUpdateModeSet,
|
Mode: RelationshipUpdateModeSet,
|
||||||
|
|
@ -145,10 +198,14 @@ func (ScrapedPerformer) IsScrapedContent() {}
|
||||||
func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool) *Performer {
|
func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool) *Performer {
|
||||||
ret := NewPerformer()
|
ret := NewPerformer()
|
||||||
currentTime := time.Now()
|
currentTime := time.Now()
|
||||||
ret.Name = *p.Name
|
ret.Name = strings.TrimSpace(*p.Name)
|
||||||
|
|
||||||
if p.Aliases != nil && !excluded["aliases"] {
|
if p.Aliases != nil && !excluded["aliases"] {
|
||||||
ret.Aliases = NewRelatedStrings(stringslice.FromString(*p.Aliases, ","))
|
aliases := stringslice.FromString(*p.Aliases, ",")
|
||||||
|
for i, alias := range aliases {
|
||||||
|
aliases[i] = strings.TrimSpace(alias)
|
||||||
|
}
|
||||||
|
ret.Aliases = NewRelatedStrings(aliases)
|
||||||
}
|
}
|
||||||
if p.Birthdate != nil && !excluded["birthdate"] {
|
if p.Birthdate != nil && !excluded["birthdate"] {
|
||||||
date, err := ParseDate(*p.Birthdate)
|
date, err := ParseDate(*p.Birthdate)
|
||||||
|
|
@ -249,7 +306,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.RemoteSiteID != nil && endpoint != "" {
|
if p.RemoteSiteID != nil && endpoint != "" && *p.RemoteSiteID != "" {
|
||||||
ret.StashIDs = NewRelatedStashIDs([]StashID{
|
ret.StashIDs = NewRelatedStashIDs([]StashID{
|
||||||
{
|
{
|
||||||
Endpoint: endpoint,
|
Endpoint: endpoint,
|
||||||
|
|
@ -378,7 +435,7 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.RemoteSiteID != nil && endpoint != "" {
|
if p.RemoteSiteID != nil && endpoint != "" && *p.RemoteSiteID != "" {
|
||||||
ret.StashIDs = &UpdateStashIDs{
|
ret.StashIDs = &UpdateStashIDs{
|
||||||
StashIDs: existingStashIDs,
|
StashIDs: existingStashIDs,
|
||||||
Mode: RelationshipUpdateModeSet,
|
Mode: RelationshipUpdateModeSet,
|
||||||
|
|
@ -395,12 +452,31 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
|
||||||
|
|
||||||
type ScrapedTag struct {
|
type ScrapedTag struct {
|
||||||
// Set if tag matched
|
// Set if tag matched
|
||||||
StoredID *string `json:"stored_id"`
|
StoredID *string `json:"stored_id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
RemoteSiteID *string `json:"remote_site_id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ScrapedTag) IsScrapedContent() {}
|
func (ScrapedTag) IsScrapedContent() {}
|
||||||
|
|
||||||
|
func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag {
|
||||||
|
currentTime := time.Now()
|
||||||
|
ret := NewTag()
|
||||||
|
ret.Name = t.Name
|
||||||
|
|
||||||
|
if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" {
|
||||||
|
ret.StashIDs = NewRelatedStashIDs([]StashID{
|
||||||
|
{
|
||||||
|
Endpoint: endpoint,
|
||||||
|
StashID: *t.RemoteSiteID,
|
||||||
|
UpdatedAt: currentTime,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ret
|
||||||
|
}
|
||||||
|
|
||||||
func ScrapedTagSortFunction(a, b *ScrapedTag) int {
|
func ScrapedTagSortFunction(a, b *ScrapedTag) int {
|
||||||
return strings.Compare(strings.ToLower(a.Name), strings.ToLower(b.Name))
|
return strings.Compare(strings.ToLower(a.Name), strings.ToLower(b.Name))
|
||||||
}
|
}
|
||||||
|
|
@ -462,6 +538,7 @@ type ScrapedGroup struct {
|
||||||
Date *string `json:"date"`
|
Date *string `json:"date"`
|
||||||
Rating *string `json:"rating"`
|
Rating *string `json:"rating"`
|
||||||
Director *string `json:"director"`
|
Director *string `json:"director"`
|
||||||
|
URL *string `json:"url"` // included for backward compatibility
|
||||||
URLs []string `json:"urls"`
|
URLs []string `json:"urls"`
|
||||||
Synopsis *string `json:"synopsis"`
|
Synopsis *string `json:"synopsis"`
|
||||||
Studio *ScrapedStudio `json:"studio"`
|
Studio *ScrapedStudio `json:"studio"`
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ import (
|
||||||
func Test_scrapedToStudioInput(t *testing.T) {
|
func Test_scrapedToStudioInput(t *testing.T) {
|
||||||
const name = "name"
|
const name = "name"
|
||||||
url := "url"
|
url := "url"
|
||||||
|
url2 := "url2"
|
||||||
emptyEndpoint := ""
|
emptyEndpoint := ""
|
||||||
endpoint := "endpoint"
|
endpoint := "endpoint"
|
||||||
remoteSiteID := "remoteSiteID"
|
remoteSiteID := "remoteSiteID"
|
||||||
|
|
@ -25,13 +26,33 @@ func Test_scrapedToStudioInput(t *testing.T) {
|
||||||
"set all",
|
"set all",
|
||||||
&ScrapedStudio{
|
&ScrapedStudio{
|
||||||
Name: name,
|
Name: name,
|
||||||
|
URLs: []string{url, url2},
|
||||||
URL: &url,
|
URL: &url,
|
||||||
RemoteSiteID: &remoteSiteID,
|
RemoteSiteID: &remoteSiteID,
|
||||||
},
|
},
|
||||||
endpoint,
|
endpoint,
|
||||||
&Studio{
|
&Studio{
|
||||||
Name: name,
|
Name: name,
|
||||||
URL: url,
|
URLs: NewRelatedStrings([]string{url, url2}),
|
||||||
|
StashIDs: NewRelatedStashIDs([]StashID{
|
||||||
|
{
|
||||||
|
Endpoint: endpoint,
|
||||||
|
StashID: remoteSiteID,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"set url instead of urls",
|
||||||
|
&ScrapedStudio{
|
||||||
|
Name: name,
|
||||||
|
URL: &url,
|
||||||
|
RemoteSiteID: &remoteSiteID,
|
||||||
|
},
|
||||||
|
endpoint,
|
||||||
|
&Studio{
|
||||||
|
Name: name,
|
||||||
|
URLs: NewRelatedStrings([]string{url}),
|
||||||
StashIDs: NewRelatedStashIDs([]StashID{
|
StashIDs: NewRelatedStashIDs([]StashID{
|
||||||
{
|
{
|
||||||
Endpoint: endpoint,
|
Endpoint: endpoint,
|
||||||
|
|
@ -321,9 +342,12 @@ func TestScrapedStudio_ToPartial(t *testing.T) {
|
||||||
fullStudio,
|
fullStudio,
|
||||||
stdArgs,
|
stdArgs,
|
||||||
StudioPartial{
|
StudioPartial{
|
||||||
ID: id,
|
ID: id,
|
||||||
Name: NewOptionalString(name),
|
Name: NewOptionalString(name),
|
||||||
URL: NewOptionalString(url),
|
URLs: &UpdateStrings{
|
||||||
|
Values: []string{url},
|
||||||
|
Mode: RelationshipUpdateModeSet,
|
||||||
|
},
|
||||||
ParentID: NewOptionalInt(parentStoredID),
|
ParentID: NewOptionalInt(parentStoredID),
|
||||||
StashIDs: &UpdateStashIDs{
|
StashIDs: &UpdateStashIDs{
|
||||||
StashIDs: append(existingStashIDs, StashID{
|
StashIDs: append(existingStashIDs, StashID{
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,6 @@ import (
|
||||||
type Studio struct {
|
type Studio struct {
|
||||||
ID int `json:"id"`
|
ID int `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
URL string `json:"url"`
|
|
||||||
ParentID *int `json:"parent_id"`
|
ParentID *int `json:"parent_id"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
|
@ -19,6 +18,7 @@ type Studio struct {
|
||||||
IgnoreAutoTag bool `json:"ignore_auto_tag"`
|
IgnoreAutoTag bool `json:"ignore_auto_tag"`
|
||||||
|
|
||||||
Aliases RelatedStrings `json:"aliases"`
|
Aliases RelatedStrings `json:"aliases"`
|
||||||
|
URLs RelatedStrings `json:"urls"`
|
||||||
TagIDs RelatedIDs `json:"tag_ids"`
|
TagIDs RelatedIDs `json:"tag_ids"`
|
||||||
StashIDs RelatedStashIDs `json:"stash_ids"`
|
StashIDs RelatedStashIDs `json:"stash_ids"`
|
||||||
}
|
}
|
||||||
|
|
@ -35,7 +35,6 @@ func NewStudio() Studio {
|
||||||
type StudioPartial struct {
|
type StudioPartial struct {
|
||||||
ID int
|
ID int
|
||||||
Name OptionalString
|
Name OptionalString
|
||||||
URL OptionalString
|
|
||||||
ParentID OptionalInt
|
ParentID OptionalInt
|
||||||
// Rating expressed in 1-100 scale
|
// Rating expressed in 1-100 scale
|
||||||
Rating OptionalInt
|
Rating OptionalInt
|
||||||
|
|
@ -46,6 +45,7 @@ type StudioPartial struct {
|
||||||
IgnoreAutoTag OptionalBool
|
IgnoreAutoTag OptionalBool
|
||||||
|
|
||||||
Aliases *UpdateStrings
|
Aliases *UpdateStrings
|
||||||
|
URLs *UpdateStrings
|
||||||
TagIDs *UpdateIDs
|
TagIDs *UpdateIDs
|
||||||
StashIDs *UpdateStashIDs
|
StashIDs *UpdateStashIDs
|
||||||
}
|
}
|
||||||
|
|
@ -63,6 +63,12 @@ func (s *Studio) LoadAliases(ctx context.Context, l AliasLoader) error {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Studio) LoadURLs(ctx context.Context, l URLLoader) error {
|
||||||
|
return s.URLs.load(func() ([]string, error) {
|
||||||
|
return l.GetURLs(ctx, s.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Studio) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
func (s *Studio) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||||
return s.TagIDs.load(func() ([]int, error) {
|
return s.TagIDs.load(func() ([]int, error) {
|
||||||
return l.GetTagIDs(ctx, s.ID)
|
return l.GetTagIDs(ctx, s.ID)
|
||||||
|
|
|
||||||
|
|
@ -15,9 +15,10 @@ type Tag struct {
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
|
||||||
Aliases RelatedStrings `json:"aliases"`
|
Aliases RelatedStrings `json:"aliases"`
|
||||||
ParentIDs RelatedIDs `json:"parent_ids"`
|
ParentIDs RelatedIDs `json:"parent_ids"`
|
||||||
ChildIDs RelatedIDs `json:"tag_ids"`
|
ChildIDs RelatedIDs `json:"tag_ids"`
|
||||||
|
StashIDs RelatedStashIDs `json:"stash_ids"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewTag() Tag {
|
func NewTag() Tag {
|
||||||
|
|
@ -46,6 +47,12 @@ func (s *Tag) LoadChildIDs(ctx context.Context, l TagRelationLoader) error {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Tag) LoadStashIDs(ctx context.Context, l StashIDLoader) error {
|
||||||
|
return s.StashIDs.load(func() ([]StashID, error) {
|
||||||
|
return l.GetStashIDs(ctx, s.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
type TagPartial struct {
|
type TagPartial struct {
|
||||||
Name OptionalString
|
Name OptionalString
|
||||||
SortName OptionalString
|
SortName OptionalString
|
||||||
|
|
@ -58,6 +65,7 @@ type TagPartial struct {
|
||||||
Aliases *UpdateStrings
|
Aliases *UpdateStrings
|
||||||
ParentIDs *UpdateIDs
|
ParentIDs *UpdateIDs
|
||||||
ChildIDs *UpdateIDs
|
ChildIDs *UpdateIDs
|
||||||
|
StashIDs *UpdateStashIDs
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewTagPartial() TagPartial {
|
func NewTagPartial() TagPartial {
|
||||||
|
|
|
||||||
|
|
@ -13,9 +13,9 @@ type FileGetter interface {
|
||||||
// FileFinder provides methods to find files.
|
// FileFinder provides methods to find files.
|
||||||
type FileFinder interface {
|
type FileFinder interface {
|
||||||
FileGetter
|
FileGetter
|
||||||
FindAllByPath(ctx context.Context, path string) ([]File, error)
|
FindAllByPath(ctx context.Context, path string, caseSensitive bool) ([]File, error)
|
||||||
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error)
|
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error)
|
||||||
FindByPath(ctx context.Context, path string) (File, error)
|
FindByPath(ctx context.Context, path string, caseSensitive bool) (File, error)
|
||||||
FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error)
|
FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error)
|
||||||
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error)
|
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error)
|
||||||
FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error)
|
FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error)
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ type FolderGetter interface {
|
||||||
type FolderFinder interface {
|
type FolderFinder interface {
|
||||||
FolderGetter
|
FolderGetter
|
||||||
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error)
|
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error)
|
||||||
FindByPath(ctx context.Context, path string) (*Folder, error)
|
FindByPath(ctx context.Context, path string, caseSensitive bool) (*Folder, error)
|
||||||
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error)
|
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error)
|
||||||
FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error)
|
FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -38,6 +38,7 @@ type ImageCounter interface {
|
||||||
CountByGalleryID(ctx context.Context, galleryID int) (int, error)
|
CountByGalleryID(ctx context.Context, galleryID int) (int, error)
|
||||||
OCount(ctx context.Context) (int, error)
|
OCount(ctx context.Context) (int, error)
|
||||||
OCountByPerformerID(ctx context.Context, performerID int) (int, error)
|
OCountByPerformerID(ctx context.Context, performerID int) (int, error)
|
||||||
|
OCountByStudioID(ctx context.Context, studioID int) (int, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ImageCreator provides methods to create images.
|
// ImageCreator provides methods to create images.
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue