Merge pull request #1239 from stashapp/develop

Merge to master for 0.6
This commit is contained in:
WithoutPants 2021-03-29 12:05:59 +11:00 committed by GitHub
commit de538be79c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
237 changed files with 33728 additions and 19412 deletions

155
.github/workflows/build.yml vendored Normal file
View file

@ -0,0 +1,155 @@
name: Build
on:
push:
branches: [ develop, master ]
pull_request:
branches: [ develop ]
release:
types: [ published ]
jobs:
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: Checkout
run: git fetch --prune --unshallow --tags
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: 1.13
- name: Set up Node
uses: actions/setup-node@v2
with:
node-version: '12'
- name: Cache node modules
uses: actions/cache@v2
env:
cache-name: cache-node_modules
with:
path: ui/v2.5/node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
- name: Pre-install
run: make pre-ui
- name: Generate
run: make generate
- name: Validate
run: make ui-validate fmt-check vet it
- name: Build UI
run: make ui-only
- name: Cross Compile
run: |
docker pull stashapp/compiler:4
./scripts/cross-compile.sh
- name: Generate checksums
run: |
git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1
sha1sum dist/stash-* | sed 's/dist\///g' | tee -a CHECKSUMS_SHA1
echo "STASH_VERSION=$(git describe --tags --exclude latest_develop)" >> $GITHUB_ENV
echo "RELEASE_DATE=$(date +'%Y-%m-%d %H:%M:%S %Z')" >> $GITHUB_ENV
- name: Upload Windows binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v2
with:
name: stash-win.exe
path: dist/stash-win.exe
- name: Upload OSX binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v2
with:
name: stash-osx
path: dist/stash-osx
- name: Upload Linux binary
# only upload binaries for pull requests
if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}}
uses: actions/upload-artifact@v2
with:
name: stash-linux
path: dist/stash-linux
- name: Update latest_develop tag
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
run : git tag -f latest_develop; git push -f --tags
- name: Development Release
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
uses: meeDamian/github-release@2.0
with:
token: "${{ secrets.GITHUB_TOKEN }}"
prerelease: true
allow_override: true
tag: latest_develop
name: "${{ env.STASH_VERSION }}: Latest development build"
body: "**${{ env.RELEASE_DATE }}**\n This is always the latest committed version on the develop branch. Use as your own risk!"
files: |
dist/stash-osx
dist/stash-win.exe
dist/stash-linux
dist/stash-linux-arm64v8
dist/stash-linux-arm32v7
dist/stash-pi
CHECKSUMS_SHA1
gzip: false
- name: Master release
if: ${{ github.event_name == 'release' && github.ref != 'refs/tags/latest_develop' }}
uses: meeDamian/github-release@2.0
with:
token: "${{ secrets.GITHUB_TOKEN }}"
files: |
dist/stash-osx
dist/stash-win.exe
dist/stash-linux
dist/stash-linux-arm64v8
dist/stash-linux-arm32v7
dist/stash-pi
CHECKSUMS_SHA1
gzip: false
- name: Development Docker
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
env:
DOCKER_CLI_EXPERIMENTAL: enabled
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
docker info
docker buildx create --name builder --use
docker buildx inspect --bootstrap
docker buildx ls
bash ./docker/ci/x86_64/docker_push.sh development
- name: Release Docker
if: ${{ github.event_name == 'release' && github.ref != 'refs/tags/latest_develop' }}
env:
DOCKER_CLI_EXPERIMENTAL: enabled
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
docker info
docker buildx create --name builder --use
docker buildx inspect --bootstrap
docker buildx ls
bash ./docker/ci/x86_64/docker_push.sh latest

1
.gitignore vendored
View file

@ -35,6 +35,7 @@ ui/v2.5/src/core/generated-*.tsx
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
.vscode
# Generated files
.idea/**/contentModel.xml

View file

@ -6,17 +6,24 @@
https://stashapp.cc
**Stash is a Go app which organizes and serves your porn.**
**Stash is a locally hosted web-based app written in Go which organizes and serves your porn.**
See a demo [here](https://vimeo.com/275537038) (password is stashapp).
* It can gather information about videos in your collection from the internet, and is extensible through the use of community-built plugins.
* It supports a wide variety of both video and image formats
* You can tag videos and find them later.
* It provides statistics about performers, tags, studios and other things.
An in-app manual is available, and the manual pages can be viewed [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en).
You can [watch a demo video](https://vimeo.com/275537038)to see it in action (password is stashapp).
# Docker install
For further information you can [read the in-app manual](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en).
# Installing stash
## Docker install
Follow [this README.md in the docker directory.](docker/production/README.md)
# Bare-metal Install
## Pre-Compiled Binaries
Stash supports macOS, Windows, and Linux. Download the [latest release here](https://github.com/stashapp/stash/releases).
@ -36,9 +43,18 @@ The `ffmpeg(.exe)` and `ffprobe(.exe)` files should be placed in `~/.stash` on m
# Usage
## Quickstart Guide
1) Download and install Stash and its dependencies
2) Run Stash. It will prompt you for some configuration options and a directory to index (you can also do this step afterward)
3) After configuration, launch your web browser and navigate to the URL shown within the Stash app.
**Note that Stash does not currently retrieve and organize information about your entire library automatically.** You will need to help it along through the use of [scrapers](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Scraping.md). The Stash community has developed scrapers for many popular data sources which can be downloaded and installed from [this repository](https://github.com/stashapp/CommunityScrapers).
The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our database. Note that this information is not comprehensive and you may need to use the scrapers to identify some of your media.
## CLI
Stash provides some command line options. See what is currently available by running `stash --help`.
Stash runs as a command-line app and local web server. There are some command-line options available, which you can see by running `stash --help`.
For example, to run stash locally on port 80 run it like this (OSX / Linux) `stash --host 127.0.0.1 --port 80`
@ -52,17 +68,25 @@ This command would need customizing for your environment. [This link](https://s
Once you have a certificate and key file name them `stash.crt` and `stash.key` and place them in the `~/.stash` directory. Stash detects these and starts up using HTTPS rather than HTTP.
# FAQ
# Customization
> I'm unable to run the app on OSX or Linux
## Themes
There is a [directory of themes](https://github.com/stashapp/stash/wiki/Themes) on our Wiki, along with instructions on how to install them..
Try running `chmod u+x stash-osx` or `chmod u+x stash-linux` to make the file executable.
## CSS Customization
You can make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks).
> I have a question not answered here.
# Suppport
Join the [Discord server](https://discord.gg/2TsNFKt).
Answers to frequently asked questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ)
# Development
For issues not addressed there, there are a few options.
* Read the [Wiki](https://github.com/stashapp/stash/wiki)
* Check the in-app documentation (also available [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en)
* Join the [Discord server](https://discord.gg/2TsNFKt).
# Building From Source Code
## Install
@ -122,10 +146,3 @@ where the app can be cross-compiled. This process is kicked off by CI via the `
command to open a bash shell to the container to poke around:
`docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t stashappdev/compiler:latest /bin/bash`
## Customization
You can make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks).
[Stash Plex Theme](https://github.com/stashapp/stash/wiki/Stash-Plex-Theme) is a community created theme inspired by popular Plex Interface.

3
go.mod
View file

@ -18,7 +18,7 @@ require (
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
github.com/jmoiron/sqlx v1.2.0
github.com/json-iterator/go v1.1.9
github.com/mattn/go-sqlite3 v1.13.0
github.com/mattn/go-sqlite3 v1.14.6
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rs/cors v1.6.0
@ -33,6 +33,7 @@ require (
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9
golang.org/x/image v0.0.0-20190802002840-cff245a6509b
golang.org/x/net v0.0.0-20200822124328-c89045814202
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd
golang.org/x/tools v0.0.0-20200915031644-64986481280e // indirect
gopkg.in/yaml.v2 v2.3.0
)

2
go.sum
View file

@ -540,6 +540,8 @@ github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK86
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.13.0 h1:LnJI81JidiW9r7pS/hXe6cFeO5EXNq7KbfvoJLRI69c=
github.com/mattn/go-sqlite3 v1.13.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4=
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=

View file

@ -31,6 +31,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
excludes
imageExcludes
scraperUserAgent
scraperCertCheck
scraperCDPPath
stashBoxes {
name

View file

@ -3,6 +3,11 @@ fragment SlimPerformerData on Performer {
name
gender
image_path
favorite
tags {
id
name
}
stash_ids {
endpoint
stash_id

View file

@ -20,6 +20,11 @@ fragment PerformerData on Performer {
favorite
image_path
scene_count
tags {
...TagData
}
stash_ids {
stash_id
endpoint

View file

@ -15,6 +15,9 @@ fragment ScrapedPerformerData on ScrapedPerformer {
tattoos
piercings
aliases
tags {
...ScrapedSceneTagData
}
image
}
@ -36,6 +39,9 @@ fragment ScrapedScenePerformerData on ScrapedScenePerformer {
tattoos
piercings
aliases
tags {
...ScrapedSceneTagData
}
remote_site_id
images
}

View file

@ -4,4 +4,5 @@ fragment TagData on Tag {
image_path
scene_count
scene_marker_count
performer_count
}

View file

@ -16,6 +16,7 @@ mutation PerformerCreate(
$twitter: String,
$instagram: String,
$favorite: Boolean,
$tag_ids: [ID!],
$stash_ids: [StashIDInput!],
$image: String) {
@ -37,6 +38,7 @@ mutation PerformerCreate(
twitter: $twitter,
instagram: $instagram,
favorite: $favorite,
tag_ids: $tag_ids,
stash_ids: $stash_ids,
image: $image
}) {
@ -52,6 +54,14 @@ mutation PerformerUpdate(
}
}
mutation BulkPerformerUpdate(
$input: BulkPerformerUpdateInput!) {
bulkPerformerUpdate(input: $input) {
...PerformerData
}
}
mutation PerformerDestroy($id: ID!) {
performerDestroy(input: { id: $id })
}

View file

@ -13,24 +13,24 @@ query AllTags {
}
query AllPerformersForFilter {
allPerformersSlim {
allPerformers {
...SlimPerformerData
}
}
query AllStudiosForFilter {
allStudiosSlim {
allStudios {
...SlimStudioData
}
}
query AllMoviesForFilter {
allMoviesSlim {
allMovies {
...SlimMovieData
}
}
query AllTagsForFilter {
allTagsSlim {
allTags {
id
name
}

View file

@ -115,11 +115,6 @@ type Query {
# Get everything with minimal metadata
allPerformersSlim: [Performer!]!
allStudiosSlim: [Studio!]!
allMoviesSlim: [Movie!]!
allTagsSlim: [Tag!]!
# Version
version: Version!
@ -174,6 +169,7 @@ type Mutation {
performerUpdate(input: PerformerUpdateInput!): Performer
performerDestroy(input: PerformerDestroyInput!): Boolean!
performersDestroy(ids: [ID!]!): Boolean!
bulkPerformerUpdate(input: BulkPerformerUpdateInput!): [Performer!]
studioCreate(input: StudioCreateInput!): Studio
studioUpdate(input: StudioUpdateInput!): Studio

View file

@ -81,6 +81,8 @@ input ConfigGeneralInput {
scraperUserAgent: String
"""Scraper CDP path. Path to chrome executable or remote address"""
scraperCDPPath: String
"""Whether the scraper should check for invalid certificates"""
scraperCertCheck: Boolean!
"""Stash-box instances used for tagging"""
stashBoxes: [StashBoxInput!]!
}
@ -92,6 +94,10 @@ type ConfigGeneralResult {
databasePath: String!
"""Path to generated files"""
generatedPath: String!
"""Path to the config file used"""
configFilePath: String!
"""Path to scrapers"""
scrapersPath: String!
"""Path to cache"""
cachePath: String!
"""Whether to calculate MD5 checksums for scene video files"""
@ -144,6 +150,8 @@ type ConfigGeneralResult {
scraperUserAgent: String
"""Scraper CDP path. Path to chrome executable or remote address"""
scraperCDPPath: String
"""Whether the scraper should check for invalid certificates"""
scraperCertCheck: Boolean!
"""Stash-box instances used for tagging"""
stashBoxes: [StashBox!]!
}

View file

@ -59,6 +59,8 @@ input PerformerFilterType {
gender: GenderCriterionInput
"""Filter to only include performers missing this property"""
is_missing: String
"""Filter to only include performers with these tags"""
tags: MultiCriterionInput
"""Filter by StashID"""
stash_id: String
}
@ -75,6 +77,10 @@ input SceneMarkerFilterType {
}
input SceneFilterType {
AND: SceneFilterType
OR: SceneFilterType
NOT: SceneFilterType
"""Filter by path"""
path: StringCriterionInput
"""Filter by rating"""
@ -97,6 +103,8 @@ input SceneFilterType {
movies: MultiCriterionInput
"""Filter to only include scenes with these tags"""
tags: MultiCriterionInput
"""Filter to only include scenes with performers with these tags"""
performer_tags: MultiCriterionInput
"""Filter to only include scenes with these performers"""
performers: MultiCriterionInput
"""Filter by StashID"""
@ -132,23 +140,38 @@ input GalleryFilterType {
organized: Boolean
"""Filter by average image resolution"""
average_resolution: ResolutionEnum
"""Filter to only include scenes with this studio"""
"""Filter to only include galleries with this studio"""
studios: MultiCriterionInput
"""Filter to only include scenes with these tags"""
"""Filter to only include galleries with these tags"""
tags: MultiCriterionInput
"""Filter to only include scenes with these performers"""
"""Filter to only include galleries with performers with these tags"""
performer_tags: MultiCriterionInput
"""Filter to only include galleries with these performers"""
performers: MultiCriterionInput
"""Filter by number of images in this gallery"""
image_count: IntCriterionInput
}
input TagFilterType {
AND: TagFilterType
OR: TagFilterType
NOT: TagFilterType
"""Filter to only include tags missing this property"""
is_missing: String
"""Filter by number of scenes with this tag"""
scene_count: IntCriterionInput
"""Filter by number of images with this tag"""
image_count: IntCriterionInput
"""Filter by number of galleries with this tag"""
gallery_count: IntCriterionInput
"""Filter by number of performers with this tag"""
performer_count: IntCriterionInput
"""Filter by number of markers with this tag"""
marker_count: IntCriterionInput
}
@ -170,6 +193,8 @@ input ImageFilterType {
studios: MultiCriterionInput
"""Filter to only include images with these tags"""
tags: MultiCriterionInput
"""Filter to only include images with performers with these tags"""
performer_tags: MultiCriterionInput
"""Filter to only include images with these performers"""
performers: MultiCriterionInput
"""Filter to only include images with these galleries"""

View file

@ -33,15 +33,15 @@ input GeneratePreviewOptionsInput {
input ScanMetadataInput {
paths: [String!]
"""Set name, date, details from metadata (if present)"""
useFileMetadata: Boolean!
useFileMetadata: Boolean
"""Strip file extension from title"""
stripFileExtension: Boolean!
stripFileExtension: Boolean
"""Generate previews during scan"""
scanGeneratePreviews: Boolean!
scanGeneratePreviews: Boolean
"""Generate image previews during scan"""
scanGenerateImagePreviews: Boolean!
scanGenerateImagePreviews: Boolean
"""Generate sprites during scan"""
scanGenerateSprites: Boolean!
scanGenerateSprites: Boolean
}
input CleanMetadataInput {

View file

@ -27,6 +27,7 @@ type Performer {
piercings: String
aliases: String
favorite: Boolean!
tags: [Tag!]!
image_path: String # Resolver
scene_count: Int # Resolver
@ -52,6 +53,7 @@ input PerformerCreateInput {
twitter: String
instagram: String
favorite: Boolean
tag_ids: [ID!]
"""This should be base64 encoded"""
image: String
stash_ids: [StashIDInput!]
@ -76,11 +78,34 @@ input PerformerUpdateInput {
twitter: String
instagram: String
favorite: Boolean
tag_ids: [ID!]
"""This should be base64 encoded"""
image: String
stash_ids: [StashIDInput!]
}
input BulkPerformerUpdateInput {
clientMutationId: String
ids: [ID!]
url: String
gender: GenderEnum
birthdate: String
ethnicity: String
country: String
eye_color: String
height: String
measurements: String
fake_tits: String
career_length: String
tattoos: String
piercings: String
aliases: String
twitter: String
instagram: String
favorite: Boolean
tag_ids: BulkUpdateIds
}
input PerformerDestroyInput {
id: ID!
}

View file

@ -16,6 +16,8 @@ type ScrapedPerformer {
tattoos: String
piercings: String
aliases: String
# Should be ScrapedPerformerTag - but would be identical types
tags: [ScrapedSceneTag!]
"""This should be base64 encoded"""
image: String
@ -39,5 +41,6 @@ input ScrapedPerformerInput {
piercings: String
aliases: String
# not including tags for the input
# not including image for the input
}

View file

@ -45,6 +45,7 @@ type ScrapedScenePerformer {
tattoos: String
piercings: String
aliases: String
tags: [ScrapedSceneTag!]
remote_site_id: String
images: [String!]

View file

@ -5,6 +5,7 @@ type Tag {
image_path: String # Resolver
scene_count: Int # Resolver
scene_marker_count: Int # Resolver
performer_count: Int
}
input TagCreateInput {

View file

@ -10,6 +10,8 @@ import (
"runtime"
"time"
"golang.org/x/sys/cpu"
"github.com/stashapp/stash/pkg/logger"
)
@ -26,10 +28,12 @@ var ErrNoVersion = errors.New("no stash version")
var stashReleases = func() map[string]string {
return map[string]string{
"windows/amd64": "stash-win.exe",
"linux/amd64": "stash-linux",
"darwin/amd64": "stash-osx",
"linux/amd64": "stash-linux",
"windows/amd64": "stash-win.exe",
"linux/arm": "stash-pi",
"linux/arm64": "stash-linux-arm64v8",
"linux/armv7": "stash-linux-arm32v7",
}
}
@ -141,7 +145,13 @@ func makeGithubRequest(url string, output interface{}) error {
// which is the latest pre-release build.
func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease string, err error) {
platform := fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH)
arch := runtime.GOARCH // https://en.wikipedia.org/wiki/Comparison_of_ARM_cores
isARMv7 := cpu.ARM.HasNEON || cpu.ARM.HasVFPv3 || cpu.ARM.HasVFPv3D16 || cpu.ARM.HasVFPv4 // armv6 doesn't support any of these features
if arch == "arm" && isARMv7 {
arch = "armv7"
}
platform := fmt.Sprintf("%s/%s", runtime.GOOS, arch)
wantedRelease := stashReleases()[platform]
version, _, _ := GetVersion()

View file

@ -34,7 +34,7 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*models.Im
func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*models.ImagePathsType, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
builder := urlbuilders.NewImageURLBuilder(baseURL, obj.ID)
builder := urlbuilders.NewImageURLBuilder(baseURL, obj)
thumbnailPath := builder.GetThumbnailURL()
imagePath := builder.GetImageURL()
return &models.ImagePathsType{

View file

@ -84,13 +84,13 @@ func (r *movieResolver) Synopsis(ctx context.Context, obj *models.Movie) (*strin
func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
frontimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj.ID).GetMovieFrontImageURL()
frontimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieFrontImageURL()
return &frontimagePath, nil
}
func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj.ID).GetMovieBackImageURL()
backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL()
return &backimagePath, nil
}

View file

@ -134,10 +134,21 @@ func (r *performerResolver) Favorite(ctx context.Context, obj *models.Performer)
func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj.ID).GetPerformerImageURL()
imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj).GetPerformerImageURL()
return &imagePath, nil
}
func (r *performerResolver) Tags(ctx context.Context, obj *models.Performer) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().FindByPerformerID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {

View file

@ -23,7 +23,7 @@ func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string,
func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj.ID).GetStudioImageURL()
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj).GetStudioImageURL()
var hasImage bool
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {

View file

@ -31,8 +31,20 @@ func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (re
return &count, err
}
func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var count int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
count, err = repo.Performer().CountByTagID(obj.ID)
return err
}); err != nil {
return nil, err
}
return &count, err
}
func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj.ID).GetTagImageURL()
imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj).GetTagImageURL()
return &imagePath, nil
}

View file

@ -151,6 +151,8 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
refreshScraperCache = true
}
config.Set(config.ScraperCertCheck, input.ScraperCertCheck)
if input.StashBoxes != nil {
if err := config.ValidateStashBoxes(input.StashBoxes); err != nil {
return nil, err

View file

@ -26,7 +26,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
// Process the base 64 encoded image string
if input.FrontImage != nil {
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
frontimageData, err = utils.ProcessImageInput(*input.FrontImage)
if err != nil {
return nil, err
}
@ -34,7 +34,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
// Process the base 64 encoded image string
if input.BackImage != nil {
_, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
backimageData, err = utils.ProcessImageInput(*input.BackImage)
if err != nil {
return nil, err
}
@ -126,7 +126,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
var frontimageData []byte
frontImageIncluded := translator.hasField("front_image")
if input.FrontImage != nil {
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
frontimageData, err = utils.ProcessImageInput(*input.FrontImage)
if err != nil {
return nil, err
}
@ -134,7 +134,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
backImageIncluded := translator.hasField("back_image")
var backimageData []byte
if input.BackImage != nil {
_, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
backimageData, err = utils.ProcessImageInput(*input.BackImage)
if err != nil {
return nil, err
}
@ -189,7 +189,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
// HACK - if front image is null and back image is not null, then set the front image
// to the default image since we can't have a null front image and a non-null back image
if frontimageData == nil && backimageData != nil {
_, frontimageData, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
frontimageData, _ = utils.ProcessImageInput(models.DefaultMovieImage)
}
if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil {

View file

@ -18,7 +18,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
var err error
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
imageData, err = utils.ProcessImageInput(*input.Image)
}
if err != nil {
@ -94,6 +94,12 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return err
}
if len(input.TagIds) > 0 {
if err := r.updatePerformerTags(qb, performer.ID, input.TagIds); err != nil {
return err
}
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(performer.ID, imageData); err != nil {
@ -133,7 +139,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
var err error
imageIncluded := translator.hasField("image")
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
imageData, err = utils.ProcessImageInput(*input.Image)
if err != nil {
return nil, err
}
@ -183,6 +189,13 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
return err
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updatePerformerTags(qb, performer.ID, input.TagIds); err != nil {
return err
}
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(performer.ID, imageData); err != nil {
@ -211,6 +224,92 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
return performer, nil
}
func (r *mutationResolver) updatePerformerTags(qb models.PerformerReaderWriter, performerID int, tagsIDs []string) error {
ids, err := utils.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return qb.UpdateTags(performerID, ids)
}
func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models.BulkPerformerUpdateInput) ([]*models.Performer, error) {
performerIDs, err := utils.StringSliceToIntSlice(input.Ids)
if err != nil {
return nil, err
}
// Populate performer from the input
updatedTime := time.Now()
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedPerformer := models.PerformerPartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
updatedPerformer.URL = translator.nullString(input.URL, "url")
updatedPerformer.Birthdate = translator.sqliteDate(input.Birthdate, "birthdate")
updatedPerformer.Ethnicity = translator.nullString(input.Ethnicity, "ethnicity")
updatedPerformer.Country = translator.nullString(input.Country, "country")
updatedPerformer.EyeColor = translator.nullString(input.EyeColor, "eye_color")
updatedPerformer.Height = translator.nullString(input.Height, "height")
updatedPerformer.Measurements = translator.nullString(input.Measurements, "measurements")
updatedPerformer.FakeTits = translator.nullString(input.FakeTits, "fake_tits")
updatedPerformer.CareerLength = translator.nullString(input.CareerLength, "career_length")
updatedPerformer.Tattoos = translator.nullString(input.Tattoos, "tattoos")
updatedPerformer.Piercings = translator.nullString(input.Piercings, "piercings")
updatedPerformer.Aliases = translator.nullString(input.Aliases, "aliases")
updatedPerformer.Twitter = translator.nullString(input.Twitter, "twitter")
updatedPerformer.Instagram = translator.nullString(input.Instagram, "instagram")
updatedPerformer.Favorite = translator.nullBool(input.Favorite, "favorite")
if translator.hasField("gender") {
if input.Gender != nil {
updatedPerformer.Gender = &sql.NullString{String: input.Gender.String(), Valid: true}
} else {
updatedPerformer.Gender = &sql.NullString{String: "", Valid: false}
}
}
ret := []*models.Performer{}
// Start the transaction and save the scene marker
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Performer()
for _, performerID := range performerIDs {
updatedPerformer.ID = performerID
performer, err := qb.Update(updatedPerformer)
if err != nil {
return err
}
ret = append(ret, performer)
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustTagIDs(qb, performerID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(performerID, tagIDs); err != nil {
return err
}
}
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *mutationResolver) PerformerDestroy(ctx context.Context, input models.PerformerDestroyInput) (bool, error) {
id, err := strconv.Atoi(input.ID)
if err != nil {

View file

@ -80,7 +80,7 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator
if input.CoverImage != nil && *input.CoverImage != "" {
var err error
_, coverImageData, err = utils.ProcessBase64Image(*input.CoverImage)
coverImageData, err = utils.ProcessImageInput(*input.CoverImage)
if err != nil {
return nil, err
}
@ -253,7 +253,7 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustSceneTagIDs(qb, sceneID, *input.TagIds)
tagIDs, err := adjustTagIDs(qb, sceneID, *input.TagIds)
if err != nil {
return err
}
@ -330,7 +330,11 @@ func adjustScenePerformerIDs(qb models.SceneReader, sceneID int, ids models.Bulk
return adjustIDs(ret, ids), nil
}
func adjustSceneTagIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
type tagIDsGetter interface {
GetTagIDs(id int) ([]int, error)
}
func adjustTagIDs(qb tagIDsGetter, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(sceneID)
if err != nil {
return nil, err

View file

@ -20,7 +20,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
// Process the base 64 encoded image string
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
imageData, err = utils.ProcessImageInput(*input.Image)
if err != nil {
return nil, err
}
@ -96,7 +96,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
imageIncluded := translator.hasField("image")
if input.Image != nil {
var err error
_, imageData, err = utils.ProcessBase64Image(*input.Image)
imageData, err = utils.ProcessImageInput(*input.Image)
if err != nil {
return nil, err
}

View file

@ -24,7 +24,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
var err error
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
imageData, err = utils.ProcessImageInput(*input.Image)
if err != nil {
return nil, err
@ -82,7 +82,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
imageIncluded := translator.hasField("image")
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
imageData, err = utils.ProcessImageInput(*input.Image)
if err != nil {
return nil, err

View file

@ -46,6 +46,8 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
Stashes: config.GetStashPaths(),
DatabasePath: config.GetDatabasePath(),
GeneratedPath: config.GetGeneratedPath(),
ConfigFilePath: config.GetConfigFilePath(),
ScrapersPath: config.GetScrapersPath(),
CachePath: config.GetCachePath(),
CalculateMd5: config.IsCalculateMD5(),
VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
@ -71,6 +73,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
Excludes: config.GetExcludes(),
ImageExcludes: config.GetImageExcludes(),
ScraperUserAgent: &scraperUserAgent,
ScraperCertCheck: config.GetScraperCertCheck(),
ScraperCDPPath: &scraperCDPPath,
StashBoxes: config.GetStashBoxes(),
}

View file

@ -53,14 +53,3 @@ func (r *queryResolver) AllMovies(ctx context.Context) (ret []*models.Movie, err
return ret, nil
}
func (r *queryResolver) AllMoviesSlim(ctx context.Context) (ret []*models.Movie, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Movie().AllSlim()
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -52,14 +52,3 @@ func (r *queryResolver) AllPerformers(ctx context.Context) (ret []*models.Perfor
return ret, nil
}
func (r *queryResolver) AllPerformersSlim(ctx context.Context) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Performer().AllSlim()
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -54,14 +54,3 @@ func (r *queryResolver) AllStudios(ctx context.Context) (ret []*models.Studio, e
return ret, nil
}
func (r *queryResolver) AllStudiosSlim(ctx context.Context) (ret []*models.Studio, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().AllSlim()
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -53,14 +53,3 @@ func (r *queryResolver) AllTags(ctx context.Context) (ret []*models.Tag, err err
return ret, nil
}
func (r *queryResolver) AllTagsSlim(ctx context.Context) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().AllSlim()
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -7,6 +7,7 @@ import (
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
)
@ -29,5 +30,5 @@ func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
builder := urlbuilders.NewSceneURLBuilder(baseURL, scene.ID)
return manager.GetSceneStreamPaths(scene, builder.GetStreamURL())
return manager.GetSceneStreamPaths(scene, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize())
}

View file

@ -14,6 +14,7 @@ import (
"runtime/debug"
"strconv"
"strings"
"time"
"github.com/99designs/gqlgen/handler"
"github.com/go-chi/chi"
@ -135,12 +136,14 @@ func Start() {
},
})
maxUploadSize := handler.UploadMaxSize(config.GetMaxUploadSize())
websocketKeepAliveDuration := handler.WebsocketKeepAliveDuration(10 * time.Second)
txnManager := manager.GetInstance().TxnManager
resolver := &Resolver{
txnManager: txnManager,
}
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: resolver}), recoverFunc, websocketUpgrader, maxUploadSize)
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: resolver}), recoverFunc, websocketUpgrader, websocketKeepAliveDuration, maxUploadSize)
r.Handle("/graphql", gqlHandler)
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))

View file

@ -1,25 +1,28 @@
package urlbuilders
import (
"github.com/stashapp/stash/pkg/models"
"strconv"
)
type ImageURLBuilder struct {
BaseURL string
ImageID string
BaseURL string
ImageID string
UpdatedAt string
}
func NewImageURLBuilder(baseURL string, imageID int) ImageURLBuilder {
func NewImageURLBuilder(baseURL string, image *models.Image) ImageURLBuilder {
return ImageURLBuilder{
BaseURL: baseURL,
ImageID: strconv.Itoa(imageID),
BaseURL: baseURL,
ImageID: strconv.Itoa(image.ID),
UpdatedAt: strconv.FormatInt(image.UpdatedAt.Timestamp.Unix(), 10),
}
}
func (b ImageURLBuilder) GetImageURL() string {
return b.BaseURL + "/image/" + b.ImageID + "/image"
return b.BaseURL + "/image/" + b.ImageID + "/image?" + b.UpdatedAt
}
func (b ImageURLBuilder) GetThumbnailURL() string {
return b.BaseURL + "/image/" + b.ImageID + "/thumbnail"
return b.BaseURL + "/image/" + b.ImageID + "/thumbnail?" + b.UpdatedAt
}

View file

@ -1,23 +1,28 @@
package urlbuilders
import "strconv"
import (
"github.com/stashapp/stash/pkg/models"
"strconv"
)
type MovieURLBuilder struct {
BaseURL string
MovieID string
BaseURL string
MovieID string
UpdatedAt string
}
func NewMovieURLBuilder(baseURL string, movieID int) MovieURLBuilder {
func NewMovieURLBuilder(baseURL string, movie *models.Movie) MovieURLBuilder {
return MovieURLBuilder{
BaseURL: baseURL,
MovieID: strconv.Itoa(movieID),
BaseURL: baseURL,
MovieID: strconv.Itoa(movie.ID),
UpdatedAt: strconv.FormatInt(movie.UpdatedAt.Timestamp.Unix(), 10),
}
}
func (b MovieURLBuilder) GetMovieFrontImageURL() string {
return b.BaseURL + "/movie/" + b.MovieID + "/frontimage"
return b.BaseURL + "/movie/" + b.MovieID + "/frontimage?" + b.UpdatedAt
}
func (b MovieURLBuilder) GetMovieBackImageURL() string {
return b.BaseURL + "/movie/" + b.MovieID + "/backimage"
return b.BaseURL + "/movie/" + b.MovieID + "/backimage?" + b.UpdatedAt
}

View file

@ -1,19 +1,24 @@
package urlbuilders
import "strconv"
import (
"github.com/stashapp/stash/pkg/models"
"strconv"
)
type PerformerURLBuilder struct {
BaseURL string
PerformerID string
UpdatedAt string
}
func NewPerformerURLBuilder(baseURL string, performerID int) PerformerURLBuilder {
func NewPerformerURLBuilder(baseURL string, performer *models.Performer) PerformerURLBuilder {
return PerformerURLBuilder{
BaseURL: baseURL,
PerformerID: strconv.Itoa(performerID),
PerformerID: strconv.Itoa(performer.ID),
UpdatedAt: strconv.FormatInt(performer.UpdatedAt.Timestamp.Unix(), 10),
}
}
func (b PerformerURLBuilder) GetPerformerImageURL() string {
return b.BaseURL + "/performer/" + b.PerformerID + "/image"
return b.BaseURL + "/performer/" + b.PerformerID + "/image?" + b.UpdatedAt
}

View file

@ -1,19 +1,24 @@
package urlbuilders
import "strconv"
import (
"github.com/stashapp/stash/pkg/models"
"strconv"
)
type StudioURLBuilder struct {
BaseURL string
StudioID string
BaseURL string
StudioID string
UpdatedAt string
}
func NewStudioURLBuilder(baseURL string, studioID int) StudioURLBuilder {
func NewStudioURLBuilder(baseURL string, studio *models.Studio) StudioURLBuilder {
return StudioURLBuilder{
BaseURL: baseURL,
StudioID: strconv.Itoa(studioID),
BaseURL: baseURL,
StudioID: strconv.Itoa(studio.ID),
UpdatedAt: strconv.FormatInt(studio.UpdatedAt.Timestamp.Unix(), 10),
}
}
func (b StudioURLBuilder) GetStudioImageURL() string {
return b.BaseURL + "/studio/" + b.StudioID + "/image"
return b.BaseURL + "/studio/" + b.StudioID + "/image?" + b.UpdatedAt
}

View file

@ -1,19 +1,24 @@
package urlbuilders
import "strconv"
import (
"github.com/stashapp/stash/pkg/models"
"strconv"
)
type TagURLBuilder struct {
BaseURL string
TagID string
BaseURL string
TagID string
UpdatedAt string
}
func NewTagURLBuilder(baseURL string, tagID int) TagURLBuilder {
func NewTagURLBuilder(baseURL string, tag *models.Tag) TagURLBuilder {
return TagURLBuilder{
BaseURL: baseURL,
TagID: strconv.Itoa(tagID),
BaseURL: baseURL,
TagID: strconv.Itoa(tag.ID),
UpdatedAt: strconv.FormatInt(tag.UpdatedAt.Timestamp.Unix(), 10),
}
}
func (b TagURLBuilder) GetTagImageURL() string {
return b.BaseURL + "/tag/" + b.TagID + "/image"
return b.BaseURL + "/tag/" + b.TagID + "/image?" + b.UpdatedAt
}

View file

@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"os"
"sync"
"time"
"github.com/fvbommel/sortorder"
@ -20,8 +21,9 @@ import (
)
var DB *sqlx.DB
var WriteMu *sync.Mutex
var dbPath string
var appSchemaVersion uint = 18
var appSchemaVersion uint = 19
var databaseSchemaVersion uint
const sqlite3Driver = "sqlite3ex"
@ -63,6 +65,7 @@ func Initialize(databasePath string) bool {
const disableForeignKeys = false
DB = open(databasePath, disableForeignKeys)
WriteMu = &sync.Mutex{}
return false
}
@ -77,6 +80,7 @@ func open(databasePath string, disableForeignKeys bool) *sqlx.DB {
conn, err := sqlx.Open(sqlite3Driver, url)
conn.SetMaxOpenConns(25)
conn.SetMaxIdleConns(4)
conn.SetConnMaxLifetime(30 * time.Second)
if err != nil {
logger.Fatalf("db.Open(): %q\n", err)
}

View file

@ -0,0 +1,9 @@
CREATE TABLE `performers_tags` (
`performer_id` integer NOT NULL,
`tag_id` integer NOT NULL,
foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE,
foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE
);
CREATE INDEX `index_performers_tags_on_tag_id` on `performers_tags` (`tag_id`);
CREATE INDEX `index_performers_tags_on_performer_id` on `performers_tags` (`performer_id`);

View file

@ -59,6 +59,13 @@ func ZipFilename(zipFilename, filenameInZip string) string {
return zipFilename + zipSeparator + filenameInZip
}
// IsZipPath returns true if the path includes the zip separator byte,
// indicating it is within a zip file.
// TODO - this should be moved to utils
func IsZipPath(p string) bool {
return strings.Contains(p, zipSeparator)
}
type imageReadCloser struct {
src io.ReadCloser
zrc *zip.ReadCloser
@ -239,7 +246,7 @@ func Serve(w http.ResponseWriter, r *http.Request, path string) {
func IsCover(img *models.Image) bool {
_, fn := getFilePath(img.Path)
return fn == "cover.jpg"
return strings.HasSuffix(fn, "cover.jpg")
}
func GetTitle(s *models.Image) string {

34
pkg/image/image_test.go Normal file
View file

@ -0,0 +1,34 @@
package image
import (
"fmt"
"path/filepath"
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stretchr/testify/assert"
)
func TestIsCover(t *testing.T) {
type test struct {
fn string
isCover bool
}
tests := []test{
{"cover.jpg", true},
{"covernot.jpg", false},
{"Cover.jpg", false},
{fmt.Sprintf("subDir%scover.jpg", string(filepath.Separator)), true},
{"endsWithcover.jpg", true},
{"cover.png", false},
}
assert := assert.New(t)
for _, tc := range tests {
img := &models.Image{
Path: tc.fn,
}
assert.Equal(tc.isCover, IsCover(img), "expected: %t for %s", tc.isCover, tc.fn)
}
}

View file

@ -29,6 +29,11 @@ var logBuffer []LogItem
// Init initialises the logger based on a logging configuration
func Init(logFile string, logOut bool, logLevel string) {
var file *os.File
customFormatter := new(logrus.TextFormatter)
customFormatter.TimestampFormat = "2006-01-02 15:04:05"
customFormatter.ForceColors = true
customFormatter.FullTimestamp = true
logger.SetFormatter(customFormatter)
if logFile != "" {
var err error

View file

@ -86,6 +86,7 @@ const SessionStoreKey = "session_store_key"
// scraping options
const ScrapersPath = "scrapers_path"
const ScraperUserAgent = "scraper_user_agent"
const ScraperCertCheck = "scraper_cert_check"
const ScraperCDPPath = "scraper_cdp_path"
// stash-box options
@ -145,6 +146,10 @@ func GetConfigPath() string {
return filepath.Dir(configFileUsed)
}
func GetConfigFilePath() string {
return viper.ConfigFileUsed()
}
func GetStashPaths() []*models.StashConfig {
var ret []*models.StashConfig
if err := viper.UnmarshalKey(Stash, &ret); err != nil || len(ret) == 0 {
@ -274,6 +279,17 @@ func GetScraperCDPPath() string {
return viper.GetString(ScraperCDPPath)
}
// GetScraperCertCheck returns true if the scraper should check for insecure
// certificates when fetching an image or a page.
func GetScraperCertCheck() bool {
ret := true
if viper.IsSet(ScraperCertCheck) {
ret = viper.GetBool(ScraperCertCheck)
}
return ret
}
func GetStashBoxes() []*models.StashBox {
var boxes []*models.StashBox
viper.UnmarshalKey(StashBoxes, &boxes)

View file

@ -2,9 +2,9 @@ package jsonschema
import (
"fmt"
"github.com/json-iterator/go"
"os"
jsoniter "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/models"
)
@ -26,6 +26,7 @@ type Performer struct {
Piercings string `json:"piercings,omitempty"`
Aliases string `json:"aliases,omitempty"`
Favorite bool `json:"favorite,omitempty"`
Tags []string `json:"tags,omitempty"`
Image string `json:"image,omitempty"`
CreatedAt models.JSONTime `json:"created_at,omitempty"`
UpdatedAt models.JSONTime `json:"updated_at,omitempty"`

View file

@ -215,13 +215,13 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
task := ScanTask{
TxnManager: s.TxnManager,
FilePath: path,
UseFileMetadata: input.UseFileMetadata,
StripFileExtension: input.StripFileExtension,
UseFileMetadata: utils.IsTrue(input.UseFileMetadata),
StripFileExtension: utils.IsTrue(input.StripFileExtension),
fileNamingAlgorithm: fileNamingAlgo,
calculateMD5: calculateMD5,
GeneratePreview: input.ScanGeneratePreviews,
GenerateImagePreview: input.ScanGenerateImagePreviews,
GenerateSprite: input.ScanGenerateSprites,
GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews),
GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews),
GenerateSprite: utils.IsTrue(input.ScanGenerateSprites),
}
go task.Start(&wg)

View file

@ -194,7 +194,38 @@ func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) {
return container, nil
}
func GetSceneStreamPaths(scene *models.Scene, directStreamURL string) ([]*models.SceneStreamEndpoint, error) {
func includeSceneStreamPath(scene *models.Scene, streamingResolution models.StreamingResolutionEnum, maxStreamingTranscodeSize models.StreamingResolutionEnum) bool {
// convert StreamingResolutionEnum to ResolutionEnum so we can get the min
// resolution
convertedRes := models.ResolutionEnum(streamingResolution)
minResolution := int64(convertedRes.GetMinResolution())
sceneResolution := scene.GetMinResolution()
// don't include if scene resolution is smaller than the streamingResolution
if sceneResolution != 0 && sceneResolution < minResolution {
return false
}
// if we always allow everything, then return true
if maxStreamingTranscodeSize == models.StreamingResolutionEnumOriginal {
return true
}
// convert StreamingResolutionEnum to ResolutionEnum
maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize)
return int64(maxStreamingResolution.GetMinResolution()) >= minResolution
}
func makeStreamEndpoint(streamURL string, streamingResolution models.StreamingResolutionEnum, mimeType, label string) *models.SceneStreamEndpoint {
return &models.SceneStreamEndpoint{
URL: fmt.Sprintf("%s?resolution=%s", streamURL, streamingResolution.String()),
MimeType: &mimeType,
Label: &label,
}
}
func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreamingTranscodeSize models.StreamingResolutionEnum) ([]*models.SceneStreamEndpoint, error) {
if scene == nil {
return nil, fmt.Errorf("nil scene")
}
@ -248,107 +279,51 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string) ([]*models
// Note: These have the wrong mime type intentionally to allow jwplayer to selection between mp4/webm
webmLabelFourK := "WEBM 4K (2160p)" // "FOUR_K"
webmLabelFullHD := "WEBM Full HD (1080p)" // "FULL_HD"
webmLabelStardardHD := "WEBM HD (720p)" // "STANDARD_HD"
webmLabelStandardHD := "WEBM HD (720p)" // "STANDARD_HD"
webmLabelStandard := "WEBM Standard (480p)" // "STANDARD"
webmLabelLow := "WEBM Low (240p)" // "LOW"
if !scene.Height.Valid || scene.Height.Int64 >= 2160 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".webm?resolution=FOUR_K",
MimeType: &mimeMp4,
Label: &webmLabelFourK,
}
ret = append(ret, &new)
}
if !scene.Height.Valid || scene.Height.Int64 >= 1080 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".webm?resolution=FULL_HD",
MimeType: &mimeMp4,
Label: &webmLabelFullHD,
}
ret = append(ret, &new)
}
if !scene.Height.Valid || scene.Height.Int64 >= 720 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".webm?resolution=STANDARD_HD",
MimeType: &mimeMp4,
Label: &webmLabelStardardHD,
}
ret = append(ret, &new)
}
if !scene.Height.Valid || scene.Height.Int64 >= 480 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".webm?resolution=STANDARD",
MimeType: &mimeMp4,
Label: &webmLabelStandard,
}
ret = append(ret, &new)
}
if !scene.Height.Valid || scene.Height.Int64 >= 240 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".webm?resolution=LOW",
MimeType: &mimeMp4,
Label: &webmLabelLow,
}
ret = append(ret, &new)
}
// Setup up lower quality transcoding options (MP4)
mp4LabelFourK := "MP4 4K (2160p)" // "FOUR_K"
mp4LabelFullHD := "MP4 Full HD (1080p)" // "FULL_HD"
mp4LabelStardardHD := "MP4 HD (720p)" // "STANDARD_HD"
mp4LabelStandardHD := "MP4 HD (720p)" // "STANDARD_HD"
mp4LabelStandard := "MP4 Standard (480p)" // "STANDARD"
mp4LabelLow := "MP4 Low (240p)" // "LOW"
if !scene.Height.Valid || scene.Height.Int64 >= 2160 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".mp4?resolution=FOUR_K",
MimeType: &mimeMp4,
Label: &mp4LabelFourK,
}
ret = append(ret, &new)
var webmStreams []*models.SceneStreamEndpoint
var mp4Streams []*models.SceneStreamEndpoint
webmURL := directStreamURL + ".webm"
mp4URL := directStreamURL + ".mp4"
if includeSceneStreamPath(scene, models.StreamingResolutionEnumFourK, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFourK, mimeMp4, webmLabelFourK))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFourK, mimeMp4, mp4LabelFourK))
}
if !scene.Height.Valid || scene.Height.Int64 >= 1080 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".mp4?resolution=FULL_HD",
MimeType: &mimeMp4,
Label: &mp4LabelFullHD,
}
ret = append(ret, &new)
if includeSceneStreamPath(scene, models.StreamingResolutionEnumFullHd, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFullHd, mimeMp4, webmLabelFullHD))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFullHd, mimeMp4, mp4LabelFullHD))
}
if !scene.Height.Valid || scene.Height.Int64 >= 720 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".mp4?resolution=STANDARD_HD",
MimeType: &mimeMp4,
Label: &mp4LabelStardardHD,
}
ret = append(ret, &new)
if includeSceneStreamPath(scene, models.StreamingResolutionEnumStandardHd, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandardHd, mimeMp4, webmLabelStandardHD))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandardHd, mimeMp4, mp4LabelStandardHD))
}
if !scene.Height.Valid || scene.Height.Int64 >= 480 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".mp4?resolution=STANDARD",
MimeType: &mimeMp4,
Label: &mp4LabelStandard,
}
ret = append(ret, &new)
if includeSceneStreamPath(scene, models.StreamingResolutionEnumStandard, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandard, mimeMp4, webmLabelStandard))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandard, mimeMp4, mp4LabelStandard))
}
if !scene.Height.Valid || scene.Height.Int64 >= 240 {
new := models.SceneStreamEndpoint{
URL: directStreamURL + ".mp4?resolution=LOW",
MimeType: &mimeMp4,
Label: &mp4LabelLow,
}
ret = append(ret, &new)
if includeSceneStreamPath(scene, models.StreamingResolutionEnumLow, maxStreamingTranscodeSize) {
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumLow, mimeMp4, webmLabelLow))
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumLow, mimeMp4, mp4LabelLow))
}
ret = append(ret, webmStreams...)
ret = append(ret, mp4Streams...)
defaultStreams := []*models.SceneStreamEndpoint{
{
URL: directStreamURL + ".webm",

View file

@ -4,6 +4,7 @@ import (
"context"
"database/sql"
"fmt"
"path/filepath"
"strings"
"sync"
@ -38,13 +39,56 @@ func (t *AutoTagTask) getQueryRegex(name string) string {
return ret
}
func (t *AutoTagTask) getQueryFilter(regex string) *models.SceneFilterType {
organized := false
ret := &models.SceneFilterType{
Path: &models.StringCriterionInput{
Modifier: models.CriterionModifierMatchesRegex,
Value: "(?i)" + regex,
},
Organized: &organized,
}
sep := string(filepath.Separator)
var or *models.SceneFilterType
for _, p := range t.paths {
newOr := &models.SceneFilterType{}
if or == nil {
ret.And = newOr
} else {
or.Or = newOr
}
or = newOr
if !strings.HasSuffix(p, sep) {
p = p + sep
}
or.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: p + "%",
}
}
return ret
}
func (t *AutoTagTask) getFindFilter() *models.FindFilterType {
perPage := 0
return &models.FindFilterType{
PerPage: &perPage,
}
}
func (t *AutoTagPerformerTask) autoTagPerformer() {
regex := t.getQueryRegex(t.performer.Name.String)
if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error {
qb := r.Scene()
scenes, err := qb.QueryForAutoTag(regex, t.paths)
scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter())
if err != nil {
return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error())
@ -84,7 +128,7 @@ func (t *AutoTagStudioTask) autoTagStudio() {
if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error {
qb := r.Scene()
scenes, err := qb.QueryForAutoTag(regex, t.paths)
scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter())
if err != nil {
return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error())
@ -133,7 +177,7 @@ func (t *AutoTagTagTask) autoTagTag() {
if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error {
qb := r.Scene()
scenes, err := qb.QueryForAutoTag(regex, t.paths)
scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter())
if err != nil {
return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error())

View file

@ -725,6 +725,18 @@ func (t *ExportTask) exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models.
continue
}
tags, err := repo.Tag().FindByPerformerID(p.ID)
if err != nil {
logger.Errorf("[performers] <%s> error getting performer tags: %s", p.Checksum, err.Error())
continue
}
newPerformerJSON.Tags = tag.GetNames(tags)
if t.includeDependencies {
t.tags.IDs = utils.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags))
}
performerJSON, err := t.json.getPerformer(p.Checksum)
if err != nil {
logger.Debugf("[performers] error reading performer json: %s", err.Error())

View file

@ -209,6 +209,7 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
readerWriter := r.Performer()
importer := &performer.Importer{
ReaderWriter: readerWriter,
TagWriter: r.Tag(),
Input: *performerJSON,
}

View file

@ -315,14 +315,22 @@ func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) {
scene, _ := sqb.FindByPath(scenePath)
// found related Scene
if scene != nil {
logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID)
if err := sqb.UpdateGalleries(scene.ID, []int{g.ID}); err != nil {
return err
sceneGalleries, _ := sqb.FindByGalleryID(g.ID) // check if gallery is already associated to the scene
isAssoc := false
for _, sg := range sceneGalleries {
if scene.ID == sg.ID {
isAssoc = true
break
}
}
if !isAssoc {
logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID)
if err := sqb.UpdateGalleries(scene.ID, []int{g.ID}); err != nil {
return err
}
}
}
}
return nil
}); err != nil {
logger.Error(err.Error())
@ -1044,6 +1052,12 @@ func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error {
excludeVidRegex := generateRegexps(config.GetExcludes())
excludeImgRegex := generateRegexps(config.GetImageExcludes())
// don't scan zip images directly
if image.IsZipPath(s.Path) {
logger.Warnf("Cannot rescan zip image %s. Rescan zip gallery instead.", s.Path)
return nil
}
generatedPath := config.GetGeneratedPath()
return utils.SymWalk(s.Path, func(path string, info os.FileInfo, err error) error {

View file

@ -0,0 +1,65 @@
package models
var resolutionMax = []int{
240,
360,
480,
540,
720,
1080,
1440,
1920,
2160,
2880,
3384,
4320,
0,
}
// GetMaxResolution returns the maximum width or height that media must be
// to qualify as this resolution. A return value of 0 means that there is no
// maximum.
func (r *ResolutionEnum) GetMaxResolution() int {
if !r.IsValid() {
return 0
}
// sanity check - length of arrays must be the same
if len(resolutionMax) != len(AllResolutionEnum) {
panic("resolutionMax array length != AllResolutionEnum array length")
}
for i, rr := range AllResolutionEnum {
if rr == *r {
return resolutionMax[i]
}
}
return 0
}
// GetMinResolution returns the minimum width or height that media must be
// to qualify as this resolution.
func (r *ResolutionEnum) GetMinResolution() int {
if !r.IsValid() {
return 0
}
// sanity check - length of arrays must be the same
if len(resolutionMax) != len(AllResolutionEnum) {
panic("resolutionMax array length != AllResolutionEnum array length")
}
// use the previous resolution max as this resolution min
for i, rr := range AllResolutionEnum {
if rr == *r {
if i > 0 {
return resolutionMax[i-1]
}
return 0
}
}
return 0
}

View file

@ -300,8 +300,8 @@ func (_m *GalleryReaderWriter) GetPerformerIDs(galleryID int) ([]int, error) {
return r0, r1
}
// GetTagIDs provides a mock function with given fields: galleryID
func (_m *GalleryReaderWriter) GetTagIDs(galleryID int) ([]int, error) {
// GetSceneIDs provides a mock function with given fields: galleryID
func (_m *GalleryReaderWriter) GetSceneIDs(galleryID int) ([]int, error) {
ret := _m.Called(galleryID)
var r0 []int
@ -323,8 +323,8 @@ func (_m *GalleryReaderWriter) GetTagIDs(galleryID int) ([]int, error) {
return r0, r1
}
// GetSceneIDs provides a mock function with given fields: galleryID
func (_m *GalleryReaderWriter) GetSceneIDs(galleryID int) ([]int, error) {
// GetTagIDs provides a mock function with given fields: galleryID
func (_m *GalleryReaderWriter) GetTagIDs(galleryID int) ([]int, error) {
ret := _m.Called(galleryID)
var r0 []int
@ -464,20 +464,6 @@ func (_m *GalleryReaderWriter) UpdatePerformers(galleryID int, performerIDs []in
return r0
}
// UpdateTags provides a mock function with given fields: galleryID, tagIDs
func (_m *GalleryReaderWriter) UpdateTags(galleryID int, tagIDs []int) error {
ret := _m.Called(galleryID, tagIDs)
var r0 error
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
r0 = rf(galleryID, tagIDs)
} else {
r0 = ret.Error(0)
}
return r0
}
// UpdateScenes provides a mock function with given fields: galleryID, sceneIDs
func (_m *GalleryReaderWriter) UpdateScenes(galleryID int, sceneIDs []int) error {
ret := _m.Called(galleryID, sceneIDs)
@ -491,3 +477,17 @@ func (_m *GalleryReaderWriter) UpdateScenes(galleryID int, sceneIDs []int) error
return r0
}
// UpdateTags provides a mock function with given fields: galleryID, tagIDs
func (_m *GalleryReaderWriter) UpdateTags(galleryID int, tagIDs []int) error {
ret := _m.Called(galleryID, tagIDs)
var r0 error
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
r0 = rf(galleryID, tagIDs)
} else {
r0 = ret.Error(0)
}
return r0
}

View file

@ -35,29 +35,6 @@ func (_m *MovieReaderWriter) All() ([]*models.Movie, error) {
return r0, r1
}
// AllSlim provides a mock function with given fields:
func (_m *MovieReaderWriter) AllSlim() ([]*models.Movie, error) {
ret := _m.Called()
var r0 []*models.Movie
if rf, ok := ret.Get(0).(func() []*models.Movie); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Movie)
}
}
var r1 error
if rf, ok := ret.Get(1).(func() error); ok {
r1 = rf()
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Count provides a mock function with given fields:
func (_m *MovieReaderWriter) Count() (int, error) {
ret := _m.Called()

View file

@ -35,29 +35,6 @@ func (_m *PerformerReaderWriter) All() ([]*models.Performer, error) {
return r0, r1
}
// AllSlim provides a mock function with given fields:
func (_m *PerformerReaderWriter) AllSlim() ([]*models.Performer, error) {
ret := _m.Called()
var r0 []*models.Performer
if rf, ok := ret.Get(0).(func() []*models.Performer); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Performer)
}
}
var r1 error
if rf, ok := ret.Get(1).(func() error); ok {
r1 = rf()
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Count provides a mock function with given fields:
func (_m *PerformerReaderWriter) Count() (int, error) {
ret := _m.Called()
@ -79,6 +56,27 @@ func (_m *PerformerReaderWriter) Count() (int, error) {
return r0, r1
}
// CountByTagID provides a mock function with given fields: tagID
func (_m *PerformerReaderWriter) CountByTagID(tagID int) (int, error) {
ret := _m.Called(tagID)
var r0 int
if rf, ok := ret.Get(0).(func(int) int); ok {
r0 = rf(tagID)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(tagID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Create provides a mock function with given fields: newPerformer
func (_m *PerformerReaderWriter) Create(newPerformer models.Performer) (*models.Performer, error) {
ret := _m.Called(newPerformer)
@ -337,6 +335,29 @@ func (_m *PerformerReaderWriter) GetStashIDs(performerID int) ([]*models.StashID
return r0, r1
}
// GetTagIDs provides a mock function with given fields: sceneID
func (_m *PerformerReaderWriter) GetTagIDs(sceneID int) ([]int, error) {
ret := _m.Called(sceneID)
var r0 []int
if rf, ok := ret.Get(0).(func(int) []int); ok {
r0 = rf(sceneID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]int)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(sceneID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Query provides a mock function with given fields: performerFilter, findFilter
func (_m *PerformerReaderWriter) Query(performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) {
ret := _m.Called(performerFilter, findFilter)
@ -440,3 +461,17 @@ func (_m *PerformerReaderWriter) UpdateStashIDs(performerID int, stashIDs []mode
return r0
}
// UpdateTags provides a mock function with given fields: sceneID, tagIDs
func (_m *PerformerReaderWriter) UpdateTags(sceneID int, tagIDs []int) error {
ret := _m.Called(sceneID, tagIDs)
var r0 error
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
r0 = rf(sceneID, tagIDs)
} else {
r0 = ret.Error(0)
}
return r0
}

View file

@ -300,6 +300,29 @@ func (_m *SceneReaderWriter) FindByChecksum(checksum string) (*models.Scene, err
return r0, r1
}
// FindByGalleryID provides a mock function with given fields: performerID
func (_m *SceneReaderWriter) FindByGalleryID(performerID int) ([]*models.Scene, error) {
ret := _m.Called(performerID)
var r0 []*models.Scene
if rf, ok := ret.Get(0).(func(int) []*models.Scene); ok {
r0 = rf(performerID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Scene)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(performerID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// FindByMovieID provides a mock function with given fields: movieID
func (_m *SceneReaderWriter) FindByMovieID(movieID int) ([]*models.Scene, error) {
ret := _m.Called(movieID)
@ -392,29 +415,6 @@ func (_m *SceneReaderWriter) FindByPerformerID(performerID int) ([]*models.Scene
return r0, r1
}
// FindByGalleryID provides a mock function with given fields: galleryID
func (_m *SceneReaderWriter) FindByGalleryID(galleryID int) ([]*models.Scene, error) {
ret := _m.Called(galleryID)
var r0 []*models.Scene
if rf, ok := ret.Get(0).(func(int) []*models.Scene); ok {
r0 = rf(galleryID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Scene)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(galleryID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// FindMany provides a mock function with given fields: ids
func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) {
ret := _m.Called(ids)
@ -461,6 +461,29 @@ func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) {
return r0, r1
}
// GetGalleryIDs provides a mock function with given fields: sceneID
func (_m *SceneReaderWriter) GetGalleryIDs(sceneID int) ([]int, error) {
ret := _m.Called(sceneID)
var r0 []int
if rf, ok := ret.Get(0).(func(int) []int); ok {
r0 = rf(sceneID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]int)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(sceneID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetMovies provides a mock function with given fields: sceneID
func (_m *SceneReaderWriter) GetMovies(sceneID int) ([]models.MoviesScenes, error) {
ret := _m.Called(sceneID)
@ -507,8 +530,31 @@ func (_m *SceneReaderWriter) GetPerformerIDs(sceneID int) ([]int, error) {
return r0, r1
}
// GetGalleryIDs provides a mock function with given fields: sceneID
func (_m *SceneReaderWriter) GetGalleryIDs(sceneID int) ([]int, error) {
// GetStashIDs provides a mock function with given fields: sceneID
func (_m *SceneReaderWriter) GetStashIDs(sceneID int) ([]*models.StashID, error) {
ret := _m.Called(sceneID)
var r0 []*models.StashID
if rf, ok := ret.Get(0).(func(int) []*models.StashID); ok {
r0 = rf(sceneID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.StashID)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(sceneID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetTagIDs provides a mock function with given fields: sceneID
func (_m *SceneReaderWriter) GetTagIDs(sceneID int) ([]int, error) {
ret := _m.Called(sceneID)
var r0 []int
@ -530,52 +576,6 @@ func (_m *SceneReaderWriter) GetGalleryIDs(sceneID int) ([]int, error) {
return r0, r1
}
// GetStashIDs provides a mock function with given fields: performerID
func (_m *SceneReaderWriter) GetStashIDs(performerID int) ([]*models.StashID, error) {
ret := _m.Called(performerID)
var r0 []*models.StashID
if rf, ok := ret.Get(0).(func(int) []*models.StashID); ok {
r0 = rf(performerID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.StashID)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(performerID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetTagIDs provides a mock function with given fields: imageID
func (_m *SceneReaderWriter) GetTagIDs(imageID int) ([]int, error) {
ret := _m.Called(imageID)
var r0 []int
if rf, ok := ret.Get(0).(func(int) []int); ok {
r0 = rf(imageID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]int)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(imageID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// IncrementOCounter provides a mock function with given fields: id
func (_m *SceneReaderWriter) IncrementOCounter(id int) (int, error) {
ret := _m.Called(id)
@ -766,6 +766,20 @@ func (_m *SceneReaderWriter) UpdateFull(updatedScene models.Scene) (*models.Scen
return r0, r1
}
// UpdateGalleries provides a mock function with given fields: sceneID, galleryIDs
func (_m *SceneReaderWriter) UpdateGalleries(sceneID int, galleryIDs []int) error {
ret := _m.Called(sceneID, galleryIDs)
var r0 error
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
r0 = rf(sceneID, galleryIDs)
} else {
r0 = ret.Error(0)
}
return r0
}
// UpdateMovies provides a mock function with given fields: sceneID, movies
func (_m *SceneReaderWriter) UpdateMovies(sceneID int, movies []models.MoviesScenes) error {
ret := _m.Called(sceneID, movies)
@ -794,20 +808,6 @@ func (_m *SceneReaderWriter) UpdatePerformers(sceneID int, performerIDs []int) e
return r0
}
// UpdateGalleries provides a mock function with given fields: sceneID, galleryIDs
func (_m *SceneReaderWriter) UpdateGalleries(sceneID int, galleryIDs []int) error {
ret := _m.Called(sceneID, galleryIDs)
var r0 error
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
r0 = rf(sceneID, galleryIDs)
} else {
r0 = ret.Error(0)
}
return r0
}
// UpdateStashIDs provides a mock function with given fields: sceneID, stashIDs
func (_m *SceneReaderWriter) UpdateStashIDs(sceneID int, stashIDs []models.StashID) error {
ret := _m.Called(sceneID, stashIDs)

View file

@ -35,29 +35,6 @@ func (_m *StudioReaderWriter) All() ([]*models.Studio, error) {
return r0, r1
}
// AllSlim provides a mock function with given fields:
func (_m *StudioReaderWriter) AllSlim() ([]*models.Studio, error) {
ret := _m.Called()
var r0 []*models.Studio
if rf, ok := ret.Get(0).(func() []*models.Studio); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Studio)
}
}
var r1 error
if rf, ok := ret.Get(1).(func() error); ok {
r1 = rf()
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Count provides a mock function with given fields:
func (_m *StudioReaderWriter) Count() (int, error) {
ret := _m.Called()

View file

@ -35,29 +35,6 @@ func (_m *TagReaderWriter) All() ([]*models.Tag, error) {
return r0, r1
}
// AllSlim provides a mock function with given fields:
func (_m *TagReaderWriter) AllSlim() ([]*models.Tag, error) {
ret := _m.Called()
var r0 []*models.Tag
if rf, ok := ret.Get(0).(func() []*models.Tag); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Tag)
}
}
var r1 error
if rf, ok := ret.Get(1).(func() error); ok {
r1 = rf()
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Count provides a mock function with given fields:
func (_m *TagReaderWriter) Count() (int, error) {
ret := _m.Called()
@ -245,6 +222,29 @@ func (_m *TagReaderWriter) FindByNames(names []string, nocase bool) ([]*models.T
return r0, r1
}
// FindByPerformerID provides a mock function with given fields: performerID
func (_m *TagReaderWriter) FindByPerformerID(performerID int) ([]*models.Tag, error) {
ret := _m.Called(performerID)
var r0 []*models.Tag
if rf, ok := ret.Get(0).(func(int) []*models.Tag); ok {
r0 = rf(performerID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.Tag)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(performerID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// FindBySceneID provides a mock function with given fields: sceneID
func (_m *TagReaderWriter) FindBySceneID(sceneID int) ([]*models.Tag, error) {
ret := _m.Called(sceneID)

View file

@ -84,6 +84,14 @@ func (s Scene) GetHash(hashAlgorithm HashAlgorithm) string {
panic("unknown hash algorithm")
}
func (s Scene) GetMinResolution() int64 {
if s.Width.Int64 < s.Height.Int64 {
return s.Width.Int64
}
return s.Height.Int64
}
// SceneFileType represents the file metadata for a scene.
type SceneFileType struct {
Size *string `graphql:"size" json:"size"`

View file

@ -24,43 +24,45 @@ type ScrapedItem struct {
}
type ScrapedPerformer struct {
Name *string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Image *string `graphql:"image" json:"image"`
Name *string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
Image *string `graphql:"image" json:"image"`
}
// this type has no Image field
type ScrapedPerformerStash struct {
Name *string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Name *string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
}
type ScrapedScene struct {
@ -106,25 +108,26 @@ type ScrapedGalleryStash struct {
type ScrapedScenePerformer struct {
// Set if performer matched
ID *string `graphql:"id" json:"id"`
Name string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"`
Images []string `graphql:"images" json:"images"`
ID *string `graphql:"id" json:"id"`
Name string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"`
Images []string `graphql:"images" json:"images"`
}
type ScrapedSceneStudio struct {

View file

@ -8,7 +8,6 @@ type MovieReader interface {
FindByNames(names []string, nocase bool) ([]*Movie, error)
All() ([]*Movie, error)
Count() (int, error)
AllSlim() ([]*Movie, error)
Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error)
GetFrontImage(movieID int) ([]byte, error)
GetBackImage(movieID int) ([]byte, error)

View file

@ -8,12 +8,13 @@ type PerformerReader interface {
FindByImageID(imageID int) ([]*Performer, error)
FindByGalleryID(galleryID int) ([]*Performer, error)
FindByNames(names []string, nocase bool) ([]*Performer, error)
CountByTagID(tagID int) (int, error)
Count() (int, error)
All() ([]*Performer, error)
AllSlim() ([]*Performer, error)
Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error)
GetImage(performerID int) ([]byte, error)
GetStashIDs(performerID int) ([]*StashID, error)
GetTagIDs(sceneID int) ([]int, error)
}
type PerformerWriter interface {
@ -24,6 +25,7 @@ type PerformerWriter interface {
UpdateImage(performerID int, image []byte) error
DestroyImage(performerID int) error
UpdateStashIDs(performerID int, stashIDs []StashID) error
UpdateTags(sceneID int, tagIDs []int) error
}
type PerformerReaderWriter interface {

View file

@ -21,7 +21,6 @@ type SceneReader interface {
CountMissingOSHash() (int, error)
Wall(q *string) ([]*Scene, error)
All() ([]*Scene, error)
QueryForAutoTag(regex string, pathPrefixes []string) ([]*Scene, error)
Query(sceneFilter *SceneFilterType, findFilter *FindFilterType) ([]*Scene, int, error)
GetCover(sceneID int) ([]byte, error)
GetMovies(sceneID int) ([]MoviesScenes, error)

View file

@ -7,7 +7,6 @@ type StudioReader interface {
FindByName(name string, nocase bool) (*Studio, error)
Count() (int, error)
All() ([]*Studio, error)
AllSlim() ([]*Studio, error)
Query(studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error)
GetImage(studioID int) ([]byte, error)
HasImage(studioID int) (bool, error)

View file

@ -4,6 +4,7 @@ type TagReader interface {
Find(id int) (*Tag, error)
FindMany(ids []int) ([]*Tag, error)
FindBySceneID(sceneID int) ([]*Tag, error)
FindByPerformerID(performerID int) ([]*Tag, error)
FindBySceneMarkerID(sceneMarkerID int) ([]*Tag, error)
FindByImageID(imageID int) ([]*Tag, error)
FindByGalleryID(galleryID int) ([]*Tag, error)
@ -11,7 +12,6 @@ type TagReader interface {
FindByNames(names []string, nocase bool) ([]*Tag, error)
Count() (int, error)
All() ([]*Tag, error)
AllSlim() ([]*Tag, error)
Query(tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error)
GetImage(tagID int) ([]byte, error)
}

View file

@ -3,6 +3,7 @@ package performer
import (
"database/sql"
"fmt"
"strings"
"github.com/stashapp/stash/pkg/manager/jsonschema"
"github.com/stashapp/stash/pkg/models"
@ -10,16 +11,25 @@ import (
)
type Importer struct {
ReaderWriter models.PerformerReaderWriter
Input jsonschema.Performer
ReaderWriter models.PerformerReaderWriter
TagWriter models.TagReaderWriter
Input jsonschema.Performer
MissingRefBehaviour models.ImportMissingRefEnum
ID int
performer models.Performer
imageData []byte
tags []*models.Tag
}
func (i *Importer) PreImport() error {
i.performer = performerJSONToPerformer(i.Input)
if err := i.populateTags(); err != nil {
return err
}
var err error
if len(i.Input.Image) > 0 {
_, i.imageData, err = utils.ProcessBase64Image(i.Input.Image)
@ -31,7 +41,82 @@ func (i *Importer) PreImport() error {
return nil
}
func (i *Importer) populateTags() error {
if len(i.Input.Tags) > 0 {
tags, err := importTags(i.TagWriter, i.Input.Tags, i.MissingRefBehaviour)
if err != nil {
return err
}
i.tags = tags
}
return nil
}
func importTags(tagWriter models.TagReaderWriter, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) {
tags, err := tagWriter.FindByNames(names, false)
if err != nil {
return nil, err
}
var pluckedNames []string
for _, tag := range tags {
pluckedNames = append(pluckedNames, tag.Name)
}
missingTags := utils.StrFilter(names, func(name string) bool {
return !utils.StrInclude(pluckedNames, name)
})
if len(missingTags) > 0 {
if missingRefBehaviour == models.ImportMissingRefEnumFail {
return nil, fmt.Errorf("tags [%s] not found", strings.Join(missingTags, ", "))
}
if missingRefBehaviour == models.ImportMissingRefEnumCreate {
createdTags, err := createTags(tagWriter, missingTags)
if err != nil {
return nil, fmt.Errorf("error creating tags: %s", err.Error())
}
tags = append(tags, createdTags...)
}
// ignore if MissingRefBehaviour set to Ignore
}
return tags, nil
}
func createTags(tagWriter models.TagWriter, names []string) ([]*models.Tag, error) {
var ret []*models.Tag
for _, name := range names {
newTag := *models.NewTag(name)
created, err := tagWriter.Create(newTag)
if err != nil {
return nil, err
}
ret = append(ret, created)
}
return ret, nil
}
func (i *Importer) PostImport(id int) error {
if len(i.tags) > 0 {
var tagIDs []int
for _, t := range i.tags {
tagIDs = append(tagIDs, t.ID)
}
if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil {
return fmt.Errorf("failed to associate tags: %s", err.Error())
}
}
if len(i.imageData) > 0 {
if err := i.ReaderWriter.UpdateImage(id, i.imageData); err != nil {
return fmt.Errorf("error setting performer image: %s", err.Error())

View file

@ -3,6 +3,8 @@ package performer
import (
"errors"
"github.com/stretchr/testify/mock"
"github.com/stashapp/stash/pkg/manager/jsonschema"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
@ -16,9 +18,15 @@ const invalidImage = "aW1hZ2VCeXRlcw&&"
const (
existingPerformerID = 100
existingTagID = 105
errTagsID = 106
existingPerformerName = "existingPerformerName"
performerNameErr = "performerNameErr"
existingTagName = "existingTagName"
existingTagErr = "existingTagErr"
missingTagName = "missingTagName"
)
func TestImporterName(t *testing.T) {
@ -53,6 +61,91 @@ func TestImporterPreImport(t *testing.T) {
assert.Equal(t, expectedPerformer, i.performer)
}
func TestImporterPreImportWithTag(t *testing.T) {
tagReaderWriter := &mocks.TagReaderWriter{}
i := Importer{
TagWriter: tagReaderWriter,
MissingRefBehaviour: models.ImportMissingRefEnumFail,
Input: jsonschema.Performer{
Tags: []string{
existingTagName,
},
},
}
tagReaderWriter.On("FindByNames", []string{existingTagName}, false).Return([]*models.Tag{
{
ID: existingTagID,
Name: existingTagName,
},
}, nil).Once()
tagReaderWriter.On("FindByNames", []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
err := i.PreImport()
assert.Nil(t, err)
assert.Equal(t, existingTagID, i.tags[0].ID)
i.Input.Tags = []string{existingTagErr}
err = i.PreImport()
assert.NotNil(t, err)
tagReaderWriter.AssertExpectations(t)
}
func TestImporterPreImportWithMissingTag(t *testing.T) {
tagReaderWriter := &mocks.TagReaderWriter{}
i := Importer{
TagWriter: tagReaderWriter,
Input: jsonschema.Performer{
Tags: []string{
missingTagName,
},
},
MissingRefBehaviour: models.ImportMissingRefEnumFail,
}
tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Times(3)
tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{
ID: existingTagID,
}, nil)
err := i.PreImport()
assert.NotNil(t, err)
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
err = i.PreImport()
assert.Nil(t, err)
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
err = i.PreImport()
assert.Nil(t, err)
assert.Equal(t, existingTagID, i.tags[0].ID)
tagReaderWriter.AssertExpectations(t)
}
func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
tagReaderWriter := &mocks.TagReaderWriter{}
i := Importer{
TagWriter: tagReaderWriter,
Input: jsonschema.Performer{
Tags: []string{
missingTagName,
},
},
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
}
tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Once()
tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
err := i.PreImport()
assert.NotNil(t, err)
}
func TestImporterPostImport(t *testing.T) {
readerWriter := &mocks.PerformerReaderWriter{}
@ -111,6 +204,32 @@ func TestImporterFindExistingID(t *testing.T) {
readerWriter.AssertExpectations(t)
}
func TestImporterPostImportUpdateTags(t *testing.T) {
readerWriter := &mocks.PerformerReaderWriter{}
i := Importer{
ReaderWriter: readerWriter,
tags: []*models.Tag{
{
ID: existingTagID,
},
},
}
updateErr := errors.New("UpdateTags error")
readerWriter.On("UpdateTags", performerID, []int{existingTagID}).Return(nil).Once()
readerWriter.On("UpdateTags", errTagsID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
err := i.PostImport(performerID)
assert.Nil(t, err)
err = i.PostImport(errTagsID)
assert.NotNil(t, err)
readerWriter.AssertExpectations(t)
}
func TestCreate(t *testing.T) {
readerWriter := &mocks.PerformerReaderWriter{}

View file

@ -158,10 +158,11 @@ type scraperDebugOptions struct {
}
type scraperCookies struct {
Name string `yaml:"Name"`
Value string `yaml:"Value"`
Domain string `yaml:"Domain"`
Path string `yaml:"Path"`
Name string `yaml:"Name"`
Value string `yaml:"Value"`
ValueRandom int `yaml:"ValueRandom"`
Domain string `yaml:"Domain"`
Path string `yaml:"Path"`
}
type cookieOptions struct {

View file

@ -13,6 +13,7 @@ import (
"github.com/chromedp/chromedp"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
// set cookies for the native http client
@ -32,7 +33,7 @@ func setCookies(jar *cookiejar.Jar, scraperConfig config) {
for _, cookie := range ckURL.Cookies {
httpCookie = &http.Cookie{
Name: cookie.Name,
Value: cookie.Value,
Value: getCookieValue(cookie),
Path: cookie.Path,
Domain: cookie.Domain,
}
@ -53,6 +54,13 @@ func setCookies(jar *cookiejar.Jar, scraperConfig config) {
}
}
func getCookieValue(cookie *scraperCookies) string {
if cookie.ValueRandom > 0 {
return utils.RandomSequence(cookie.ValueRandom)
}
return cookie.Value
}
// print all cookies from the jar of the native http client
func printCookies(jar *cookiejar.Jar, scraperConfig config, msg string) {
driverOptions := scraperConfig.DriverOptions
@ -92,7 +100,7 @@ func setCDPCookies(driverOptions scraperDriverOptions) chromedp.Tasks {
for _, ckURL := range driverOptions.Cookies {
for _, cookie := range ckURL.Cookies {
success, err := network.SetCookie(cookie.Name, cookie.Value).
success, err := network.SetCookie(cookie.Name, getCookieValue(cookie)).
WithExpires(&expr).
WithDomain(cookie.Domain).
WithPath(cookie.Path).

View file

@ -31,33 +31,28 @@ xPathScrapers:
selector: //div[@id="search-result"]//div[@data-test="teaser-subject"]/a/@href
postProcess:
- replace:
- regex: ^
with: https://www.freeones.com
- regex: $
with: /profile
- regex: ^
with: https://www.freeones.com
- regex: /feed$
with: /bio
performerScraper:
performer:
Name:
Name:
selector: //h1
postProcess:
- replace:
- regex: \sBio\s*$
with: ""
URL:
selector: //a[span[text()="Profile"]]/@href
postProcess:
- replace:
- regex: ^
with: https://www.freeones.com
- regex: \sBio\s*$
with: ""
URL: //link[@rel="alternate" and @hreflang="x-default"]/@href
Twitter: //a[contains(@href,'twitter.com/')]/@href
Instagram: //a[contains(@href,'instagram.com/')]/@href
Birthdate:
selector: //span[contains(text(),'Born On')]
postProcess:
- replace:
- regex: Born On
with:
- regex: Born On
with:
- parseDate: January 2, 2006
Ethnicity:
selector: //a[@data-test="link_ethnicity"]/span/text()
@ -73,8 +68,8 @@ xPathScrapers:
selector: //span[text()='Height']/following-sibling::span/a
postProcess:
- replace:
- regex: \D+[\s\S]+
with: ""
- regex: \D+[\s\S]+
with: ""
- map:
Unknown: ""
Measurements:
@ -88,18 +83,18 @@ xPathScrapers:
postProcess:
- map:
Unknown: ""
Fake: Yes
Natural: No
Fake: "Yes"
Natural: "No"
CareerLength:
selector: //div[contains(@class,'timeline-horizontal')]//p[@class='m-0']
concat: "-"
Aliases: //p[@data-test='p_aliases']/text()
Tattoos:
Tattoos:
selector: //span[text()='Tattoos']/following-sibling::span/span
postProcess:
- map:
Unknown: ""
Piercings:
Piercings:
selector: //span[text()='Piercings']/following-sibling::span/span
postProcess:
- map:
@ -108,7 +103,7 @@ xPathScrapers:
selector: //div[contains(@class,'image-container')]//a/img/@src
Gender:
fixed: "Female"
# Last updated January 31, 2021
# Last updated March 24, 2021
`
func getFreeonesScraper() config {

View file

@ -1,11 +1,14 @@
package scraper
import (
"crypto/tls"
"fmt"
"io/ioutil"
"net/http"
"strings"
"time"
stashConfig "github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
@ -83,6 +86,8 @@ func setMovieBackImage(m *models.ScrapedMovie, globalConfig GlobalConfig) error
func getImage(url string, globalConfig GlobalConfig) (*string, error) {
client := &http.Client{
Transport: &http.Transport{ // ignore insecure certificates
TLSClientConfig: &tls.Config{InsecureSkipVerify: !stashConfig.GetScraperCertCheck()}},
Timeout: imageGetTimeout,
}
@ -109,6 +114,10 @@ func getImage(url string, globalConfig GlobalConfig) (*string, error) {
return nil, err
}
if resp.StatusCode >= 400 {
return nil, fmt.Errorf("http error %d", resp.StatusCode)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)

View file

@ -52,7 +52,7 @@ func (s *jsonScraper) loadURL(url string) (string, error) {
if err != nil {
return "", err
}
logger.Infof("loadURL (%s)\n", url)
doc, err := ioutil.ReadAll(r)
if err != nil {
return "", err
@ -71,7 +71,8 @@ func (s *jsonScraper) loadURL(url string) (string, error) {
}
func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
doc, scraper, err := s.scrapeURL(url)
u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}
@ -81,7 +82,8 @@ func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer
}
func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
doc, scraper, err := s.scrapeURL(url)
u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}
@ -91,7 +93,8 @@ func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error)
}
func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
doc, scraper, err := s.scrapeURL(url)
u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}
@ -101,7 +104,8 @@ func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, er
}
func (s *jsonScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
doc, scraper, err := s.scrapeURL(url)
u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}

View file

@ -94,10 +94,10 @@ func (s mappedConfig) postProcess(q mappedQuery, attrConfig mappedScraperAttrCon
type mappedSceneScraperConfig struct {
mappedConfig
Tags mappedConfig `yaml:"Tags"`
Performers mappedConfig `yaml:"Performers"`
Studio mappedConfig `yaml:"Studio"`
Movies mappedConfig `yaml:"Movies"`
Tags mappedConfig `yaml:"Tags"`
Performers mappedPerformerScraperConfig `yaml:"Performers"`
Studio mappedConfig `yaml:"Studio"`
Movies mappedConfig `yaml:"Movies"`
}
type _mappedSceneScraperConfig mappedSceneScraperConfig
@ -211,10 +211,54 @@ func (s *mappedGalleryScraperConfig) UnmarshalYAML(unmarshal func(interface{}) e
type mappedPerformerScraperConfig struct {
mappedConfig
Tags mappedConfig `yaml:"Tags"`
}
type _mappedPerformerScraperConfig mappedPerformerScraperConfig
const (
mappedScraperConfigPerformerTags = "Tags"
)
func (s *mappedPerformerScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return unmarshal(&s.mappedConfig)
// HACK - unmarshal to map first, then remove known scene sub-fields, then
// remarshal to yaml and pass that down to the base map
parentMap := make(map[string]interface{})
if err := unmarshal(parentMap); err != nil {
return err
}
// move the known sub-fields to a separate map
thisMap := make(map[string]interface{})
thisMap[mappedScraperConfigPerformerTags] = parentMap[mappedScraperConfigPerformerTags]
delete(parentMap, mappedScraperConfigPerformerTags)
// re-unmarshal the sub-fields
yml, err := yaml.Marshal(thisMap)
if err != nil {
return err
}
// needs to be a different type to prevent infinite recursion
c := _mappedPerformerScraperConfig{}
if err := yaml.Unmarshal(yml, &c); err != nil {
return err
}
*s = mappedPerformerScraperConfig(c)
yml, err = yaml.Marshal(parentMap)
if err != nil {
return err
}
if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil {
return err
}
return nil
}
type mappedMovieScraperConfig struct {
@ -647,9 +691,23 @@ func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer,
return nil, nil
}
performerTagsMap := performerMap.Tags
results := performerMap.process(q, s.Common)
if len(results) > 0 {
results[0].apply(&ret)
// now apply the tags
if performerTagsMap != nil {
logger.Debug(`Processing performer tags:`)
tagResults := performerTagsMap.process(q, s.Common)
for _, p := range tagResults {
tag := &models.ScrapedSceneTag{}
p.apply(tag)
ret.Tags = append(ret.Tags, tag)
}
}
}
return &ret, nil
@ -687,19 +745,34 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
sceneStudioMap := sceneScraperConfig.Studio
sceneMoviesMap := sceneScraperConfig.Movies
scenePerformerTagsMap := scenePerformersMap.Tags
logger.Debug(`Processing scene:`)
results := sceneMap.process(q, s.Common)
if len(results) > 0 {
results[0].apply(&ret)
// process performer tags once
var performerTagResults mappedResults
if scenePerformerTagsMap != nil {
performerTagResults = scenePerformerTagsMap.process(q, s.Common)
}
// now apply the performers and tags
if scenePerformersMap != nil {
if scenePerformersMap.mappedConfig != nil {
logger.Debug(`Processing scene performers:`)
performerResults := scenePerformersMap.process(q, s.Common)
for _, p := range performerResults {
performer := &models.ScrapedScenePerformer{}
p.apply(performer)
for _, p := range performerTagResults {
tag := &models.ScrapedSceneTag{}
p.apply(tag)
ret.Tags = append(ret.Tags, tag)
}
ret.Performers = append(ret.Performers, performer)
}
}

View file

@ -17,6 +17,13 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
ret["oshash"] = scene.OSHash.String
ret["filename"] = filepath.Base(scene.Path)
ret["title"] = scene.Title.String
ret["url"] = scene.URL.String
return ret
}
func queryURLParameterFromURL(url string) queryURLParameters {
ret := make(queryURLParameters)
ret["url"] = url
return ret
}
@ -28,6 +35,7 @@ func queryURLParametersFromGallery(gallery *models.Gallery) queryURLParameters {
ret["filename"] = filepath.Base(gallery.Path.String)
}
ret["title"] = gallery.Title.String
ret["url"] = gallery.URL.String
return ret
}
@ -49,3 +57,14 @@ func (p queryURLParameters) constructURL(url string) string {
return ret
}
// replaceURL does a partial URL Replace ( only url parameter is used)
func replaceURL(url string, scraperConfig scraperTypeConfig) string {
u := url
queryURL := queryURLParameterFromURL(u)
if scraperConfig.QueryURLReplacements != nil {
queryURL.applyReplacements(scraperConfig.QueryURLReplacements)
u = queryURL.constructURL(scraperConfig.QueryURL)
}
return u
}

View file

@ -220,9 +220,11 @@ func (c Cache) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error)
return nil, err
}
// post-process - set the image if applicable
if err := setPerformerImage(ret, c.globalConfig); err != nil {
logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
if ret != nil {
err = c.postScrapePerformer(ret)
if err != nil {
return nil, err
}
}
return ret, nil
@ -232,6 +234,49 @@ func (c Cache) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error)
return nil, nil
}
func (c Cache) postScrapePerformer(ret *models.ScrapedPerformer) error {
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
tqb := r.Tag()
for _, t := range ret.Tags {
err := MatchScrapedSceneTag(tqb, t)
if err != nil {
return err
}
}
return nil
}); err != nil {
return err
}
// post-process - set the image if applicable
if err := setPerformerImage(ret, c.globalConfig); err != nil {
logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
}
return nil
}
func (c Cache) postScrapeScenePerformer(ret *models.ScrapedScenePerformer) error {
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
tqb := r.Tag()
for _, t := range ret.Tags {
err := MatchScrapedSceneTag(tqb, t)
if err != nil {
return err
}
}
return nil
}); err != nil {
return err
}
return nil
}
func (c Cache) postScrapeScene(ret *models.ScrapedScene) error {
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
pqb := r.Performer()
@ -240,8 +285,11 @@ func (c Cache) postScrapeScene(ret *models.ScrapedScene) error {
sqb := r.Studio()
for _, p := range ret.Performers {
err := MatchScrapedScenePerformer(pqb, p)
if err != nil {
if err := c.postScrapeScenePerformer(p); err != nil {
return err
}
if err := MatchScrapedScenePerformer(pqb, p); err != nil {
return err
}
}

View file

@ -30,6 +30,13 @@ func newScriptScraper(scraper scraperTypeConfig, config config, globalConfig Glo
func (s *scriptScraper) runScraperScript(inString string, out interface{}) error {
command := s.scraper.Script
if command[0] == "python" || command[0] == "python3" {
executable, err := findPythonExecutable()
if err == nil {
command[0] = executable
}
}
cmd := exec.Command(command[0], command[1:]...)
cmd.Dir = filepath.Dir(s.config.path)
@ -184,3 +191,19 @@ func (s *scriptScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, erro
return &ret, err
}
func findPythonExecutable() (string, error) {
_, err := exec.LookPath("python3")
if err != nil {
_, err = exec.LookPath("python")
if err != nil {
return "", err
}
return "python", nil
}
return "python3", nil
}

View file

@ -100,6 +100,13 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
return nil, err
}
if q.FindPerformer != nil {
// the ids of the tags must be nilled
for _, t := range q.FindPerformer.Tags {
t.ID = nil
}
}
// need to copy back to a scraped performer
ret := models.ScrapedPerformer{}
err = copier.Copy(&ret, q.FindPerformer)

View file

@ -322,6 +322,7 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode
Twitter: findURL(p.Urls, "TWITTER"),
RemoteSiteID: &id,
Images: images,
// TODO - tags not currently supported
// TODO - Image - should be returned as a set of URLs. Will need a
// graphql schema change to accommodate this. Leave off for now.
}

View file

@ -3,6 +3,7 @@ package scraper
import (
"bytes"
"context"
"crypto/tls"
"errors"
"fmt"
"io"
@ -22,6 +23,7 @@ import (
"golang.org/x/net/publicsuffix"
"github.com/stashapp/stash/pkg/logger"
stashConfig "github.com/stashapp/stash/pkg/manager/config"
)
// Timeout for the scrape http request. Includes transfer time. May want to make this
@ -49,6 +51,9 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re
printCookies(jar, scraperConfig, "Jar cookies set from scraper")
client := &http.Client{
Transport: &http.Transport{ // ignore insecure certificates
TLSClientConfig: &tls.Config{InsecureSkipVerify: !stashConfig.GetScraperCertCheck()},
},
Timeout: scrapeGetTimeout,
// defaultCheckRedirect code with max changed from 10 to 20
CheckRedirect: func(req *http.Request, via []*http.Request) error {
@ -74,6 +79,10 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re
if err != nil {
return nil, err
}
if resp.StatusCode >= 400 {
return nil, fmt.Errorf("http error %d", resp.StatusCode)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)

View file

@ -52,7 +52,8 @@ func (s *xpathScraper) scrapeURL(url string) (*html.Node, *mappedScraper, error)
}
func (s *xpathScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
doc, scraper, err := s.scrapeURL(url)
u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}
@ -62,7 +63,8 @@ func (s *xpathScraper) scrapePerformerByURL(url string) (*models.ScrapedPerforme
}
func (s *xpathScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
doc, scraper, err := s.scrapeURL(url)
u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}
@ -72,7 +74,8 @@ func (s *xpathScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error
}
func (s *xpathScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
doc, scraper, err := s.scrapeURL(url)
u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}
@ -82,7 +85,8 @@ func (s *xpathScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, e
}
func (s *xpathScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
doc, scraper, err := s.scrapeURL(url)
u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries
doc, scraper, err := s.scrapeURL(u)
if err != nil {
return nil, err
}

View file

@ -520,7 +520,7 @@ func makeSceneXPathConfig() mappedScraper {
performerConfig := make(mappedConfig)
performerConfig["Name"] = makeSimpleAttrConfig(`$performerElem/@data-mxptext`)
performerConfig["URL"] = makeSimpleAttrConfig(`$performerElem/@href`)
config.Performers = performerConfig
config.Performers.mappedConfig = performerConfig
studioConfig := make(mappedConfig)
studioConfig["Name"] = makeSimpleAttrConfig(`$studioElem`)
@ -730,7 +730,7 @@ xPathScrapers:
assert.Equal(t, "//title", sceneConfig.mappedConfig["Title"].Selector)
assert.Equal(t, "//tags", sceneConfig.Tags["Name"].Selector)
assert.Equal(t, "//movies", sceneConfig.Movies["Name"].Selector)
assert.Equal(t, "//performers", sceneConfig.Performers["Name"].Selector)
assert.Equal(t, "//performers", sceneConfig.Performers.mappedConfig["Name"].Selector)
assert.Equal(t, "//studio", sceneConfig.Studio["Name"].Selector)
postProcess := sceneConfig.mappedConfig["Title"].postProcessActions

403
pkg/sqlite/filter.go Normal file
View file

@ -0,0 +1,403 @@
package sqlite
import (
"errors"
"fmt"
"regexp"
"strings"
"github.com/stashapp/stash/pkg/models"
)
type sqlClause struct {
sql string
args []interface{}
}
func makeClause(sql string, args ...interface{}) sqlClause {
return sqlClause{
sql: sql,
args: args,
}
}
type criterionHandler interface {
handle(f *filterBuilder)
}
type criterionHandlerFunc func(f *filterBuilder)
type join struct {
table string
as string
onClause string
}
// equals returns true if the other join alias/table is equal to this one
func (j join) equals(o join) bool {
return j.alias() == o.alias()
}
// alias returns the as string, or the table if as is empty
func (j join) alias() string {
if j.as == "" {
return j.table
}
return j.as
}
func (j join) toSQL() string {
asStr := ""
if j.as != "" && j.as != j.table {
asStr = " AS " + j.as
}
return fmt.Sprintf("LEFT JOIN %s%s ON %s", j.table, asStr, j.onClause)
}
type joins []join
func (j *joins) add(newJoins ...join) {
// only add if not already joined
for _, newJoin := range newJoins {
for _, jj := range *j {
if jj.equals(newJoin) {
return
}
}
*j = append(*j, newJoin)
}
}
func (j *joins) toSQL() string {
var ret []string
for _, jj := range *j {
ret = append(ret, jj.toSQL())
}
return strings.Join(ret, " ")
}
type filterBuilder struct {
subFilter *filterBuilder
subFilterOp string
joins joins
whereClauses []sqlClause
havingClauses []sqlClause
err error
}
var errSubFilterAlreadySet error = errors.New(`sub-filter already set`)
// sub-filter operator values
var (
andOp = "AND"
orOp = "OR"
notOp = "AND NOT"
)
// and sets the sub-filter that will be ANDed with this one.
// Sets the error state if sub-filter is already set.
func (f *filterBuilder) and(a *filterBuilder) {
if f.subFilter != nil {
f.setError(errSubFilterAlreadySet)
return
}
f.subFilter = a
f.subFilterOp = andOp
}
// or sets the sub-filter that will be ORed with this one.
// Sets the error state if a sub-filter is already set.
func (f *filterBuilder) or(o *filterBuilder) {
if f.subFilter != nil {
f.setError(errSubFilterAlreadySet)
return
}
f.subFilter = o
f.subFilterOp = orOp
}
// not sets the sub-filter that will be AND NOTed with this one.
// Sets the error state if a sub-filter is already set.
func (f *filterBuilder) not(n *filterBuilder) {
if f.subFilter != nil {
f.setError(errSubFilterAlreadySet)
return
}
f.subFilter = n
f.subFilterOp = notOp
}
// addJoin adds a join to the filter. The join is expressed in SQL as:
// LEFT JOIN <table> [AS <as>] ON <onClause>
// The AS is omitted if as is empty.
// This method does not add a join if it its alias/table name is already
// present in another existing join.
func (f *filterBuilder) addJoin(table, as, onClause string) {
newJoin := join{
table: table,
as: as,
onClause: onClause,
}
f.joins.add(newJoin)
}
// addWhere adds a where clause and arguments to the filter. Where clauses
// are ANDed together. Does not add anything if the provided string is empty.
func (f *filterBuilder) addWhere(sql string, args ...interface{}) {
if sql == "" {
return
}
f.whereClauses = append(f.whereClauses, makeClause(sql, args...))
}
// addHaving adds a where clause and arguments to the filter. Having clauses
// are ANDed together. Does not add anything if the provided string is empty.
func (f *filterBuilder) addHaving(sql string, args ...interface{}) {
if sql == "" {
return
}
f.havingClauses = append(f.havingClauses, makeClause(sql, args...))
}
func (f *filterBuilder) getSubFilterClause(clause, subFilterClause string) string {
ret := clause
if subFilterClause != "" {
var op string
if len(ret) > 0 {
op = " " + f.subFilterOp + " "
} else {
if f.subFilterOp == notOp {
op = "NOT "
}
}
ret += op + "(" + subFilterClause + ")"
}
return ret
}
// generateWhereClauses generates the SQL where clause for this filter.
// All where clauses within the filter are ANDed together. This is combined
// with the sub-filter, which will use the applicable operator (AND/OR/AND NOT).
func (f *filterBuilder) generateWhereClauses() (clause string, args []interface{}) {
clause, args = f.andClauses(f.whereClauses)
if f.subFilter != nil {
c, a := f.subFilter.generateWhereClauses()
if c != "" {
clause = f.getSubFilterClause(clause, c)
if len(a) > 0 {
args = append(args, a...)
}
}
}
return
}
// generateHavingClauses generates the SQL having clause for this filter.
// All having clauses within the filter are ANDed together. This is combined
// with the sub-filter, which will use the applicable operator (AND/OR/AND NOT).
func (f *filterBuilder) generateHavingClauses() (string, []interface{}) {
clause, args := f.andClauses(f.havingClauses)
if f.subFilter != nil {
c, a := f.subFilter.generateHavingClauses()
if c != "" {
clause = f.getSubFilterClause(clause, c)
if len(a) > 0 {
args = append(args, a...)
}
}
}
return clause, args
}
// getAllJoins returns all of the joins in this filter and any sub-filter(s).
// Redundant joins will not be duplicated in the return value.
func (f *filterBuilder) getAllJoins() joins {
var ret joins
ret.add(f.joins...)
if f.subFilter != nil {
subJoins := f.subFilter.getAllJoins()
if len(subJoins) > 0 {
ret.add(subJoins...)
}
}
return ret
}
// getError returns the error state on this filter, or on any sub-filter(s) if
// the error state is nil.
func (f *filterBuilder) getError() error {
if f.err != nil {
return f.err
}
if f.subFilter != nil {
return f.subFilter.getError()
}
return nil
}
// handleCriterion calls the handle function on the provided criterionHandler,
// providing itself.
func (f *filterBuilder) handleCriterion(handler criterionHandler) {
f.handleCriterionFunc(func(h *filterBuilder) {
handler.handle(h)
})
}
// handleCriterionFunc calls the provided criterion handler function providing
// itself.
func (f *filterBuilder) handleCriterionFunc(handler criterionHandlerFunc) {
handler(f)
}
func (f *filterBuilder) setError(e error) {
if f.err == nil {
f.err = e
}
}
func (f *filterBuilder) andClauses(input []sqlClause) (string, []interface{}) {
var clauses []string
var args []interface{}
for _, w := range input {
clauses = append(clauses, w.sql)
args = append(args, w.args...)
}
if len(clauses) > 0 {
c := strings.Join(clauses, " AND ")
if len(clauses) > 1 {
c = "(" + c + ")"
}
return c, args
}
return "", nil
}
func stringCriterionHandler(c *models.StringCriterionInput, column string) criterionHandlerFunc {
return func(f *filterBuilder) {
if c != nil {
if modifier := c.Modifier; c.Modifier.IsValid() {
switch modifier {
case models.CriterionModifierIncludes:
clause, thisArgs := getSearchBinding([]string{column}, c.Value, false)
f.addWhere(clause, thisArgs...)
case models.CriterionModifierExcludes:
clause, thisArgs := getSearchBinding([]string{column}, c.Value, true)
f.addWhere(clause, thisArgs...)
case models.CriterionModifierEquals:
f.addWhere(column+" LIKE ?", c.Value)
case models.CriterionModifierNotEquals:
f.addWhere(column+" NOT LIKE ?", c.Value)
case models.CriterionModifierMatchesRegex:
if _, err := regexp.Compile(c.Value); err != nil {
f.setError(err)
return
}
f.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column), c.Value)
case models.CriterionModifierNotMatchesRegex:
if _, err := regexp.Compile(c.Value); err != nil {
f.setError(err)
return
}
f.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column), c.Value)
default:
clause, count := getSimpleCriterionClause(modifier, "?")
if count == 1 {
f.addWhere(column+" "+clause, c.Value)
} else {
f.addWhere(column + " " + clause)
}
}
}
}
}
}
func intCriterionHandler(c *models.IntCriterionInput, column string) criterionHandlerFunc {
return func(f *filterBuilder) {
if c != nil {
clause, count := getIntCriterionWhereClause(column, *c)
if count == 1 {
f.addWhere(clause, c.Value)
} else {
f.addWhere(clause)
}
}
}
}
func boolCriterionHandler(c *bool, column string) criterionHandlerFunc {
return func(f *filterBuilder) {
if c != nil {
var v string
if *c {
v = "1"
} else {
v = "0"
}
f.addWhere(column + " = " + v)
}
}
}
func stringLiteralCriterionHandler(v *string, column string) criterionHandlerFunc {
return func(f *filterBuilder) {
if v != nil {
f.addWhere(column+" = ?", v)
}
}
}
type multiCriterionHandlerBuilder struct {
primaryTable string
foreignTable string
joinTable string
primaryFK string
foreignFK string
// function that will be called to perform any necessary joins
addJoinsFunc func(f *filterBuilder)
}
func (m *multiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionInput) criterionHandlerFunc {
return func(f *filterBuilder) {
if criterion != nil && len(criterion.Value) > 0 {
var args []interface{}
for _, tagID := range criterion.Value {
args = append(args, tagID)
}
if m.addJoinsFunc != nil {
m.addJoinsFunc(f)
}
whereClause, havingClause := getMultiCriterionClause(m.primaryTable, m.foreignTable, m.joinTable, m.primaryFK, m.foreignFK, criterion)
f.addWhere(whereClause, args...)
f.addHaving(havingClause)
}
}
}

View file

@ -0,0 +1,614 @@
package sqlite
import (
"errors"
"fmt"
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stretchr/testify/assert"
)
func TestFilterBuilderAnd(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
other := &filterBuilder{}
newBuilder := &filterBuilder{}
// and should set the subFilter
f.and(other)
assert.Equal(other, f.subFilter)
assert.Nil(f.getError())
// and should set error if and is set
f.and(newBuilder)
assert.Equal(other, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
// and should set error if or is set
// and should not set subFilter if or is set
f = &filterBuilder{}
f.or(other)
f.and(newBuilder)
assert.Equal(other, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
// and should set error if not is set
// and should not set subFilter if not is set
f = &filterBuilder{}
f.not(other)
f.and(newBuilder)
assert.Equal(other, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
}
func TestFilterBuilderOr(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
other := &filterBuilder{}
newBuilder := &filterBuilder{}
// or should set the orFilter
f.or(other)
assert.Equal(other, f.subFilter)
assert.Nil(f.getError())
// or should set error if or is set
f.or(newBuilder)
assert.Equal(newBuilder, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
// or should set error if and is set
// or should not set subFilter if and is set
f = &filterBuilder{}
f.and(other)
f.or(newBuilder)
assert.Equal(other, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
// or should set error if not is set
// or should not set subFilter if not is set
f = &filterBuilder{}
f.not(other)
f.or(newBuilder)
assert.Equal(other, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
}
func TestFilterBuilderNot(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
other := &filterBuilder{}
newBuilder := &filterBuilder{}
// not should set the subFilter
f.not(other)
// ensure and filter is set
assert.Equal(other, f.subFilter)
assert.Nil(f.getError())
// not should set error if not is set
f.not(newBuilder)
assert.Equal(newBuilder, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
// not should set error if and is set
// not should not set subFilter if and is set
f = &filterBuilder{}
f.and(other)
f.not(newBuilder)
assert.Equal(other, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
// not should set error if or is set
// not should not set subFilter if or is set
f = &filterBuilder{}
f.or(other)
f.not(newBuilder)
assert.Equal(other, f.subFilter)
assert.Equal(errSubFilterAlreadySet, f.getError())
}
func TestAddJoin(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
const (
table1Name = "table1Name"
table2Name = "table2Name"
as1Name = "as1"
as2Name = "as2"
onClause = "onClause1"
)
f.addJoin(table1Name, as1Name, onClause)
// ensure join is added
assert.Len(f.joins, 1)
assert.Equal(fmt.Sprintf("LEFT JOIN %s AS %s ON %s", table1Name, as1Name, onClause), f.joins[0].toSQL())
// ensure join with same as is not added
f.addJoin(table2Name, as1Name, onClause)
assert.Len(f.joins, 1)
// ensure same table with different alias can be added
f.addJoin(table1Name, as2Name, onClause)
assert.Len(f.joins, 2)
assert.Equal(fmt.Sprintf("LEFT JOIN %s AS %s ON %s", table1Name, as2Name, onClause), f.joins[1].toSQL())
// ensure table without alias can be added if tableName != existing alias/tableName
f.addJoin(table1Name, "", onClause)
assert.Len(f.joins, 3)
assert.Equal(fmt.Sprintf("LEFT JOIN %s ON %s", table1Name, onClause), f.joins[2].toSQL())
// ensure table with alias == table name of a join without alias is not added
f.addJoin(table2Name, table1Name, onClause)
assert.Len(f.joins, 3)
// ensure table without alias cannot be added if tableName == existing alias
f.addJoin(as2Name, "", onClause)
assert.Len(f.joins, 3)
// ensure AS is not used if same as table name
f.addJoin(table2Name, table2Name, onClause)
assert.Len(f.joins, 4)
assert.Equal(fmt.Sprintf("LEFT JOIN %s ON %s", table2Name, onClause), f.joins[3].toSQL())
}
func TestAddWhere(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
// ensure empty sql adds nothing
f.addWhere("")
assert.Len(f.whereClauses, 0)
const whereClause = "a = b"
var args = []interface{}{"1", "2"}
// ensure addWhere sets where clause and args
f.addWhere(whereClause, args...)
assert.Len(f.whereClauses, 1)
assert.Equal(whereClause, f.whereClauses[0].sql)
assert.Equal(args, f.whereClauses[0].args)
// ensure addWhere without args sets where clause
f.addWhere(whereClause)
assert.Len(f.whereClauses, 2)
assert.Equal(whereClause, f.whereClauses[1].sql)
assert.Len(f.whereClauses[1].args, 0)
}
func TestAddHaving(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
// ensure empty sql adds nothing
f.addHaving("")
assert.Len(f.havingClauses, 0)
const havingClause = "a = b"
var args = []interface{}{"1", "2"}
// ensure addWhere sets where clause and args
f.addHaving(havingClause, args...)
assert.Len(f.havingClauses, 1)
assert.Equal(havingClause, f.havingClauses[0].sql)
assert.Equal(args, f.havingClauses[0].args)
// ensure addWhere without args sets where clause
f.addHaving(havingClause)
assert.Len(f.havingClauses, 2)
assert.Equal(havingClause, f.havingClauses[1].sql)
assert.Len(f.havingClauses[1].args, 0)
}
func TestGenerateWhereClauses(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
const clause1 = "a = 1"
const clause2 = "b = 2"
const clause3 = "c = 3"
const arg1 = "1"
const arg2 = "2"
const arg3 = "3"
// ensure single where clause is generated correctly
f.addWhere(clause1)
r, rArgs := f.generateWhereClauses()
assert.Equal(clause1, r)
assert.Len(rArgs, 0)
// ensure multiple where clauses are surrounded with parenthesis and
// ANDed together
f.addWhere(clause2, arg1, arg2)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("(%s AND %s)", clause1, clause2), r)
assert.Len(rArgs, 2)
// ensure empty subfilter is not added to generated where clause
sf := &filterBuilder{}
f.and(sf)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("(%s AND %s)", clause1, clause2), r)
assert.Len(rArgs, 2)
// ensure sub-filter is generated correctly
sf.addWhere(clause3, arg3)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("(%s AND %s) AND (%s)", clause1, clause2, clause3), r)
assert.Len(rArgs, 3)
// ensure OR sub-filter is generated correctly
f = &filterBuilder{}
f.addWhere(clause1)
f.addWhere(clause2, arg1, arg2)
f.or(sf)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("(%s AND %s) OR (%s)", clause1, clause2, clause3), r)
assert.Len(rArgs, 3)
// ensure NOT sub-filter is generated correctly
f = &filterBuilder{}
f.addWhere(clause1)
f.addWhere(clause2, arg1, arg2)
f.not(sf)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("(%s AND %s) AND NOT (%s)", clause1, clause2, clause3), r)
assert.Len(rArgs, 3)
// ensure empty filter with ANDed sub-filter does not include AND
f = &filterBuilder{}
f.and(sf)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("(%s)", clause3), r)
assert.Len(rArgs, 1)
// ensure empty filter with ORed sub-filter does not include OR
f = &filterBuilder{}
f.or(sf)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("(%s)", clause3), r)
assert.Len(rArgs, 1)
// ensure empty filter with NOTed sub-filter does not include AND
f = &filterBuilder{}
f.not(sf)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("NOT (%s)", clause3), r)
assert.Len(rArgs, 1)
// (clause1) AND ((clause2) OR (clause3))
f = &filterBuilder{}
f.addWhere(clause1)
sf2 := &filterBuilder{}
sf2.addWhere(clause2, arg1, arg2)
f.and(sf2)
sf2.or(sf)
r, rArgs = f.generateWhereClauses()
assert.Equal(fmt.Sprintf("%s AND (%s OR (%s))", clause1, clause2, clause3), r)
assert.Len(rArgs, 3)
}
func TestGenerateHavingClauses(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
const clause1 = "a = 1"
const clause2 = "b = 2"
const clause3 = "c = 3"
const arg1 = "1"
const arg2 = "2"
const arg3 = "3"
// ensure single Having clause is generated correctly
f.addHaving(clause1)
r, rArgs := f.generateHavingClauses()
assert.Equal(clause1, r)
assert.Len(rArgs, 0)
// ensure multiple Having clauses are surrounded with parenthesis and
// ANDed together
f.addHaving(clause2, arg1, arg2)
r, rArgs = f.generateHavingClauses()
assert.Equal("("+clause1+" AND "+clause2+")", r)
assert.Len(rArgs, 2)
// ensure empty subfilter is not added to generated Having clause
sf := &filterBuilder{}
f.and(sf)
r, rArgs = f.generateHavingClauses()
assert.Equal("("+clause1+" AND "+clause2+")", r)
assert.Len(rArgs, 2)
// ensure sub-filter is generated correctly
sf.addHaving(clause3, arg3)
r, rArgs = f.generateHavingClauses()
assert.Equal("("+clause1+" AND "+clause2+") AND ("+clause3+")", r)
assert.Len(rArgs, 3)
// ensure OR sub-filter is generated correctly
f = &filterBuilder{}
f.addHaving(clause1)
f.addHaving(clause2, arg1, arg2)
f.or(sf)
r, rArgs = f.generateHavingClauses()
assert.Equal("("+clause1+" AND "+clause2+") OR ("+clause3+")", r)
assert.Len(rArgs, 3)
// ensure NOT sub-filter is generated correctly
f = &filterBuilder{}
f.addHaving(clause1)
f.addHaving(clause2, arg1, arg2)
f.not(sf)
r, rArgs = f.generateHavingClauses()
assert.Equal("("+clause1+" AND "+clause2+") AND NOT ("+clause3+")", r)
assert.Len(rArgs, 3)
}
func TestGetAllJoins(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
const (
table1Name = "table1Name"
table2Name = "table2Name"
as1Name = "as1"
as2Name = "as2"
onClause = "onClause1"
)
f.addJoin(table1Name, as1Name, onClause)
// ensure join is returned
joins := f.getAllJoins()
assert.Len(joins, 1)
assert.Equal(fmt.Sprintf("LEFT JOIN %s AS %s ON %s", table1Name, as1Name, onClause), joins[0].toSQL())
// ensure joins in sub-filter are returned
subFilter := &filterBuilder{}
f.and(subFilter)
subFilter.addJoin(table2Name, as2Name, onClause)
joins = f.getAllJoins()
assert.Len(joins, 2)
assert.Equal(fmt.Sprintf("LEFT JOIN %s AS %s ON %s", table2Name, as2Name, onClause), joins[1].toSQL())
// ensure redundant joins are not returned
subFilter.addJoin(as1Name, "", onClause)
joins = f.getAllJoins()
assert.Len(joins, 2)
}
func TestGetError(t *testing.T) {
assert := assert.New(t)
f := &filterBuilder{}
subFilter := &filterBuilder{}
f.and(subFilter)
expectedErr := errors.New("test error")
expectedErr2 := errors.New("test error2")
f.err = expectedErr
subFilter.err = expectedErr2
// ensure getError returns the top-level error state
assert.Equal(expectedErr, f.getError())
// ensure getError returns sub-filter error state if top-level error
// is nil
f.err = nil
assert.Equal(expectedErr2, f.getError())
// ensure getError returns nil if all error states are nil
subFilter.err = nil
assert.Nil(f.getError())
}
func TestStringCriterionHandlerIncludes(t *testing.T) {
assert := assert.New(t)
const column = "column"
const value1 = "two words"
const quotedValue = `"two words"`
f := &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierIncludes,
Value: value1,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("(%[1]s LIKE ? OR %[1]s LIKE ?)", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 2)
assert.Equal("%two%", f.whereClauses[0].args[0])
assert.Equal("%words%", f.whereClauses[0].args[1])
f = &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierIncludes,
Value: quotedValue,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("(%[1]s LIKE ?)", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 1)
assert.Equal("%two words%", f.whereClauses[0].args[0])
}
func TestStringCriterionHandlerExcludes(t *testing.T) {
assert := assert.New(t)
const column = "column"
const value1 = "two words"
const quotedValue = `"two words"`
f := &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierExcludes,
Value: value1,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("(%[1]s NOT LIKE ? AND %[1]s NOT LIKE ?)", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 2)
assert.Equal("%two%", f.whereClauses[0].args[0])
assert.Equal("%words%", f.whereClauses[0].args[1])
f = &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierExcludes,
Value: quotedValue,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("(%[1]s NOT LIKE ?)", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 1)
assert.Equal("%two words%", f.whereClauses[0].args[0])
}
func TestStringCriterionHandlerEquals(t *testing.T) {
assert := assert.New(t)
const column = "column"
const value1 = "two words"
f := &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: value1,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("%[1]s LIKE ?", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 1)
assert.Equal(value1, f.whereClauses[0].args[0])
}
func TestStringCriterionHandlerNotEquals(t *testing.T) {
assert := assert.New(t)
const column = "column"
const value1 = "two words"
f := &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierNotEquals,
Value: value1,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("%[1]s NOT LIKE ?", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 1)
assert.Equal(value1, f.whereClauses[0].args[0])
}
func TestStringCriterionHandlerMatchesRegex(t *testing.T) {
assert := assert.New(t)
const column = "column"
const validValue = "two words"
const invalidValue = "*two words"
f := &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierMatchesRegex,
Value: validValue,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 1)
assert.Equal(validValue, f.whereClauses[0].args[0])
// ensure invalid regex sets error state
f = &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierMatchesRegex,
Value: invalidValue,
}, column))
assert.NotNil(f.getError())
}
func TestStringCriterionHandlerNotMatchesRegex(t *testing.T) {
assert := assert.New(t)
const column = "column"
const validValue = "two words"
const invalidValue = "*two words"
f := &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierNotMatchesRegex,
Value: validValue,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 1)
assert.Equal(validValue, f.whereClauses[0].args[0])
// ensure invalid regex sets error state
f = &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierNotMatchesRegex,
Value: invalidValue,
}, column))
assert.NotNil(f.getError())
}
func TestStringCriterionHandlerIsNull(t *testing.T) {
assert := assert.New(t)
const column = "column"
f := &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierIsNull,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("%[1]s IS NULL", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 0)
}
func TestStringCriterionHandlerNotNull(t *testing.T) {
assert := assert.New(t)
const column = "column"
f := &filterBuilder{}
f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{
Modifier: models.CriterionModifierNotNull,
}, column))
assert.Len(f.whereClauses, 1)
assert.Equal(fmt.Sprintf("%[1]s IS NOT NULL", column), f.whereClauses[0].sql)
assert.Len(f.whereClauses[0].args, 0)
}

View file

@ -233,7 +233,7 @@ func (qb *galleryQueryBuilder) Query(galleryFilter *models.GalleryFilterType, fi
}
query.body += " LEFT JOIN tags on tags_join.tag_id = tags.id"
whereClause, havingClause := getMultiCriterionClause("galleries", "tags", "tags_join", "gallery_id", "tag_id", tagsFilter)
whereClause, havingClause := getMultiCriterionClause("galleries", "tags", "galleries_tags", "gallery_id", "tag_id", tagsFilter)
query.addWhere(whereClause)
query.addHaving(havingClause)
}
@ -244,7 +244,7 @@ func (qb *galleryQueryBuilder) Query(galleryFilter *models.GalleryFilterType, fi
}
query.body += " LEFT JOIN performers ON performers_join.performer_id = performers.id"
whereClause, havingClause := getMultiCriterionClause("galleries", "performers", "performers_join", "gallery_id", "performer_id", performersFilter)
whereClause, havingClause := getMultiCriterionClause("galleries", "performers", "performers_galleries", "gallery_id", "performer_id", performersFilter)
query.addWhere(whereClause)
query.addHaving(havingClause)
}
@ -259,6 +259,8 @@ func (qb *galleryQueryBuilder) Query(galleryFilter *models.GalleryFilterType, fi
query.addHaving(havingClause)
}
handleGalleryPerformerTagsCriterion(&query, galleryFilter.PerformerTags)
query.sortAndPagination = qb.getGallerySort(findFilter) + getPagination(findFilter)
idsResult, countResult, err := query.executeFind()
if err != nil {
@ -344,6 +346,31 @@ func (qb *galleryQueryBuilder) handleAverageResolutionFilter(query *queryBuilder
}
}
func handleGalleryPerformerTagsCriterion(query *queryBuilder, performerTagsFilter *models.MultiCriterionInput) {
if performerTagsFilter != nil && len(performerTagsFilter.Value) > 0 {
for _, tagID := range performerTagsFilter.Value {
query.addArg(tagID)
}
query.body += " LEFT JOIN performers_tags AS performer_tags_join on performers_join.performer_id = performer_tags_join.performer_id"
if performerTagsFilter.Modifier == models.CriterionModifierIncludes {
// includes any of the provided ids
query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value)))
} else if performerTagsFilter.Modifier == models.CriterionModifierIncludesAll {
// includes all of the provided ids
query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value)))
query.addHaving(fmt.Sprintf("count(distinct performer_tags_join.tag_id) IS %d", len(performerTagsFilter.Value)))
} else if performerTagsFilter.Modifier == models.CriterionModifierExcludes {
query.addWhere(fmt.Sprintf(`not exists
(select performers_galleries.performer_id from performers_galleries
left join performers_tags on performers_tags.performer_id = performers_galleries.performer_id where
performers_galleries.gallery_id = galleries.id AND
performers_tags.tag_id in %s)`, getInBinding(len(performerTagsFilter.Value))))
}
}
}
func (qb *galleryQueryBuilder) getGallerySort(findFilter *models.FindFilterType) string {
var sort string
var direction string

View file

@ -3,6 +3,7 @@
package sqlite_test
import (
"strconv"
"testing"
"github.com/stretchr/testify/assert"
@ -272,6 +273,307 @@ func TestGalleryQueryIsMissingScene(t *testing.T) {
})
}
func queryGallery(t *testing.T, sqb models.GalleryReader, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) []*models.Gallery {
galleries, _, err := sqb.Query(galleryFilter, findFilter)
if err != nil {
t.Errorf("Error querying gallery: %s", err.Error())
}
return galleries
}
func TestGalleryQueryIsMissingStudio(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Gallery()
isMissing := "studio"
galleryFilter := models.GalleryFilterType{
IsMissing: &isMissing,
}
q := getGalleryStringValue(galleryIdxWithStudio, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
galleries := queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.Len(t, galleries, 0)
findFilter.Q = nil
galleries = queryGallery(t, sqb, &galleryFilter, &findFilter)
// ensure non of the ids equal the one with studio
for _, gallery := range galleries {
assert.NotEqual(t, galleryIDs[galleryIdxWithStudio], gallery.ID)
}
return nil
})
}
func TestGalleryQueryIsMissingPerformers(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Gallery()
isMissing := "performers"
galleryFilter := models.GalleryFilterType{
IsMissing: &isMissing,
}
q := getGalleryStringValue(galleryIdxWithPerformer, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
galleries := queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.Len(t, galleries, 0)
findFilter.Q = nil
galleries = queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.True(t, len(galleries) > 0)
// ensure non of the ids equal the one with movies
for _, gallery := range galleries {
assert.NotEqual(t, galleryIDs[galleryIdxWithPerformer], gallery.ID)
}
return nil
})
}
func TestGalleryQueryIsMissingTags(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Gallery()
isMissing := "tags"
galleryFilter := models.GalleryFilterType{
IsMissing: &isMissing,
}
q := getGalleryStringValue(galleryIdxWithTwoTags, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
galleries := queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.Len(t, galleries, 0)
findFilter.Q = nil
galleries = queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.True(t, len(galleries) > 0)
return nil
})
}
func TestGalleryQueryPerformers(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Gallery()
performerCriterion := models.MultiCriterionInput{
Value: []string{
strconv.Itoa(performerIDs[performerIdxWithGallery]),
strconv.Itoa(performerIDs[performerIdx1WithGallery]),
},
Modifier: models.CriterionModifierIncludes,
}
galleryFilter := models.GalleryFilterType{
Performers: &performerCriterion,
}
galleries := queryGallery(t, sqb, &galleryFilter, nil)
assert.Len(t, galleries, 2)
// ensure ids are correct
for _, gallery := range galleries {
assert.True(t, gallery.ID == galleryIDs[galleryIdxWithPerformer] || gallery.ID == galleryIDs[galleryIdxWithTwoPerformers])
}
performerCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(performerIDs[performerIdx1WithGallery]),
strconv.Itoa(performerIDs[performerIdx2WithGallery]),
},
Modifier: models.CriterionModifierIncludesAll,
}
galleries = queryGallery(t, sqb, &galleryFilter, nil)
assert.Len(t, galleries, 1)
assert.Equal(t, galleryIDs[galleryIdxWithTwoPerformers], galleries[0].ID)
performerCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(performerIDs[performerIdx1WithGallery]),
},
Modifier: models.CriterionModifierExcludes,
}
q := getGalleryStringValue(galleryIdxWithTwoPerformers, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
galleries = queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.Len(t, galleries, 0)
return nil
})
}
func TestGalleryQueryTags(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Gallery()
tagCriterion := models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdxWithGallery]),
strconv.Itoa(tagIDs[tagIdx1WithGallery]),
},
Modifier: models.CriterionModifierIncludes,
}
galleryFilter := models.GalleryFilterType{
Tags: &tagCriterion,
}
galleries := queryGallery(t, sqb, &galleryFilter, nil)
assert.Len(t, galleries, 2)
// ensure ids are correct
for _, gallery := range galleries {
assert.True(t, gallery.ID == galleryIDs[galleryIdxWithTag] || gallery.ID == galleryIDs[galleryIdxWithTwoTags])
}
tagCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdx1WithGallery]),
strconv.Itoa(tagIDs[tagIdx2WithGallery]),
},
Modifier: models.CriterionModifierIncludesAll,
}
galleries = queryGallery(t, sqb, &galleryFilter, nil)
assert.Len(t, galleries, 1)
assert.Equal(t, galleryIDs[galleryIdxWithTwoTags], galleries[0].ID)
tagCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdx1WithGallery]),
},
Modifier: models.CriterionModifierExcludes,
}
q := getGalleryStringValue(galleryIdxWithTwoTags, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
galleries = queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.Len(t, galleries, 0)
return nil
})
}
func TestGalleryQueryStudio(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Gallery()
studioCriterion := models.MultiCriterionInput{
Value: []string{
strconv.Itoa(studioIDs[studioIdxWithGallery]),
},
Modifier: models.CriterionModifierIncludes,
}
galleryFilter := models.GalleryFilterType{
Studios: &studioCriterion,
}
galleries := queryGallery(t, sqb, &galleryFilter, nil)
assert.Len(t, galleries, 1)
// ensure id is correct
assert.Equal(t, galleryIDs[galleryIdxWithStudio], galleries[0].ID)
studioCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(studioIDs[studioIdxWithGallery]),
},
Modifier: models.CriterionModifierExcludes,
}
q := getGalleryStringValue(galleryIdxWithStudio, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
galleries = queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.Len(t, galleries, 0)
return nil
})
}
func TestGalleryQueryPerformerTags(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Gallery()
tagCriterion := models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdxWithPerformer]),
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
},
Modifier: models.CriterionModifierIncludes,
}
galleryFilter := models.GalleryFilterType{
PerformerTags: &tagCriterion,
}
galleries := queryGallery(t, sqb, &galleryFilter, nil)
assert.Len(t, galleries, 2)
// ensure ids are correct
for _, gallery := range galleries {
assert.True(t, gallery.ID == galleryIDs[galleryIdxWithPerformerTag] || gallery.ID == galleryIDs[galleryIdxWithPerformerTwoTags])
}
tagCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
strconv.Itoa(tagIDs[tagIdx2WithPerformer]),
},
Modifier: models.CriterionModifierIncludesAll,
}
galleries = queryGallery(t, sqb, &galleryFilter, nil)
assert.Len(t, galleries, 1)
assert.Equal(t, galleryIDs[galleryIdxWithPerformerTwoTags], galleries[0].ID)
tagCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
},
Modifier: models.CriterionModifierExcludes,
}
q := getGalleryStringValue(galleryIdxWithPerformerTwoTags, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
galleries = queryGallery(t, sqb, &galleryFilter, &findFilter)
assert.Len(t, galleries, 0)
return nil
})
}
// TODO Count
// TODO All
// TODO Query

View file

@ -297,7 +297,7 @@ func (qb *imageQueryBuilder) Query(imageFilter *models.ImageFilterType, findFilt
case "SIX_K":
query.addWhere("(MIN(images.height, images.width) >= 3384 AND MIN(images.height, images.width) < 4320)")
case "EIGHT_K":
query.addWhere("(MIN(images.height, images.width) >= 4320")
query.addWhere("MIN(images.height, images.width) >= 4320")
}
}
}
@ -360,6 +360,8 @@ func (qb *imageQueryBuilder) Query(imageFilter *models.ImageFilterType, findFilt
query.addHaving(havingClause)
}
handleImagePerformerTagsCriterion(&query, imageFilter.PerformerTags)
query.sortAndPagination = qb.getImageSort(findFilter) + getPagination(findFilter)
idsResult, countResult, err := query.executeFind()
if err != nil {
@ -379,6 +381,31 @@ func (qb *imageQueryBuilder) Query(imageFilter *models.ImageFilterType, findFilt
return images, countResult, nil
}
func handleImagePerformerTagsCriterion(query *queryBuilder, performerTagsFilter *models.MultiCriterionInput) {
if performerTagsFilter != nil && len(performerTagsFilter.Value) > 0 {
for _, tagID := range performerTagsFilter.Value {
query.addArg(tagID)
}
query.body += " LEFT JOIN performers_tags AS performer_tags_join on performers_join.performer_id = performer_tags_join.performer_id"
if performerTagsFilter.Modifier == models.CriterionModifierIncludes {
// includes any of the provided ids
query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value)))
} else if performerTagsFilter.Modifier == models.CriterionModifierIncludesAll {
// includes all of the provided ids
query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value)))
query.addHaving(fmt.Sprintf("count(distinct performer_tags_join.tag_id) IS %d", len(performerTagsFilter.Value)))
} else if performerTagsFilter.Modifier == models.CriterionModifierExcludes {
query.addWhere(fmt.Sprintf(`not exists
(select performers_images.performer_id from performers_images
left join performers_tags on performers_tags.performer_id = performers_images.performer_id where
performers_images.image_id = images.id AND
performers_tags.tag_id in %s)`, getInBinding(len(performerTagsFilter.Value))))
}
}
}
func (qb *imageQueryBuilder) getImageSort(findFilter *models.FindFilterType) string {
if findFilter == nil {
return " ORDER BY images.path ASC "

View file

@ -619,6 +619,70 @@ func TestImageQueryStudio(t *testing.T) {
})
}
func queryImages(t *testing.T, sqb models.ImageReader, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) []*models.Image {
images, _, err := sqb.Query(imageFilter, findFilter)
if err != nil {
t.Errorf("Error querying images: %s", err.Error())
}
return images
}
func TestImageQueryPerformerTags(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Image()
tagCriterion := models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdxWithPerformer]),
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
},
Modifier: models.CriterionModifierIncludes,
}
imageFilter := models.ImageFilterType{
PerformerTags: &tagCriterion,
}
images := queryImages(t, sqb, &imageFilter, nil)
assert.Len(t, images, 2)
// ensure ids are correct
for _, image := range images {
assert.True(t, image.ID == imageIDs[imageIdxWithPerformerTag] || image.ID == imageIDs[imageIdxWithPerformerTwoTags])
}
tagCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
strconv.Itoa(tagIDs[tagIdx2WithPerformer]),
},
Modifier: models.CriterionModifierIncludesAll,
}
images = queryImages(t, sqb, &imageFilter, nil)
assert.Len(t, images, 1)
assert.Equal(t, imageIDs[imageIdxWithPerformerTwoTags], images[0].ID)
tagCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
},
Modifier: models.CriterionModifierExcludes,
}
q := getImageStringValue(imageIdxWithPerformerTwoTags, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
images = queryImages(t, sqb, &imageFilter, &findFilter)
assert.Len(t, images, 0)
return nil
})
}
func TestImageQuerySorting(t *testing.T) {
withTxn(func(r models.Repository) error {
sort := titleField

View file

@ -114,10 +114,6 @@ func (qb *movieQueryBuilder) All() ([]*models.Movie, error) {
return qb.queryMovies(selectAll("movies")+qb.getMovieSort(nil), nil)
}
func (qb *movieQueryBuilder) AllSlim() ([]*models.Movie, error) {
return qb.queryMovies("SELECT movies.id, movies.name FROM movies "+qb.getMovieSort(nil), nil)
}
func (qb *movieQueryBuilder) Query(movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) {
if findFilter == nil {
findFilter = &models.FindFilterType{}

View file

@ -11,6 +11,13 @@ import (
const performerTable = "performers"
const performerIDColumn = "performer_id"
const performersTagsTable = "performers_tags"
var countPerformersForTagQuery = `
SELECT tag_id AS id FROM performers_tags
WHERE performers_tags.tag_id = ?
GROUP BY performers_tags.performer_id
`
type performerQueryBuilder struct {
repository
@ -153,6 +160,11 @@ func (qb *performerQueryBuilder) FindByNames(names []string, nocase bool) ([]*mo
return qb.queryPerformers(query, args)
}
func (qb *performerQueryBuilder) CountByTagID(tagID int) (int, error) {
args := []interface{}{tagID}
return qb.runCountQuery(qb.buildCountQuery(countPerformersForTagQuery), args)
}
func (qb *performerQueryBuilder) Count() (int, error) {
return qb.runCountQuery(qb.buildCountQuery("SELECT performers.id FROM performers"), nil)
}
@ -161,10 +173,6 @@ func (qb *performerQueryBuilder) All() ([]*models.Performer, error) {
return qb.queryPerformers(selectAll("performers")+qb.getPerformerSort(nil), nil)
}
func (qb *performerQueryBuilder) AllSlim() ([]*models.Performer, error) {
return qb.queryPerformers("SELECT performers.id, performers.name, performers.gender FROM performers "+qb.getPerformerSort(nil), nil)
}
func (qb *performerQueryBuilder) Query(performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) {
if performerFilter == nil {
performerFilter = &models.PerformerFilterType{}
@ -250,6 +258,18 @@ func (qb *performerQueryBuilder) Query(performerFilter *models.PerformerFilterTy
// TODO - need better handling of aliases
query.handleStringCriterionInput(performerFilter.Aliases, tableName+".aliases")
if tagsFilter := performerFilter.Tags; tagsFilter != nil && len(tagsFilter.Value) > 0 {
for _, tagID := range tagsFilter.Value {
query.addArg(tagID)
}
query.body += ` left join performers_tags as tags_join on tags_join.performer_id = performers.id
LEFT JOIN tags on tags_join.tag_id = tags.id`
whereClause, havingClause := getMultiCriterionClause("performers", "tags", "performers_tags", "performer_id", "tag_id", tagsFilter)
query.addWhere(whereClause)
query.addHaving(havingClause)
}
query.sortAndPagination = qb.getPerformerSort(findFilter) + getPagination(findFilter)
idsResult, countResult, err := query.executeFind()
if err != nil {
@ -361,6 +381,26 @@ func (qb *performerQueryBuilder) queryPerformers(query string, args []interface{
return []*models.Performer(ret), nil
}
func (qb *performerQueryBuilder) tagsRepository() *joinRepository {
return &joinRepository{
repository: repository{
tx: qb.tx,
tableName: performersTagsTable,
idColumn: performerIDColumn,
},
fkColumn: tagIDColumn,
}
}
func (qb *performerQueryBuilder) GetTagIDs(id int) ([]int, error) {
return qb.tagsRepository().getIDs(id)
}
func (qb *performerQueryBuilder) UpdateTags(id int, tagIDs []int) error {
// Delete the existing joins and then create new ones
return qb.tagsRepository().replace(id, tagIDs)
}
func (qb *performerQueryBuilder) imageRepository() *imageRepository {
return &imageRepository{
repository: repository{

View file

@ -5,6 +5,7 @@ package sqlite_test
import (
"database/sql"
"fmt"
"strconv"
"strings"
"testing"
"time"
@ -44,6 +45,14 @@ func TestPerformerFindBySceneID(t *testing.T) {
}
func TestPerformerFindByNames(t *testing.T) {
getNames := func(p []*models.Performer) []string {
var ret []string
for _, pp := range p {
ret = append(ret, pp.Name.String)
}
return ret
}
withTxn(func(r models.Repository) error {
var names []string
@ -72,19 +81,20 @@ func TestPerformerFindByNames(t *testing.T) {
if err != nil {
t.Errorf("Error finding performers: %s", err.Error())
}
assert.Len(t, performers, 2) // performerIdxWithScene and performerIdx1WithScene
assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String)
assert.Equal(t, performerNames[performerIdx1WithScene], performers[1].Name.String)
retNames := getNames(performers)
assert.Equal(t, names, retNames)
performers, err = pqb.FindByNames(names, true) // find performers by names ( 2 names nocase)
if err != nil {
t.Errorf("Error finding performers: %s", err.Error())
}
assert.Len(t, performers, 4) // performerIdxWithScene and performerIdxWithDupName , performerIdx1WithScene and performerIdx1WithDupName
assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String)
assert.Equal(t, performerNames[performerIdx1WithScene], performers[1].Name.String)
assert.Equal(t, performerNames[performerIdx1WithDupName], performers[2].Name.String)
assert.Equal(t, performerNames[performerIdxWithDupName], performers[3].Name.String)
retNames = getNames(performers)
assert.Equal(t, []string{
performerNames[performerIdxWithScene],
performerNames[performerIdx1WithScene],
performerNames[performerIdx1WithDupName],
performerNames[performerIdxWithDupName],
}, retNames)
return nil
})
@ -218,6 +228,109 @@ func verifyPerformerAge(t *testing.T, ageCriterion models.IntCriterionInput) {
})
}
func TestPerformerQueryCareerLength(t *testing.T) {
const value = "2005"
careerLengthCriterion := models.StringCriterionInput{
Value: value,
Modifier: models.CriterionModifierEquals,
}
verifyPerformerCareerLength(t, careerLengthCriterion)
careerLengthCriterion.Modifier = models.CriterionModifierNotEquals
verifyPerformerCareerLength(t, careerLengthCriterion)
careerLengthCriterion.Modifier = models.CriterionModifierMatchesRegex
verifyPerformerCareerLength(t, careerLengthCriterion)
careerLengthCriterion.Modifier = models.CriterionModifierNotMatchesRegex
verifyPerformerCareerLength(t, careerLengthCriterion)
}
func verifyPerformerCareerLength(t *testing.T, criterion models.StringCriterionInput) {
withTxn(func(r models.Repository) error {
qb := r.Performer()
performerFilter := models.PerformerFilterType{
CareerLength: &criterion,
}
performers, _, err := qb.Query(&performerFilter, nil)
if err != nil {
t.Errorf("Error querying performer: %s", err.Error())
}
for _, performer := range performers {
cl := performer.CareerLength
verifyNullString(t, cl, criterion)
}
return nil
})
}
func queryPerformers(t *testing.T, qb models.PerformerReader, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) []*models.Performer {
performers, _, err := qb.Query(performerFilter, findFilter)
if err != nil {
t.Errorf("Error querying performers: %s", err.Error())
}
return performers
}
func TestPerformerQueryTags(t *testing.T) {
withTxn(func(r models.Repository) error {
sqb := r.Performer()
tagCriterion := models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdxWithPerformer]),
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
},
Modifier: models.CriterionModifierIncludes,
}
performerFilter := models.PerformerFilterType{
Tags: &tagCriterion,
}
// ensure ids are correct
performers := queryPerformers(t, sqb, &performerFilter, nil)
assert.Len(t, performers, 2)
for _, performer := range performers {
assert.True(t, performer.ID == performerIDs[performerIdxWithTag] || performer.ID == performerIDs[performerIdxWithTwoTags])
}
tagCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
strconv.Itoa(tagIDs[tagIdx2WithPerformer]),
},
Modifier: models.CriterionModifierIncludesAll,
}
performers = queryPerformers(t, sqb, &performerFilter, nil)
assert.Len(t, performers, 1)
assert.Equal(t, sceneIDs[performerIdxWithTwoTags], performers[0].ID)
tagCriterion = models.MultiCriterionInput{
Value: []string{
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
},
Modifier: models.CriterionModifierExcludes,
}
q := getSceneStringValue(performerIdxWithTwoTags, titleField)
findFilter := models.FindFilterType{
Q: &q,
}
performers = queryPerformers(t, sqb, &performerFilter, &findFilter)
assert.Len(t, performers, 0)
return nil
})
}
func TestPerformerStashIDs(t *testing.T) {
if err := withTxn(func(r models.Repository) error {
qb := r.Performer()

View file

@ -1,6 +1,7 @@
package sqlite
import (
"fmt"
"regexp"
"github.com/stashapp/stash/pkg/models"
@ -11,6 +12,7 @@ type queryBuilder struct {
body string
joins joins
whereClauses []string
havingClauses []string
args []interface{}
@ -25,7 +27,10 @@ func (qb queryBuilder) executeFind() ([]int, int, error) {
return nil, 0, qb.err
}
return qb.repository.executeFindQuery(qb.body, qb.args, qb.sortAndPagination, qb.whereClauses, qb.havingClauses)
body := qb.body
body += qb.joins.toSQL()
return qb.repository.executeFindQuery(body, qb.args, qb.sortAndPagination, qb.whereClauses, qb.havingClauses)
}
func (qb *queryBuilder) addWhere(clauses ...string) {
@ -48,6 +53,48 @@ func (qb *queryBuilder) addArg(args ...interface{}) {
qb.args = append(qb.args, args...)
}
func (qb *queryBuilder) join(table, as, onClause string) {
newJoin := join{
table: table,
as: as,
onClause: onClause,
}
qb.joins.add(newJoin)
}
func (qb *queryBuilder) addJoins(joins ...join) {
qb.joins.add(joins...)
}
func (qb *queryBuilder) addFilter(f *filterBuilder) {
err := f.getError()
if err != nil {
qb.err = err
return
}
clause, args := f.generateWhereClauses()
if len(clause) > 0 {
qb.addWhere(clause)
}
if len(args) > 0 {
qb.addArg(args...)
}
clause, args = f.generateHavingClauses()
if len(clause) > 0 {
qb.addHaving(clause)
}
if len(args) > 0 {
qb.addArg(args...)
}
qb.addJoins(f.getAllJoins()...)
}
func (qb *queryBuilder) handleIntCriterionInput(c *models.IntCriterionInput, column string) {
if c != nil {
clause, count := getIntCriterionWhereClause(column, *c)
@ -81,15 +128,19 @@ func (qb *queryBuilder) handleStringCriterionInput(c *models.StringCriterionInpu
qb.err = err
return
}
qb.addWhere(column + " regexp ?")
qb.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column))
qb.addArg(c.Value)
case models.CriterionModifierNotMatchesRegex:
if _, err := regexp.Compile(c.Value); err != nil {
qb.err = err
return
}
qb.addWhere(column + " NOT regexp ?")
qb.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column))
qb.addArg(c.Value)
case models.CriterionModifierIsNull:
qb.addWhere("(" + column + " IS NULL OR TRIM(" + column + ") = '')")
case models.CriterionModifierNotNull:
qb.addWhere("(" + column + " IS NOT NULL AND TRIM(" + column + ") != '')")
default:
clause, count := getSimpleCriterionClause(modifier, "?")
qb.addWhere(column + " " + clause)

View file

@ -273,6 +273,18 @@ func (r *repository) newQuery() queryBuilder {
}
}
func (r *repository) join(j joiner, as string, parentIDCol string) {
t := r.tableName
if as != "" {
t = as
}
j.addJoin(r.tableName, as, fmt.Sprintf("%s.%s = %s", t, r.idColumn, parentIDCol))
}
type joiner interface {
addJoin(table, as, onClause string)
}
type joinRepository struct {
repository
fkColumn string

View file

@ -3,8 +3,7 @@ package sqlite
import (
"database/sql"
"fmt"
"path/filepath"
"strings"
"strconv"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/models"
@ -290,51 +289,71 @@ func (qb *sceneQueryBuilder) All() ([]*models.Scene, error) {
return qb.queryScenes(selectAll(sceneTable)+qb.getSceneSort(nil), nil)
}
// QueryForAutoTag queries for scenes whose paths match the provided regex and
// are optionally within the provided path. Excludes organized scenes.
// TODO - this should be replaced with Query once it can perform multiple
// filters on the same field.
func (qb *sceneQueryBuilder) QueryForAutoTag(regex string, pathPrefixes []string) ([]*models.Scene, error) {
var args []interface{}
body := selectDistinctIDs("scenes") + ` WHERE
scenes.path regexp ? AND
scenes.organized = 0`
func illegalFilterCombination(type1, type2 string) error {
return fmt.Errorf("cannot have %s and %s in the same filter", type1, type2)
}
args = append(args, "(?i)"+regex)
func (qb *sceneQueryBuilder) validateFilter(sceneFilter *models.SceneFilterType) error {
const and = "AND"
const or = "OR"
const not = "NOT"
var pathClauses []string
for _, p := range pathPrefixes {
pathClauses = append(pathClauses, "scenes.path like ?")
sep := string(filepath.Separator)
if !strings.HasSuffix(p, sep) {
p = p + sep
if sceneFilter.And != nil {
if sceneFilter.Or != nil {
return illegalFilterCombination(and, or)
}
args = append(args, p+"%")
}
if len(pathClauses) > 0 {
body += " AND (" + strings.Join(pathClauses, " OR ") + ")"
}
idsResult, err := qb.runIdsQuery(body, args)
if err != nil {
return nil, err
}
var scenes []*models.Scene
for _, id := range idsResult {
scene, err := qb.Find(id)
if err != nil {
return nil, err
if sceneFilter.Not != nil {
return illegalFilterCombination(and, not)
}
scenes = append(scenes, scene)
return qb.validateFilter(sceneFilter.And)
}
return scenes, nil
if sceneFilter.Or != nil {
if sceneFilter.Not != nil {
return illegalFilterCombination(or, not)
}
return qb.validateFilter(sceneFilter.Or)
}
if sceneFilter.Not != nil {
return qb.validateFilter(sceneFilter.Not)
}
return nil
}
func (qb *sceneQueryBuilder) makeFilter(sceneFilter *models.SceneFilterType) *filterBuilder {
query := &filterBuilder{}
if sceneFilter.And != nil {
query.and(qb.makeFilter(sceneFilter.And))
}
if sceneFilter.Or != nil {
query.or(qb.makeFilter(sceneFilter.Or))
}
if sceneFilter.Not != nil {
query.not(qb.makeFilter(sceneFilter.Not))
}
query.handleCriterionFunc(stringCriterionHandler(sceneFilter.Path, "scenes.path"))
query.handleCriterionFunc(intCriterionHandler(sceneFilter.Rating, "scenes.rating"))
query.handleCriterionFunc(intCriterionHandler(sceneFilter.OCounter, "scenes.o_counter"))
query.handleCriterionFunc(boolCriterionHandler(sceneFilter.Organized, "scenes.organized"))
query.handleCriterionFunc(durationCriterionHandler(sceneFilter.Duration, "scenes.duration"))
query.handleCriterionFunc(resolutionCriterionHandler(sceneFilter.Resolution, "scenes.height", "scenes.width"))
query.handleCriterionFunc(hasMarkersCriterionHandler(sceneFilter.HasMarkers))
query.handleCriterionFunc(sceneIsMissingCriterionHandler(qb, sceneFilter.IsMissing))
query.handleCriterionFunc(sceneTagsCriterionHandler(qb, sceneFilter.Tags))
query.handleCriterionFunc(scenePerformersCriterionHandler(qb, sceneFilter.Performers))
query.handleCriterionFunc(sceneStudioCriterionHandler(qb, sceneFilter.Studios))
query.handleCriterionFunc(sceneMoviesCriterionHandler(qb, sceneFilter.Movies))
query.handleCriterionFunc(sceneStashIDsHandler(qb, sceneFilter.StashID))
query.handleCriterionFunc(scenePerformerTagsCriterionHandler(qb, sceneFilter.PerformerTags))
return query
}
func (qb *sceneQueryBuilder) Query(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) {
@ -348,152 +367,21 @@ func (qb *sceneQueryBuilder) Query(sceneFilter *models.SceneFilterType, findFilt
query := qb.newQuery()
query.body = selectDistinctIDs(sceneTable)
query.body += `
left join scene_markers on scene_markers.scene_id = scenes.id
left join performers_scenes as performers_join on performers_join.scene_id = scenes.id
left join movies_scenes as movies_join on movies_join.scene_id = scenes.id
left join studios as studio on studio.id = scenes.studio_id
left join scenes_galleries as galleries_join on galleries_join.scene_id = scenes.id
left join scenes_tags as tags_join on tags_join.scene_id = scenes.id
left join scene_stash_ids on scene_stash_ids.scene_id = scenes.id
`
if q := findFilter.Q; q != nil && *q != "" {
query.join("scene_markers", "", "scene_markers.scene_id = scenes.id")
searchColumns := []string{"scenes.title", "scenes.details", "scenes.path", "scenes.oshash", "scenes.checksum", "scene_markers.title"}
clause, thisArgs := getSearchBinding(searchColumns, *q, false)
query.addWhere(clause)
query.addArg(thisArgs...)
}
query.handleStringCriterionInput(sceneFilter.Path, "scenes.path")
query.handleIntCriterionInput(sceneFilter.Rating, "scenes.rating")
query.handleIntCriterionInput(sceneFilter.OCounter, "scenes.o_counter")
if Organized := sceneFilter.Organized; Organized != nil {
var organized string
if *Organized == true {
organized = "1"
} else {
organized = "0"
}
query.addWhere("scenes.organized = " + organized)
if err := qb.validateFilter(sceneFilter); err != nil {
return nil, 0, err
}
filter := qb.makeFilter(sceneFilter)
if durationFilter := sceneFilter.Duration; durationFilter != nil {
clause, thisArgs := getDurationWhereClause(*durationFilter)
query.addWhere(clause)
query.addArg(thisArgs...)
}
if resolutionFilter := sceneFilter.Resolution; resolutionFilter != nil {
if resolution := resolutionFilter.String(); resolutionFilter.IsValid() {
switch resolution {
case "VERY_LOW":
query.addWhere("MIN(scenes.height, scenes.width) < 240")
case "LOW":
query.addWhere("(MIN(scenes.height, scenes.width) >= 240 AND MIN(scenes.height, scenes.width) < 360)")
case "R360P":
query.addWhere("(MIN(scenes.height, scenes.width) >= 360 AND MIN(scenes.height, scenes.width) < 480)")
case "STANDARD":
query.addWhere("(MIN(scenes.height, scenes.width) >= 480 AND MIN(scenes.height, scenes.width) < 540)")
case "WEB_HD":
query.addWhere("(MIN(scenes.height, scenes.width) >= 540 AND MIN(scenes.height, scenes.width) < 720)")
case "STANDARD_HD":
query.addWhere("(MIN(scenes.height, scenes.width) >= 720 AND MIN(scenes.height, scenes.width) < 1080)")
case "FULL_HD":
query.addWhere("(MIN(scenes.height, scenes.width) >= 1080 AND MIN(scenes.height, scenes.width) < 1440)")
case "QUAD_HD":
query.addWhere("(MIN(scenes.height, scenes.width) >= 1440 AND MIN(scenes.height, scenes.width) < 1920)")
case "VR_HD":
query.addWhere("(MIN(scenes.height, scenes.width) >= 1920 AND MIN(scenes.height, scenes.width) < 2160)")
case "FOUR_K":
query.addWhere("(MIN(scenes.height, scenes.width) >= 2160 AND MIN(scenes.height, scenes.width) < 2880)")
case "FIVE_K":
query.addWhere("(MIN(scenes.height, scenes.width) >= 2880 AND MIN(scenes.height, scenes.width) < 3384)")
case "SIX_K":
query.addWhere("(MIN(scenes.height, scenes.width) >= 3384 AND MIN(scenes.height, scenes.width) < 4320)")
case "EIGHT_K":
query.addWhere("(MIN(scenes.height, scenes.width) >= 4320")
}
}
}
if hasMarkersFilter := sceneFilter.HasMarkers; hasMarkersFilter != nil {
if strings.Compare(*hasMarkersFilter, "true") == 0 {
query.addHaving("count(scene_markers.scene_id) > 0")
} else {
query.addWhere("scene_markers.id IS NULL")
}
}
if isMissingFilter := sceneFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" {
switch *isMissingFilter {
case "galleries":
query.addWhere("galleries_join.scene_id IS NULL")
case "studio":
query.addWhere("scenes.studio_id IS NULL")
case "movie":
query.addWhere("movies_join.scene_id IS NULL")
case "performers":
query.addWhere("performers_join.scene_id IS NULL")
case "date":
query.addWhere("scenes.date IS \"\" OR scenes.date IS \"0001-01-01\"")
case "tags":
query.addWhere("tags_join.scene_id IS NULL")
case "stash_id":
query.addWhere("scene_stash_ids.scene_id IS NULL")
default:
query.addWhere("(scenes." + *isMissingFilter + " IS NULL OR TRIM(scenes." + *isMissingFilter + ") = '')")
}
}
if tagsFilter := sceneFilter.Tags; tagsFilter != nil && len(tagsFilter.Value) > 0 {
for _, tagID := range tagsFilter.Value {
query.addArg(tagID)
}
query.body += " LEFT JOIN tags on tags_join.tag_id = tags.id"
whereClause, havingClause := getMultiCriterionClause("scenes", "tags", "scenes_tags", "scene_id", "tag_id", tagsFilter)
query.addWhere(whereClause)
query.addHaving(havingClause)
}
if performersFilter := sceneFilter.Performers; performersFilter != nil && len(performersFilter.Value) > 0 {
for _, performerID := range performersFilter.Value {
query.addArg(performerID)
}
query.body += " LEFT JOIN performers ON performers_join.performer_id = performers.id"
whereClause, havingClause := getMultiCriterionClause("scenes", "performers", "performers_scenes", "scene_id", "performer_id", performersFilter)
query.addWhere(whereClause)
query.addHaving(havingClause)
}
if studiosFilter := sceneFilter.Studios; studiosFilter != nil && len(studiosFilter.Value) > 0 {
for _, studioID := range studiosFilter.Value {
query.addArg(studioID)
}
whereClause, havingClause := getMultiCriterionClause("scenes", "studio", "", "", "studio_id", studiosFilter)
query.addWhere(whereClause)
query.addHaving(havingClause)
}
if moviesFilter := sceneFilter.Movies; moviesFilter != nil && len(moviesFilter.Value) > 0 {
for _, movieID := range moviesFilter.Value {
query.addArg(movieID)
}
query.body += " LEFT JOIN movies ON movies_join.movie_id = movies.id"
whereClause, havingClause := getMultiCriterionClause("scenes", "movies", "movies_scenes", "scene_id", "movie_id", moviesFilter)
query.addWhere(whereClause)
query.addHaving(havingClause)
}
if stashIDFilter := sceneFilter.StashID; stashIDFilter != nil {
query.addWhere("scene_stash_ids.stash_id = ?")
query.addArg(stashIDFilter)
}
query.addFilter(filter)
query.sortAndPagination = qb.getSceneSort(findFilter) + getPagination(findFilter)
@ -522,7 +410,16 @@ func appendClause(clauses []string, clause string) []string {
return clauses
}
func getDurationWhereClause(durationFilter models.IntCriterionInput) (string, []interface{}) {
func durationCriterionHandler(durationFilter *models.IntCriterionInput, column string) criterionHandlerFunc {
return func(f *filterBuilder) {
if durationFilter != nil {
clause, thisArgs := getDurationWhereClause(*durationFilter, column)
f.addWhere(clause, thisArgs...)
}
}
}
func getDurationWhereClause(durationFilter models.IntCriterionInput, column string) (string, []interface{}) {
// special case for duration. We accept duration as seconds as int but the
// field is floating point. Change the equals filter to return a range
// between x and x + 1
@ -532,16 +429,16 @@ func getDurationWhereClause(durationFilter models.IntCriterionInput) (string, []
value := durationFilter.Value
if durationFilter.Modifier == models.CriterionModifierEquals {
clause = "scenes.duration >= ? AND scenes.duration < ?"
clause = fmt.Sprintf("%[1]s >= ? AND %[1]s < ?", column)
args = append(args, value)
args = append(args, value+1)
} else if durationFilter.Modifier == models.CriterionModifierNotEquals {
clause = "(scenes.duration < ? OR scenes.duration >= ?)"
clause = fmt.Sprintf("(%[1]s < ? OR %[1]s >= ?)", column)
args = append(args, value)
args = append(args, value+1)
} else {
var count int
clause, count = getIntCriterionWhereClause("scenes.duration", durationFilter)
clause, count = getIntCriterionWhereClause(column, durationFilter)
if count == 1 {
args = append(args, value)
}
@ -550,6 +447,179 @@ func getDurationWhereClause(durationFilter models.IntCriterionInput) (string, []
return clause, args
}
func resolutionCriterionHandler(resolution *models.ResolutionEnum, heightColumn string, widthColumn string) criterionHandlerFunc {
return func(f *filterBuilder) {
if resolution != nil && resolution.IsValid() {
min := resolution.GetMinResolution()
max := resolution.GetMaxResolution()
widthHeight := fmt.Sprintf("MIN(%s, %s)", widthColumn, heightColumn)
if min > 0 {
f.addWhere(widthHeight + " >= " + strconv.Itoa(min))
}
if max > 0 {
f.addWhere(widthHeight + " < " + strconv.Itoa(max))
}
}
}
}
func hasMarkersCriterionHandler(hasMarkers *string) criterionHandlerFunc {
return func(f *filterBuilder) {
if hasMarkers != nil {
f.addJoin("scene_markers", "", "scene_markers.scene_id = scenes.id")
if *hasMarkers == "true" {
f.addHaving("count(scene_markers.scene_id) > 0")
} else {
f.addWhere("scene_markers.id IS NULL")
}
}
}
}
func sceneIsMissingCriterionHandler(qb *sceneQueryBuilder, isMissing *string) criterionHandlerFunc {
return func(f *filterBuilder) {
if isMissing != nil && *isMissing != "" {
switch *isMissing {
case "galleries":
qb.galleriesRepository().join(f, "galleries_join", "scenes.id")
f.addWhere("galleries_join.scene_id IS NULL")
case "studio":
f.addWhere("scenes.studio_id IS NULL")
case "movie":
qb.moviesRepository().join(f, "movies_join", "scenes.id")
f.addWhere("movies_join.scene_id IS NULL")
case "performers":
qb.performersRepository().join(f, "performers_join", "scenes.id")
f.addWhere("performers_join.scene_id IS NULL")
case "date":
f.addWhere("scenes.date IS \"\" OR scenes.date IS \"0001-01-01\"")
case "tags":
qb.tagsRepository().join(f, "tags_join", "scenes.id")
f.addWhere("tags_join.scene_id IS NULL")
case "stash_id":
qb.stashIDRepository().join(f, "scene_stash_ids", "scenes.id")
f.addWhere("scene_stash_ids.scene_id IS NULL")
default:
f.addWhere("(scenes." + *isMissing + " IS NULL OR TRIM(scenes." + *isMissing + ") = '')")
}
}
}
}
func (qb *sceneQueryBuilder) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder {
return multiCriterionHandlerBuilder{
primaryTable: sceneTable,
foreignTable: foreignTable,
joinTable: joinTable,
primaryFK: sceneIDColumn,
foreignFK: foreignFK,
addJoinsFunc: addJoinsFunc,
}
}
func sceneTagsCriterionHandler(qb *sceneQueryBuilder, tags *models.MultiCriterionInput) criterionHandlerFunc {
addJoinsFunc := func(f *filterBuilder) {
qb.tagsRepository().join(f, "tags_join", "scenes.id")
f.addJoin("tags", "", "tags_join.tag_id = tags.id")
}
h := qb.getMultiCriterionHandlerBuilder(tagTable, scenesTagsTable, tagIDColumn, addJoinsFunc)
return h.handler(tags)
}
func scenePerformersCriterionHandler(qb *sceneQueryBuilder, performers *models.MultiCriterionInput) criterionHandlerFunc {
addJoinsFunc := func(f *filterBuilder) {
qb.performersRepository().join(f, "performers_join", "scenes.id")
f.addJoin("performers", "", "performers_join.performer_id = performers.id")
}
h := qb.getMultiCriterionHandlerBuilder(performerTable, performersScenesTable, performerIDColumn, addJoinsFunc)
return h.handler(performers)
}
func sceneStudioCriterionHandler(qb *sceneQueryBuilder, studios *models.MultiCriterionInput) criterionHandlerFunc {
addJoinsFunc := func(f *filterBuilder) {
f.addJoin("studios", "studio", "studio.id = scenes.studio_id")
}
h := qb.getMultiCriterionHandlerBuilder("studio", "", studioIDColumn, addJoinsFunc)
return h.handler(studios)
}
func sceneMoviesCriterionHandler(qb *sceneQueryBuilder, movies *models.MultiCriterionInput) criterionHandlerFunc {
addJoinsFunc := func(f *filterBuilder) {
qb.moviesRepository().join(f, "movies_join", "scenes.id")
f.addJoin("movies", "", "movies_join.movie_id = movies.id")
}
h := qb.getMultiCriterionHandlerBuilder(movieTable, moviesScenesTable, "movie_id", addJoinsFunc)
return h.handler(movies)
}
func sceneStashIDsHandler(qb *sceneQueryBuilder, stashID *string) criterionHandlerFunc {
return func(f *filterBuilder) {
if stashID != nil && *stashID != "" {
qb.stashIDRepository().join(f, "scene_stash_ids", "scenes.id")
stringLiteralCriterionHandler(stashID, "scene_stash_ids.stash_id")(f)
}
}
}
func scenePerformerTagsCriterionHandler(qb *sceneQueryBuilder, performerTagsFilter *models.MultiCriterionInput) criterionHandlerFunc {
return func(f *filterBuilder) {
if performerTagsFilter != nil && len(performerTagsFilter.Value) > 0 {
qb.performersRepository().join(f, "performers_join", "scenes.id")
f.addJoin("performers_tags", "performer_tags_join", "performers_join.performer_id = performer_tags_join.performer_id")
var args []interface{}
for _, tagID := range performerTagsFilter.Value {
args = append(args, tagID)
}
if performerTagsFilter.Modifier == models.CriterionModifierIncludes {
// includes any of the provided ids
f.addWhere("performer_tags_join.tag_id IN "+getInBinding(len(performerTagsFilter.Value)), args...)
} else if performerTagsFilter.Modifier == models.CriterionModifierIncludesAll {
// includes all of the provided ids
f.addWhere("performer_tags_join.tag_id IN "+getInBinding(len(performerTagsFilter.Value)), args...)
f.addHaving(fmt.Sprintf("count(distinct performer_tags_join.tag_id) IS %d", len(performerTagsFilter.Value)))
} else if performerTagsFilter.Modifier == models.CriterionModifierExcludes {
f.addWhere(fmt.Sprintf(`not exists
(select performers_scenes.performer_id from performers_scenes
left join performers_tags on performers_tags.performer_id = performers_scenes.performer_id where
performers_scenes.scene_id = scenes.id AND
performers_tags.tag_id in %s)`, getInBinding(len(performerTagsFilter.Value))), args...)
}
}
}
}
func handleScenePerformerTagsCriterion(query *queryBuilder, performerTagsFilter *models.MultiCriterionInput) {
if performerTagsFilter != nil && len(performerTagsFilter.Value) > 0 {
for _, tagID := range performerTagsFilter.Value {
query.addArg(tagID)
}
query.body += " LEFT JOIN performers_tags AS performer_tags_join on performers_join.performer_id = performer_tags_join.performer_id"
if performerTagsFilter.Modifier == models.CriterionModifierIncludes {
// includes any of the provided ids
query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value)))
} else if performerTagsFilter.Modifier == models.CriterionModifierIncludesAll {
// includes all of the provided ids
query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value)))
query.addHaving(fmt.Sprintf("count(distinct performer_tags_join.tag_id) IS %d", len(performerTagsFilter.Value)))
} else if performerTagsFilter.Modifier == models.CriterionModifierExcludes {
query.addWhere(fmt.Sprintf(`not exists
(select performers_scenes.performer_id from performers_scenes
left join performers_tags on performers_tags.performer_id = performers_scenes.performer_id where
performers_scenes.scene_id = scenes.id AND
performers_tags.tag_id in %s)`, getInBinding(len(performerTagsFilter.Value))))
}
}
}
func (qb *sceneQueryBuilder) getSceneSort(findFilter *models.FindFilterType) string {
if findFilter == nil {
return " ORDER BY scenes.path, scenes.date ASC "

Some files were not shown because too many files have changed in this diff Show more