diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..c343fa522 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,155 @@ +name: Build + +on: + push: + branches: [ develop, master ] + pull_request: + branches: [ develop ] + release: + types: [ published ] + +jobs: + build: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + + - name: Checkout + run: git fetch --prune --unshallow --tags + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.13 + + - name: Set up Node + uses: actions/setup-node@v2 + with: + node-version: '12' + + - name: Cache node modules + uses: actions/cache@v2 + env: + cache-name: cache-node_modules + with: + path: ui/v2.5/node_modules + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}- + ${{ runner.os }}-build- + ${{ runner.os }}- + + - name: Pre-install + run: make pre-ui + + - name: Generate + run: make generate + + - name: Validate + run: make ui-validate fmt-check vet it + + - name: Build UI + run: make ui-only + + - name: Cross Compile + run: | + docker pull stashapp/compiler:4 + ./scripts/cross-compile.sh + + - name: Generate checksums + run: | + git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1 + sha1sum dist/stash-* | sed 's/dist\///g' | tee -a CHECKSUMS_SHA1 + echo "STASH_VERSION=$(git describe --tags --exclude latest_develop)" >> $GITHUB_ENV + echo "RELEASE_DATE=$(date +'%Y-%m-%d %H:%M:%S %Z')" >> $GITHUB_ENV + + - name: Upload Windows binary + # only upload binaries for pull requests + if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} + uses: actions/upload-artifact@v2 + with: + name: stash-win.exe + path: dist/stash-win.exe + + - name: Upload OSX binary + # only upload binaries for pull requests + if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} + uses: actions/upload-artifact@v2 + with: + name: stash-osx + path: dist/stash-osx + + - name: Upload Linux binary + # only upload binaries for pull requests + if: ${{ github.event_name == 'pull_request' && github.base_ref != 'refs/heads/develop' && github.base_ref != 'refs/heads/master'}} + uses: actions/upload-artifact@v2 + with: + name: stash-linux + path: dist/stash-linux + + - name: Update latest_develop tag + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} + run : git tag -f latest_develop; git push -f --tags + + - name: Development Release + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} + uses: meeDamian/github-release@2.0 + with: + token: "${{ secrets.GITHUB_TOKEN }}" + prerelease: true + allow_override: true + tag: latest_develop + name: "${{ env.STASH_VERSION }}: Latest development build" + body: "**${{ env.RELEASE_DATE }}**\n This is always the latest committed version on the develop branch. Use as your own risk!" + files: | + dist/stash-osx + dist/stash-win.exe + dist/stash-linux + dist/stash-linux-arm64v8 + dist/stash-linux-arm32v7 + dist/stash-pi + CHECKSUMS_SHA1 + gzip: false + + - name: Master release + if: ${{ github.event_name == 'release' && github.ref != 'refs/tags/latest_develop' }} + uses: meeDamian/github-release@2.0 + with: + token: "${{ secrets.GITHUB_TOKEN }}" + files: | + dist/stash-osx + dist/stash-win.exe + dist/stash-linux + dist/stash-linux-arm64v8 + dist/stash-linux-arm32v7 + dist/stash-pi + CHECKSUMS_SHA1 + gzip: false + + - name: Development Docker + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} + env: + DOCKER_CLI_EXPERIMENTAL: enabled + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + run: | + docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64 + docker info + docker buildx create --name builder --use + docker buildx inspect --bootstrap + docker buildx ls + bash ./docker/ci/x86_64/docker_push.sh development + + - name: Release Docker + if: ${{ github.event_name == 'release' && github.ref != 'refs/tags/latest_develop' }} + env: + DOCKER_CLI_EXPERIMENTAL: enabled + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + run: | + docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64 + docker info + docker buildx create --name builder --use + docker buildx inspect --bootstrap + docker buildx ls + bash ./docker/ci/x86_64/docker_push.sh latest diff --git a/.gitignore b/.gitignore index 822b8ce50..d6c2eea2c 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,7 @@ ui/v2.5/src/core/generated-*.tsx .idea/**/usage.statistics.xml .idea/**/dictionaries .idea/**/shelf +.vscode # Generated files .idea/**/contentModel.xml diff --git a/.travis.yml b/.travis.yml.disabled similarity index 100% rename from .travis.yml rename to .travis.yml.disabled diff --git a/README.md b/README.md index b0f3a995e..6d0513996 100644 --- a/README.md +++ b/README.md @@ -6,17 +6,24 @@ https://stashapp.cc -**Stash is a Go app which organizes and serves your porn.** +**Stash is a locally hosted web-based app written in Go which organizes and serves your porn.** -See a demo [here](https://vimeo.com/275537038) (password is stashapp). +* It can gather information about videos in your collection from the internet, and is extensible through the use of community-built plugins. +* It supports a wide variety of both video and image formats +* You can tag videos and find them later. +* It provides statistics about performers, tags, studios and other things. -An in-app manual is available, and the manual pages can be viewed [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en). +You can [watch a demo video](https://vimeo.com/275537038)to see it in action (password is stashapp). -# Docker install +For further information you can [read the in-app manual](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en). + +# Installing stash + +## Docker install Follow [this README.md in the docker directory.](docker/production/README.md) -# Bare-metal Install +## Pre-Compiled Binaries Stash supports macOS, Windows, and Linux. Download the [latest release here](https://github.com/stashapp/stash/releases). @@ -36,9 +43,18 @@ The `ffmpeg(.exe)` and `ffprobe(.exe)` files should be placed in `~/.stash` on m # Usage +## Quickstart Guide +1) Download and install Stash and its dependencies +2) Run Stash. It will prompt you for some configuration options and a directory to index (you can also do this step afterward) +3) After configuration, launch your web browser and navigate to the URL shown within the Stash app. + +**Note that Stash does not currently retrieve and organize information about your entire library automatically.** You will need to help it along through the use of [scrapers](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Scraping.md). The Stash community has developed scrapers for many popular data sources which can be downloaded and installed from [this repository](https://github.com/stashapp/CommunityScrapers). + +The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our database. Note that this information is not comprehensive and you may need to use the scrapers to identify some of your media. + ## CLI -Stash provides some command line options. See what is currently available by running `stash --help`. +Stash runs as a command-line app and local web server. There are some command-line options available, which you can see by running `stash --help`. For example, to run stash locally on port 80 run it like this (OSX / Linux) `stash --host 127.0.0.1 --port 80` @@ -52,17 +68,25 @@ This command would need customizing for your environment. [This link](https://s Once you have a certificate and key file name them `stash.crt` and `stash.key` and place them in the `~/.stash` directory. Stash detects these and starts up using HTTPS rather than HTTP. -# FAQ +# Customization -> I'm unable to run the app on OSX or Linux +## Themes +There is a [directory of themes](https://github.com/stashapp/stash/wiki/Themes) on our Wiki, along with instructions on how to install them.. -Try running `chmod u+x stash-osx` or `chmod u+x stash-linux` to make the file executable. +## CSS Customization +You can make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks). -> I have a question not answered here. +# Suppport -Join the [Discord server](https://discord.gg/2TsNFKt). +Answers to frequently asked questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ) -# Development +For issues not addressed there, there are a few options. + +* Read the [Wiki](https://github.com/stashapp/stash/wiki) +* Check the in-app documentation (also available [here](https://github.com/stashapp/stash/tree/develop/ui/v2.5/src/docs/en) +* Join the [Discord server](https://discord.gg/2TsNFKt). + +# Building From Source Code ## Install @@ -122,10 +146,3 @@ where the app can be cross-compiled. This process is kicked off by CI via the ` command to open a bash shell to the container to poke around: `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t stashappdev/compiler:latest /bin/bash` - -## Customization - -You can make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks). - -[Stash Plex Theme](https://github.com/stashapp/stash/wiki/Stash-Plex-Theme) is a community created theme inspired by popular Plex Interface. - diff --git a/go.mod b/go.mod index d27e7bb41..50213e41c 100644 --- a/go.mod +++ b/go.mod @@ -18,7 +18,7 @@ require ( github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a github.com/jmoiron/sqlx v1.2.0 github.com/json-iterator/go v1.1.9 - github.com/mattn/go-sqlite3 v1.13.0 + github.com/mattn/go-sqlite3 v1.14.6 github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 github.com/remeh/sizedwaitgroup v1.0.0 github.com/rs/cors v1.6.0 @@ -33,6 +33,7 @@ require ( golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 golang.org/x/image v0.0.0-20190802002840-cff245a6509b golang.org/x/net v0.0.0-20200822124328-c89045814202 + golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd golang.org/x/tools v0.0.0-20200915031644-64986481280e // indirect gopkg.in/yaml.v2 v2.3.0 ) diff --git a/go.sum b/go.sum index ba64d66f2..744bb3712 100644 --- a/go.sum +++ b/go.sum @@ -540,6 +540,8 @@ github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK86 github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.13.0 h1:LnJI81JidiW9r7pS/hXe6cFeO5EXNq7KbfvoJLRI69c= github.com/mattn/go-sqlite3 v1.13.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= +github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= diff --git a/graphql/documents/data/config.graphql b/graphql/documents/data/config.graphql index cd7dc6835..07330d63b 100644 --- a/graphql/documents/data/config.graphql +++ b/graphql/documents/data/config.graphql @@ -31,6 +31,7 @@ fragment ConfigGeneralData on ConfigGeneralResult { excludes imageExcludes scraperUserAgent + scraperCertCheck scraperCDPPath stashBoxes { name diff --git a/graphql/documents/data/performer-slim.graphql b/graphql/documents/data/performer-slim.graphql index 09aeb5c16..603744d33 100644 --- a/graphql/documents/data/performer-slim.graphql +++ b/graphql/documents/data/performer-slim.graphql @@ -3,6 +3,11 @@ fragment SlimPerformerData on Performer { name gender image_path + favorite + tags { + id + name + } stash_ids { endpoint stash_id diff --git a/graphql/documents/data/performer.graphql b/graphql/documents/data/performer.graphql index 24ce512ad..253412b8a 100644 --- a/graphql/documents/data/performer.graphql +++ b/graphql/documents/data/performer.graphql @@ -20,6 +20,11 @@ fragment PerformerData on Performer { favorite image_path scene_count + + tags { + ...TagData + } + stash_ids { stash_id endpoint diff --git a/graphql/documents/data/scrapers.graphql b/graphql/documents/data/scrapers.graphql index fe2bf7f7b..b4397cdf3 100644 --- a/graphql/documents/data/scrapers.graphql +++ b/graphql/documents/data/scrapers.graphql @@ -15,6 +15,9 @@ fragment ScrapedPerformerData on ScrapedPerformer { tattoos piercings aliases + tags { + ...ScrapedSceneTagData + } image } @@ -36,6 +39,9 @@ fragment ScrapedScenePerformerData on ScrapedScenePerformer { tattoos piercings aliases + tags { + ...ScrapedSceneTagData + } remote_site_id images } diff --git a/graphql/documents/data/tag.graphql b/graphql/documents/data/tag.graphql index 650f52a56..3a0e84e1c 100644 --- a/graphql/documents/data/tag.graphql +++ b/graphql/documents/data/tag.graphql @@ -4,4 +4,5 @@ fragment TagData on Tag { image_path scene_count scene_marker_count + performer_count } diff --git a/graphql/documents/mutations/performer.graphql b/graphql/documents/mutations/performer.graphql index 0f29c12b4..e4ccf442e 100644 --- a/graphql/documents/mutations/performer.graphql +++ b/graphql/documents/mutations/performer.graphql @@ -16,6 +16,7 @@ mutation PerformerCreate( $twitter: String, $instagram: String, $favorite: Boolean, + $tag_ids: [ID!], $stash_ids: [StashIDInput!], $image: String) { @@ -37,6 +38,7 @@ mutation PerformerCreate( twitter: $twitter, instagram: $instagram, favorite: $favorite, + tag_ids: $tag_ids, stash_ids: $stash_ids, image: $image }) { @@ -52,6 +54,14 @@ mutation PerformerUpdate( } } +mutation BulkPerformerUpdate( + $input: BulkPerformerUpdateInput!) { + + bulkPerformerUpdate(input: $input) { + ...PerformerData + } +} + mutation PerformerDestroy($id: ID!) { performerDestroy(input: { id: $id }) } diff --git a/graphql/documents/queries/misc.graphql b/graphql/documents/queries/misc.graphql index 89983d5ba..3df25042e 100644 --- a/graphql/documents/queries/misc.graphql +++ b/graphql/documents/queries/misc.graphql @@ -13,24 +13,24 @@ query AllTags { } query AllPerformersForFilter { - allPerformersSlim { + allPerformers { ...SlimPerformerData } } query AllStudiosForFilter { - allStudiosSlim { + allStudios { ...SlimStudioData } } query AllMoviesForFilter { - allMoviesSlim { + allMovies { ...SlimMovieData } } query AllTagsForFilter { - allTagsSlim { + allTags { id name } diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 9364d69a5..7286f4591 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -115,11 +115,6 @@ type Query { # Get everything with minimal metadata - allPerformersSlim: [Performer!]! - allStudiosSlim: [Studio!]! - allMoviesSlim: [Movie!]! - allTagsSlim: [Tag!]! - # Version version: Version! @@ -174,6 +169,7 @@ type Mutation { performerUpdate(input: PerformerUpdateInput!): Performer performerDestroy(input: PerformerDestroyInput!): Boolean! performersDestroy(ids: [ID!]!): Boolean! + bulkPerformerUpdate(input: BulkPerformerUpdateInput!): [Performer!] studioCreate(input: StudioCreateInput!): Studio studioUpdate(input: StudioUpdateInput!): Studio diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql index 5967a2625..24b243138 100644 --- a/graphql/schema/types/config.graphql +++ b/graphql/schema/types/config.graphql @@ -81,6 +81,8 @@ input ConfigGeneralInput { scraperUserAgent: String """Scraper CDP path. Path to chrome executable or remote address""" scraperCDPPath: String + """Whether the scraper should check for invalid certificates""" + scraperCertCheck: Boolean! """Stash-box instances used for tagging""" stashBoxes: [StashBoxInput!]! } @@ -92,6 +94,10 @@ type ConfigGeneralResult { databasePath: String! """Path to generated files""" generatedPath: String! + """Path to the config file used""" + configFilePath: String! + """Path to scrapers""" + scrapersPath: String! """Path to cache""" cachePath: String! """Whether to calculate MD5 checksums for scene video files""" @@ -144,6 +150,8 @@ type ConfigGeneralResult { scraperUserAgent: String """Scraper CDP path. Path to chrome executable or remote address""" scraperCDPPath: String + """Whether the scraper should check for invalid certificates""" + scraperCertCheck: Boolean! """Stash-box instances used for tagging""" stashBoxes: [StashBox!]! } diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index d03aa062b..750cb6c89 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -59,6 +59,8 @@ input PerformerFilterType { gender: GenderCriterionInput """Filter to only include performers missing this property""" is_missing: String + """Filter to only include performers with these tags""" + tags: MultiCriterionInput """Filter by StashID""" stash_id: String } @@ -75,6 +77,10 @@ input SceneMarkerFilterType { } input SceneFilterType { + AND: SceneFilterType + OR: SceneFilterType + NOT: SceneFilterType + """Filter by path""" path: StringCriterionInput """Filter by rating""" @@ -97,6 +103,8 @@ input SceneFilterType { movies: MultiCriterionInput """Filter to only include scenes with these tags""" tags: MultiCriterionInput + """Filter to only include scenes with performers with these tags""" + performer_tags: MultiCriterionInput """Filter to only include scenes with these performers""" performers: MultiCriterionInput """Filter by StashID""" @@ -132,23 +140,38 @@ input GalleryFilterType { organized: Boolean """Filter by average image resolution""" average_resolution: ResolutionEnum - """Filter to only include scenes with this studio""" + """Filter to only include galleries with this studio""" studios: MultiCriterionInput - """Filter to only include scenes with these tags""" + """Filter to only include galleries with these tags""" tags: MultiCriterionInput - """Filter to only include scenes with these performers""" + """Filter to only include galleries with performers with these tags""" + performer_tags: MultiCriterionInput + """Filter to only include galleries with these performers""" performers: MultiCriterionInput """Filter by number of images in this gallery""" image_count: IntCriterionInput } input TagFilterType { + AND: TagFilterType + OR: TagFilterType + NOT: TagFilterType + """Filter to only include tags missing this property""" is_missing: String """Filter by number of scenes with this tag""" scene_count: IntCriterionInput + """Filter by number of images with this tag""" + image_count: IntCriterionInput + + """Filter by number of galleries with this tag""" + gallery_count: IntCriterionInput + + """Filter by number of performers with this tag""" + performer_count: IntCriterionInput + """Filter by number of markers with this tag""" marker_count: IntCriterionInput } @@ -170,6 +193,8 @@ input ImageFilterType { studios: MultiCriterionInput """Filter to only include images with these tags""" tags: MultiCriterionInput + """Filter to only include images with performers with these tags""" + performer_tags: MultiCriterionInput """Filter to only include images with these performers""" performers: MultiCriterionInput """Filter to only include images with these galleries""" diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index d00e2846c..f83f3ad78 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -33,15 +33,15 @@ input GeneratePreviewOptionsInput { input ScanMetadataInput { paths: [String!] """Set name, date, details from metadata (if present)""" - useFileMetadata: Boolean! + useFileMetadata: Boolean """Strip file extension from title""" - stripFileExtension: Boolean! + stripFileExtension: Boolean """Generate previews during scan""" - scanGeneratePreviews: Boolean! + scanGeneratePreviews: Boolean """Generate image previews during scan""" - scanGenerateImagePreviews: Boolean! + scanGenerateImagePreviews: Boolean """Generate sprites during scan""" - scanGenerateSprites: Boolean! + scanGenerateSprites: Boolean } input CleanMetadataInput { diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql index fd5c08fc4..1324f4f81 100644 --- a/graphql/schema/types/performer.graphql +++ b/graphql/schema/types/performer.graphql @@ -27,6 +27,7 @@ type Performer { piercings: String aliases: String favorite: Boolean! + tags: [Tag!]! image_path: String # Resolver scene_count: Int # Resolver @@ -52,6 +53,7 @@ input PerformerCreateInput { twitter: String instagram: String favorite: Boolean + tag_ids: [ID!] """This should be base64 encoded""" image: String stash_ids: [StashIDInput!] @@ -76,11 +78,34 @@ input PerformerUpdateInput { twitter: String instagram: String favorite: Boolean + tag_ids: [ID!] """This should be base64 encoded""" image: String stash_ids: [StashIDInput!] } +input BulkPerformerUpdateInput { + clientMutationId: String + ids: [ID!] + url: String + gender: GenderEnum + birthdate: String + ethnicity: String + country: String + eye_color: String + height: String + measurements: String + fake_tits: String + career_length: String + tattoos: String + piercings: String + aliases: String + twitter: String + instagram: String + favorite: Boolean + tag_ids: BulkUpdateIds +} + input PerformerDestroyInput { id: ID! } diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql index d991ed327..eadc19160 100644 --- a/graphql/schema/types/scraped-performer.graphql +++ b/graphql/schema/types/scraped-performer.graphql @@ -16,6 +16,8 @@ type ScrapedPerformer { tattoos: String piercings: String aliases: String + # Should be ScrapedPerformerTag - but would be identical types + tags: [ScrapedSceneTag!] """This should be base64 encoded""" image: String @@ -39,5 +41,6 @@ input ScrapedPerformerInput { piercings: String aliases: String + # not including tags for the input # not including image for the input } \ No newline at end of file diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql index 7066ac4b4..65a474600 100644 --- a/graphql/schema/types/scraper.graphql +++ b/graphql/schema/types/scraper.graphql @@ -45,6 +45,7 @@ type ScrapedScenePerformer { tattoos: String piercings: String aliases: String + tags: [ScrapedSceneTag!] remote_site_id: String images: [String!] diff --git a/graphql/schema/types/tag.graphql b/graphql/schema/types/tag.graphql index 68fb53e81..2cb6765c8 100644 --- a/graphql/schema/types/tag.graphql +++ b/graphql/schema/types/tag.graphql @@ -5,6 +5,7 @@ type Tag { image_path: String # Resolver scene_count: Int # Resolver scene_marker_count: Int # Resolver + performer_count: Int } input TagCreateInput { diff --git a/pkg/api/check_version.go b/pkg/api/check_version.go index 11f7d1b44..659e01ec7 100644 --- a/pkg/api/check_version.go +++ b/pkg/api/check_version.go @@ -10,6 +10,8 @@ import ( "runtime" "time" + "golang.org/x/sys/cpu" + "github.com/stashapp/stash/pkg/logger" ) @@ -26,10 +28,12 @@ var ErrNoVersion = errors.New("no stash version") var stashReleases = func() map[string]string { return map[string]string{ - "windows/amd64": "stash-win.exe", - "linux/amd64": "stash-linux", "darwin/amd64": "stash-osx", + "linux/amd64": "stash-linux", + "windows/amd64": "stash-win.exe", "linux/arm": "stash-pi", + "linux/arm64": "stash-linux-arm64v8", + "linux/armv7": "stash-linux-arm32v7", } } @@ -141,7 +145,13 @@ func makeGithubRequest(url string, output interface{}) error { // which is the latest pre-release build. func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease string, err error) { - platform := fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH) + arch := runtime.GOARCH // https://en.wikipedia.org/wiki/Comparison_of_ARM_cores + isARMv7 := cpu.ARM.HasNEON || cpu.ARM.HasVFPv3 || cpu.ARM.HasVFPv3D16 || cpu.ARM.HasVFPv4 // armv6 doesn't support any of these features + if arch == "arm" && isARMv7 { + arch = "armv7" + } + + platform := fmt.Sprintf("%s/%s", runtime.GOOS, arch) wantedRelease := stashReleases()[platform] version, _, _ := GetVersion() diff --git a/pkg/api/resolver_model_image.go b/pkg/api/resolver_model_image.go index d3cf4c066..893198f1c 100644 --- a/pkg/api/resolver_model_image.go +++ b/pkg/api/resolver_model_image.go @@ -34,7 +34,7 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*models.Im func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*models.ImagePathsType, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - builder := urlbuilders.NewImageURLBuilder(baseURL, obj.ID) + builder := urlbuilders.NewImageURLBuilder(baseURL, obj) thumbnailPath := builder.GetThumbnailURL() imagePath := builder.GetImageURL() return &models.ImagePathsType{ diff --git a/pkg/api/resolver_model_movie.go b/pkg/api/resolver_model_movie.go index 137113ed7..afd82ab8a 100644 --- a/pkg/api/resolver_model_movie.go +++ b/pkg/api/resolver_model_movie.go @@ -84,13 +84,13 @@ func (r *movieResolver) Synopsis(ctx context.Context, obj *models.Movie) (*strin func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (*string, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - frontimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj.ID).GetMovieFrontImageURL() + frontimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieFrontImageURL() return &frontimagePath, nil } func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj.ID).GetMovieBackImageURL() + backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL() return &backimagePath, nil } diff --git a/pkg/api/resolver_model_performer.go b/pkg/api/resolver_model_performer.go index ccbcbc58a..cef67c22a 100644 --- a/pkg/api/resolver_model_performer.go +++ b/pkg/api/resolver_model_performer.go @@ -134,10 +134,21 @@ func (r *performerResolver) Favorite(ctx context.Context, obj *models.Performer) func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer) (*string, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj.ID).GetPerformerImageURL() + imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj).GetPerformerImageURL() return &imagePath, nil } +func (r *performerResolver) Tags(ctx context.Context, obj *models.Performer) (ret []*models.Tag, err error) { + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + ret, err = repo.Tag().FindByPerformerID(obj.ID) + return err + }); err != nil { + return nil, err + } + + return ret, nil +} + func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (ret *int, err error) { var res int if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { diff --git a/pkg/api/resolver_model_studio.go b/pkg/api/resolver_model_studio.go index dac4e265e..1f866b004 100644 --- a/pkg/api/resolver_model_studio.go +++ b/pkg/api/resolver_model_studio.go @@ -23,7 +23,7 @@ func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string, func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj.ID).GetStudioImageURL() + imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj).GetStudioImageURL() var hasImage bool if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { diff --git a/pkg/api/resolver_model_tag.go b/pkg/api/resolver_model_tag.go index 08cb50819..1cbb3acf3 100644 --- a/pkg/api/resolver_model_tag.go +++ b/pkg/api/resolver_model_tag.go @@ -31,8 +31,20 @@ func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (re return &count, err } +func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { + var count int + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + count, err = repo.Performer().CountByTagID(obj.ID) + return err + }); err != nil { + return nil, err + } + + return &count, err +} + func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj.ID).GetTagImageURL() + imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj).GetTagImageURL() return &imagePath, nil } diff --git a/pkg/api/resolver_mutation_configure.go b/pkg/api/resolver_mutation_configure.go index a1051e218..34b416094 100644 --- a/pkg/api/resolver_mutation_configure.go +++ b/pkg/api/resolver_mutation_configure.go @@ -151,6 +151,8 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co refreshScraperCache = true } + config.Set(config.ScraperCertCheck, input.ScraperCertCheck) + if input.StashBoxes != nil { if err := config.ValidateStashBoxes(input.StashBoxes); err != nil { return nil, err diff --git a/pkg/api/resolver_mutation_movie.go b/pkg/api/resolver_mutation_movie.go index 320ed49da..3672fd47e 100644 --- a/pkg/api/resolver_mutation_movie.go +++ b/pkg/api/resolver_mutation_movie.go @@ -26,7 +26,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr // Process the base 64 encoded image string if input.FrontImage != nil { - _, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage) + frontimageData, err = utils.ProcessImageInput(*input.FrontImage) if err != nil { return nil, err } @@ -34,7 +34,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr // Process the base 64 encoded image string if input.BackImage != nil { - _, backimageData, err = utils.ProcessBase64Image(*input.BackImage) + backimageData, err = utils.ProcessImageInput(*input.BackImage) if err != nil { return nil, err } @@ -126,7 +126,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp var frontimageData []byte frontImageIncluded := translator.hasField("front_image") if input.FrontImage != nil { - _, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage) + frontimageData, err = utils.ProcessImageInput(*input.FrontImage) if err != nil { return nil, err } @@ -134,7 +134,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp backImageIncluded := translator.hasField("back_image") var backimageData []byte if input.BackImage != nil { - _, backimageData, err = utils.ProcessBase64Image(*input.BackImage) + backimageData, err = utils.ProcessImageInput(*input.BackImage) if err != nil { return nil, err } @@ -189,7 +189,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp // HACK - if front image is null and back image is not null, then set the front image // to the default image since we can't have a null front image and a non-null back image if frontimageData == nil && backimageData != nil { - _, frontimageData, _ = utils.ProcessBase64Image(models.DefaultMovieImage) + frontimageData, _ = utils.ProcessImageInput(models.DefaultMovieImage) } if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil { diff --git a/pkg/api/resolver_mutation_performer.go b/pkg/api/resolver_mutation_performer.go index 790ce4cda..69eb5832c 100644 --- a/pkg/api/resolver_mutation_performer.go +++ b/pkg/api/resolver_mutation_performer.go @@ -18,7 +18,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per var err error if input.Image != nil { - _, imageData, err = utils.ProcessBase64Image(*input.Image) + imageData, err = utils.ProcessImageInput(*input.Image) } if err != nil { @@ -94,6 +94,12 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per return err } + if len(input.TagIds) > 0 { + if err := r.updatePerformerTags(qb, performer.ID, input.TagIds); err != nil { + return err + } + } + // update image table if len(imageData) > 0 { if err := qb.UpdateImage(performer.ID, imageData); err != nil { @@ -133,7 +139,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per var err error imageIncluded := translator.hasField("image") if input.Image != nil { - _, imageData, err = utils.ProcessBase64Image(*input.Image) + imageData, err = utils.ProcessImageInput(*input.Image) if err != nil { return nil, err } @@ -183,6 +189,13 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per return err } + // Save the tags + if translator.hasField("tag_ids") { + if err := r.updatePerformerTags(qb, performer.ID, input.TagIds); err != nil { + return err + } + } + // update image table if len(imageData) > 0 { if err := qb.UpdateImage(performer.ID, imageData); err != nil { @@ -211,6 +224,92 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per return performer, nil } +func (r *mutationResolver) updatePerformerTags(qb models.PerformerReaderWriter, performerID int, tagsIDs []string) error { + ids, err := utils.StringSliceToIntSlice(tagsIDs) + if err != nil { + return err + } + return qb.UpdateTags(performerID, ids) +} + +func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models.BulkPerformerUpdateInput) ([]*models.Performer, error) { + performerIDs, err := utils.StringSliceToIntSlice(input.Ids) + if err != nil { + return nil, err + } + + // Populate performer from the input + updatedTime := time.Now() + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + updatedPerformer := models.PerformerPartial{ + UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, + } + + updatedPerformer.URL = translator.nullString(input.URL, "url") + updatedPerformer.Birthdate = translator.sqliteDate(input.Birthdate, "birthdate") + updatedPerformer.Ethnicity = translator.nullString(input.Ethnicity, "ethnicity") + updatedPerformer.Country = translator.nullString(input.Country, "country") + updatedPerformer.EyeColor = translator.nullString(input.EyeColor, "eye_color") + updatedPerformer.Height = translator.nullString(input.Height, "height") + updatedPerformer.Measurements = translator.nullString(input.Measurements, "measurements") + updatedPerformer.FakeTits = translator.nullString(input.FakeTits, "fake_tits") + updatedPerformer.CareerLength = translator.nullString(input.CareerLength, "career_length") + updatedPerformer.Tattoos = translator.nullString(input.Tattoos, "tattoos") + updatedPerformer.Piercings = translator.nullString(input.Piercings, "piercings") + updatedPerformer.Aliases = translator.nullString(input.Aliases, "aliases") + updatedPerformer.Twitter = translator.nullString(input.Twitter, "twitter") + updatedPerformer.Instagram = translator.nullString(input.Instagram, "instagram") + updatedPerformer.Favorite = translator.nullBool(input.Favorite, "favorite") + + if translator.hasField("gender") { + if input.Gender != nil { + updatedPerformer.Gender = &sql.NullString{String: input.Gender.String(), Valid: true} + } else { + updatedPerformer.Gender = &sql.NullString{String: "", Valid: false} + } + } + + ret := []*models.Performer{} + + // Start the transaction and save the scene marker + if err := r.withTxn(ctx, func(repo models.Repository) error { + qb := repo.Performer() + + for _, performerID := range performerIDs { + updatedPerformer.ID = performerID + + performer, err := qb.Update(updatedPerformer) + if err != nil { + return err + } + + ret = append(ret, performer) + + // Save the tags + if translator.hasField("tag_ids") { + tagIDs, err := adjustTagIDs(qb, performerID, *input.TagIds) + if err != nil { + return err + } + + if err := qb.UpdateTags(performerID, tagIDs); err != nil { + return err + } + } + } + + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + func (r *mutationResolver) PerformerDestroy(ctx context.Context, input models.PerformerDestroyInput) (bool, error) { id, err := strconv.Atoi(input.ID) if err != nil { diff --git a/pkg/api/resolver_mutation_scene.go b/pkg/api/resolver_mutation_scene.go index 5a0ba525f..ba73aecb1 100644 --- a/pkg/api/resolver_mutation_scene.go +++ b/pkg/api/resolver_mutation_scene.go @@ -80,7 +80,7 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator if input.CoverImage != nil && *input.CoverImage != "" { var err error - _, coverImageData, err = utils.ProcessBase64Image(*input.CoverImage) + coverImageData, err = utils.ProcessImageInput(*input.CoverImage) if err != nil { return nil, err } @@ -253,7 +253,7 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul // Save the tags if translator.hasField("tag_ids") { - tagIDs, err := adjustSceneTagIDs(qb, sceneID, *input.TagIds) + tagIDs, err := adjustTagIDs(qb, sceneID, *input.TagIds) if err != nil { return err } @@ -330,7 +330,11 @@ func adjustScenePerformerIDs(qb models.SceneReader, sceneID int, ids models.Bulk return adjustIDs(ret, ids), nil } -func adjustSceneTagIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) { +type tagIDsGetter interface { + GetTagIDs(id int) ([]int, error) +} + +func adjustTagIDs(qb tagIDsGetter, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) { ret, err = qb.GetTagIDs(sceneID) if err != nil { return nil, err diff --git a/pkg/api/resolver_mutation_studio.go b/pkg/api/resolver_mutation_studio.go index bf1b5f312..82be5d1e9 100644 --- a/pkg/api/resolver_mutation_studio.go +++ b/pkg/api/resolver_mutation_studio.go @@ -20,7 +20,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio // Process the base 64 encoded image string if input.Image != nil { - _, imageData, err = utils.ProcessBase64Image(*input.Image) + imageData, err = utils.ProcessImageInput(*input.Image) if err != nil { return nil, err } @@ -96,7 +96,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio imageIncluded := translator.hasField("image") if input.Image != nil { var err error - _, imageData, err = utils.ProcessBase64Image(*input.Image) + imageData, err = utils.ProcessImageInput(*input.Image) if err != nil { return nil, err } diff --git a/pkg/api/resolver_mutation_tag.go b/pkg/api/resolver_mutation_tag.go index 90ee479d1..df11e06d0 100644 --- a/pkg/api/resolver_mutation_tag.go +++ b/pkg/api/resolver_mutation_tag.go @@ -24,7 +24,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate var err error if input.Image != nil { - _, imageData, err = utils.ProcessBase64Image(*input.Image) + imageData, err = utils.ProcessImageInput(*input.Image) if err != nil { return nil, err @@ -82,7 +82,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate imageIncluded := translator.hasField("image") if input.Image != nil { - _, imageData, err = utils.ProcessBase64Image(*input.Image) + imageData, err = utils.ProcessImageInput(*input.Image) if err != nil { return nil, err diff --git a/pkg/api/resolver_query_configuration.go b/pkg/api/resolver_query_configuration.go index c848375ea..cd4bbaff4 100644 --- a/pkg/api/resolver_query_configuration.go +++ b/pkg/api/resolver_query_configuration.go @@ -46,6 +46,8 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult { Stashes: config.GetStashPaths(), DatabasePath: config.GetDatabasePath(), GeneratedPath: config.GetGeneratedPath(), + ConfigFilePath: config.GetConfigFilePath(), + ScrapersPath: config.GetScrapersPath(), CachePath: config.GetCachePath(), CalculateMd5: config.IsCalculateMD5(), VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), @@ -71,6 +73,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult { Excludes: config.GetExcludes(), ImageExcludes: config.GetImageExcludes(), ScraperUserAgent: &scraperUserAgent, + ScraperCertCheck: config.GetScraperCertCheck(), ScraperCDPPath: &scraperCDPPath, StashBoxes: config.GetStashBoxes(), } diff --git a/pkg/api/resolver_query_find_movie.go b/pkg/api/resolver_query_find_movie.go index 16a024260..1a66c2461 100644 --- a/pkg/api/resolver_query_find_movie.go +++ b/pkg/api/resolver_query_find_movie.go @@ -53,14 +53,3 @@ func (r *queryResolver) AllMovies(ctx context.Context) (ret []*models.Movie, err return ret, nil } - -func (r *queryResolver) AllMoviesSlim(ctx context.Context) (ret []*models.Movie, err error) { - if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { - ret, err = repo.Movie().AllSlim() - return err - }); err != nil { - return nil, err - } - - return ret, nil -} diff --git a/pkg/api/resolver_query_find_performer.go b/pkg/api/resolver_query_find_performer.go index 741d5f094..32cc46891 100644 --- a/pkg/api/resolver_query_find_performer.go +++ b/pkg/api/resolver_query_find_performer.go @@ -52,14 +52,3 @@ func (r *queryResolver) AllPerformers(ctx context.Context) (ret []*models.Perfor return ret, nil } - -func (r *queryResolver) AllPerformersSlim(ctx context.Context) (ret []*models.Performer, err error) { - if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { - ret, err = repo.Performer().AllSlim() - return err - }); err != nil { - return nil, err - } - - return ret, nil -} diff --git a/pkg/api/resolver_query_find_studio.go b/pkg/api/resolver_query_find_studio.go index 6df2ecae7..71677cb35 100644 --- a/pkg/api/resolver_query_find_studio.go +++ b/pkg/api/resolver_query_find_studio.go @@ -54,14 +54,3 @@ func (r *queryResolver) AllStudios(ctx context.Context) (ret []*models.Studio, e return ret, nil } - -func (r *queryResolver) AllStudiosSlim(ctx context.Context) (ret []*models.Studio, err error) { - if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { - ret, err = repo.Studio().AllSlim() - return err - }); err != nil { - return nil, err - } - - return ret, nil -} diff --git a/pkg/api/resolver_query_find_tag.go b/pkg/api/resolver_query_find_tag.go index c373c42bd..e44366361 100644 --- a/pkg/api/resolver_query_find_tag.go +++ b/pkg/api/resolver_query_find_tag.go @@ -53,14 +53,3 @@ func (r *queryResolver) AllTags(ctx context.Context) (ret []*models.Tag, err err return ret, nil } - -func (r *queryResolver) AllTagsSlim(ctx context.Context) (ret []*models.Tag, err error) { - if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { - ret, err = repo.Tag().AllSlim() - return err - }); err != nil { - return nil, err - } - - return ret, nil -} diff --git a/pkg/api/resolver_query_scene.go b/pkg/api/resolver_query_scene.go index bffbcfd6f..64110e70d 100644 --- a/pkg/api/resolver_query_scene.go +++ b/pkg/api/resolver_query_scene.go @@ -7,6 +7,7 @@ import ( "github.com/stashapp/stash/pkg/api/urlbuilders" "github.com/stashapp/stash/pkg/manager" + "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" ) @@ -29,5 +30,5 @@ func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models baseURL, _ := ctx.Value(BaseURLCtxKey).(string) builder := urlbuilders.NewSceneURLBuilder(baseURL, scene.ID) - return manager.GetSceneStreamPaths(scene, builder.GetStreamURL()) + return manager.GetSceneStreamPaths(scene, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize()) } diff --git a/pkg/api/server.go b/pkg/api/server.go index 0fc201cc6..331e203f3 100644 --- a/pkg/api/server.go +++ b/pkg/api/server.go @@ -14,6 +14,7 @@ import ( "runtime/debug" "strconv" "strings" + "time" "github.com/99designs/gqlgen/handler" "github.com/go-chi/chi" @@ -135,12 +136,14 @@ func Start() { }, }) maxUploadSize := handler.UploadMaxSize(config.GetMaxUploadSize()) + websocketKeepAliveDuration := handler.WebsocketKeepAliveDuration(10 * time.Second) txnManager := manager.GetInstance().TxnManager resolver := &Resolver{ txnManager: txnManager, } - gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: resolver}), recoverFunc, websocketUpgrader, maxUploadSize) + + gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: resolver}), recoverFunc, websocketUpgrader, websocketKeepAliveDuration, maxUploadSize) r.Handle("/graphql", gqlHandler) r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql")) diff --git a/pkg/api/urlbuilders/image.go b/pkg/api/urlbuilders/image.go index e81dd446e..9594a4530 100644 --- a/pkg/api/urlbuilders/image.go +++ b/pkg/api/urlbuilders/image.go @@ -1,25 +1,28 @@ package urlbuilders import ( + "github.com/stashapp/stash/pkg/models" "strconv" ) type ImageURLBuilder struct { - BaseURL string - ImageID string + BaseURL string + ImageID string + UpdatedAt string } -func NewImageURLBuilder(baseURL string, imageID int) ImageURLBuilder { +func NewImageURLBuilder(baseURL string, image *models.Image) ImageURLBuilder { return ImageURLBuilder{ - BaseURL: baseURL, - ImageID: strconv.Itoa(imageID), + BaseURL: baseURL, + ImageID: strconv.Itoa(image.ID), + UpdatedAt: strconv.FormatInt(image.UpdatedAt.Timestamp.Unix(), 10), } } func (b ImageURLBuilder) GetImageURL() string { - return b.BaseURL + "/image/" + b.ImageID + "/image" + return b.BaseURL + "/image/" + b.ImageID + "/image?" + b.UpdatedAt } func (b ImageURLBuilder) GetThumbnailURL() string { - return b.BaseURL + "/image/" + b.ImageID + "/thumbnail" + return b.BaseURL + "/image/" + b.ImageID + "/thumbnail?" + b.UpdatedAt } diff --git a/pkg/api/urlbuilders/movie.go b/pkg/api/urlbuilders/movie.go index 6f7694b83..c50536c96 100644 --- a/pkg/api/urlbuilders/movie.go +++ b/pkg/api/urlbuilders/movie.go @@ -1,23 +1,28 @@ package urlbuilders -import "strconv" +import ( + "github.com/stashapp/stash/pkg/models" + "strconv" +) type MovieURLBuilder struct { - BaseURL string - MovieID string + BaseURL string + MovieID string + UpdatedAt string } -func NewMovieURLBuilder(baseURL string, movieID int) MovieURLBuilder { +func NewMovieURLBuilder(baseURL string, movie *models.Movie) MovieURLBuilder { return MovieURLBuilder{ - BaseURL: baseURL, - MovieID: strconv.Itoa(movieID), + BaseURL: baseURL, + MovieID: strconv.Itoa(movie.ID), + UpdatedAt: strconv.FormatInt(movie.UpdatedAt.Timestamp.Unix(), 10), } } func (b MovieURLBuilder) GetMovieFrontImageURL() string { - return b.BaseURL + "/movie/" + b.MovieID + "/frontimage" + return b.BaseURL + "/movie/" + b.MovieID + "/frontimage?" + b.UpdatedAt } func (b MovieURLBuilder) GetMovieBackImageURL() string { - return b.BaseURL + "/movie/" + b.MovieID + "/backimage" + return b.BaseURL + "/movie/" + b.MovieID + "/backimage?" + b.UpdatedAt } diff --git a/pkg/api/urlbuilders/performer.go b/pkg/api/urlbuilders/performer.go index b7c450617..e7e0b2626 100644 --- a/pkg/api/urlbuilders/performer.go +++ b/pkg/api/urlbuilders/performer.go @@ -1,19 +1,24 @@ package urlbuilders -import "strconv" +import ( + "github.com/stashapp/stash/pkg/models" + "strconv" +) type PerformerURLBuilder struct { BaseURL string PerformerID string + UpdatedAt string } -func NewPerformerURLBuilder(baseURL string, performerID int) PerformerURLBuilder { +func NewPerformerURLBuilder(baseURL string, performer *models.Performer) PerformerURLBuilder { return PerformerURLBuilder{ BaseURL: baseURL, - PerformerID: strconv.Itoa(performerID), + PerformerID: strconv.Itoa(performer.ID), + UpdatedAt: strconv.FormatInt(performer.UpdatedAt.Timestamp.Unix(), 10), } } func (b PerformerURLBuilder) GetPerformerImageURL() string { - return b.BaseURL + "/performer/" + b.PerformerID + "/image" + return b.BaseURL + "/performer/" + b.PerformerID + "/image?" + b.UpdatedAt } diff --git a/pkg/api/urlbuilders/studio.go b/pkg/api/urlbuilders/studio.go index eaa909dc4..7713aa5b9 100644 --- a/pkg/api/urlbuilders/studio.go +++ b/pkg/api/urlbuilders/studio.go @@ -1,19 +1,24 @@ package urlbuilders -import "strconv" +import ( + "github.com/stashapp/stash/pkg/models" + "strconv" +) type StudioURLBuilder struct { - BaseURL string - StudioID string + BaseURL string + StudioID string + UpdatedAt string } -func NewStudioURLBuilder(baseURL string, studioID int) StudioURLBuilder { +func NewStudioURLBuilder(baseURL string, studio *models.Studio) StudioURLBuilder { return StudioURLBuilder{ - BaseURL: baseURL, - StudioID: strconv.Itoa(studioID), + BaseURL: baseURL, + StudioID: strconv.Itoa(studio.ID), + UpdatedAt: strconv.FormatInt(studio.UpdatedAt.Timestamp.Unix(), 10), } } func (b StudioURLBuilder) GetStudioImageURL() string { - return b.BaseURL + "/studio/" + b.StudioID + "/image" + return b.BaseURL + "/studio/" + b.StudioID + "/image?" + b.UpdatedAt } diff --git a/pkg/api/urlbuilders/tag.go b/pkg/api/urlbuilders/tag.go index e3f9415c9..a0f5a27dc 100644 --- a/pkg/api/urlbuilders/tag.go +++ b/pkg/api/urlbuilders/tag.go @@ -1,19 +1,24 @@ package urlbuilders -import "strconv" +import ( + "github.com/stashapp/stash/pkg/models" + "strconv" +) type TagURLBuilder struct { - BaseURL string - TagID string + BaseURL string + TagID string + UpdatedAt string } -func NewTagURLBuilder(baseURL string, tagID int) TagURLBuilder { +func NewTagURLBuilder(baseURL string, tag *models.Tag) TagURLBuilder { return TagURLBuilder{ - BaseURL: baseURL, - TagID: strconv.Itoa(tagID), + BaseURL: baseURL, + TagID: strconv.Itoa(tag.ID), + UpdatedAt: strconv.FormatInt(tag.UpdatedAt.Timestamp.Unix(), 10), } } func (b TagURLBuilder) GetTagImageURL() string { - return b.BaseURL + "/tag/" + b.TagID + "/image" + return b.BaseURL + "/tag/" + b.TagID + "/image?" + b.UpdatedAt } diff --git a/pkg/database/database.go b/pkg/database/database.go index 84fdcccf9..e4099b073 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "os" + "sync" "time" "github.com/fvbommel/sortorder" @@ -20,8 +21,9 @@ import ( ) var DB *sqlx.DB +var WriteMu *sync.Mutex var dbPath string -var appSchemaVersion uint = 18 +var appSchemaVersion uint = 19 var databaseSchemaVersion uint const sqlite3Driver = "sqlite3ex" @@ -63,6 +65,7 @@ func Initialize(databasePath string) bool { const disableForeignKeys = false DB = open(databasePath, disableForeignKeys) + WriteMu = &sync.Mutex{} return false } @@ -77,6 +80,7 @@ func open(databasePath string, disableForeignKeys bool) *sqlx.DB { conn, err := sqlx.Open(sqlite3Driver, url) conn.SetMaxOpenConns(25) conn.SetMaxIdleConns(4) + conn.SetConnMaxLifetime(30 * time.Second) if err != nil { logger.Fatalf("db.Open(): %q\n", err) } diff --git a/pkg/database/migrations/19_performer_tags.up.sql b/pkg/database/migrations/19_performer_tags.up.sql new file mode 100644 index 000000000..fef24913a --- /dev/null +++ b/pkg/database/migrations/19_performer_tags.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `performers_tags` ( + `performer_id` integer NOT NULL, + `tag_id` integer NOT NULL, + foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE, + foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE +); + +CREATE INDEX `index_performers_tags_on_tag_id` on `performers_tags` (`tag_id`); +CREATE INDEX `index_performers_tags_on_performer_id` on `performers_tags` (`performer_id`); diff --git a/pkg/image/image.go b/pkg/image/image.go index ec781afb7..d54271d66 100644 --- a/pkg/image/image.go +++ b/pkg/image/image.go @@ -59,6 +59,13 @@ func ZipFilename(zipFilename, filenameInZip string) string { return zipFilename + zipSeparator + filenameInZip } +// IsZipPath returns true if the path includes the zip separator byte, +// indicating it is within a zip file. +// TODO - this should be moved to utils +func IsZipPath(p string) bool { + return strings.Contains(p, zipSeparator) +} + type imageReadCloser struct { src io.ReadCloser zrc *zip.ReadCloser @@ -239,7 +246,7 @@ func Serve(w http.ResponseWriter, r *http.Request, path string) { func IsCover(img *models.Image) bool { _, fn := getFilePath(img.Path) - return fn == "cover.jpg" + return strings.HasSuffix(fn, "cover.jpg") } func GetTitle(s *models.Image) string { diff --git a/pkg/image/image_test.go b/pkg/image/image_test.go new file mode 100644 index 000000000..3188a63d5 --- /dev/null +++ b/pkg/image/image_test.go @@ -0,0 +1,34 @@ +package image + +import ( + "fmt" + "path/filepath" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stretchr/testify/assert" +) + +func TestIsCover(t *testing.T) { + type test struct { + fn string + isCover bool + } + + tests := []test{ + {"cover.jpg", true}, + {"covernot.jpg", false}, + {"Cover.jpg", false}, + {fmt.Sprintf("subDir%scover.jpg", string(filepath.Separator)), true}, + {"endsWithcover.jpg", true}, + {"cover.png", false}, + } + + assert := assert.New(t) + for _, tc := range tests { + img := &models.Image{ + Path: tc.fn, + } + assert.Equal(tc.isCover, IsCover(img), "expected: %t for %s", tc.isCover, tc.fn) + } +} diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index a01eb1629..8e2de2bbf 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -29,6 +29,11 @@ var logBuffer []LogItem // Init initialises the logger based on a logging configuration func Init(logFile string, logOut bool, logLevel string) { var file *os.File + customFormatter := new(logrus.TextFormatter) + customFormatter.TimestampFormat = "2006-01-02 15:04:05" + customFormatter.ForceColors = true + customFormatter.FullTimestamp = true + logger.SetFormatter(customFormatter) if logFile != "" { var err error diff --git a/pkg/manager/config/config.go b/pkg/manager/config/config.go index 6ef58f6b9..a858d6722 100644 --- a/pkg/manager/config/config.go +++ b/pkg/manager/config/config.go @@ -86,6 +86,7 @@ const SessionStoreKey = "session_store_key" // scraping options const ScrapersPath = "scrapers_path" const ScraperUserAgent = "scraper_user_agent" +const ScraperCertCheck = "scraper_cert_check" const ScraperCDPPath = "scraper_cdp_path" // stash-box options @@ -145,6 +146,10 @@ func GetConfigPath() string { return filepath.Dir(configFileUsed) } +func GetConfigFilePath() string { + return viper.ConfigFileUsed() +} + func GetStashPaths() []*models.StashConfig { var ret []*models.StashConfig if err := viper.UnmarshalKey(Stash, &ret); err != nil || len(ret) == 0 { @@ -274,6 +279,17 @@ func GetScraperCDPPath() string { return viper.GetString(ScraperCDPPath) } +// GetScraperCertCheck returns true if the scraper should check for insecure +// certificates when fetching an image or a page. +func GetScraperCertCheck() bool { + ret := true + if viper.IsSet(ScraperCertCheck) { + ret = viper.GetBool(ScraperCertCheck) + } + + return ret +} + func GetStashBoxes() []*models.StashBox { var boxes []*models.StashBox viper.UnmarshalKey(StashBoxes, &boxes) diff --git a/pkg/manager/jsonschema/performer.go b/pkg/manager/jsonschema/performer.go index 52122dd0a..a145f9bce 100644 --- a/pkg/manager/jsonschema/performer.go +++ b/pkg/manager/jsonschema/performer.go @@ -2,9 +2,9 @@ package jsonschema import ( "fmt" - "github.com/json-iterator/go" "os" + jsoniter "github.com/json-iterator/go" "github.com/stashapp/stash/pkg/models" ) @@ -26,6 +26,7 @@ type Performer struct { Piercings string `json:"piercings,omitempty"` Aliases string `json:"aliases,omitempty"` Favorite bool `json:"favorite,omitempty"` + Tags []string `json:"tags,omitempty"` Image string `json:"image,omitempty"` CreatedAt models.JSONTime `json:"created_at,omitempty"` UpdatedAt models.JSONTime `json:"updated_at,omitempty"` diff --git a/pkg/manager/manager_tasks.go b/pkg/manager/manager_tasks.go index cf36f4c67..28e42022b 100644 --- a/pkg/manager/manager_tasks.go +++ b/pkg/manager/manager_tasks.go @@ -215,13 +215,13 @@ func (s *singleton) Scan(input models.ScanMetadataInput) { task := ScanTask{ TxnManager: s.TxnManager, FilePath: path, - UseFileMetadata: input.UseFileMetadata, - StripFileExtension: input.StripFileExtension, + UseFileMetadata: utils.IsTrue(input.UseFileMetadata), + StripFileExtension: utils.IsTrue(input.StripFileExtension), fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5, - GeneratePreview: input.ScanGeneratePreviews, - GenerateImagePreview: input.ScanGenerateImagePreviews, - GenerateSprite: input.ScanGenerateSprites, + GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews), + GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews), + GenerateSprite: utils.IsTrue(input.ScanGenerateSprites), } go task.Start(&wg) diff --git a/pkg/manager/scene.go b/pkg/manager/scene.go index 55495da81..52ff02e5f 100644 --- a/pkg/manager/scene.go +++ b/pkg/manager/scene.go @@ -194,7 +194,38 @@ func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) { return container, nil } -func GetSceneStreamPaths(scene *models.Scene, directStreamURL string) ([]*models.SceneStreamEndpoint, error) { +func includeSceneStreamPath(scene *models.Scene, streamingResolution models.StreamingResolutionEnum, maxStreamingTranscodeSize models.StreamingResolutionEnum) bool { + // convert StreamingResolutionEnum to ResolutionEnum so we can get the min + // resolution + convertedRes := models.ResolutionEnum(streamingResolution) + + minResolution := int64(convertedRes.GetMinResolution()) + sceneResolution := scene.GetMinResolution() + + // don't include if scene resolution is smaller than the streamingResolution + if sceneResolution != 0 && sceneResolution < minResolution { + return false + } + + // if we always allow everything, then return true + if maxStreamingTranscodeSize == models.StreamingResolutionEnumOriginal { + return true + } + + // convert StreamingResolutionEnum to ResolutionEnum + maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize) + return int64(maxStreamingResolution.GetMinResolution()) >= minResolution +} + +func makeStreamEndpoint(streamURL string, streamingResolution models.StreamingResolutionEnum, mimeType, label string) *models.SceneStreamEndpoint { + return &models.SceneStreamEndpoint{ + URL: fmt.Sprintf("%s?resolution=%s", streamURL, streamingResolution.String()), + MimeType: &mimeType, + Label: &label, + } +} + +func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreamingTranscodeSize models.StreamingResolutionEnum) ([]*models.SceneStreamEndpoint, error) { if scene == nil { return nil, fmt.Errorf("nil scene") } @@ -248,107 +279,51 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string) ([]*models // Note: These have the wrong mime type intentionally to allow jwplayer to selection between mp4/webm webmLabelFourK := "WEBM 4K (2160p)" // "FOUR_K" webmLabelFullHD := "WEBM Full HD (1080p)" // "FULL_HD" - webmLabelStardardHD := "WEBM HD (720p)" // "STANDARD_HD" + webmLabelStandardHD := "WEBM HD (720p)" // "STANDARD_HD" webmLabelStandard := "WEBM Standard (480p)" // "STANDARD" webmLabelLow := "WEBM Low (240p)" // "LOW" - if !scene.Height.Valid || scene.Height.Int64 >= 2160 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".webm?resolution=FOUR_K", - MimeType: &mimeMp4, - Label: &webmLabelFourK, - } - ret = append(ret, &new) - } - - if !scene.Height.Valid || scene.Height.Int64 >= 1080 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".webm?resolution=FULL_HD", - MimeType: &mimeMp4, - Label: &webmLabelFullHD, - } - ret = append(ret, &new) - } - - if !scene.Height.Valid || scene.Height.Int64 >= 720 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".webm?resolution=STANDARD_HD", - MimeType: &mimeMp4, - Label: &webmLabelStardardHD, - } - ret = append(ret, &new) - } - - if !scene.Height.Valid || scene.Height.Int64 >= 480 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".webm?resolution=STANDARD", - MimeType: &mimeMp4, - Label: &webmLabelStandard, - } - ret = append(ret, &new) - } - - if !scene.Height.Valid || scene.Height.Int64 >= 240 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".webm?resolution=LOW", - MimeType: &mimeMp4, - Label: &webmLabelLow, - } - ret = append(ret, &new) - } - // Setup up lower quality transcoding options (MP4) mp4LabelFourK := "MP4 4K (2160p)" // "FOUR_K" mp4LabelFullHD := "MP4 Full HD (1080p)" // "FULL_HD" - mp4LabelStardardHD := "MP4 HD (720p)" // "STANDARD_HD" + mp4LabelStandardHD := "MP4 HD (720p)" // "STANDARD_HD" mp4LabelStandard := "MP4 Standard (480p)" // "STANDARD" mp4LabelLow := "MP4 Low (240p)" // "LOW" - if !scene.Height.Valid || scene.Height.Int64 >= 2160 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".mp4?resolution=FOUR_K", - MimeType: &mimeMp4, - Label: &mp4LabelFourK, - } - ret = append(ret, &new) + var webmStreams []*models.SceneStreamEndpoint + var mp4Streams []*models.SceneStreamEndpoint + + webmURL := directStreamURL + ".webm" + mp4URL := directStreamURL + ".mp4" + + if includeSceneStreamPath(scene, models.StreamingResolutionEnumFourK, maxStreamingTranscodeSize) { + webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFourK, mimeMp4, webmLabelFourK)) + mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFourK, mimeMp4, mp4LabelFourK)) } - if !scene.Height.Valid || scene.Height.Int64 >= 1080 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".mp4?resolution=FULL_HD", - MimeType: &mimeMp4, - Label: &mp4LabelFullHD, - } - ret = append(ret, &new) + if includeSceneStreamPath(scene, models.StreamingResolutionEnumFullHd, maxStreamingTranscodeSize) { + webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFullHd, mimeMp4, webmLabelFullHD)) + mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFullHd, mimeMp4, mp4LabelFullHD)) } - if !scene.Height.Valid || scene.Height.Int64 >= 720 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".mp4?resolution=STANDARD_HD", - MimeType: &mimeMp4, - Label: &mp4LabelStardardHD, - } - ret = append(ret, &new) + if includeSceneStreamPath(scene, models.StreamingResolutionEnumStandardHd, maxStreamingTranscodeSize) { + webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandardHd, mimeMp4, webmLabelStandardHD)) + mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandardHd, mimeMp4, mp4LabelStandardHD)) } - if !scene.Height.Valid || scene.Height.Int64 >= 480 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".mp4?resolution=STANDARD", - MimeType: &mimeMp4, - Label: &mp4LabelStandard, - } - ret = append(ret, &new) + if includeSceneStreamPath(scene, models.StreamingResolutionEnumStandard, maxStreamingTranscodeSize) { + webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandard, mimeMp4, webmLabelStandard)) + mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandard, mimeMp4, mp4LabelStandard)) } - if !scene.Height.Valid || scene.Height.Int64 >= 240 { - new := models.SceneStreamEndpoint{ - URL: directStreamURL + ".mp4?resolution=LOW", - MimeType: &mimeMp4, - Label: &mp4LabelLow, - } - ret = append(ret, &new) + if includeSceneStreamPath(scene, models.StreamingResolutionEnumLow, maxStreamingTranscodeSize) { + webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumLow, mimeMp4, webmLabelLow)) + mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumLow, mimeMp4, mp4LabelLow)) } + ret = append(ret, webmStreams...) + ret = append(ret, mp4Streams...) + defaultStreams := []*models.SceneStreamEndpoint{ { URL: directStreamURL + ".webm", diff --git a/pkg/manager/task_autotag.go b/pkg/manager/task_autotag.go index c9077b50d..cbd7cdc32 100644 --- a/pkg/manager/task_autotag.go +++ b/pkg/manager/task_autotag.go @@ -4,6 +4,7 @@ import ( "context" "database/sql" "fmt" + "path/filepath" "strings" "sync" @@ -38,13 +39,56 @@ func (t *AutoTagTask) getQueryRegex(name string) string { return ret } +func (t *AutoTagTask) getQueryFilter(regex string) *models.SceneFilterType { + organized := false + ret := &models.SceneFilterType{ + Path: &models.StringCriterionInput{ + Modifier: models.CriterionModifierMatchesRegex, + Value: "(?i)" + regex, + }, + Organized: &organized, + } + + sep := string(filepath.Separator) + + var or *models.SceneFilterType + for _, p := range t.paths { + newOr := &models.SceneFilterType{} + if or == nil { + ret.And = newOr + } else { + or.Or = newOr + } + + or = newOr + + if !strings.HasSuffix(p, sep) { + p = p + sep + } + + or.Path = &models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: p + "%", + } + } + + return ret +} + +func (t *AutoTagTask) getFindFilter() *models.FindFilterType { + perPage := 0 + return &models.FindFilterType{ + PerPage: &perPage, + } +} + func (t *AutoTagPerformerTask) autoTagPerformer() { regex := t.getQueryRegex(t.performer.Name.String) if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { qb := r.Scene() - scenes, err := qb.QueryForAutoTag(regex, t.paths) + scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter()) if err != nil { return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error()) @@ -84,7 +128,7 @@ func (t *AutoTagStudioTask) autoTagStudio() { if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { qb := r.Scene() - scenes, err := qb.QueryForAutoTag(regex, t.paths) + scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter()) if err != nil { return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error()) @@ -133,7 +177,7 @@ func (t *AutoTagTagTask) autoTagTag() { if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { qb := r.Scene() - scenes, err := qb.QueryForAutoTag(regex, t.paths) + scenes, _, err := qb.Query(t.getQueryFilter(regex), t.getFindFilter()) if err != nil { return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error()) diff --git a/pkg/manager/task_export.go b/pkg/manager/task_export.go index dbcef2e46..b949b9389 100644 --- a/pkg/manager/task_export.go +++ b/pkg/manager/task_export.go @@ -725,6 +725,18 @@ func (t *ExportTask) exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models. continue } + tags, err := repo.Tag().FindByPerformerID(p.ID) + if err != nil { + logger.Errorf("[performers] <%s> error getting performer tags: %s", p.Checksum, err.Error()) + continue + } + + newPerformerJSON.Tags = tag.GetNames(tags) + + if t.includeDependencies { + t.tags.IDs = utils.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags)) + } + performerJSON, err := t.json.getPerformer(p.Checksum) if err != nil { logger.Debugf("[performers] error reading performer json: %s", err.Error()) diff --git a/pkg/manager/task_import.go b/pkg/manager/task_import.go index dc1f12ec2..d5f8b720b 100644 --- a/pkg/manager/task_import.go +++ b/pkg/manager/task_import.go @@ -209,6 +209,7 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) { readerWriter := r.Performer() importer := &performer.Importer{ ReaderWriter: readerWriter, + TagWriter: r.Tag(), Input: *performerJSON, } diff --git a/pkg/manager/task_scan.go b/pkg/manager/task_scan.go index f89600149..d35432f4a 100644 --- a/pkg/manager/task_scan.go +++ b/pkg/manager/task_scan.go @@ -315,14 +315,22 @@ func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) { scene, _ := sqb.FindByPath(scenePath) // found related Scene if scene != nil { - logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID) - - if err := sqb.UpdateGalleries(scene.ID, []int{g.ID}); err != nil { - return err + sceneGalleries, _ := sqb.FindByGalleryID(g.ID) // check if gallery is already associated to the scene + isAssoc := false + for _, sg := range sceneGalleries { + if scene.ID == sg.ID { + isAssoc = true + break + } + } + if !isAssoc { + logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID) + if err := sqb.UpdateGalleries(scene.ID, []int{g.ID}); err != nil { + return err + } } } } - return nil }); err != nil { logger.Error(err.Error()) @@ -1044,6 +1052,12 @@ func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error { excludeVidRegex := generateRegexps(config.GetExcludes()) excludeImgRegex := generateRegexps(config.GetImageExcludes()) + // don't scan zip images directly + if image.IsZipPath(s.Path) { + logger.Warnf("Cannot rescan zip image %s. Rescan zip gallery instead.", s.Path) + return nil + } + generatedPath := config.GetGeneratedPath() return utils.SymWalk(s.Path, func(path string, info os.FileInfo, err error) error { diff --git a/pkg/models/extension_resolution.go b/pkg/models/extension_resolution.go new file mode 100644 index 000000000..864fd4421 --- /dev/null +++ b/pkg/models/extension_resolution.go @@ -0,0 +1,65 @@ +package models + +var resolutionMax = []int{ + 240, + 360, + 480, + 540, + 720, + 1080, + 1440, + 1920, + 2160, + 2880, + 3384, + 4320, + 0, +} + +// GetMaxResolution returns the maximum width or height that media must be +// to qualify as this resolution. A return value of 0 means that there is no +// maximum. +func (r *ResolutionEnum) GetMaxResolution() int { + if !r.IsValid() { + return 0 + } + + // sanity check - length of arrays must be the same + if len(resolutionMax) != len(AllResolutionEnum) { + panic("resolutionMax array length != AllResolutionEnum array length") + } + + for i, rr := range AllResolutionEnum { + if rr == *r { + return resolutionMax[i] + } + } + + return 0 +} + +// GetMinResolution returns the minimum width or height that media must be +// to qualify as this resolution. +func (r *ResolutionEnum) GetMinResolution() int { + if !r.IsValid() { + return 0 + } + + // sanity check - length of arrays must be the same + if len(resolutionMax) != len(AllResolutionEnum) { + panic("resolutionMax array length != AllResolutionEnum array length") + } + + // use the previous resolution max as this resolution min + for i, rr := range AllResolutionEnum { + if rr == *r { + if i > 0 { + return resolutionMax[i-1] + } + + return 0 + } + } + + return 0 +} diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index e3eb879d9..3585c3036 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -300,8 +300,8 @@ func (_m *GalleryReaderWriter) GetPerformerIDs(galleryID int) ([]int, error) { return r0, r1 } -// GetTagIDs provides a mock function with given fields: galleryID -func (_m *GalleryReaderWriter) GetTagIDs(galleryID int) ([]int, error) { +// GetSceneIDs provides a mock function with given fields: galleryID +func (_m *GalleryReaderWriter) GetSceneIDs(galleryID int) ([]int, error) { ret := _m.Called(galleryID) var r0 []int @@ -323,8 +323,8 @@ func (_m *GalleryReaderWriter) GetTagIDs(galleryID int) ([]int, error) { return r0, r1 } -// GetSceneIDs provides a mock function with given fields: galleryID -func (_m *GalleryReaderWriter) GetSceneIDs(galleryID int) ([]int, error) { +// GetTagIDs provides a mock function with given fields: galleryID +func (_m *GalleryReaderWriter) GetTagIDs(galleryID int) ([]int, error) { ret := _m.Called(galleryID) var r0 []int @@ -464,20 +464,6 @@ func (_m *GalleryReaderWriter) UpdatePerformers(galleryID int, performerIDs []in return r0 } -// UpdateTags provides a mock function with given fields: galleryID, tagIDs -func (_m *GalleryReaderWriter) UpdateTags(galleryID int, tagIDs []int) error { - ret := _m.Called(galleryID, tagIDs) - - var r0 error - if rf, ok := ret.Get(0).(func(int, []int) error); ok { - r0 = rf(galleryID, tagIDs) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // UpdateScenes provides a mock function with given fields: galleryID, sceneIDs func (_m *GalleryReaderWriter) UpdateScenes(galleryID int, sceneIDs []int) error { ret := _m.Called(galleryID, sceneIDs) @@ -491,3 +477,17 @@ func (_m *GalleryReaderWriter) UpdateScenes(galleryID int, sceneIDs []int) error return r0 } + +// UpdateTags provides a mock function with given fields: galleryID, tagIDs +func (_m *GalleryReaderWriter) UpdateTags(galleryID int, tagIDs []int) error { + ret := _m.Called(galleryID, tagIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []int) error); ok { + r0 = rf(galleryID, tagIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/MovieReaderWriter.go b/pkg/models/mocks/MovieReaderWriter.go index be9ec5dd6..8cf71e4a5 100644 --- a/pkg/models/mocks/MovieReaderWriter.go +++ b/pkg/models/mocks/MovieReaderWriter.go @@ -35,29 +35,6 @@ func (_m *MovieReaderWriter) All() ([]*models.Movie, error) { return r0, r1 } -// AllSlim provides a mock function with given fields: -func (_m *MovieReaderWriter) AllSlim() ([]*models.Movie, error) { - ret := _m.Called() - - var r0 []*models.Movie - if rf, ok := ret.Get(0).(func() []*models.Movie); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // Count provides a mock function with given fields: func (_m *MovieReaderWriter) Count() (int, error) { ret := _m.Called() diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index 24cdc5bbf..60575ab3b 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -35,29 +35,6 @@ func (_m *PerformerReaderWriter) All() ([]*models.Performer, error) { return r0, r1 } -// AllSlim provides a mock function with given fields: -func (_m *PerformerReaderWriter) AllSlim() ([]*models.Performer, error) { - ret := _m.Called() - - var r0 []*models.Performer - if rf, ok := ret.Get(0).(func() []*models.Performer); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Performer) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // Count provides a mock function with given fields: func (_m *PerformerReaderWriter) Count() (int, error) { ret := _m.Called() @@ -79,6 +56,27 @@ func (_m *PerformerReaderWriter) Count() (int, error) { return r0, r1 } +// CountByTagID provides a mock function with given fields: tagID +func (_m *PerformerReaderWriter) CountByTagID(tagID int) (int, error) { + ret := _m.Called(tagID) + + var r0 int + if rf, ok := ret.Get(0).(func(int) int); ok { + r0 = rf(tagID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(tagID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Create provides a mock function with given fields: newPerformer func (_m *PerformerReaderWriter) Create(newPerformer models.Performer) (*models.Performer, error) { ret := _m.Called(newPerformer) @@ -337,6 +335,29 @@ func (_m *PerformerReaderWriter) GetStashIDs(performerID int) ([]*models.StashID return r0, r1 } +// GetTagIDs provides a mock function with given fields: sceneID +func (_m *PerformerReaderWriter) GetTagIDs(sceneID int) ([]int, error) { + ret := _m.Called(sceneID) + + var r0 []int + if rf, ok := ret.Get(0).(func(int) []int); ok { + r0 = rf(sceneID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]int) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(sceneID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Query provides a mock function with given fields: performerFilter, findFilter func (_m *PerformerReaderWriter) Query(performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) { ret := _m.Called(performerFilter, findFilter) @@ -440,3 +461,17 @@ func (_m *PerformerReaderWriter) UpdateStashIDs(performerID int, stashIDs []mode return r0 } + +// UpdateTags provides a mock function with given fields: sceneID, tagIDs +func (_m *PerformerReaderWriter) UpdateTags(sceneID int, tagIDs []int) error { + ret := _m.Called(sceneID, tagIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []int) error); ok { + r0 = rf(sceneID, tagIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 386d93130..0e5295759 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -300,6 +300,29 @@ func (_m *SceneReaderWriter) FindByChecksum(checksum string) (*models.Scene, err return r0, r1 } +// FindByGalleryID provides a mock function with given fields: performerID +func (_m *SceneReaderWriter) FindByGalleryID(performerID int) ([]*models.Scene, error) { + ret := _m.Called(performerID) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(int) []*models.Scene); ok { + r0 = rf(performerID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(performerID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByMovieID provides a mock function with given fields: movieID func (_m *SceneReaderWriter) FindByMovieID(movieID int) ([]*models.Scene, error) { ret := _m.Called(movieID) @@ -392,29 +415,6 @@ func (_m *SceneReaderWriter) FindByPerformerID(performerID int) ([]*models.Scene return r0, r1 } -// FindByGalleryID provides a mock function with given fields: galleryID -func (_m *SceneReaderWriter) FindByGalleryID(galleryID int) ([]*models.Scene, error) { - ret := _m.Called(galleryID) - - var r0 []*models.Scene - if rf, ok := ret.Get(0).(func(int) []*models.Scene); ok { - r0 = rf(galleryID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Scene) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(int) error); ok { - r1 = rf(galleryID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // FindMany provides a mock function with given fields: ids func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) { ret := _m.Called(ids) @@ -461,6 +461,29 @@ func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) { return r0, r1 } +// GetGalleryIDs provides a mock function with given fields: sceneID +func (_m *SceneReaderWriter) GetGalleryIDs(sceneID int) ([]int, error) { + ret := _m.Called(sceneID) + + var r0 []int + if rf, ok := ret.Get(0).(func(int) []int); ok { + r0 = rf(sceneID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]int) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(sceneID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetMovies provides a mock function with given fields: sceneID func (_m *SceneReaderWriter) GetMovies(sceneID int) ([]models.MoviesScenes, error) { ret := _m.Called(sceneID) @@ -507,8 +530,31 @@ func (_m *SceneReaderWriter) GetPerformerIDs(sceneID int) ([]int, error) { return r0, r1 } -// GetGalleryIDs provides a mock function with given fields: sceneID -func (_m *SceneReaderWriter) GetGalleryIDs(sceneID int) ([]int, error) { +// GetStashIDs provides a mock function with given fields: sceneID +func (_m *SceneReaderWriter) GetStashIDs(sceneID int) ([]*models.StashID, error) { + ret := _m.Called(sceneID) + + var r0 []*models.StashID + if rf, ok := ret.Get(0).(func(int) []*models.StashID); ok { + r0 = rf(sceneID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.StashID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(sceneID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTagIDs provides a mock function with given fields: sceneID +func (_m *SceneReaderWriter) GetTagIDs(sceneID int) ([]int, error) { ret := _m.Called(sceneID) var r0 []int @@ -530,52 +576,6 @@ func (_m *SceneReaderWriter) GetGalleryIDs(sceneID int) ([]int, error) { return r0, r1 } -// GetStashIDs provides a mock function with given fields: performerID -func (_m *SceneReaderWriter) GetStashIDs(performerID int) ([]*models.StashID, error) { - ret := _m.Called(performerID) - - var r0 []*models.StashID - if rf, ok := ret.Get(0).(func(int) []*models.StashID); ok { - r0 = rf(performerID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.StashID) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(int) error); ok { - r1 = rf(performerID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetTagIDs provides a mock function with given fields: imageID -func (_m *SceneReaderWriter) GetTagIDs(imageID int) ([]int, error) { - ret := _m.Called(imageID) - - var r0 []int - if rf, ok := ret.Get(0).(func(int) []int); ok { - r0 = rf(imageID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]int) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(int) error); ok { - r1 = rf(imageID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // IncrementOCounter provides a mock function with given fields: id func (_m *SceneReaderWriter) IncrementOCounter(id int) (int, error) { ret := _m.Called(id) @@ -766,6 +766,20 @@ func (_m *SceneReaderWriter) UpdateFull(updatedScene models.Scene) (*models.Scen return r0, r1 } +// UpdateGalleries provides a mock function with given fields: sceneID, galleryIDs +func (_m *SceneReaderWriter) UpdateGalleries(sceneID int, galleryIDs []int) error { + ret := _m.Called(sceneID, galleryIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []int) error); ok { + r0 = rf(sceneID, galleryIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // UpdateMovies provides a mock function with given fields: sceneID, movies func (_m *SceneReaderWriter) UpdateMovies(sceneID int, movies []models.MoviesScenes) error { ret := _m.Called(sceneID, movies) @@ -794,20 +808,6 @@ func (_m *SceneReaderWriter) UpdatePerformers(sceneID int, performerIDs []int) e return r0 } -// UpdateGalleries provides a mock function with given fields: sceneID, galleryIDs -func (_m *SceneReaderWriter) UpdateGalleries(sceneID int, galleryIDs []int) error { - ret := _m.Called(sceneID, galleryIDs) - - var r0 error - if rf, ok := ret.Get(0).(func(int, []int) error); ok { - r0 = rf(sceneID, galleryIDs) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // UpdateStashIDs provides a mock function with given fields: sceneID, stashIDs func (_m *SceneReaderWriter) UpdateStashIDs(sceneID int, stashIDs []models.StashID) error { ret := _m.Called(sceneID, stashIDs) diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index bd182c95e..fb9c02d7d 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -35,29 +35,6 @@ func (_m *StudioReaderWriter) All() ([]*models.Studio, error) { return r0, r1 } -// AllSlim provides a mock function with given fields: -func (_m *StudioReaderWriter) AllSlim() ([]*models.Studio, error) { - ret := _m.Called() - - var r0 []*models.Studio - if rf, ok := ret.Get(0).(func() []*models.Studio); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Studio) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // Count provides a mock function with given fields: func (_m *StudioReaderWriter) Count() (int, error) { ret := _m.Called() diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index 3d258ea98..65dcd8b89 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -35,29 +35,6 @@ func (_m *TagReaderWriter) All() ([]*models.Tag, error) { return r0, r1 } -// AllSlim provides a mock function with given fields: -func (_m *TagReaderWriter) AllSlim() ([]*models.Tag, error) { - ret := _m.Called() - - var r0 []*models.Tag - if rf, ok := ret.Get(0).(func() []*models.Tag); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Tag) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // Count provides a mock function with given fields: func (_m *TagReaderWriter) Count() (int, error) { ret := _m.Called() @@ -245,6 +222,29 @@ func (_m *TagReaderWriter) FindByNames(names []string, nocase bool) ([]*models.T return r0, r1 } +// FindByPerformerID provides a mock function with given fields: performerID +func (_m *TagReaderWriter) FindByPerformerID(performerID int) ([]*models.Tag, error) { + ret := _m.Called(performerID) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func(int) []*models.Tag); ok { + r0 = rf(performerID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(performerID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindBySceneID provides a mock function with given fields: sceneID func (_m *TagReaderWriter) FindBySceneID(sceneID int) ([]*models.Tag, error) { ret := _m.Called(sceneID) diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index 3d1abbf76..40bcd43e9 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -84,6 +84,14 @@ func (s Scene) GetHash(hashAlgorithm HashAlgorithm) string { panic("unknown hash algorithm") } +func (s Scene) GetMinResolution() int64 { + if s.Width.Int64 < s.Height.Int64 { + return s.Width.Int64 + } + + return s.Height.Int64 +} + // SceneFileType represents the file metadata for a scene. type SceneFileType struct { Size *string `graphql:"size" json:"size"` diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go index a3f6aff44..e9fa33118 100644 --- a/pkg/models/model_scraped_item.go +++ b/pkg/models/model_scraped_item.go @@ -24,43 +24,45 @@ type ScrapedItem struct { } type ScrapedPerformer struct { - Name *string `graphql:"name" json:"name"` - Gender *string `graphql:"gender" json:"gender"` - URL *string `graphql:"url" json:"url"` - Twitter *string `graphql:"twitter" json:"twitter"` - Instagram *string `graphql:"instagram" json:"instagram"` - Birthdate *string `graphql:"birthdate" json:"birthdate"` - Ethnicity *string `graphql:"ethnicity" json:"ethnicity"` - Country *string `graphql:"country" json:"country"` - EyeColor *string `graphql:"eye_color" json:"eye_color"` - Height *string `graphql:"height" json:"height"` - Measurements *string `graphql:"measurements" json:"measurements"` - FakeTits *string `graphql:"fake_tits" json:"fake_tits"` - CareerLength *string `graphql:"career_length" json:"career_length"` - Tattoos *string `graphql:"tattoos" json:"tattoos"` - Piercings *string `graphql:"piercings" json:"piercings"` - Aliases *string `graphql:"aliases" json:"aliases"` - Image *string `graphql:"image" json:"image"` + Name *string `graphql:"name" json:"name"` + Gender *string `graphql:"gender" json:"gender"` + URL *string `graphql:"url" json:"url"` + Twitter *string `graphql:"twitter" json:"twitter"` + Instagram *string `graphql:"instagram" json:"instagram"` + Birthdate *string `graphql:"birthdate" json:"birthdate"` + Ethnicity *string `graphql:"ethnicity" json:"ethnicity"` + Country *string `graphql:"country" json:"country"` + EyeColor *string `graphql:"eye_color" json:"eye_color"` + Height *string `graphql:"height" json:"height"` + Measurements *string `graphql:"measurements" json:"measurements"` + FakeTits *string `graphql:"fake_tits" json:"fake_tits"` + CareerLength *string `graphql:"career_length" json:"career_length"` + Tattoos *string `graphql:"tattoos" json:"tattoos"` + Piercings *string `graphql:"piercings" json:"piercings"` + Aliases *string `graphql:"aliases" json:"aliases"` + Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"` + Image *string `graphql:"image" json:"image"` } // this type has no Image field type ScrapedPerformerStash struct { - Name *string `graphql:"name" json:"name"` - Gender *string `graphql:"gender" json:"gender"` - URL *string `graphql:"url" json:"url"` - Twitter *string `graphql:"twitter" json:"twitter"` - Instagram *string `graphql:"instagram" json:"instagram"` - Birthdate *string `graphql:"birthdate" json:"birthdate"` - Ethnicity *string `graphql:"ethnicity" json:"ethnicity"` - Country *string `graphql:"country" json:"country"` - EyeColor *string `graphql:"eye_color" json:"eye_color"` - Height *string `graphql:"height" json:"height"` - Measurements *string `graphql:"measurements" json:"measurements"` - FakeTits *string `graphql:"fake_tits" json:"fake_tits"` - CareerLength *string `graphql:"career_length" json:"career_length"` - Tattoos *string `graphql:"tattoos" json:"tattoos"` - Piercings *string `graphql:"piercings" json:"piercings"` - Aliases *string `graphql:"aliases" json:"aliases"` + Name *string `graphql:"name" json:"name"` + Gender *string `graphql:"gender" json:"gender"` + URL *string `graphql:"url" json:"url"` + Twitter *string `graphql:"twitter" json:"twitter"` + Instagram *string `graphql:"instagram" json:"instagram"` + Birthdate *string `graphql:"birthdate" json:"birthdate"` + Ethnicity *string `graphql:"ethnicity" json:"ethnicity"` + Country *string `graphql:"country" json:"country"` + EyeColor *string `graphql:"eye_color" json:"eye_color"` + Height *string `graphql:"height" json:"height"` + Measurements *string `graphql:"measurements" json:"measurements"` + FakeTits *string `graphql:"fake_tits" json:"fake_tits"` + CareerLength *string `graphql:"career_length" json:"career_length"` + Tattoos *string `graphql:"tattoos" json:"tattoos"` + Piercings *string `graphql:"piercings" json:"piercings"` + Aliases *string `graphql:"aliases" json:"aliases"` + Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"` } type ScrapedScene struct { @@ -106,25 +108,26 @@ type ScrapedGalleryStash struct { type ScrapedScenePerformer struct { // Set if performer matched - ID *string `graphql:"id" json:"id"` - Name string `graphql:"name" json:"name"` - Gender *string `graphql:"gender" json:"gender"` - URL *string `graphql:"url" json:"url"` - Twitter *string `graphql:"twitter" json:"twitter"` - Instagram *string `graphql:"instagram" json:"instagram"` - Birthdate *string `graphql:"birthdate" json:"birthdate"` - Ethnicity *string `graphql:"ethnicity" json:"ethnicity"` - Country *string `graphql:"country" json:"country"` - EyeColor *string `graphql:"eye_color" json:"eye_color"` - Height *string `graphql:"height" json:"height"` - Measurements *string `graphql:"measurements" json:"measurements"` - FakeTits *string `graphql:"fake_tits" json:"fake_tits"` - CareerLength *string `graphql:"career_length" json:"career_length"` - Tattoos *string `graphql:"tattoos" json:"tattoos"` - Piercings *string `graphql:"piercings" json:"piercings"` - Aliases *string `graphql:"aliases" json:"aliases"` - RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"` - Images []string `graphql:"images" json:"images"` + ID *string `graphql:"id" json:"id"` + Name string `graphql:"name" json:"name"` + Gender *string `graphql:"gender" json:"gender"` + URL *string `graphql:"url" json:"url"` + Twitter *string `graphql:"twitter" json:"twitter"` + Instagram *string `graphql:"instagram" json:"instagram"` + Birthdate *string `graphql:"birthdate" json:"birthdate"` + Ethnicity *string `graphql:"ethnicity" json:"ethnicity"` + Country *string `graphql:"country" json:"country"` + EyeColor *string `graphql:"eye_color" json:"eye_color"` + Height *string `graphql:"height" json:"height"` + Measurements *string `graphql:"measurements" json:"measurements"` + FakeTits *string `graphql:"fake_tits" json:"fake_tits"` + CareerLength *string `graphql:"career_length" json:"career_length"` + Tattoos *string `graphql:"tattoos" json:"tattoos"` + Piercings *string `graphql:"piercings" json:"piercings"` + Aliases *string `graphql:"aliases" json:"aliases"` + Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"` + RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"` + Images []string `graphql:"images" json:"images"` } type ScrapedSceneStudio struct { diff --git a/pkg/models/movie.go b/pkg/models/movie.go index 62f3bf52d..dc6df5fd8 100644 --- a/pkg/models/movie.go +++ b/pkg/models/movie.go @@ -8,7 +8,6 @@ type MovieReader interface { FindByNames(names []string, nocase bool) ([]*Movie, error) All() ([]*Movie, error) Count() (int, error) - AllSlim() ([]*Movie, error) Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) GetFrontImage(movieID int) ([]byte, error) GetBackImage(movieID int) ([]byte, error) diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 92fbe4399..2c550b720 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -8,12 +8,13 @@ type PerformerReader interface { FindByImageID(imageID int) ([]*Performer, error) FindByGalleryID(galleryID int) ([]*Performer, error) FindByNames(names []string, nocase bool) ([]*Performer, error) + CountByTagID(tagID int) (int, error) Count() (int, error) All() ([]*Performer, error) - AllSlim() ([]*Performer, error) Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error) GetImage(performerID int) ([]byte, error) GetStashIDs(performerID int) ([]*StashID, error) + GetTagIDs(sceneID int) ([]int, error) } type PerformerWriter interface { @@ -24,6 +25,7 @@ type PerformerWriter interface { UpdateImage(performerID int, image []byte) error DestroyImage(performerID int) error UpdateStashIDs(performerID int, stashIDs []StashID) error + UpdateTags(sceneID int, tagIDs []int) error } type PerformerReaderWriter interface { diff --git a/pkg/models/scene.go b/pkg/models/scene.go index 6bd5e78f8..ef4485717 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -21,7 +21,6 @@ type SceneReader interface { CountMissingOSHash() (int, error) Wall(q *string) ([]*Scene, error) All() ([]*Scene, error) - QueryForAutoTag(regex string, pathPrefixes []string) ([]*Scene, error) Query(sceneFilter *SceneFilterType, findFilter *FindFilterType) ([]*Scene, int, error) GetCover(sceneID int) ([]byte, error) GetMovies(sceneID int) ([]MoviesScenes, error) diff --git a/pkg/models/studio.go b/pkg/models/studio.go index 6addf8cd1..358abf596 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -7,7 +7,6 @@ type StudioReader interface { FindByName(name string, nocase bool) (*Studio, error) Count() (int, error) All() ([]*Studio, error) - AllSlim() ([]*Studio, error) Query(studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) GetImage(studioID int) ([]byte, error) HasImage(studioID int) (bool, error) diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 7bdd8a197..5f03e33b5 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -4,6 +4,7 @@ type TagReader interface { Find(id int) (*Tag, error) FindMany(ids []int) ([]*Tag, error) FindBySceneID(sceneID int) ([]*Tag, error) + FindByPerformerID(performerID int) ([]*Tag, error) FindBySceneMarkerID(sceneMarkerID int) ([]*Tag, error) FindByImageID(imageID int) ([]*Tag, error) FindByGalleryID(galleryID int) ([]*Tag, error) @@ -11,7 +12,6 @@ type TagReader interface { FindByNames(names []string, nocase bool) ([]*Tag, error) Count() (int, error) All() ([]*Tag, error) - AllSlim() ([]*Tag, error) Query(tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) GetImage(tagID int) ([]byte, error) } diff --git a/pkg/performer/import.go b/pkg/performer/import.go index b07c073e2..2131b1e57 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -3,6 +3,7 @@ package performer import ( "database/sql" "fmt" + "strings" "github.com/stashapp/stash/pkg/manager/jsonschema" "github.com/stashapp/stash/pkg/models" @@ -10,16 +11,25 @@ import ( ) type Importer struct { - ReaderWriter models.PerformerReaderWriter - Input jsonschema.Performer + ReaderWriter models.PerformerReaderWriter + TagWriter models.TagReaderWriter + Input jsonschema.Performer + MissingRefBehaviour models.ImportMissingRefEnum + ID int performer models.Performer imageData []byte + + tags []*models.Tag } func (i *Importer) PreImport() error { i.performer = performerJSONToPerformer(i.Input) + if err := i.populateTags(); err != nil { + return err + } + var err error if len(i.Input.Image) > 0 { _, i.imageData, err = utils.ProcessBase64Image(i.Input.Image) @@ -31,7 +41,82 @@ func (i *Importer) PreImport() error { return nil } +func (i *Importer) populateTags() error { + if len(i.Input.Tags) > 0 { + + tags, err := importTags(i.TagWriter, i.Input.Tags, i.MissingRefBehaviour) + if err != nil { + return err + } + + i.tags = tags + } + + return nil +} + +func importTags(tagWriter models.TagReaderWriter, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { + tags, err := tagWriter.FindByNames(names, false) + if err != nil { + return nil, err + } + + var pluckedNames []string + for _, tag := range tags { + pluckedNames = append(pluckedNames, tag.Name) + } + + missingTags := utils.StrFilter(names, func(name string) bool { + return !utils.StrInclude(pluckedNames, name) + }) + + if len(missingTags) > 0 { + if missingRefBehaviour == models.ImportMissingRefEnumFail { + return nil, fmt.Errorf("tags [%s] not found", strings.Join(missingTags, ", ")) + } + + if missingRefBehaviour == models.ImportMissingRefEnumCreate { + createdTags, err := createTags(tagWriter, missingTags) + if err != nil { + return nil, fmt.Errorf("error creating tags: %s", err.Error()) + } + + tags = append(tags, createdTags...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + return tags, nil +} + +func createTags(tagWriter models.TagWriter, names []string) ([]*models.Tag, error) { + var ret []*models.Tag + for _, name := range names { + newTag := *models.NewTag(name) + + created, err := tagWriter.Create(newTag) + if err != nil { + return nil, err + } + + ret = append(ret, created) + } + + return ret, nil +} + func (i *Importer) PostImport(id int) error { + if len(i.tags) > 0 { + var tagIDs []int + for _, t := range i.tags { + tagIDs = append(tagIDs, t.ID) + } + if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil { + return fmt.Errorf("failed to associate tags: %s", err.Error()) + } + } + if len(i.imageData) > 0 { if err := i.ReaderWriter.UpdateImage(id, i.imageData); err != nil { return fmt.Errorf("error setting performer image: %s", err.Error()) diff --git a/pkg/performer/import_test.go b/pkg/performer/import_test.go index 43509702d..13598f047 100644 --- a/pkg/performer/import_test.go +++ b/pkg/performer/import_test.go @@ -3,6 +3,8 @@ package performer import ( "errors" + "github.com/stretchr/testify/mock" + "github.com/stashapp/stash/pkg/manager/jsonschema" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" @@ -16,9 +18,15 @@ const invalidImage = "aW1hZ2VCeXRlcw&&" const ( existingPerformerID = 100 + existingTagID = 105 + errTagsID = 106 existingPerformerName = "existingPerformerName" performerNameErr = "performerNameErr" + + existingTagName = "existingTagName" + existingTagErr = "existingTagErr" + missingTagName = "missingTagName" ) func TestImporterName(t *testing.T) { @@ -53,6 +61,91 @@ func TestImporterPreImport(t *testing.T) { assert.Equal(t, expectedPerformer, i.performer) } +func TestImporterPreImportWithTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Performer{ + Tags: []string{ + existingTagName, + }, + }, + } + + tagReaderWriter.On("FindByNames", []string{existingTagName}, false).Return([]*models.Tag{ + { + ID: existingTagID, + Name: existingTagName, + }, + }, nil).Once() + tagReaderWriter.On("FindByNames", []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + i.Input.Tags = []string{existingTagErr} + err = i.PreImport() + assert.NotNil(t, err) + + tagReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + Input: jsonschema.Performer{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Times(3) + tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{ + ID: existingTagID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + tagReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + Input: jsonschema.Performer{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Once() + tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + func TestImporterPostImport(t *testing.T) { readerWriter := &mocks.PerformerReaderWriter{} @@ -111,6 +204,32 @@ func TestImporterFindExistingID(t *testing.T) { readerWriter.AssertExpectations(t) } +func TestImporterPostImportUpdateTags(t *testing.T) { + readerWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + tags: []*models.Tag{ + { + ID: existingTagID, + }, + }, + } + + updateErr := errors.New("UpdateTags error") + + readerWriter.On("UpdateTags", performerID, []int{existingTagID}).Return(nil).Once() + readerWriter.On("UpdateTags", errTagsID, mock.AnythingOfType("[]int")).Return(updateErr).Once() + + err := i.PostImport(performerID) + assert.Nil(t, err) + + err = i.PostImport(errTagsID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + func TestCreate(t *testing.T) { readerWriter := &mocks.PerformerReaderWriter{} diff --git a/pkg/scraper/config.go b/pkg/scraper/config.go index c81fb2061..7d5d49ebf 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/config.go @@ -158,10 +158,11 @@ type scraperDebugOptions struct { } type scraperCookies struct { - Name string `yaml:"Name"` - Value string `yaml:"Value"` - Domain string `yaml:"Domain"` - Path string `yaml:"Path"` + Name string `yaml:"Name"` + Value string `yaml:"Value"` + ValueRandom int `yaml:"ValueRandom"` + Domain string `yaml:"Domain"` + Path string `yaml:"Path"` } type cookieOptions struct { diff --git a/pkg/scraper/cookies.go b/pkg/scraper/cookies.go index 0e3ae3d70..fbcc05d50 100644 --- a/pkg/scraper/cookies.go +++ b/pkg/scraper/cookies.go @@ -13,6 +13,7 @@ import ( "github.com/chromedp/chromedp" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/utils" ) // set cookies for the native http client @@ -32,7 +33,7 @@ func setCookies(jar *cookiejar.Jar, scraperConfig config) { for _, cookie := range ckURL.Cookies { httpCookie = &http.Cookie{ Name: cookie.Name, - Value: cookie.Value, + Value: getCookieValue(cookie), Path: cookie.Path, Domain: cookie.Domain, } @@ -53,6 +54,13 @@ func setCookies(jar *cookiejar.Jar, scraperConfig config) { } } +func getCookieValue(cookie *scraperCookies) string { + if cookie.ValueRandom > 0 { + return utils.RandomSequence(cookie.ValueRandom) + } + return cookie.Value +} + // print all cookies from the jar of the native http client func printCookies(jar *cookiejar.Jar, scraperConfig config, msg string) { driverOptions := scraperConfig.DriverOptions @@ -92,7 +100,7 @@ func setCDPCookies(driverOptions scraperDriverOptions) chromedp.Tasks { for _, ckURL := range driverOptions.Cookies { for _, cookie := range ckURL.Cookies { - success, err := network.SetCookie(cookie.Name, cookie.Value). + success, err := network.SetCookie(cookie.Name, getCookieValue(cookie)). WithExpires(&expr). WithDomain(cookie.Domain). WithPath(cookie.Path). diff --git a/pkg/scraper/freeones.go b/pkg/scraper/freeones.go index e57a51a5b..8b72e9df3 100644 --- a/pkg/scraper/freeones.go +++ b/pkg/scraper/freeones.go @@ -31,33 +31,28 @@ xPathScrapers: selector: //div[@id="search-result"]//div[@data-test="teaser-subject"]/a/@href postProcess: - replace: - - regex: ^ - with: https://www.freeones.com - - regex: $ - with: /profile + - regex: ^ + with: https://www.freeones.com + - regex: /feed$ + with: /bio performerScraper: performer: - Name: + Name: selector: //h1 postProcess: - replace: - - regex: \sBio\s*$ - with: "" - URL: - selector: //a[span[text()="Profile"]]/@href - postProcess: - - replace: - - regex: ^ - with: https://www.freeones.com + - regex: \sBio\s*$ + with: "" + URL: //link[@rel="alternate" and @hreflang="x-default"]/@href Twitter: //a[contains(@href,'twitter.com/')]/@href Instagram: //a[contains(@href,'instagram.com/')]/@href Birthdate: selector: //span[contains(text(),'Born On')] postProcess: - replace: - - regex: Born On - with: + - regex: Born On + with: - parseDate: January 2, 2006 Ethnicity: selector: //a[@data-test="link_ethnicity"]/span/text() @@ -73,8 +68,8 @@ xPathScrapers: selector: //span[text()='Height']/following-sibling::span/a postProcess: - replace: - - regex: \D+[\s\S]+ - with: "" + - regex: \D+[\s\S]+ + with: "" - map: Unknown: "" Measurements: @@ -88,18 +83,18 @@ xPathScrapers: postProcess: - map: Unknown: "" - Fake: Yes - Natural: No + Fake: "Yes" + Natural: "No" CareerLength: selector: //div[contains(@class,'timeline-horizontal')]//p[@class='m-0'] concat: "-" Aliases: //p[@data-test='p_aliases']/text() - Tattoos: + Tattoos: selector: //span[text()='Tattoos']/following-sibling::span/span postProcess: - map: Unknown: "" - Piercings: + Piercings: selector: //span[text()='Piercings']/following-sibling::span/span postProcess: - map: @@ -108,7 +103,7 @@ xPathScrapers: selector: //div[contains(@class,'image-container')]//a/img/@src Gender: fixed: "Female" -# Last updated January 31, 2021 +# Last updated March 24, 2021 ` func getFreeonesScraper() config { diff --git a/pkg/scraper/image.go b/pkg/scraper/image.go index 1c6059835..08cb6725d 100644 --- a/pkg/scraper/image.go +++ b/pkg/scraper/image.go @@ -1,11 +1,14 @@ package scraper import ( + "crypto/tls" + "fmt" "io/ioutil" "net/http" "strings" "time" + stashConfig "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) @@ -83,6 +86,8 @@ func setMovieBackImage(m *models.ScrapedMovie, globalConfig GlobalConfig) error func getImage(url string, globalConfig GlobalConfig) (*string, error) { client := &http.Client{ + Transport: &http.Transport{ // ignore insecure certificates + TLSClientConfig: &tls.Config{InsecureSkipVerify: !stashConfig.GetScraperCertCheck()}}, Timeout: imageGetTimeout, } @@ -109,6 +114,10 @@ func getImage(url string, globalConfig GlobalConfig) (*string, error) { return nil, err } + if resp.StatusCode >= 400 { + return nil, fmt.Errorf("http error %d", resp.StatusCode) + } + defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) diff --git a/pkg/scraper/json.go b/pkg/scraper/json.go index b40b04e77..b7c68e86e 100644 --- a/pkg/scraper/json.go +++ b/pkg/scraper/json.go @@ -52,7 +52,7 @@ func (s *jsonScraper) loadURL(url string) (string, error) { if err != nil { return "", err } - + logger.Infof("loadURL (%s)\n", url) doc, err := ioutil.ReadAll(r) if err != nil { return "", err @@ -71,7 +71,8 @@ func (s *jsonScraper) loadURL(url string) (string, error) { } func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) { - doc, scraper, err := s.scrapeURL(url) + u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries + doc, scraper, err := s.scrapeURL(u) if err != nil { return nil, err } @@ -81,7 +82,8 @@ func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer } func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) { - doc, scraper, err := s.scrapeURL(url) + u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries + doc, scraper, err := s.scrapeURL(u) if err != nil { return nil, err } @@ -91,7 +93,8 @@ func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) } func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) { - doc, scraper, err := s.scrapeURL(url) + u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries + doc, scraper, err := s.scrapeURL(u) if err != nil { return nil, err } @@ -101,7 +104,8 @@ func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, er } func (s *jsonScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) { - doc, scraper, err := s.scrapeURL(url) + u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries + doc, scraper, err := s.scrapeURL(u) if err != nil { return nil, err } diff --git a/pkg/scraper/mapped.go b/pkg/scraper/mapped.go index 98f4896e4..6b25e4850 100644 --- a/pkg/scraper/mapped.go +++ b/pkg/scraper/mapped.go @@ -94,10 +94,10 @@ func (s mappedConfig) postProcess(q mappedQuery, attrConfig mappedScraperAttrCon type mappedSceneScraperConfig struct { mappedConfig - Tags mappedConfig `yaml:"Tags"` - Performers mappedConfig `yaml:"Performers"` - Studio mappedConfig `yaml:"Studio"` - Movies mappedConfig `yaml:"Movies"` + Tags mappedConfig `yaml:"Tags"` + Performers mappedPerformerScraperConfig `yaml:"Performers"` + Studio mappedConfig `yaml:"Studio"` + Movies mappedConfig `yaml:"Movies"` } type _mappedSceneScraperConfig mappedSceneScraperConfig @@ -211,10 +211,54 @@ func (s *mappedGalleryScraperConfig) UnmarshalYAML(unmarshal func(interface{}) e type mappedPerformerScraperConfig struct { mappedConfig + + Tags mappedConfig `yaml:"Tags"` } +type _mappedPerformerScraperConfig mappedPerformerScraperConfig + +const ( + mappedScraperConfigPerformerTags = "Tags" +) func (s *mappedPerformerScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { - return unmarshal(&s.mappedConfig) + // HACK - unmarshal to map first, then remove known scene sub-fields, then + // remarshal to yaml and pass that down to the base map + parentMap := make(map[string]interface{}) + if err := unmarshal(parentMap); err != nil { + return err + } + + // move the known sub-fields to a separate map + thisMap := make(map[string]interface{}) + + thisMap[mappedScraperConfigPerformerTags] = parentMap[mappedScraperConfigPerformerTags] + + delete(parentMap, mappedScraperConfigPerformerTags) + + // re-unmarshal the sub-fields + yml, err := yaml.Marshal(thisMap) + if err != nil { + return err + } + + // needs to be a different type to prevent infinite recursion + c := _mappedPerformerScraperConfig{} + if err := yaml.Unmarshal(yml, &c); err != nil { + return err + } + + *s = mappedPerformerScraperConfig(c) + + yml, err = yaml.Marshal(parentMap) + if err != nil { + return err + } + + if err := yaml.Unmarshal(yml, &s.mappedConfig); err != nil { + return err + } + + return nil } type mappedMovieScraperConfig struct { @@ -647,9 +691,23 @@ func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer, return nil, nil } + performerTagsMap := performerMap.Tags + results := performerMap.process(q, s.Common) if len(results) > 0 { results[0].apply(&ret) + + // now apply the tags + if performerTagsMap != nil { + logger.Debug(`Processing performer tags:`) + tagResults := performerTagsMap.process(q, s.Common) + + for _, p := range tagResults { + tag := &models.ScrapedSceneTag{} + p.apply(tag) + ret.Tags = append(ret.Tags, tag) + } + } } return &ret, nil @@ -687,19 +745,34 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error) sceneStudioMap := sceneScraperConfig.Studio sceneMoviesMap := sceneScraperConfig.Movies + scenePerformerTagsMap := scenePerformersMap.Tags + logger.Debug(`Processing scene:`) results := sceneMap.process(q, s.Common) if len(results) > 0 { results[0].apply(&ret) + // process performer tags once + var performerTagResults mappedResults + if scenePerformerTagsMap != nil { + performerTagResults = scenePerformerTagsMap.process(q, s.Common) + } + // now apply the performers and tags - if scenePerformersMap != nil { + if scenePerformersMap.mappedConfig != nil { logger.Debug(`Processing scene performers:`) performerResults := scenePerformersMap.process(q, s.Common) for _, p := range performerResults { performer := &models.ScrapedScenePerformer{} p.apply(performer) + + for _, p := range performerTagResults { + tag := &models.ScrapedSceneTag{} + p.apply(tag) + ret.Tags = append(ret.Tags, tag) + } + ret.Performers = append(ret.Performers, performer) } } diff --git a/pkg/scraper/query_url.go b/pkg/scraper/query_url.go index 517df5ac2..462069d2f 100644 --- a/pkg/scraper/query_url.go +++ b/pkg/scraper/query_url.go @@ -17,6 +17,13 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters { ret["oshash"] = scene.OSHash.String ret["filename"] = filepath.Base(scene.Path) ret["title"] = scene.Title.String + ret["url"] = scene.URL.String + return ret +} + +func queryURLParameterFromURL(url string) queryURLParameters { + ret := make(queryURLParameters) + ret["url"] = url return ret } @@ -28,6 +35,7 @@ func queryURLParametersFromGallery(gallery *models.Gallery) queryURLParameters { ret["filename"] = filepath.Base(gallery.Path.String) } ret["title"] = gallery.Title.String + ret["url"] = gallery.URL.String return ret } @@ -49,3 +57,14 @@ func (p queryURLParameters) constructURL(url string) string { return ret } + +// replaceURL does a partial URL Replace ( only url parameter is used) +func replaceURL(url string, scraperConfig scraperTypeConfig) string { + u := url + queryURL := queryURLParameterFromURL(u) + if scraperConfig.QueryURLReplacements != nil { + queryURL.applyReplacements(scraperConfig.QueryURLReplacements) + u = queryURL.constructURL(scraperConfig.QueryURL) + } + return u +} diff --git a/pkg/scraper/scrapers.go b/pkg/scraper/scrapers.go index 9c40e2a77..6e2ee3fd2 100644 --- a/pkg/scraper/scrapers.go +++ b/pkg/scraper/scrapers.go @@ -220,9 +220,11 @@ func (c Cache) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) return nil, err } - // post-process - set the image if applicable - if err := setPerformerImage(ret, c.globalConfig); err != nil { - logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error()) + if ret != nil { + err = c.postScrapePerformer(ret) + if err != nil { + return nil, err + } } return ret, nil @@ -232,6 +234,49 @@ func (c Cache) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) return nil, nil } +func (c Cache) postScrapePerformer(ret *models.ScrapedPerformer) error { + if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + tqb := r.Tag() + + for _, t := range ret.Tags { + err := MatchScrapedSceneTag(tqb, t) + if err != nil { + return err + } + } + + return nil + }); err != nil { + return err + } + + // post-process - set the image if applicable + if err := setPerformerImage(ret, c.globalConfig); err != nil { + logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error()) + } + + return nil +} + +func (c Cache) postScrapeScenePerformer(ret *models.ScrapedScenePerformer) error { + if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { + tqb := r.Tag() + + for _, t := range ret.Tags { + err := MatchScrapedSceneTag(tqb, t) + if err != nil { + return err + } + } + + return nil + }); err != nil { + return err + } + + return nil +} + func (c Cache) postScrapeScene(ret *models.ScrapedScene) error { if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { pqb := r.Performer() @@ -240,8 +285,11 @@ func (c Cache) postScrapeScene(ret *models.ScrapedScene) error { sqb := r.Studio() for _, p := range ret.Performers { - err := MatchScrapedScenePerformer(pqb, p) - if err != nil { + if err := c.postScrapeScenePerformer(p); err != nil { + return err + } + + if err := MatchScrapedScenePerformer(pqb, p); err != nil { return err } } diff --git a/pkg/scraper/script.go b/pkg/scraper/script.go index 46c4164fe..32f768d45 100644 --- a/pkg/scraper/script.go +++ b/pkg/scraper/script.go @@ -30,6 +30,13 @@ func newScriptScraper(scraper scraperTypeConfig, config config, globalConfig Glo func (s *scriptScraper) runScraperScript(inString string, out interface{}) error { command := s.scraper.Script + if command[0] == "python" || command[0] == "python3" { + executable, err := findPythonExecutable() + if err == nil { + command[0] = executable + } + } + cmd := exec.Command(command[0], command[1:]...) cmd.Dir = filepath.Dir(s.config.path) @@ -184,3 +191,19 @@ func (s *scriptScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, erro return &ret, err } + +func findPythonExecutable() (string, error) { + _, err := exec.LookPath("python3") + + if err != nil { + _, err = exec.LookPath("python") + + if err != nil { + return "", err + } + + return "python", nil + } + + return "python3", nil +} diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index a53afcab5..d37b82847 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -100,6 +100,13 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped return nil, err } + if q.FindPerformer != nil { + // the ids of the tags must be nilled + for _, t := range q.FindPerformer.Tags { + t.ID = nil + } + } + // need to copy back to a scraped performer ret := models.ScrapedPerformer{} err = copier.Copy(&ret, q.FindPerformer) diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index 77ae492f6..1dac41422 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -322,6 +322,7 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode Twitter: findURL(p.Urls, "TWITTER"), RemoteSiteID: &id, Images: images, + // TODO - tags not currently supported // TODO - Image - should be returned as a set of URLs. Will need a // graphql schema change to accommodate this. Leave off for now. } diff --git a/pkg/scraper/url.go b/pkg/scraper/url.go index 159dc1ccd..85e1590ee 100644 --- a/pkg/scraper/url.go +++ b/pkg/scraper/url.go @@ -3,6 +3,7 @@ package scraper import ( "bytes" "context" + "crypto/tls" "errors" "fmt" "io" @@ -22,6 +23,7 @@ import ( "golang.org/x/net/publicsuffix" "github.com/stashapp/stash/pkg/logger" + stashConfig "github.com/stashapp/stash/pkg/manager/config" ) // Timeout for the scrape http request. Includes transfer time. May want to make this @@ -49,6 +51,9 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re printCookies(jar, scraperConfig, "Jar cookies set from scraper") client := &http.Client{ + Transport: &http.Transport{ // ignore insecure certificates + TLSClientConfig: &tls.Config{InsecureSkipVerify: !stashConfig.GetScraperCertCheck()}, + }, Timeout: scrapeGetTimeout, // defaultCheckRedirect code with max changed from 10 to 20 CheckRedirect: func(req *http.Request, via []*http.Request) error { @@ -74,6 +79,10 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re if err != nil { return nil, err } + if resp.StatusCode >= 400 { + return nil, fmt.Errorf("http error %d", resp.StatusCode) + } + defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go index f4f55bcdc..e612b5f4d 100644 --- a/pkg/scraper/xpath.go +++ b/pkg/scraper/xpath.go @@ -52,7 +52,8 @@ func (s *xpathScraper) scrapeURL(url string) (*html.Node, *mappedScraper, error) } func (s *xpathScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) { - doc, scraper, err := s.scrapeURL(url) + u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries + doc, scraper, err := s.scrapeURL(u) if err != nil { return nil, err } @@ -62,7 +63,8 @@ func (s *xpathScraper) scrapePerformerByURL(url string) (*models.ScrapedPerforme } func (s *xpathScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) { - doc, scraper, err := s.scrapeURL(url) + u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries + doc, scraper, err := s.scrapeURL(u) if err != nil { return nil, err } @@ -72,7 +74,8 @@ func (s *xpathScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error } func (s *xpathScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) { - doc, scraper, err := s.scrapeURL(url) + u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries + doc, scraper, err := s.scrapeURL(u) if err != nil { return nil, err } @@ -82,7 +85,8 @@ func (s *xpathScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, e } func (s *xpathScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) { - doc, scraper, err := s.scrapeURL(url) + u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries + doc, scraper, err := s.scrapeURL(u) if err != nil { return nil, err } diff --git a/pkg/scraper/xpath_test.go b/pkg/scraper/xpath_test.go index f5950460c..275d59830 100644 --- a/pkg/scraper/xpath_test.go +++ b/pkg/scraper/xpath_test.go @@ -520,7 +520,7 @@ func makeSceneXPathConfig() mappedScraper { performerConfig := make(mappedConfig) performerConfig["Name"] = makeSimpleAttrConfig(`$performerElem/@data-mxptext`) performerConfig["URL"] = makeSimpleAttrConfig(`$performerElem/@href`) - config.Performers = performerConfig + config.Performers.mappedConfig = performerConfig studioConfig := make(mappedConfig) studioConfig["Name"] = makeSimpleAttrConfig(`$studioElem`) @@ -730,7 +730,7 @@ xPathScrapers: assert.Equal(t, "//title", sceneConfig.mappedConfig["Title"].Selector) assert.Equal(t, "//tags", sceneConfig.Tags["Name"].Selector) assert.Equal(t, "//movies", sceneConfig.Movies["Name"].Selector) - assert.Equal(t, "//performers", sceneConfig.Performers["Name"].Selector) + assert.Equal(t, "//performers", sceneConfig.Performers.mappedConfig["Name"].Selector) assert.Equal(t, "//studio", sceneConfig.Studio["Name"].Selector) postProcess := sceneConfig.mappedConfig["Title"].postProcessActions diff --git a/pkg/sqlite/filter.go b/pkg/sqlite/filter.go new file mode 100644 index 000000000..bae383825 --- /dev/null +++ b/pkg/sqlite/filter.go @@ -0,0 +1,403 @@ +package sqlite + +import ( + "errors" + "fmt" + "regexp" + "strings" + + "github.com/stashapp/stash/pkg/models" +) + +type sqlClause struct { + sql string + args []interface{} +} + +func makeClause(sql string, args ...interface{}) sqlClause { + return sqlClause{ + sql: sql, + args: args, + } +} + +type criterionHandler interface { + handle(f *filterBuilder) +} + +type criterionHandlerFunc func(f *filterBuilder) + +type join struct { + table string + as string + onClause string +} + +// equals returns true if the other join alias/table is equal to this one +func (j join) equals(o join) bool { + return j.alias() == o.alias() +} + +// alias returns the as string, or the table if as is empty +func (j join) alias() string { + if j.as == "" { + return j.table + } + + return j.as +} + +func (j join) toSQL() string { + asStr := "" + if j.as != "" && j.as != j.table { + asStr = " AS " + j.as + } + + return fmt.Sprintf("LEFT JOIN %s%s ON %s", j.table, asStr, j.onClause) +} + +type joins []join + +func (j *joins) add(newJoins ...join) { + // only add if not already joined + for _, newJoin := range newJoins { + for _, jj := range *j { + if jj.equals(newJoin) { + return + } + } + + *j = append(*j, newJoin) + } +} + +func (j *joins) toSQL() string { + var ret []string + for _, jj := range *j { + ret = append(ret, jj.toSQL()) + } + + return strings.Join(ret, " ") +} + +type filterBuilder struct { + subFilter *filterBuilder + subFilterOp string + + joins joins + whereClauses []sqlClause + havingClauses []sqlClause + + err error +} + +var errSubFilterAlreadySet error = errors.New(`sub-filter already set`) + +// sub-filter operator values +var ( + andOp = "AND" + orOp = "OR" + notOp = "AND NOT" +) + +// and sets the sub-filter that will be ANDed with this one. +// Sets the error state if sub-filter is already set. +func (f *filterBuilder) and(a *filterBuilder) { + if f.subFilter != nil { + f.setError(errSubFilterAlreadySet) + return + } + + f.subFilter = a + f.subFilterOp = andOp +} + +// or sets the sub-filter that will be ORed with this one. +// Sets the error state if a sub-filter is already set. +func (f *filterBuilder) or(o *filterBuilder) { + if f.subFilter != nil { + f.setError(errSubFilterAlreadySet) + return + } + + f.subFilter = o + f.subFilterOp = orOp +} + +// not sets the sub-filter that will be AND NOTed with this one. +// Sets the error state if a sub-filter is already set. +func (f *filterBuilder) not(n *filterBuilder) { + if f.subFilter != nil { + f.setError(errSubFilterAlreadySet) + return + } + + f.subFilter = n + f.subFilterOp = notOp +} + +// addJoin adds a join to the filter. The join is expressed in SQL as: +// LEFT JOIN [AS ] ON +// The AS is omitted if as is empty. +// This method does not add a join if it its alias/table name is already +// present in another existing join. +func (f *filterBuilder) addJoin(table, as, onClause string) { + newJoin := join{ + table: table, + as: as, + onClause: onClause, + } + + f.joins.add(newJoin) +} + +// addWhere adds a where clause and arguments to the filter. Where clauses +// are ANDed together. Does not add anything if the provided string is empty. +func (f *filterBuilder) addWhere(sql string, args ...interface{}) { + if sql == "" { + return + } + f.whereClauses = append(f.whereClauses, makeClause(sql, args...)) +} + +// addHaving adds a where clause and arguments to the filter. Having clauses +// are ANDed together. Does not add anything if the provided string is empty. +func (f *filterBuilder) addHaving(sql string, args ...interface{}) { + if sql == "" { + return + } + f.havingClauses = append(f.havingClauses, makeClause(sql, args...)) +} + +func (f *filterBuilder) getSubFilterClause(clause, subFilterClause string) string { + ret := clause + + if subFilterClause != "" { + var op string + if len(ret) > 0 { + op = " " + f.subFilterOp + " " + } else { + if f.subFilterOp == notOp { + op = "NOT " + } + } + + ret += op + "(" + subFilterClause + ")" + } + + return ret +} + +// generateWhereClauses generates the SQL where clause for this filter. +// All where clauses within the filter are ANDed together. This is combined +// with the sub-filter, which will use the applicable operator (AND/OR/AND NOT). +func (f *filterBuilder) generateWhereClauses() (clause string, args []interface{}) { + clause, args = f.andClauses(f.whereClauses) + + if f.subFilter != nil { + c, a := f.subFilter.generateWhereClauses() + if c != "" { + clause = f.getSubFilterClause(clause, c) + if len(a) > 0 { + args = append(args, a...) + } + } + } + + return +} + +// generateHavingClauses generates the SQL having clause for this filter. +// All having clauses within the filter are ANDed together. This is combined +// with the sub-filter, which will use the applicable operator (AND/OR/AND NOT). +func (f *filterBuilder) generateHavingClauses() (string, []interface{}) { + clause, args := f.andClauses(f.havingClauses) + + if f.subFilter != nil { + c, a := f.subFilter.generateHavingClauses() + if c != "" { + clause = f.getSubFilterClause(clause, c) + if len(a) > 0 { + args = append(args, a...) + } + } + } + + return clause, args +} + +// getAllJoins returns all of the joins in this filter and any sub-filter(s). +// Redundant joins will not be duplicated in the return value. +func (f *filterBuilder) getAllJoins() joins { + var ret joins + ret.add(f.joins...) + if f.subFilter != nil { + subJoins := f.subFilter.getAllJoins() + if len(subJoins) > 0 { + ret.add(subJoins...) + } + } + + return ret +} + +// getError returns the error state on this filter, or on any sub-filter(s) if +// the error state is nil. +func (f *filterBuilder) getError() error { + if f.err != nil { + return f.err + } + + if f.subFilter != nil { + return f.subFilter.getError() + } + + return nil +} + +// handleCriterion calls the handle function on the provided criterionHandler, +// providing itself. +func (f *filterBuilder) handleCriterion(handler criterionHandler) { + f.handleCriterionFunc(func(h *filterBuilder) { + handler.handle(h) + }) +} + +// handleCriterionFunc calls the provided criterion handler function providing +// itself. +func (f *filterBuilder) handleCriterionFunc(handler criterionHandlerFunc) { + handler(f) +} + +func (f *filterBuilder) setError(e error) { + if f.err == nil { + f.err = e + } +} + +func (f *filterBuilder) andClauses(input []sqlClause) (string, []interface{}) { + var clauses []string + var args []interface{} + for _, w := range input { + clauses = append(clauses, w.sql) + args = append(args, w.args...) + } + + if len(clauses) > 0 { + c := strings.Join(clauses, " AND ") + if len(clauses) > 1 { + c = "(" + c + ")" + } + return c, args + } + + return "", nil +} + +func stringCriterionHandler(c *models.StringCriterionInput, column string) criterionHandlerFunc { + return func(f *filterBuilder) { + if c != nil { + if modifier := c.Modifier; c.Modifier.IsValid() { + switch modifier { + case models.CriterionModifierIncludes: + clause, thisArgs := getSearchBinding([]string{column}, c.Value, false) + f.addWhere(clause, thisArgs...) + case models.CriterionModifierExcludes: + clause, thisArgs := getSearchBinding([]string{column}, c.Value, true) + f.addWhere(clause, thisArgs...) + case models.CriterionModifierEquals: + f.addWhere(column+" LIKE ?", c.Value) + case models.CriterionModifierNotEquals: + f.addWhere(column+" NOT LIKE ?", c.Value) + case models.CriterionModifierMatchesRegex: + if _, err := regexp.Compile(c.Value); err != nil { + f.setError(err) + return + } + f.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column), c.Value) + case models.CriterionModifierNotMatchesRegex: + if _, err := regexp.Compile(c.Value); err != nil { + f.setError(err) + return + } + f.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column), c.Value) + default: + clause, count := getSimpleCriterionClause(modifier, "?") + + if count == 1 { + f.addWhere(column+" "+clause, c.Value) + } else { + f.addWhere(column + " " + clause) + } + } + } + } + } +} + +func intCriterionHandler(c *models.IntCriterionInput, column string) criterionHandlerFunc { + return func(f *filterBuilder) { + if c != nil { + clause, count := getIntCriterionWhereClause(column, *c) + + if count == 1 { + f.addWhere(clause, c.Value) + } else { + f.addWhere(clause) + } + } + } +} + +func boolCriterionHandler(c *bool, column string) criterionHandlerFunc { + return func(f *filterBuilder) { + if c != nil { + var v string + if *c { + v = "1" + } else { + v = "0" + } + + f.addWhere(column + " = " + v) + } + } +} + +func stringLiteralCriterionHandler(v *string, column string) criterionHandlerFunc { + return func(f *filterBuilder) { + if v != nil { + f.addWhere(column+" = ?", v) + } + } +} + +type multiCriterionHandlerBuilder struct { + primaryTable string + foreignTable string + joinTable string + primaryFK string + foreignFK string + + // function that will be called to perform any necessary joins + addJoinsFunc func(f *filterBuilder) +} + +func (m *multiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionInput) criterionHandlerFunc { + return func(f *filterBuilder) { + if criterion != nil && len(criterion.Value) > 0 { + var args []interface{} + for _, tagID := range criterion.Value { + args = append(args, tagID) + } + + if m.addJoinsFunc != nil { + m.addJoinsFunc(f) + } + + whereClause, havingClause := getMultiCriterionClause(m.primaryTable, m.foreignTable, m.joinTable, m.primaryFK, m.foreignFK, criterion) + f.addWhere(whereClause, args...) + f.addHaving(havingClause) + } + } +} diff --git a/pkg/sqlite/filter_internal_test.go b/pkg/sqlite/filter_internal_test.go new file mode 100644 index 000000000..302aff0db --- /dev/null +++ b/pkg/sqlite/filter_internal_test.go @@ -0,0 +1,614 @@ +package sqlite + +import ( + "errors" + "fmt" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stretchr/testify/assert" +) + +func TestFilterBuilderAnd(t *testing.T) { + assert := assert.New(t) + + f := &filterBuilder{} + other := &filterBuilder{} + newBuilder := &filterBuilder{} + + // and should set the subFilter + f.and(other) + assert.Equal(other, f.subFilter) + assert.Nil(f.getError()) + + // and should set error if and is set + f.and(newBuilder) + assert.Equal(other, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) + + // and should set error if or is set + // and should not set subFilter if or is set + f = &filterBuilder{} + f.or(other) + f.and(newBuilder) + assert.Equal(other, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) + + // and should set error if not is set + // and should not set subFilter if not is set + f = &filterBuilder{} + f.not(other) + f.and(newBuilder) + assert.Equal(other, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) +} + +func TestFilterBuilderOr(t *testing.T) { + assert := assert.New(t) + + f := &filterBuilder{} + other := &filterBuilder{} + newBuilder := &filterBuilder{} + + // or should set the orFilter + f.or(other) + assert.Equal(other, f.subFilter) + assert.Nil(f.getError()) + + // or should set error if or is set + f.or(newBuilder) + assert.Equal(newBuilder, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) + + // or should set error if and is set + // or should not set subFilter if and is set + f = &filterBuilder{} + f.and(other) + f.or(newBuilder) + assert.Equal(other, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) + + // or should set error if not is set + // or should not set subFilter if not is set + f = &filterBuilder{} + f.not(other) + f.or(newBuilder) + assert.Equal(other, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) +} + +func TestFilterBuilderNot(t *testing.T) { + assert := assert.New(t) + + f := &filterBuilder{} + other := &filterBuilder{} + newBuilder := &filterBuilder{} + + // not should set the subFilter + f.not(other) + // ensure and filter is set + assert.Equal(other, f.subFilter) + assert.Nil(f.getError()) + + // not should set error if not is set + f.not(newBuilder) + assert.Equal(newBuilder, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) + + // not should set error if and is set + // not should not set subFilter if and is set + f = &filterBuilder{} + f.and(other) + f.not(newBuilder) + assert.Equal(other, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) + + // not should set error if or is set + // not should not set subFilter if or is set + f = &filterBuilder{} + f.or(other) + f.not(newBuilder) + assert.Equal(other, f.subFilter) + assert.Equal(errSubFilterAlreadySet, f.getError()) +} + +func TestAddJoin(t *testing.T) { + assert := assert.New(t) + + f := &filterBuilder{} + + const ( + table1Name = "table1Name" + table2Name = "table2Name" + + as1Name = "as1" + as2Name = "as2" + + onClause = "onClause1" + ) + + f.addJoin(table1Name, as1Name, onClause) + + // ensure join is added + assert.Len(f.joins, 1) + assert.Equal(fmt.Sprintf("LEFT JOIN %s AS %s ON %s", table1Name, as1Name, onClause), f.joins[0].toSQL()) + + // ensure join with same as is not added + f.addJoin(table2Name, as1Name, onClause) + assert.Len(f.joins, 1) + + // ensure same table with different alias can be added + f.addJoin(table1Name, as2Name, onClause) + assert.Len(f.joins, 2) + assert.Equal(fmt.Sprintf("LEFT JOIN %s AS %s ON %s", table1Name, as2Name, onClause), f.joins[1].toSQL()) + + // ensure table without alias can be added if tableName != existing alias/tableName + f.addJoin(table1Name, "", onClause) + assert.Len(f.joins, 3) + assert.Equal(fmt.Sprintf("LEFT JOIN %s ON %s", table1Name, onClause), f.joins[2].toSQL()) + + // ensure table with alias == table name of a join without alias is not added + f.addJoin(table2Name, table1Name, onClause) + assert.Len(f.joins, 3) + + // ensure table without alias cannot be added if tableName == existing alias + f.addJoin(as2Name, "", onClause) + assert.Len(f.joins, 3) + + // ensure AS is not used if same as table name + f.addJoin(table2Name, table2Name, onClause) + assert.Len(f.joins, 4) + assert.Equal(fmt.Sprintf("LEFT JOIN %s ON %s", table2Name, onClause), f.joins[3].toSQL()) +} + +func TestAddWhere(t *testing.T) { + assert := assert.New(t) + + f := &filterBuilder{} + + // ensure empty sql adds nothing + f.addWhere("") + assert.Len(f.whereClauses, 0) + + const whereClause = "a = b" + var args = []interface{}{"1", "2"} + + // ensure addWhere sets where clause and args + f.addWhere(whereClause, args...) + assert.Len(f.whereClauses, 1) + assert.Equal(whereClause, f.whereClauses[0].sql) + assert.Equal(args, f.whereClauses[0].args) + + // ensure addWhere without args sets where clause + f.addWhere(whereClause) + assert.Len(f.whereClauses, 2) + assert.Equal(whereClause, f.whereClauses[1].sql) + assert.Len(f.whereClauses[1].args, 0) +} + +func TestAddHaving(t *testing.T) { + assert := assert.New(t) + + f := &filterBuilder{} + + // ensure empty sql adds nothing + f.addHaving("") + assert.Len(f.havingClauses, 0) + + const havingClause = "a = b" + var args = []interface{}{"1", "2"} + + // ensure addWhere sets where clause and args + f.addHaving(havingClause, args...) + assert.Len(f.havingClauses, 1) + assert.Equal(havingClause, f.havingClauses[0].sql) + assert.Equal(args, f.havingClauses[0].args) + + // ensure addWhere without args sets where clause + f.addHaving(havingClause) + assert.Len(f.havingClauses, 2) + assert.Equal(havingClause, f.havingClauses[1].sql) + assert.Len(f.havingClauses[1].args, 0) +} + +func TestGenerateWhereClauses(t *testing.T) { + assert := assert.New(t) + + f := &filterBuilder{} + + const clause1 = "a = 1" + const clause2 = "b = 2" + const clause3 = "c = 3" + + const arg1 = "1" + const arg2 = "2" + const arg3 = "3" + + // ensure single where clause is generated correctly + f.addWhere(clause1) + r, rArgs := f.generateWhereClauses() + assert.Equal(clause1, r) + assert.Len(rArgs, 0) + + // ensure multiple where clauses are surrounded with parenthesis and + // ANDed together + f.addWhere(clause2, arg1, arg2) + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("(%s AND %s)", clause1, clause2), r) + assert.Len(rArgs, 2) + + // ensure empty subfilter is not added to generated where clause + sf := &filterBuilder{} + f.and(sf) + + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("(%s AND %s)", clause1, clause2), r) + assert.Len(rArgs, 2) + + // ensure sub-filter is generated correctly + sf.addWhere(clause3, arg3) + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("(%s AND %s) AND (%s)", clause1, clause2, clause3), r) + assert.Len(rArgs, 3) + + // ensure OR sub-filter is generated correctly + f = &filterBuilder{} + f.addWhere(clause1) + f.addWhere(clause2, arg1, arg2) + f.or(sf) + + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("(%s AND %s) OR (%s)", clause1, clause2, clause3), r) + assert.Len(rArgs, 3) + + // ensure NOT sub-filter is generated correctly + f = &filterBuilder{} + f.addWhere(clause1) + f.addWhere(clause2, arg1, arg2) + f.not(sf) + + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("(%s AND %s) AND NOT (%s)", clause1, clause2, clause3), r) + assert.Len(rArgs, 3) + + // ensure empty filter with ANDed sub-filter does not include AND + f = &filterBuilder{} + f.and(sf) + + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("(%s)", clause3), r) + assert.Len(rArgs, 1) + + // ensure empty filter with ORed sub-filter does not include OR + f = &filterBuilder{} + f.or(sf) + + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("(%s)", clause3), r) + assert.Len(rArgs, 1) + + // ensure empty filter with NOTed sub-filter does not include AND + f = &filterBuilder{} + f.not(sf) + + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("NOT (%s)", clause3), r) + assert.Len(rArgs, 1) + + // (clause1) AND ((clause2) OR (clause3)) + f = &filterBuilder{} + f.addWhere(clause1) + sf2 := &filterBuilder{} + sf2.addWhere(clause2, arg1, arg2) + f.and(sf2) + sf2.or(sf) + r, rArgs = f.generateWhereClauses() + assert.Equal(fmt.Sprintf("%s AND (%s OR (%s))", clause1, clause2, clause3), r) + assert.Len(rArgs, 3) +} + +func TestGenerateHavingClauses(t *testing.T) { + assert := assert.New(t) + + f := &filterBuilder{} + + const clause1 = "a = 1" + const clause2 = "b = 2" + const clause3 = "c = 3" + + const arg1 = "1" + const arg2 = "2" + const arg3 = "3" + + // ensure single Having clause is generated correctly + f.addHaving(clause1) + r, rArgs := f.generateHavingClauses() + assert.Equal(clause1, r) + assert.Len(rArgs, 0) + + // ensure multiple Having clauses are surrounded with parenthesis and + // ANDed together + f.addHaving(clause2, arg1, arg2) + r, rArgs = f.generateHavingClauses() + assert.Equal("("+clause1+" AND "+clause2+")", r) + assert.Len(rArgs, 2) + + // ensure empty subfilter is not added to generated Having clause + sf := &filterBuilder{} + f.and(sf) + + r, rArgs = f.generateHavingClauses() + assert.Equal("("+clause1+" AND "+clause2+")", r) + assert.Len(rArgs, 2) + + // ensure sub-filter is generated correctly + sf.addHaving(clause3, arg3) + r, rArgs = f.generateHavingClauses() + assert.Equal("("+clause1+" AND "+clause2+") AND ("+clause3+")", r) + assert.Len(rArgs, 3) + + // ensure OR sub-filter is generated correctly + f = &filterBuilder{} + f.addHaving(clause1) + f.addHaving(clause2, arg1, arg2) + f.or(sf) + + r, rArgs = f.generateHavingClauses() + assert.Equal("("+clause1+" AND "+clause2+") OR ("+clause3+")", r) + assert.Len(rArgs, 3) + + // ensure NOT sub-filter is generated correctly + f = &filterBuilder{} + f.addHaving(clause1) + f.addHaving(clause2, arg1, arg2) + f.not(sf) + + r, rArgs = f.generateHavingClauses() + assert.Equal("("+clause1+" AND "+clause2+") AND NOT ("+clause3+")", r) + assert.Len(rArgs, 3) +} + +func TestGetAllJoins(t *testing.T) { + assert := assert.New(t) + f := &filterBuilder{} + + const ( + table1Name = "table1Name" + table2Name = "table2Name" + + as1Name = "as1" + as2Name = "as2" + + onClause = "onClause1" + ) + + f.addJoin(table1Name, as1Name, onClause) + + // ensure join is returned + joins := f.getAllJoins() + assert.Len(joins, 1) + assert.Equal(fmt.Sprintf("LEFT JOIN %s AS %s ON %s", table1Name, as1Name, onClause), joins[0].toSQL()) + + // ensure joins in sub-filter are returned + subFilter := &filterBuilder{} + f.and(subFilter) + subFilter.addJoin(table2Name, as2Name, onClause) + + joins = f.getAllJoins() + assert.Len(joins, 2) + assert.Equal(fmt.Sprintf("LEFT JOIN %s AS %s ON %s", table2Name, as2Name, onClause), joins[1].toSQL()) + + // ensure redundant joins are not returned + subFilter.addJoin(as1Name, "", onClause) + joins = f.getAllJoins() + assert.Len(joins, 2) +} + +func TestGetError(t *testing.T) { + assert := assert.New(t) + f := &filterBuilder{} + subFilter := &filterBuilder{} + + f.and(subFilter) + + expectedErr := errors.New("test error") + expectedErr2 := errors.New("test error2") + f.err = expectedErr + subFilter.err = expectedErr2 + + // ensure getError returns the top-level error state + assert.Equal(expectedErr, f.getError()) + + // ensure getError returns sub-filter error state if top-level error + // is nil + f.err = nil + assert.Equal(expectedErr2, f.getError()) + + // ensure getError returns nil if all error states are nil + subFilter.err = nil + assert.Nil(f.getError()) +} + +func TestStringCriterionHandlerIncludes(t *testing.T) { + assert := assert.New(t) + + const column = "column" + const value1 = "two words" + const quotedValue = `"two words"` + + f := &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierIncludes, + Value: value1, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("(%[1]s LIKE ? OR %[1]s LIKE ?)", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 2) + assert.Equal("%two%", f.whereClauses[0].args[0]) + assert.Equal("%words%", f.whereClauses[0].args[1]) + + f = &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierIncludes, + Value: quotedValue, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("(%[1]s LIKE ?)", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 1) + assert.Equal("%two words%", f.whereClauses[0].args[0]) +} + +func TestStringCriterionHandlerExcludes(t *testing.T) { + assert := assert.New(t) + + const column = "column" + const value1 = "two words" + const quotedValue = `"two words"` + + f := &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: value1, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("(%[1]s NOT LIKE ? AND %[1]s NOT LIKE ?)", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 2) + assert.Equal("%two%", f.whereClauses[0].args[0]) + assert.Equal("%words%", f.whereClauses[0].args[1]) + + f = &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierExcludes, + Value: quotedValue, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("(%[1]s NOT LIKE ?)", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 1) + assert.Equal("%two words%", f.whereClauses[0].args[0]) +} + +func TestStringCriterionHandlerEquals(t *testing.T) { + assert := assert.New(t) + + const column = "column" + const value1 = "two words" + + f := &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: value1, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("%[1]s LIKE ?", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 1) + assert.Equal(value1, f.whereClauses[0].args[0]) +} + +func TestStringCriterionHandlerNotEquals(t *testing.T) { + assert := assert.New(t) + + const column = "column" + const value1 = "two words" + + f := &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: value1, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("%[1]s NOT LIKE ?", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 1) + assert.Equal(value1, f.whereClauses[0].args[0]) +} + +func TestStringCriterionHandlerMatchesRegex(t *testing.T) { + assert := assert.New(t) + + const column = "column" + const validValue = "two words" + const invalidValue = "*two words" + + f := &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierMatchesRegex, + Value: validValue, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 1) + assert.Equal(validValue, f.whereClauses[0].args[0]) + + // ensure invalid regex sets error state + f = &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierMatchesRegex, + Value: invalidValue, + }, column)) + + assert.NotNil(f.getError()) +} + +func TestStringCriterionHandlerNotMatchesRegex(t *testing.T) { + assert := assert.New(t) + + const column = "column" + const validValue = "two words" + const invalidValue = "*two words" + + f := &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierNotMatchesRegex, + Value: validValue, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 1) + assert.Equal(validValue, f.whereClauses[0].args[0]) + + // ensure invalid regex sets error state + f = &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierNotMatchesRegex, + Value: invalidValue, + }, column)) + + assert.NotNil(f.getError()) +} + +func TestStringCriterionHandlerIsNull(t *testing.T) { + assert := assert.New(t) + + const column = "column" + + f := &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("%[1]s IS NULL", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 0) +} + +func TestStringCriterionHandlerNotNull(t *testing.T) { + assert := assert.New(t) + + const column = "column" + + f := &filterBuilder{} + f.handleCriterionFunc(stringCriterionHandler(&models.StringCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, column)) + + assert.Len(f.whereClauses, 1) + assert.Equal(fmt.Sprintf("%[1]s IS NOT NULL", column), f.whereClauses[0].sql) + assert.Len(f.whereClauses[0].args, 0) +} diff --git a/pkg/sqlite/gallery.go b/pkg/sqlite/gallery.go index a9b0f7d6c..2f2e8fef7 100644 --- a/pkg/sqlite/gallery.go +++ b/pkg/sqlite/gallery.go @@ -233,7 +233,7 @@ func (qb *galleryQueryBuilder) Query(galleryFilter *models.GalleryFilterType, fi } query.body += " LEFT JOIN tags on tags_join.tag_id = tags.id" - whereClause, havingClause := getMultiCriterionClause("galleries", "tags", "tags_join", "gallery_id", "tag_id", tagsFilter) + whereClause, havingClause := getMultiCriterionClause("galleries", "tags", "galleries_tags", "gallery_id", "tag_id", tagsFilter) query.addWhere(whereClause) query.addHaving(havingClause) } @@ -244,7 +244,7 @@ func (qb *galleryQueryBuilder) Query(galleryFilter *models.GalleryFilterType, fi } query.body += " LEFT JOIN performers ON performers_join.performer_id = performers.id" - whereClause, havingClause := getMultiCriterionClause("galleries", "performers", "performers_join", "gallery_id", "performer_id", performersFilter) + whereClause, havingClause := getMultiCriterionClause("galleries", "performers", "performers_galleries", "gallery_id", "performer_id", performersFilter) query.addWhere(whereClause) query.addHaving(havingClause) } @@ -259,6 +259,8 @@ func (qb *galleryQueryBuilder) Query(galleryFilter *models.GalleryFilterType, fi query.addHaving(havingClause) } + handleGalleryPerformerTagsCriterion(&query, galleryFilter.PerformerTags) + query.sortAndPagination = qb.getGallerySort(findFilter) + getPagination(findFilter) idsResult, countResult, err := query.executeFind() if err != nil { @@ -344,6 +346,31 @@ func (qb *galleryQueryBuilder) handleAverageResolutionFilter(query *queryBuilder } } +func handleGalleryPerformerTagsCriterion(query *queryBuilder, performerTagsFilter *models.MultiCriterionInput) { + if performerTagsFilter != nil && len(performerTagsFilter.Value) > 0 { + for _, tagID := range performerTagsFilter.Value { + query.addArg(tagID) + } + + query.body += " LEFT JOIN performers_tags AS performer_tags_join on performers_join.performer_id = performer_tags_join.performer_id" + + if performerTagsFilter.Modifier == models.CriterionModifierIncludes { + // includes any of the provided ids + query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value))) + } else if performerTagsFilter.Modifier == models.CriterionModifierIncludesAll { + // includes all of the provided ids + query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value))) + query.addHaving(fmt.Sprintf("count(distinct performer_tags_join.tag_id) IS %d", len(performerTagsFilter.Value))) + } else if performerTagsFilter.Modifier == models.CriterionModifierExcludes { + query.addWhere(fmt.Sprintf(`not exists + (select performers_galleries.performer_id from performers_galleries + left join performers_tags on performers_tags.performer_id = performers_galleries.performer_id where + performers_galleries.gallery_id = galleries.id AND + performers_tags.tag_id in %s)`, getInBinding(len(performerTagsFilter.Value)))) + } + } +} + func (qb *galleryQueryBuilder) getGallerySort(findFilter *models.FindFilterType) string { var sort string var direction string diff --git a/pkg/sqlite/gallery_test.go b/pkg/sqlite/gallery_test.go index c06ff19a2..f34328ecd 100644 --- a/pkg/sqlite/gallery_test.go +++ b/pkg/sqlite/gallery_test.go @@ -3,6 +3,7 @@ package sqlite_test import ( + "strconv" "testing" "github.com/stretchr/testify/assert" @@ -272,6 +273,307 @@ func TestGalleryQueryIsMissingScene(t *testing.T) { }) } +func queryGallery(t *testing.T, sqb models.GalleryReader, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) []*models.Gallery { + galleries, _, err := sqb.Query(galleryFilter, findFilter) + if err != nil { + t.Errorf("Error querying gallery: %s", err.Error()) + } + + return galleries +} + +func TestGalleryQueryIsMissingStudio(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Gallery() + isMissing := "studio" + galleryFilter := models.GalleryFilterType{ + IsMissing: &isMissing, + } + + q := getGalleryStringValue(galleryIdxWithStudio, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + galleries := queryGallery(t, sqb, &galleryFilter, &findFilter) + + assert.Len(t, galleries, 0) + + findFilter.Q = nil + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + + // ensure non of the ids equal the one with studio + for _, gallery := range galleries { + assert.NotEqual(t, galleryIDs[galleryIdxWithStudio], gallery.ID) + } + + return nil + }) +} + +func TestGalleryQueryIsMissingPerformers(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Gallery() + isMissing := "performers" + galleryFilter := models.GalleryFilterType{ + IsMissing: &isMissing, + } + + q := getGalleryStringValue(galleryIdxWithPerformer, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + galleries := queryGallery(t, sqb, &galleryFilter, &findFilter) + + assert.Len(t, galleries, 0) + + findFilter.Q = nil + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + + assert.True(t, len(galleries) > 0) + + // ensure non of the ids equal the one with movies + for _, gallery := range galleries { + assert.NotEqual(t, galleryIDs[galleryIdxWithPerformer], gallery.ID) + } + + return nil + }) +} + +func TestGalleryQueryIsMissingTags(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Gallery() + isMissing := "tags" + galleryFilter := models.GalleryFilterType{ + IsMissing: &isMissing, + } + + q := getGalleryStringValue(galleryIdxWithTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + galleries := queryGallery(t, sqb, &galleryFilter, &findFilter) + + assert.Len(t, galleries, 0) + + findFilter.Q = nil + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + + assert.True(t, len(galleries) > 0) + + return nil + }) +} + +func TestGalleryQueryPerformers(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Gallery() + performerCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdxWithGallery]), + strconv.Itoa(performerIDs[performerIdx1WithGallery]), + }, + Modifier: models.CriterionModifierIncludes, + } + + galleryFilter := models.GalleryFilterType{ + Performers: &performerCriterion, + } + + galleries := queryGallery(t, sqb, &galleryFilter, nil) + + assert.Len(t, galleries, 2) + + // ensure ids are correct + for _, gallery := range galleries { + assert.True(t, gallery.ID == galleryIDs[galleryIdxWithPerformer] || gallery.ID == galleryIDs[galleryIdxWithTwoPerformers]) + } + + performerCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdx1WithGallery]), + strconv.Itoa(performerIDs[performerIdx2WithGallery]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + galleries = queryGallery(t, sqb, &galleryFilter, nil) + + assert.Len(t, galleries, 1) + assert.Equal(t, galleryIDs[galleryIdxWithTwoPerformers], galleries[0].ID) + + performerCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdx1WithGallery]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getGalleryStringValue(galleryIdxWithTwoPerformers, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + assert.Len(t, galleries, 0) + + return nil + }) +} + +func TestGalleryQueryTags(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Gallery() + tagCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithGallery]), + strconv.Itoa(tagIDs[tagIdx1WithGallery]), + }, + Modifier: models.CriterionModifierIncludes, + } + + galleryFilter := models.GalleryFilterType{ + Tags: &tagCriterion, + } + + galleries := queryGallery(t, sqb, &galleryFilter, nil) + assert.Len(t, galleries, 2) + + // ensure ids are correct + for _, gallery := range galleries { + assert.True(t, gallery.ID == galleryIDs[galleryIdxWithTag] || gallery.ID == galleryIDs[galleryIdxWithTwoTags]) + } + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithGallery]), + strconv.Itoa(tagIDs[tagIdx2WithGallery]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + galleries = queryGallery(t, sqb, &galleryFilter, nil) + + assert.Len(t, galleries, 1) + assert.Equal(t, galleryIDs[galleryIdxWithTwoTags], galleries[0].ID) + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithGallery]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getGalleryStringValue(galleryIdxWithTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + assert.Len(t, galleries, 0) + + return nil + }) +} + +func TestGalleryQueryStudio(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Gallery() + studioCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGallery]), + }, + Modifier: models.CriterionModifierIncludes, + } + + galleryFilter := models.GalleryFilterType{ + Studios: &studioCriterion, + } + + galleries := queryGallery(t, sqb, &galleryFilter, nil) + + assert.Len(t, galleries, 1) + + // ensure id is correct + assert.Equal(t, galleryIDs[galleryIdxWithStudio], galleries[0].ID) + + studioCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGallery]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getGalleryStringValue(galleryIdxWithStudio, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + assert.Len(t, galleries, 0) + + return nil + }) +} + +func TestGalleryQueryPerformerTags(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Gallery() + tagCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithPerformer]), + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierIncludes, + } + + galleryFilter := models.GalleryFilterType{ + PerformerTags: &tagCriterion, + } + + galleries := queryGallery(t, sqb, &galleryFilter, nil) + assert.Len(t, galleries, 2) + + // ensure ids are correct + for _, gallery := range galleries { + assert.True(t, gallery.ID == galleryIDs[galleryIdxWithPerformerTag] || gallery.ID == galleryIDs[galleryIdxWithPerformerTwoTags]) + } + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + galleries = queryGallery(t, sqb, &galleryFilter, nil) + + assert.Len(t, galleries, 1) + assert.Equal(t, galleryIDs[galleryIdxWithPerformerTwoTags], galleries[0].ID) + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getGalleryStringValue(galleryIdxWithPerformerTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + galleries = queryGallery(t, sqb, &galleryFilter, &findFilter) + assert.Len(t, galleries, 0) + + return nil + }) +} + // TODO Count // TODO All // TODO Query diff --git a/pkg/sqlite/image.go b/pkg/sqlite/image.go index 48123786b..adb23004e 100644 --- a/pkg/sqlite/image.go +++ b/pkg/sqlite/image.go @@ -297,7 +297,7 @@ func (qb *imageQueryBuilder) Query(imageFilter *models.ImageFilterType, findFilt case "SIX_K": query.addWhere("(MIN(images.height, images.width) >= 3384 AND MIN(images.height, images.width) < 4320)") case "EIGHT_K": - query.addWhere("(MIN(images.height, images.width) >= 4320") + query.addWhere("MIN(images.height, images.width) >= 4320") } } } @@ -360,6 +360,8 @@ func (qb *imageQueryBuilder) Query(imageFilter *models.ImageFilterType, findFilt query.addHaving(havingClause) } + handleImagePerformerTagsCriterion(&query, imageFilter.PerformerTags) + query.sortAndPagination = qb.getImageSort(findFilter) + getPagination(findFilter) idsResult, countResult, err := query.executeFind() if err != nil { @@ -379,6 +381,31 @@ func (qb *imageQueryBuilder) Query(imageFilter *models.ImageFilterType, findFilt return images, countResult, nil } +func handleImagePerformerTagsCriterion(query *queryBuilder, performerTagsFilter *models.MultiCriterionInput) { + if performerTagsFilter != nil && len(performerTagsFilter.Value) > 0 { + for _, tagID := range performerTagsFilter.Value { + query.addArg(tagID) + } + + query.body += " LEFT JOIN performers_tags AS performer_tags_join on performers_join.performer_id = performer_tags_join.performer_id" + + if performerTagsFilter.Modifier == models.CriterionModifierIncludes { + // includes any of the provided ids + query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value))) + } else if performerTagsFilter.Modifier == models.CriterionModifierIncludesAll { + // includes all of the provided ids + query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value))) + query.addHaving(fmt.Sprintf("count(distinct performer_tags_join.tag_id) IS %d", len(performerTagsFilter.Value))) + } else if performerTagsFilter.Modifier == models.CriterionModifierExcludes { + query.addWhere(fmt.Sprintf(`not exists + (select performers_images.performer_id from performers_images + left join performers_tags on performers_tags.performer_id = performers_images.performer_id where + performers_images.image_id = images.id AND + performers_tags.tag_id in %s)`, getInBinding(len(performerTagsFilter.Value)))) + } + } +} + func (qb *imageQueryBuilder) getImageSort(findFilter *models.FindFilterType) string { if findFilter == nil { return " ORDER BY images.path ASC " diff --git a/pkg/sqlite/image_test.go b/pkg/sqlite/image_test.go index 40f4c2a9f..36077739b 100644 --- a/pkg/sqlite/image_test.go +++ b/pkg/sqlite/image_test.go @@ -619,6 +619,70 @@ func TestImageQueryStudio(t *testing.T) { }) } +func queryImages(t *testing.T, sqb models.ImageReader, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) []*models.Image { + images, _, err := sqb.Query(imageFilter, findFilter) + if err != nil { + t.Errorf("Error querying images: %s", err.Error()) + } + + return images +} + +func TestImageQueryPerformerTags(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Image() + tagCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithPerformer]), + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierIncludes, + } + + imageFilter := models.ImageFilterType{ + PerformerTags: &tagCriterion, + } + + images := queryImages(t, sqb, &imageFilter, nil) + assert.Len(t, images, 2) + + // ensure ids are correct + for _, image := range images { + assert.True(t, image.ID == imageIDs[imageIdxWithPerformerTag] || image.ID == imageIDs[imageIdxWithPerformerTwoTags]) + } + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + images = queryImages(t, sqb, &imageFilter, nil) + + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithPerformerTwoTags], images[0].ID) + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getImageStringValue(imageIdxWithPerformerTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + images = queryImages(t, sqb, &imageFilter, &findFilter) + assert.Len(t, images, 0) + + return nil + }) +} + func TestImageQuerySorting(t *testing.T) { withTxn(func(r models.Repository) error { sort := titleField diff --git a/pkg/sqlite/movies.go b/pkg/sqlite/movies.go index 40565aeac..567c8c1b7 100644 --- a/pkg/sqlite/movies.go +++ b/pkg/sqlite/movies.go @@ -114,10 +114,6 @@ func (qb *movieQueryBuilder) All() ([]*models.Movie, error) { return qb.queryMovies(selectAll("movies")+qb.getMovieSort(nil), nil) } -func (qb *movieQueryBuilder) AllSlim() ([]*models.Movie, error) { - return qb.queryMovies("SELECT movies.id, movies.name FROM movies "+qb.getMovieSort(nil), nil) -} - func (qb *movieQueryBuilder) Query(movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) { if findFilter == nil { findFilter = &models.FindFilterType{} diff --git a/pkg/sqlite/performer.go b/pkg/sqlite/performer.go index 078b46e4a..cc85da887 100644 --- a/pkg/sqlite/performer.go +++ b/pkg/sqlite/performer.go @@ -11,6 +11,13 @@ import ( const performerTable = "performers" const performerIDColumn = "performer_id" +const performersTagsTable = "performers_tags" + +var countPerformersForTagQuery = ` +SELECT tag_id AS id FROM performers_tags +WHERE performers_tags.tag_id = ? +GROUP BY performers_tags.performer_id +` type performerQueryBuilder struct { repository @@ -153,6 +160,11 @@ func (qb *performerQueryBuilder) FindByNames(names []string, nocase bool) ([]*mo return qb.queryPerformers(query, args) } +func (qb *performerQueryBuilder) CountByTagID(tagID int) (int, error) { + args := []interface{}{tagID} + return qb.runCountQuery(qb.buildCountQuery(countPerformersForTagQuery), args) +} + func (qb *performerQueryBuilder) Count() (int, error) { return qb.runCountQuery(qb.buildCountQuery("SELECT performers.id FROM performers"), nil) } @@ -161,10 +173,6 @@ func (qb *performerQueryBuilder) All() ([]*models.Performer, error) { return qb.queryPerformers(selectAll("performers")+qb.getPerformerSort(nil), nil) } -func (qb *performerQueryBuilder) AllSlim() ([]*models.Performer, error) { - return qb.queryPerformers("SELECT performers.id, performers.name, performers.gender FROM performers "+qb.getPerformerSort(nil), nil) -} - func (qb *performerQueryBuilder) Query(performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) { if performerFilter == nil { performerFilter = &models.PerformerFilterType{} @@ -250,6 +258,18 @@ func (qb *performerQueryBuilder) Query(performerFilter *models.PerformerFilterTy // TODO - need better handling of aliases query.handleStringCriterionInput(performerFilter.Aliases, tableName+".aliases") + if tagsFilter := performerFilter.Tags; tagsFilter != nil && len(tagsFilter.Value) > 0 { + for _, tagID := range tagsFilter.Value { + query.addArg(tagID) + } + + query.body += ` left join performers_tags as tags_join on tags_join.performer_id = performers.id + LEFT JOIN tags on tags_join.tag_id = tags.id` + whereClause, havingClause := getMultiCriterionClause("performers", "tags", "performers_tags", "performer_id", "tag_id", tagsFilter) + query.addWhere(whereClause) + query.addHaving(havingClause) + } + query.sortAndPagination = qb.getPerformerSort(findFilter) + getPagination(findFilter) idsResult, countResult, err := query.executeFind() if err != nil { @@ -361,6 +381,26 @@ func (qb *performerQueryBuilder) queryPerformers(query string, args []interface{ return []*models.Performer(ret), nil } +func (qb *performerQueryBuilder) tagsRepository() *joinRepository { + return &joinRepository{ + repository: repository{ + tx: qb.tx, + tableName: performersTagsTable, + idColumn: performerIDColumn, + }, + fkColumn: tagIDColumn, + } +} + +func (qb *performerQueryBuilder) GetTagIDs(id int) ([]int, error) { + return qb.tagsRepository().getIDs(id) +} + +func (qb *performerQueryBuilder) UpdateTags(id int, tagIDs []int) error { + // Delete the existing joins and then create new ones + return qb.tagsRepository().replace(id, tagIDs) +} + func (qb *performerQueryBuilder) imageRepository() *imageRepository { return &imageRepository{ repository: repository{ diff --git a/pkg/sqlite/performer_test.go b/pkg/sqlite/performer_test.go index 530430403..c5c7dc6cd 100644 --- a/pkg/sqlite/performer_test.go +++ b/pkg/sqlite/performer_test.go @@ -5,6 +5,7 @@ package sqlite_test import ( "database/sql" "fmt" + "strconv" "strings" "testing" "time" @@ -44,6 +45,14 @@ func TestPerformerFindBySceneID(t *testing.T) { } func TestPerformerFindByNames(t *testing.T) { + getNames := func(p []*models.Performer) []string { + var ret []string + for _, pp := range p { + ret = append(ret, pp.Name.String) + } + return ret + } + withTxn(func(r models.Repository) error { var names []string @@ -72,19 +81,20 @@ func TestPerformerFindByNames(t *testing.T) { if err != nil { t.Errorf("Error finding performers: %s", err.Error()) } - assert.Len(t, performers, 2) // performerIdxWithScene and performerIdx1WithScene - assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String) - assert.Equal(t, performerNames[performerIdx1WithScene], performers[1].Name.String) + retNames := getNames(performers) + assert.Equal(t, names, retNames) performers, err = pqb.FindByNames(names, true) // find performers by names ( 2 names nocase) if err != nil { t.Errorf("Error finding performers: %s", err.Error()) } - assert.Len(t, performers, 4) // performerIdxWithScene and performerIdxWithDupName , performerIdx1WithScene and performerIdx1WithDupName - assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String) - assert.Equal(t, performerNames[performerIdx1WithScene], performers[1].Name.String) - assert.Equal(t, performerNames[performerIdx1WithDupName], performers[2].Name.String) - assert.Equal(t, performerNames[performerIdxWithDupName], performers[3].Name.String) + retNames = getNames(performers) + assert.Equal(t, []string{ + performerNames[performerIdxWithScene], + performerNames[performerIdx1WithScene], + performerNames[performerIdx1WithDupName], + performerNames[performerIdxWithDupName], + }, retNames) return nil }) @@ -218,6 +228,109 @@ func verifyPerformerAge(t *testing.T, ageCriterion models.IntCriterionInput) { }) } +func TestPerformerQueryCareerLength(t *testing.T) { + const value = "2005" + careerLengthCriterion := models.StringCriterionInput{ + Value: value, + Modifier: models.CriterionModifierEquals, + } + + verifyPerformerCareerLength(t, careerLengthCriterion) + + careerLengthCriterion.Modifier = models.CriterionModifierNotEquals + verifyPerformerCareerLength(t, careerLengthCriterion) + + careerLengthCriterion.Modifier = models.CriterionModifierMatchesRegex + verifyPerformerCareerLength(t, careerLengthCriterion) + + careerLengthCriterion.Modifier = models.CriterionModifierNotMatchesRegex + verifyPerformerCareerLength(t, careerLengthCriterion) +} + +func verifyPerformerCareerLength(t *testing.T, criterion models.StringCriterionInput) { + withTxn(func(r models.Repository) error { + qb := r.Performer() + performerFilter := models.PerformerFilterType{ + CareerLength: &criterion, + } + + performers, _, err := qb.Query(&performerFilter, nil) + if err != nil { + t.Errorf("Error querying performer: %s", err.Error()) + } + + for _, performer := range performers { + cl := performer.CareerLength + verifyNullString(t, cl, criterion) + } + + return nil + }) +} + +func queryPerformers(t *testing.T, qb models.PerformerReader, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) []*models.Performer { + performers, _, err := qb.Query(performerFilter, findFilter) + if err != nil { + t.Errorf("Error querying performers: %s", err.Error()) + } + + return performers +} + +func TestPerformerQueryTags(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Performer() + tagCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithPerformer]), + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierIncludes, + } + + performerFilter := models.PerformerFilterType{ + Tags: &tagCriterion, + } + + // ensure ids are correct + performers := queryPerformers(t, sqb, &performerFilter, nil) + assert.Len(t, performers, 2) + for _, performer := range performers { + assert.True(t, performer.ID == performerIDs[performerIdxWithTag] || performer.ID == performerIDs[performerIdxWithTwoTags]) + } + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + performers = queryPerformers(t, sqb, &performerFilter, nil) + + assert.Len(t, performers, 1) + assert.Equal(t, sceneIDs[performerIdxWithTwoTags], performers[0].ID) + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getSceneStringValue(performerIdxWithTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + performers = queryPerformers(t, sqb, &performerFilter, &findFilter) + assert.Len(t, performers, 0) + + return nil + }) +} + func TestPerformerStashIDs(t *testing.T) { if err := withTxn(func(r models.Repository) error { qb := r.Performer() diff --git a/pkg/sqlite/query.go b/pkg/sqlite/query.go index 378fe0efe..82a17cf4f 100644 --- a/pkg/sqlite/query.go +++ b/pkg/sqlite/query.go @@ -1,6 +1,7 @@ package sqlite import ( + "fmt" "regexp" "github.com/stashapp/stash/pkg/models" @@ -11,6 +12,7 @@ type queryBuilder struct { body string + joins joins whereClauses []string havingClauses []string args []interface{} @@ -25,7 +27,10 @@ func (qb queryBuilder) executeFind() ([]int, int, error) { return nil, 0, qb.err } - return qb.repository.executeFindQuery(qb.body, qb.args, qb.sortAndPagination, qb.whereClauses, qb.havingClauses) + body := qb.body + body += qb.joins.toSQL() + + return qb.repository.executeFindQuery(body, qb.args, qb.sortAndPagination, qb.whereClauses, qb.havingClauses) } func (qb *queryBuilder) addWhere(clauses ...string) { @@ -48,6 +53,48 @@ func (qb *queryBuilder) addArg(args ...interface{}) { qb.args = append(qb.args, args...) } +func (qb *queryBuilder) join(table, as, onClause string) { + newJoin := join{ + table: table, + as: as, + onClause: onClause, + } + + qb.joins.add(newJoin) +} + +func (qb *queryBuilder) addJoins(joins ...join) { + qb.joins.add(joins...) +} + +func (qb *queryBuilder) addFilter(f *filterBuilder) { + err := f.getError() + if err != nil { + qb.err = err + return + } + + clause, args := f.generateWhereClauses() + if len(clause) > 0 { + qb.addWhere(clause) + } + + if len(args) > 0 { + qb.addArg(args...) + } + + clause, args = f.generateHavingClauses() + if len(clause) > 0 { + qb.addHaving(clause) + } + + if len(args) > 0 { + qb.addArg(args...) + } + + qb.addJoins(f.getAllJoins()...) +} + func (qb *queryBuilder) handleIntCriterionInput(c *models.IntCriterionInput, column string) { if c != nil { clause, count := getIntCriterionWhereClause(column, *c) @@ -81,15 +128,19 @@ func (qb *queryBuilder) handleStringCriterionInput(c *models.StringCriterionInpu qb.err = err return } - qb.addWhere(column + " regexp ?") + qb.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column)) qb.addArg(c.Value) case models.CriterionModifierNotMatchesRegex: if _, err := regexp.Compile(c.Value); err != nil { qb.err = err return } - qb.addWhere(column + " NOT regexp ?") + qb.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column)) qb.addArg(c.Value) + case models.CriterionModifierIsNull: + qb.addWhere("(" + column + " IS NULL OR TRIM(" + column + ") = '')") + case models.CriterionModifierNotNull: + qb.addWhere("(" + column + " IS NOT NULL AND TRIM(" + column + ") != '')") default: clause, count := getSimpleCriterionClause(modifier, "?") qb.addWhere(column + " " + clause) diff --git a/pkg/sqlite/repository.go b/pkg/sqlite/repository.go index 99d2eeb01..681e68376 100644 --- a/pkg/sqlite/repository.go +++ b/pkg/sqlite/repository.go @@ -273,6 +273,18 @@ func (r *repository) newQuery() queryBuilder { } } +func (r *repository) join(j joiner, as string, parentIDCol string) { + t := r.tableName + if as != "" { + t = as + } + j.addJoin(r.tableName, as, fmt.Sprintf("%s.%s = %s", t, r.idColumn, parentIDCol)) +} + +type joiner interface { + addJoin(table, as, onClause string) +} + type joinRepository struct { repository fkColumn string diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index 83c983725..8dd954322 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -3,8 +3,7 @@ package sqlite import ( "database/sql" "fmt" - "path/filepath" - "strings" + "strconv" "github.com/jmoiron/sqlx" "github.com/stashapp/stash/pkg/models" @@ -290,51 +289,71 @@ func (qb *sceneQueryBuilder) All() ([]*models.Scene, error) { return qb.queryScenes(selectAll(sceneTable)+qb.getSceneSort(nil), nil) } -// QueryForAutoTag queries for scenes whose paths match the provided regex and -// are optionally within the provided path. Excludes organized scenes. -// TODO - this should be replaced with Query once it can perform multiple -// filters on the same field. -func (qb *sceneQueryBuilder) QueryForAutoTag(regex string, pathPrefixes []string) ([]*models.Scene, error) { - var args []interface{} - body := selectDistinctIDs("scenes") + ` WHERE - scenes.path regexp ? AND - scenes.organized = 0` +func illegalFilterCombination(type1, type2 string) error { + return fmt.Errorf("cannot have %s and %s in the same filter", type1, type2) +} - args = append(args, "(?i)"+regex) +func (qb *sceneQueryBuilder) validateFilter(sceneFilter *models.SceneFilterType) error { + const and = "AND" + const or = "OR" + const not = "NOT" - var pathClauses []string - for _, p := range pathPrefixes { - pathClauses = append(pathClauses, "scenes.path like ?") - - sep := string(filepath.Separator) - if !strings.HasSuffix(p, sep) { - p = p + sep + if sceneFilter.And != nil { + if sceneFilter.Or != nil { + return illegalFilterCombination(and, or) } - args = append(args, p+"%") - } - - if len(pathClauses) > 0 { - body += " AND (" + strings.Join(pathClauses, " OR ") + ")" - } - - idsResult, err := qb.runIdsQuery(body, args) - - if err != nil { - return nil, err - } - - var scenes []*models.Scene - for _, id := range idsResult { - scene, err := qb.Find(id) - - if err != nil { - return nil, err + if sceneFilter.Not != nil { + return illegalFilterCombination(and, not) } - scenes = append(scenes, scene) + return qb.validateFilter(sceneFilter.And) } - return scenes, nil + if sceneFilter.Or != nil { + if sceneFilter.Not != nil { + return illegalFilterCombination(or, not) + } + + return qb.validateFilter(sceneFilter.Or) + } + + if sceneFilter.Not != nil { + return qb.validateFilter(sceneFilter.Not) + } + + return nil +} + +func (qb *sceneQueryBuilder) makeFilter(sceneFilter *models.SceneFilterType) *filterBuilder { + query := &filterBuilder{} + + if sceneFilter.And != nil { + query.and(qb.makeFilter(sceneFilter.And)) + } + if sceneFilter.Or != nil { + query.or(qb.makeFilter(sceneFilter.Or)) + } + if sceneFilter.Not != nil { + query.not(qb.makeFilter(sceneFilter.Not)) + } + + query.handleCriterionFunc(stringCriterionHandler(sceneFilter.Path, "scenes.path")) + query.handleCriterionFunc(intCriterionHandler(sceneFilter.Rating, "scenes.rating")) + query.handleCriterionFunc(intCriterionHandler(sceneFilter.OCounter, "scenes.o_counter")) + query.handleCriterionFunc(boolCriterionHandler(sceneFilter.Organized, "scenes.organized")) + query.handleCriterionFunc(durationCriterionHandler(sceneFilter.Duration, "scenes.duration")) + query.handleCriterionFunc(resolutionCriterionHandler(sceneFilter.Resolution, "scenes.height", "scenes.width")) + query.handleCriterionFunc(hasMarkersCriterionHandler(sceneFilter.HasMarkers)) + query.handleCriterionFunc(sceneIsMissingCriterionHandler(qb, sceneFilter.IsMissing)) + + query.handleCriterionFunc(sceneTagsCriterionHandler(qb, sceneFilter.Tags)) + query.handleCriterionFunc(scenePerformersCriterionHandler(qb, sceneFilter.Performers)) + query.handleCriterionFunc(sceneStudioCriterionHandler(qb, sceneFilter.Studios)) + query.handleCriterionFunc(sceneMoviesCriterionHandler(qb, sceneFilter.Movies)) + query.handleCriterionFunc(sceneStashIDsHandler(qb, sceneFilter.StashID)) + query.handleCriterionFunc(scenePerformerTagsCriterionHandler(qb, sceneFilter.PerformerTags)) + + return query } func (qb *sceneQueryBuilder) Query(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { @@ -348,152 +367,21 @@ func (qb *sceneQueryBuilder) Query(sceneFilter *models.SceneFilterType, findFilt query := qb.newQuery() query.body = selectDistinctIDs(sceneTable) - query.body += ` - left join scene_markers on scene_markers.scene_id = scenes.id - left join performers_scenes as performers_join on performers_join.scene_id = scenes.id - left join movies_scenes as movies_join on movies_join.scene_id = scenes.id - left join studios as studio on studio.id = scenes.studio_id - left join scenes_galleries as galleries_join on galleries_join.scene_id = scenes.id - left join scenes_tags as tags_join on tags_join.scene_id = scenes.id - left join scene_stash_ids on scene_stash_ids.scene_id = scenes.id - ` if q := findFilter.Q; q != nil && *q != "" { + query.join("scene_markers", "", "scene_markers.scene_id = scenes.id") searchColumns := []string{"scenes.title", "scenes.details", "scenes.path", "scenes.oshash", "scenes.checksum", "scene_markers.title"} clause, thisArgs := getSearchBinding(searchColumns, *q, false) query.addWhere(clause) query.addArg(thisArgs...) } - query.handleStringCriterionInput(sceneFilter.Path, "scenes.path") - query.handleIntCriterionInput(sceneFilter.Rating, "scenes.rating") - query.handleIntCriterionInput(sceneFilter.OCounter, "scenes.o_counter") - - if Organized := sceneFilter.Organized; Organized != nil { - var organized string - if *Organized == true { - organized = "1" - } else { - organized = "0" - } - query.addWhere("scenes.organized = " + organized) + if err := qb.validateFilter(sceneFilter); err != nil { + return nil, 0, err } + filter := qb.makeFilter(sceneFilter) - if durationFilter := sceneFilter.Duration; durationFilter != nil { - clause, thisArgs := getDurationWhereClause(*durationFilter) - query.addWhere(clause) - query.addArg(thisArgs...) - } - - if resolutionFilter := sceneFilter.Resolution; resolutionFilter != nil { - if resolution := resolutionFilter.String(); resolutionFilter.IsValid() { - switch resolution { - case "VERY_LOW": - query.addWhere("MIN(scenes.height, scenes.width) < 240") - case "LOW": - query.addWhere("(MIN(scenes.height, scenes.width) >= 240 AND MIN(scenes.height, scenes.width) < 360)") - case "R360P": - query.addWhere("(MIN(scenes.height, scenes.width) >= 360 AND MIN(scenes.height, scenes.width) < 480)") - case "STANDARD": - query.addWhere("(MIN(scenes.height, scenes.width) >= 480 AND MIN(scenes.height, scenes.width) < 540)") - case "WEB_HD": - query.addWhere("(MIN(scenes.height, scenes.width) >= 540 AND MIN(scenes.height, scenes.width) < 720)") - case "STANDARD_HD": - query.addWhere("(MIN(scenes.height, scenes.width) >= 720 AND MIN(scenes.height, scenes.width) < 1080)") - case "FULL_HD": - query.addWhere("(MIN(scenes.height, scenes.width) >= 1080 AND MIN(scenes.height, scenes.width) < 1440)") - case "QUAD_HD": - query.addWhere("(MIN(scenes.height, scenes.width) >= 1440 AND MIN(scenes.height, scenes.width) < 1920)") - case "VR_HD": - query.addWhere("(MIN(scenes.height, scenes.width) >= 1920 AND MIN(scenes.height, scenes.width) < 2160)") - case "FOUR_K": - query.addWhere("(MIN(scenes.height, scenes.width) >= 2160 AND MIN(scenes.height, scenes.width) < 2880)") - case "FIVE_K": - query.addWhere("(MIN(scenes.height, scenes.width) >= 2880 AND MIN(scenes.height, scenes.width) < 3384)") - case "SIX_K": - query.addWhere("(MIN(scenes.height, scenes.width) >= 3384 AND MIN(scenes.height, scenes.width) < 4320)") - case "EIGHT_K": - query.addWhere("(MIN(scenes.height, scenes.width) >= 4320") - } - } - } - - if hasMarkersFilter := sceneFilter.HasMarkers; hasMarkersFilter != nil { - if strings.Compare(*hasMarkersFilter, "true") == 0 { - query.addHaving("count(scene_markers.scene_id) > 0") - } else { - query.addWhere("scene_markers.id IS NULL") - } - } - - if isMissingFilter := sceneFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" { - switch *isMissingFilter { - case "galleries": - query.addWhere("galleries_join.scene_id IS NULL") - case "studio": - query.addWhere("scenes.studio_id IS NULL") - case "movie": - query.addWhere("movies_join.scene_id IS NULL") - case "performers": - query.addWhere("performers_join.scene_id IS NULL") - case "date": - query.addWhere("scenes.date IS \"\" OR scenes.date IS \"0001-01-01\"") - case "tags": - query.addWhere("tags_join.scene_id IS NULL") - case "stash_id": - query.addWhere("scene_stash_ids.scene_id IS NULL") - default: - query.addWhere("(scenes." + *isMissingFilter + " IS NULL OR TRIM(scenes." + *isMissingFilter + ") = '')") - } - } - - if tagsFilter := sceneFilter.Tags; tagsFilter != nil && len(tagsFilter.Value) > 0 { - for _, tagID := range tagsFilter.Value { - query.addArg(tagID) - } - - query.body += " LEFT JOIN tags on tags_join.tag_id = tags.id" - whereClause, havingClause := getMultiCriterionClause("scenes", "tags", "scenes_tags", "scene_id", "tag_id", tagsFilter) - query.addWhere(whereClause) - query.addHaving(havingClause) - } - - if performersFilter := sceneFilter.Performers; performersFilter != nil && len(performersFilter.Value) > 0 { - for _, performerID := range performersFilter.Value { - query.addArg(performerID) - } - - query.body += " LEFT JOIN performers ON performers_join.performer_id = performers.id" - whereClause, havingClause := getMultiCriterionClause("scenes", "performers", "performers_scenes", "scene_id", "performer_id", performersFilter) - query.addWhere(whereClause) - query.addHaving(havingClause) - } - - if studiosFilter := sceneFilter.Studios; studiosFilter != nil && len(studiosFilter.Value) > 0 { - for _, studioID := range studiosFilter.Value { - query.addArg(studioID) - } - - whereClause, havingClause := getMultiCriterionClause("scenes", "studio", "", "", "studio_id", studiosFilter) - query.addWhere(whereClause) - query.addHaving(havingClause) - } - - if moviesFilter := sceneFilter.Movies; moviesFilter != nil && len(moviesFilter.Value) > 0 { - for _, movieID := range moviesFilter.Value { - query.addArg(movieID) - } - - query.body += " LEFT JOIN movies ON movies_join.movie_id = movies.id" - whereClause, havingClause := getMultiCriterionClause("scenes", "movies", "movies_scenes", "scene_id", "movie_id", moviesFilter) - query.addWhere(whereClause) - query.addHaving(havingClause) - } - - if stashIDFilter := sceneFilter.StashID; stashIDFilter != nil { - query.addWhere("scene_stash_ids.stash_id = ?") - query.addArg(stashIDFilter) - } + query.addFilter(filter) query.sortAndPagination = qb.getSceneSort(findFilter) + getPagination(findFilter) @@ -522,7 +410,16 @@ func appendClause(clauses []string, clause string) []string { return clauses } -func getDurationWhereClause(durationFilter models.IntCriterionInput) (string, []interface{}) { +func durationCriterionHandler(durationFilter *models.IntCriterionInput, column string) criterionHandlerFunc { + return func(f *filterBuilder) { + if durationFilter != nil { + clause, thisArgs := getDurationWhereClause(*durationFilter, column) + f.addWhere(clause, thisArgs...) + } + } +} + +func getDurationWhereClause(durationFilter models.IntCriterionInput, column string) (string, []interface{}) { // special case for duration. We accept duration as seconds as int but the // field is floating point. Change the equals filter to return a range // between x and x + 1 @@ -532,16 +429,16 @@ func getDurationWhereClause(durationFilter models.IntCriterionInput) (string, [] value := durationFilter.Value if durationFilter.Modifier == models.CriterionModifierEquals { - clause = "scenes.duration >= ? AND scenes.duration < ?" + clause = fmt.Sprintf("%[1]s >= ? AND %[1]s < ?", column) args = append(args, value) args = append(args, value+1) } else if durationFilter.Modifier == models.CriterionModifierNotEquals { - clause = "(scenes.duration < ? OR scenes.duration >= ?)" + clause = fmt.Sprintf("(%[1]s < ? OR %[1]s >= ?)", column) args = append(args, value) args = append(args, value+1) } else { var count int - clause, count = getIntCriterionWhereClause("scenes.duration", durationFilter) + clause, count = getIntCriterionWhereClause(column, durationFilter) if count == 1 { args = append(args, value) } @@ -550,6 +447,179 @@ func getDurationWhereClause(durationFilter models.IntCriterionInput) (string, [] return clause, args } +func resolutionCriterionHandler(resolution *models.ResolutionEnum, heightColumn string, widthColumn string) criterionHandlerFunc { + return func(f *filterBuilder) { + if resolution != nil && resolution.IsValid() { + min := resolution.GetMinResolution() + max := resolution.GetMaxResolution() + + widthHeight := fmt.Sprintf("MIN(%s, %s)", widthColumn, heightColumn) + + if min > 0 { + f.addWhere(widthHeight + " >= " + strconv.Itoa(min)) + } + + if max > 0 { + f.addWhere(widthHeight + " < " + strconv.Itoa(max)) + } + } + } +} + +func hasMarkersCriterionHandler(hasMarkers *string) criterionHandlerFunc { + return func(f *filterBuilder) { + if hasMarkers != nil { + f.addJoin("scene_markers", "", "scene_markers.scene_id = scenes.id") + if *hasMarkers == "true" { + f.addHaving("count(scene_markers.scene_id) > 0") + } else { + f.addWhere("scene_markers.id IS NULL") + } + } + } +} + +func sceneIsMissingCriterionHandler(qb *sceneQueryBuilder, isMissing *string) criterionHandlerFunc { + return func(f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "galleries": + qb.galleriesRepository().join(f, "galleries_join", "scenes.id") + f.addWhere("galleries_join.scene_id IS NULL") + case "studio": + f.addWhere("scenes.studio_id IS NULL") + case "movie": + qb.moviesRepository().join(f, "movies_join", "scenes.id") + f.addWhere("movies_join.scene_id IS NULL") + case "performers": + qb.performersRepository().join(f, "performers_join", "scenes.id") + f.addWhere("performers_join.scene_id IS NULL") + case "date": + f.addWhere("scenes.date IS \"\" OR scenes.date IS \"0001-01-01\"") + case "tags": + qb.tagsRepository().join(f, "tags_join", "scenes.id") + f.addWhere("tags_join.scene_id IS NULL") + case "stash_id": + qb.stashIDRepository().join(f, "scene_stash_ids", "scenes.id") + f.addWhere("scene_stash_ids.scene_id IS NULL") + default: + f.addWhere("(scenes." + *isMissing + " IS NULL OR TRIM(scenes." + *isMissing + ") = '')") + } + } + } +} + +func (qb *sceneQueryBuilder) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder { + return multiCriterionHandlerBuilder{ + primaryTable: sceneTable, + foreignTable: foreignTable, + joinTable: joinTable, + primaryFK: sceneIDColumn, + foreignFK: foreignFK, + addJoinsFunc: addJoinsFunc, + } +} +func sceneTagsCriterionHandler(qb *sceneQueryBuilder, tags *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + qb.tagsRepository().join(f, "tags_join", "scenes.id") + f.addJoin("tags", "", "tags_join.tag_id = tags.id") + } + h := qb.getMultiCriterionHandlerBuilder(tagTable, scenesTagsTable, tagIDColumn, addJoinsFunc) + + return h.handler(tags) +} + +func scenePerformersCriterionHandler(qb *sceneQueryBuilder, performers *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + qb.performersRepository().join(f, "performers_join", "scenes.id") + f.addJoin("performers", "", "performers_join.performer_id = performers.id") + } + h := qb.getMultiCriterionHandlerBuilder(performerTable, performersScenesTable, performerIDColumn, addJoinsFunc) + + return h.handler(performers) +} + +func sceneStudioCriterionHandler(qb *sceneQueryBuilder, studios *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + f.addJoin("studios", "studio", "studio.id = scenes.studio_id") + } + h := qb.getMultiCriterionHandlerBuilder("studio", "", studioIDColumn, addJoinsFunc) + + return h.handler(studios) +} + +func sceneMoviesCriterionHandler(qb *sceneQueryBuilder, movies *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + qb.moviesRepository().join(f, "movies_join", "scenes.id") + f.addJoin("movies", "", "movies_join.movie_id = movies.id") + } + h := qb.getMultiCriterionHandlerBuilder(movieTable, moviesScenesTable, "movie_id", addJoinsFunc) + return h.handler(movies) +} + +func sceneStashIDsHandler(qb *sceneQueryBuilder, stashID *string) criterionHandlerFunc { + return func(f *filterBuilder) { + if stashID != nil && *stashID != "" { + qb.stashIDRepository().join(f, "scene_stash_ids", "scenes.id") + stringLiteralCriterionHandler(stashID, "scene_stash_ids.stash_id")(f) + } + } +} + +func scenePerformerTagsCriterionHandler(qb *sceneQueryBuilder, performerTagsFilter *models.MultiCriterionInput) criterionHandlerFunc { + return func(f *filterBuilder) { + if performerTagsFilter != nil && len(performerTagsFilter.Value) > 0 { + qb.performersRepository().join(f, "performers_join", "scenes.id") + f.addJoin("performers_tags", "performer_tags_join", "performers_join.performer_id = performer_tags_join.performer_id") + + var args []interface{} + for _, tagID := range performerTagsFilter.Value { + args = append(args, tagID) + } + + if performerTagsFilter.Modifier == models.CriterionModifierIncludes { + // includes any of the provided ids + f.addWhere("performer_tags_join.tag_id IN "+getInBinding(len(performerTagsFilter.Value)), args...) + } else if performerTagsFilter.Modifier == models.CriterionModifierIncludesAll { + // includes all of the provided ids + f.addWhere("performer_tags_join.tag_id IN "+getInBinding(len(performerTagsFilter.Value)), args...) + f.addHaving(fmt.Sprintf("count(distinct performer_tags_join.tag_id) IS %d", len(performerTagsFilter.Value))) + } else if performerTagsFilter.Modifier == models.CriterionModifierExcludes { + f.addWhere(fmt.Sprintf(`not exists + (select performers_scenes.performer_id from performers_scenes + left join performers_tags on performers_tags.performer_id = performers_scenes.performer_id where + performers_scenes.scene_id = scenes.id AND + performers_tags.tag_id in %s)`, getInBinding(len(performerTagsFilter.Value))), args...) + } + } + } +} + +func handleScenePerformerTagsCriterion(query *queryBuilder, performerTagsFilter *models.MultiCriterionInput) { + if performerTagsFilter != nil && len(performerTagsFilter.Value) > 0 { + for _, tagID := range performerTagsFilter.Value { + query.addArg(tagID) + } + + query.body += " LEFT JOIN performers_tags AS performer_tags_join on performers_join.performer_id = performer_tags_join.performer_id" + + if performerTagsFilter.Modifier == models.CriterionModifierIncludes { + // includes any of the provided ids + query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value))) + } else if performerTagsFilter.Modifier == models.CriterionModifierIncludesAll { + // includes all of the provided ids + query.addWhere("performer_tags_join.tag_id IN " + getInBinding(len(performerTagsFilter.Value))) + query.addHaving(fmt.Sprintf("count(distinct performer_tags_join.tag_id) IS %d", len(performerTagsFilter.Value))) + } else if performerTagsFilter.Modifier == models.CriterionModifierExcludes { + query.addWhere(fmt.Sprintf(`not exists + (select performers_scenes.performer_id from performers_scenes + left join performers_tags on performers_tags.performer_id = performers_scenes.performer_id where + performers_scenes.scene_id = scenes.id AND + performers_tags.tag_id in %s)`, getInBinding(len(performerTagsFilter.Value)))) + } + } +} + func (qb *sceneQueryBuilder) getSceneSort(findFilter *models.FindFilterType) string { if findFilter == nil { return " ORDER BY scenes.path, scenes.date ASC " diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index a4aef4089..ac34403fb 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -139,6 +139,7 @@ func TestSceneQueryQ(t *testing.T) { } func queryScene(t *testing.T, sqb models.SceneReader, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) []*models.Scene { + t.Helper() scenes, _, err := sqb.Query(sceneFilter, findFilter) if err != nil { t.Errorf("Error querying scene: %s", err.Error()) @@ -186,6 +187,143 @@ func TestSceneQueryPath(t *testing.T) { verifyScenesPath(t, pathCriterion) } +func TestSceneQueryPathOr(t *testing.T) { + const scene1Idx = 1 + const scene2Idx = 2 + + scene1Path := getSceneStringValue(scene1Idx, "Path") + scene2Path := getSceneStringValue(scene2Idx, "Path") + + sceneFilter := models.SceneFilterType{ + Path: &models.StringCriterionInput{ + Value: scene1Path, + Modifier: models.CriterionModifierEquals, + }, + Or: &models.SceneFilterType{ + Path: &models.StringCriterionInput{ + Value: scene2Path, + Modifier: models.CriterionModifierEquals, + }, + }, + } + + withTxn(func(r models.Repository) error { + sqb := r.Scene() + + scenes := queryScene(t, sqb, &sceneFilter, nil) + + assert.Len(t, scenes, 2) + assert.Equal(t, scene1Path, scenes[0].Path) + assert.Equal(t, scene2Path, scenes[1].Path) + + return nil + }) +} + +func TestSceneQueryPathAndRating(t *testing.T) { + const sceneIdx = 1 + scenePath := getSceneStringValue(sceneIdx, "Path") + sceneRating := getRating(sceneIdx) + + sceneFilter := models.SceneFilterType{ + Path: &models.StringCriterionInput{ + Value: scenePath, + Modifier: models.CriterionModifierEquals, + }, + And: &models.SceneFilterType{ + Rating: &models.IntCriterionInput{ + Value: int(sceneRating.Int64), + Modifier: models.CriterionModifierEquals, + }, + }, + } + + withTxn(func(r models.Repository) error { + sqb := r.Scene() + + scenes := queryScene(t, sqb, &sceneFilter, nil) + + assert.Len(t, scenes, 1) + assert.Equal(t, scenePath, scenes[0].Path) + assert.Equal(t, sceneRating.Int64, scenes[0].Rating.Int64) + + return nil + }) +} + +func TestSceneQueryPathNotRating(t *testing.T) { + const sceneIdx = 1 + + sceneRating := getRating(sceneIdx) + + pathCriterion := models.StringCriterionInput{ + Value: "scene_.*1_Path", + Modifier: models.CriterionModifierMatchesRegex, + } + + ratingCriterion := models.IntCriterionInput{ + Value: int(sceneRating.Int64), + Modifier: models.CriterionModifierEquals, + } + + sceneFilter := models.SceneFilterType{ + Path: &pathCriterion, + Not: &models.SceneFilterType{ + Rating: &ratingCriterion, + }, + } + + withTxn(func(r models.Repository) error { + sqb := r.Scene() + + scenes := queryScene(t, sqb, &sceneFilter, nil) + + for _, scene := range scenes { + verifyString(t, scene.Path, pathCriterion) + ratingCriterion.Modifier = models.CriterionModifierNotEquals + verifyInt64(t, scene.Rating, ratingCriterion) + } + + return nil + }) +} + +func TestSceneIllegalQuery(t *testing.T) { + assert := assert.New(t) + + const sceneIdx = 1 + subFilter := models.SceneFilterType{ + Path: &models.StringCriterionInput{ + Value: getSceneStringValue(sceneIdx, "Path"), + Modifier: models.CriterionModifierEquals, + }, + } + + sceneFilter := &models.SceneFilterType{ + And: &subFilter, + Or: &subFilter, + } + + withTxn(func(r models.Repository) error { + sqb := r.Scene() + + _, _, err := sqb.Query(sceneFilter, nil) + assert.NotNil(err) + + sceneFilter.Or = nil + sceneFilter.Not = &subFilter + _, _, err = sqb.Query(sceneFilter, nil) + assert.NotNil(err) + + sceneFilter.And = nil + sceneFilter.Or = &subFilter + _, _, err = sqb.Query(sceneFilter, nil) + assert.NotNil(err) + + return nil + }) +} + func verifyScenesPath(t *testing.T, pathCriterion models.StringCriterionInput) { withTxn(func(r models.Repository) error { sqb := r.Scene() @@ -218,6 +356,17 @@ func verifyNullString(t *testing.T, value sql.NullString, criterion models.Strin if criterion.Modifier == models.CriterionModifierNotEquals { assert.NotEqual(criterion.Value, value.String) } + if criterion.Modifier == models.CriterionModifierMatchesRegex { + assert.True(value.Valid) + assert.Regexp(regexp.MustCompile(criterion.Value), value) + } + if criterion.Modifier == models.CriterionModifierNotMatchesRegex { + if !value.Valid { + // correct + return + } + assert.NotRegexp(regexp.MustCompile(criterion.Value), value) + } } func verifyString(t *testing.T, value string, criterion models.StringCriterionInput) { @@ -802,6 +951,61 @@ func TestSceneQueryTags(t *testing.T) { }) } +func TestSceneQueryPerformerTags(t *testing.T) { + withTxn(func(r models.Repository) error { + sqb := r.Scene() + tagCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithPerformer]), + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierIncludes, + } + + sceneFilter := models.SceneFilterType{ + PerformerTags: &tagCriterion, + } + + scenes := queryScene(t, sqb, &sceneFilter, nil) + assert.Len(t, scenes, 2) + + // ensure ids are correct + for _, scene := range scenes { + assert.True(t, scene.ID == sceneIDs[sceneIdxWithPerformerTag] || scene.ID == sceneIDs[sceneIdxWithPerformerTwoTags]) + } + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + strconv.Itoa(tagIDs[tagIdx2WithPerformer]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + scenes = queryScene(t, sqb, &sceneFilter, nil) + + assert.Len(t, scenes, 1) + assert.Equal(t, sceneIDs[sceneIdxWithPerformerTwoTags], scenes[0].ID) + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithPerformer]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getSceneStringValue(sceneIdxWithPerformerTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + scenes = queryScene(t, sqb, &sceneFilter, &findFilter) + assert.Len(t, scenes, 0) + + return nil + }) +} + func TestSceneQueryStudio(t *testing.T) { withTxn(func(r models.Repository) error { sqb := r.Scene() diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index 81019ce9d..323bb113c 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -20,98 +20,240 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -const totalScenes = 12 -const totalImages = 6 // TODO - add one for zip file -const performersNameCase = 6 -const performersNameNoCase = 2 -const moviesNameCase = 2 -const moviesNameNoCase = 1 -const totalGalleries = 3 -const tagsNameNoCase = 2 -const tagsNameCase = 9 -const studiosNameCase = 5 -const studiosNameNoCase = 1 +const ( + sceneIdxWithMovie = iota + sceneIdxWithGallery + sceneIdxWithPerformer + sceneIdxWithTwoPerformers + sceneIdxWithTag + sceneIdxWithTwoTags + sceneIdxWithStudio + sceneIdxWithMarker + sceneIdxWithPerformerTag + sceneIdxWithPerformerTwoTags + // new indexes above + lastSceneIdx -var sceneIDs []int -var imageIDs []int -var performerIDs []int -var movieIDs []int -var galleryIDs []int -var tagIDs []int -var studioIDs []int -var markerIDs []int + totalScenes = lastSceneIdx + 3 +) -var tagNames []string -var studioNames []string -var movieNames []string -var performerNames []string +const ( + imageIdxWithGallery = iota + imageIdxWithPerformer + imageIdxWithTwoPerformers + imageIdxWithTag + imageIdxWithTwoTags + imageIdxWithStudio + imageIdxInZip // TODO - not implemented + imageIdxWithPerformerTag + imageIdxWithPerformerTwoTags + // new indexes above + totalImages +) -const sceneIdxWithMovie = 0 -const sceneIdxWithGallery = 1 -const sceneIdxWithPerformer = 2 -const sceneIdxWithTwoPerformers = 3 -const sceneIdxWithTag = 4 -const sceneIdxWithTwoTags = 5 -const sceneIdxWithStudio = 6 -const sceneIdxWithMarker = 7 +const ( + performerIdxWithScene = iota + performerIdx1WithScene + performerIdx2WithScene + performerIdxWithImage + performerIdx1WithImage + performerIdx2WithImage + performerIdxWithTag + performerIdxWithTwoTags + performerIdxWithGallery + performerIdx1WithGallery + performerIdx2WithGallery + // new indexes above + // performers with dup names start from the end + performerIdx1WithDupName + performerIdxWithDupName -const imageIdxWithGallery = 0 -const imageIdxWithPerformer = 1 -const imageIdxWithTwoPerformers = 2 -const imageIdxWithTag = 3 -const imageIdxWithTwoTags = 4 -const imageIdxWithStudio = 5 -const imageIdxInZip = 6 + performersNameCase = performerIdx1WithDupName + performersNameNoCase = 2 +) -const performerIdxWithScene = 0 -const performerIdx1WithScene = 1 -const performerIdx2WithScene = 2 -const performerIdxWithImage = 3 -const performerIdx1WithImage = 4 -const performerIdx2WithImage = 5 +const ( + movieIdxWithScene = iota + movieIdxWithStudio + // movies with dup names start from the end + movieIdxWithDupName -// performers with dup names start from the end -const performerIdx1WithDupName = 6 -const performerIdxWithDupName = 7 + moviesNameCase = movieIdxWithDupName + moviesNameNoCase = 1 +) -const movieIdxWithScene = 0 -const movieIdxWithStudio = 1 +const ( + galleryIdxWithScene = iota + galleryIdxWithImage + galleryIdxWithPerformer + galleryIdxWithTwoPerformers + galleryIdxWithTag + galleryIdxWithTwoTags + galleryIdxWithStudio + galleryIdxWithPerformerTag + galleryIdxWithPerformerTwoTags + // new indexes above + lastGalleryIdx -// movies with dup names start from the end -const movieIdxWithDupName = 2 + totalGalleries = lastGalleryIdx + 1 +) -const galleryIdxWithScene = 0 -const galleryIdxWithImage = 1 +const ( + tagIdxWithScene = iota + tagIdx1WithScene + tagIdx2WithScene + tagIdxWithPrimaryMarker + tagIdxWithMarker + tagIdxWithCoverImage + tagIdxWithImage + tagIdx1WithImage + tagIdx2WithImage + tagIdxWithPerformer + tagIdx1WithPerformer + tagIdx2WithPerformer + tagIdxWithGallery + tagIdx1WithGallery + tagIdx2WithGallery + // new indexes above + // tags with dup names start from the end + tagIdx1WithDupName + tagIdxWithDupName -const tagIdxWithScene = 0 -const tagIdx1WithScene = 1 -const tagIdx2WithScene = 2 -const tagIdxWithPrimaryMarker = 3 -const tagIdxWithMarker = 4 -const tagIdxWithCoverImage = 5 -const tagIdxWithImage = 6 -const tagIdx1WithImage = 7 -const tagIdx2WithImage = 8 + tagsNameNoCase = 2 + tagsNameCase = tagIdx1WithDupName +) -// tags with dup names start from the end -const tagIdx1WithDupName = 9 -const tagIdxWithDupName = 10 +const ( + studioIdxWithScene = iota + studioIdxWithMovie + studioIdxWithChildStudio + studioIdxWithParentStudio + studioIdxWithImage + studioIdxWithGallery + // new indexes above + // studios with dup names start from the end + studioIdxWithDupName -const studioIdxWithScene = 0 -const studioIdxWithMovie = 1 -const studioIdxWithChildStudio = 2 -const studioIdxWithParentStudio = 3 -const studioIdxWithImage = 4 + studiosNameCase = studioIdxWithDupName + studiosNameNoCase = 1 +) -// studios with dup names start from the end -const studioIdxWithDupName = 5 +const ( + markerIdxWithScene = iota +) -const markerIdxWithScene = 0 +const ( + pathField = "Path" + checksumField = "Checksum" + titleField = "Title" + zipPath = "zipPath.zip" +) -const pathField = "Path" -const checksumField = "Checksum" -const titleField = "Title" -const zipPath = "zipPath.zip" +var ( + sceneIDs []int + imageIDs []int + performerIDs []int + movieIDs []int + galleryIDs []int + tagIDs []int + studioIDs []int + markerIDs []int + + tagNames []string + studioNames []string + movieNames []string + performerNames []string +) + +type idAssociation struct { + first int + second int +} + +var ( + sceneTagLinks = [][2]int{ + {sceneIdxWithTag, tagIdxWithScene}, + {sceneIdxWithTwoTags, tagIdx1WithScene}, + {sceneIdxWithTwoTags, tagIdx2WithScene}, + } + + scenePerformerLinks = [][2]int{ + {sceneIdxWithPerformer, performerIdxWithScene}, + {sceneIdxWithTwoPerformers, performerIdx1WithScene}, + {sceneIdxWithTwoPerformers, performerIdx2WithScene}, + {sceneIdxWithPerformerTag, performerIdxWithTag}, + {sceneIdxWithPerformerTwoTags, performerIdxWithTwoTags}, + } + + sceneGalleryLinks = [][2]int{ + {sceneIdxWithGallery, galleryIdxWithScene}, + } + + sceneMovieLinks = [][2]int{ + {sceneIdxWithMovie, movieIdxWithScene}, + } + + sceneStudioLinks = [][2]int{ + {sceneIdxWithStudio, studioIdxWithScene}, + } +) + +var ( + imageGalleryLinks = [][2]int{ + {imageIdxWithGallery, galleryIdxWithImage}, + } + imageStudioLinks = [][2]int{ + {imageIdxWithStudio, studioIdxWithImage}, + } + imageTagLinks = [][2]int{ + {imageIdxWithTag, tagIdxWithImage}, + {imageIdxWithTwoTags, tagIdx1WithImage}, + {imageIdxWithTwoTags, tagIdx2WithImage}, + } + imagePerformerLinks = [][2]int{ + {imageIdxWithPerformer, performerIdxWithImage}, + {imageIdxWithTwoPerformers, performerIdx1WithImage}, + {imageIdxWithTwoPerformers, performerIdx2WithImage}, + {imageIdxWithPerformerTag, performerIdxWithTag}, + {imageIdxWithPerformerTwoTags, performerIdxWithTwoTags}, + } +) + +var ( + galleryPerformerLinks = [][2]int{ + {galleryIdxWithPerformer, performerIdxWithGallery}, + {galleryIdxWithTwoPerformers, performerIdx1WithGallery}, + {galleryIdxWithTwoPerformers, performerIdx2WithGallery}, + {galleryIdxWithPerformerTag, performerIdxWithTag}, + {galleryIdxWithPerformerTwoTags, performerIdxWithTwoTags}, + } + + galleryTagLinks = [][2]int{ + {galleryIdxWithTag, tagIdxWithGallery}, + {galleryIdxWithTwoTags, tagIdx1WithGallery}, + {galleryIdxWithTwoTags, tagIdx2WithGallery}, + } +) + +var ( + movieStudioLinks = [][2]int{ + {movieIdxWithStudio, studioIdxWithMovie}, + } +) + +var ( + studioParentLinks = [][2]int{ + {studioIdxWithChildStudio, studioIdxWithParentStudio}, + } +) + +var ( + performerTagLinks = [][2]int{ + {performerIdxWithTag, tagIdxWithPerformer}, + {performerIdxWithTwoTags, tagIdx1WithPerformer}, + {performerIdxWithTwoTags, tagIdx2WithPerformer}, + } +) func TestMain(m *testing.M) { ret := runTests(m) @@ -193,12 +335,16 @@ func populateDB() error { return fmt.Errorf("error creating studios: %s", err.Error()) } - if err := linkSceneGallery(r.Scene(), sceneIdxWithGallery, galleryIdxWithScene); err != nil { - return fmt.Errorf("error linking scene to gallery: %s", err.Error()) + if err := linkPerformerTags(r.Performer()); err != nil { + return fmt.Errorf("error linking performer tags: %s", err.Error()) } - if err := linkSceneMovie(r.Scene(), sceneIdxWithMovie, movieIdxWithScene); err != nil { - return fmt.Errorf("error scene to movie: %s", err.Error()) + if err := linkSceneGalleries(r.Scene()); err != nil { + return fmt.Errorf("error linking scenes to galleries: %s", err.Error()) + } + + if err := linkSceneMovies(r.Scene()); err != nil { + return fmt.Errorf("error linking scenes to movies: %s", err.Error()) } if err := linkScenePerformers(r.Scene()); err != nil { @@ -209,11 +355,11 @@ func populateDB() error { return fmt.Errorf("error linking scene tags: %s", err.Error()) } - if err := linkSceneStudio(r.Scene(), sceneIdxWithStudio, studioIdxWithScene); err != nil { - return fmt.Errorf("error linking scene studio: %s", err.Error()) + if err := linkSceneStudios(r.Scene()); err != nil { + return fmt.Errorf("error linking scene studios: %s", err.Error()) } - if err := linkImageGallery(r.Gallery(), imageIdxWithGallery, galleryIdxWithImage); err != nil { + if err := linkImageGalleries(r.Gallery()); err != nil { return fmt.Errorf("error linking gallery images: %s", err.Error()) } @@ -225,16 +371,28 @@ func populateDB() error { return fmt.Errorf("error linking image tags: %s", err.Error()) } - if err := linkImageStudio(r.Image(), imageIdxWithStudio, studioIdxWithImage); err != nil { + if err := linkImageStudios(r.Image()); err != nil { return fmt.Errorf("error linking image studio: %s", err.Error()) } - if err := linkMovieStudio(r.Movie(), movieIdxWithStudio, studioIdxWithMovie); err != nil { - return fmt.Errorf("error linking movie studio: %s", err.Error()) + if err := linkMovieStudios(r.Movie()); err != nil { + return fmt.Errorf("error linking movie studios: %s", err.Error()) } - if err := linkStudioParent(r.Studio(), studioIdxWithChildStudio, studioIdxWithParentStudio); err != nil { - return fmt.Errorf("error linking studio parent: %s", err.Error()) + if err := linkStudiosParent(r.Studio()); err != nil { + return fmt.Errorf("error linking studios parent: %s", err.Error()) + } + + if err := linkGalleryPerformers(r.Gallery()); err != nil { + return fmt.Errorf("error linking gallery performers: %s", err.Error()) + } + + if err := linkGalleryTags(r.Gallery()); err != nil { + return fmt.Errorf("error linking gallery tags: %s", err.Error()) + } + + if err := linkGalleryStudio(r.Gallery(), galleryIdxWithStudio, studioIdxWithGallery); err != nil { + return fmt.Errorf("error linking gallery studio: %s", err.Error()) } if err := createMarker(r.SceneMarker(), sceneIdxWithMarker, tagIdxWithPrimaryMarker, []int{tagIdxWithMarker}); err != nil { @@ -429,6 +587,15 @@ func getPerformerBirthdate(index int) string { return birthdate.Format("2006-01-02") } +func getPerformerCareerLength(index int) *string { + if index%5 == 0 { + return nil + } + + ret := fmt.Sprintf("20%2d", index) + return &ret +} + //createPerformers creates n performers with plain Name and o performers with camel cased NaMe included func createPerformers(pqb models.PerformerReaderWriter, n int, o int) error { const namePlain = "Name" @@ -455,6 +622,11 @@ func createPerformers(pqb models.PerformerReaderWriter, n int, o int) error { }, } + careerLength := getPerformerCareerLength(i) + if careerLength != nil { + performer.CareerLength = models.NullString(*careerLength) + } + created, err := pqb.Create(performer) if err != nil { @@ -488,6 +660,30 @@ func getTagMarkerCount(id int) int { return 0 } +func getTagImageCount(id int) int { + if id == tagIDs[tagIdx1WithImage] || id == tagIDs[tagIdx2WithImage] || id == tagIDs[tagIdxWithImage] { + return 1 + } + + return 0 +} + +func getTagGalleryCount(id int) int { + if id == tagIDs[tagIdx1WithGallery] || id == tagIDs[tagIdx2WithGallery] || id == tagIDs[tagIdxWithGallery] { + return 1 + } + + return 0 +} + +func getTagPerformerCount(id int) int { + if id == tagIDs[tagIdx1WithPerformer] || id == tagIDs[tagIdx2WithPerformer] || id == tagIDs[tagIdxWithPerformer] { + return 1 + } + + return 0 +} + //createTags creates n tags with plain Name and o tags with camel cased NaMe included func createTags(tqb models.TagReaderWriter, n int, o int) error { const namePlain = "Name" @@ -600,159 +796,188 @@ func createMarker(mqb models.SceneMarkerReaderWriter, sceneIdx, primaryTagIdx in return nil } -func linkSceneMovie(qb models.SceneReaderWriter, sceneIndex, movieIndex int) error { - sceneID := sceneIDs[sceneIndex] - movies, err := qb.GetMovies(sceneID) - if err != nil { - return err +func doLinks(links [][2]int, fn func(idx1, idx2 int) error) error { + for _, l := range links { + if err := fn(l[0], l[1]); err != nil { + return err + } } - movies = append(movies, models.MoviesScenes{ - MovieID: movieIDs[movieIndex], - SceneID: sceneID, + return nil +} + +func linkPerformerTags(qb models.PerformerReaderWriter) error { + return doLinks(performerTagLinks, func(performerIndex, tagIndex int) error { + performerID := performerIDs[performerIndex] + tagID := tagIDs[tagIndex] + tagIDs, err := qb.GetTagIDs(performerID) + if err != nil { + return err + } + + tagIDs = utils.IntAppendUnique(tagIDs, tagID) + + return qb.UpdateTags(performerID, tagIDs) + }) +} + +func linkSceneMovies(qb models.SceneReaderWriter) error { + return doLinks(sceneMovieLinks, func(sceneIndex, movieIndex int) error { + sceneID := sceneIDs[sceneIndex] + movies, err := qb.GetMovies(sceneID) + if err != nil { + return err + } + + movies = append(movies, models.MoviesScenes{ + MovieID: movieIDs[movieIndex], + SceneID: sceneID, + }) + return qb.UpdateMovies(sceneID, movies) }) - return qb.UpdateMovies(sceneID, movies) } func linkScenePerformers(qb models.SceneReaderWriter) error { - if err := linkScenePerformer(qb, sceneIdxWithPerformer, performerIdxWithScene); err != nil { + return doLinks(scenePerformerLinks, func(sceneIndex, performerIndex int) error { + _, err := scene.AddPerformer(qb, sceneIDs[sceneIndex], performerIDs[performerIndex]) return err - } - if err := linkScenePerformer(qb, sceneIdxWithTwoPerformers, performerIdx1WithScene); err != nil { - return err - } - if err := linkScenePerformer(qb, sceneIdxWithTwoPerformers, performerIdx2WithScene); err != nil { - return err - } - - return nil + }) } -func linkScenePerformer(qb models.SceneReaderWriter, sceneIndex, performerIndex int) error { - _, err := scene.AddPerformer(qb, sceneIDs[sceneIndex], performerIDs[performerIndex]) - return err -} - -func linkSceneGallery(qb models.SceneReaderWriter, sceneIndex, galleryIndex int) error { - _, err := scene.AddGallery(qb, sceneIDs[sceneIndex], galleryIDs[galleryIndex]) - return err +func linkSceneGalleries(qb models.SceneReaderWriter) error { + return doLinks(sceneGalleryLinks, func(sceneIndex, galleryIndex int) error { + _, err := scene.AddGallery(qb, sceneIDs[sceneIndex], galleryIDs[galleryIndex]) + return err + }) } func linkSceneTags(qb models.SceneReaderWriter) error { - if err := linkSceneTag(qb, sceneIdxWithTag, tagIdxWithScene); err != nil { + return doLinks(sceneTagLinks, func(sceneIndex, tagIndex int) error { + _, err := scene.AddTag(qb, sceneIDs[sceneIndex], tagIDs[tagIndex]) return err - } - if err := linkSceneTag(qb, sceneIdxWithTwoTags, tagIdx1WithScene); err != nil { - return err - } - if err := linkSceneTag(qb, sceneIdxWithTwoTags, tagIdx2WithScene); err != nil { - return err - } - - return nil + }) } -func linkSceneTag(qb models.SceneReaderWriter, sceneIndex, tagIndex int) error { - _, err := scene.AddTag(qb, sceneIDs[sceneIndex], tagIDs[tagIndex]) - return err +func linkSceneStudios(sqb models.SceneWriter) error { + return doLinks(sceneStudioLinks, func(sceneIndex, studioIndex int) error { + scene := models.ScenePartial{ + ID: sceneIDs[sceneIndex], + StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true}, + } + _, err := sqb.Update(scene) + + return err + }) } -func linkSceneStudio(sqb models.SceneWriter, sceneIndex, studioIndex int) error { - scene := models.ScenePartial{ - ID: sceneIDs[sceneIndex], - StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true}, - } - _, err := sqb.Update(scene) - - return err -} - -func linkImageGallery(gqb models.GalleryReaderWriter, imageIndex, galleryIndex int) error { - return gallery.AddImage(gqb, galleryIDs[galleryIndex], imageIDs[imageIndex]) +func linkImageGalleries(gqb models.GalleryReaderWriter) error { + return doLinks(imageGalleryLinks, func(imageIndex, galleryIndex int) error { + return gallery.AddImage(gqb, galleryIDs[galleryIndex], imageIDs[imageIndex]) + }) } func linkImageTags(iqb models.ImageReaderWriter) error { - if err := linkImageTag(iqb, imageIdxWithTag, tagIdxWithImage); err != nil { - return err - } - if err := linkImageTag(iqb, imageIdxWithTwoTags, tagIdx1WithImage); err != nil { - return err - } - if err := linkImageTag(iqb, imageIdxWithTwoTags, tagIdx2WithImage); err != nil { - return err - } + return doLinks(imageTagLinks, func(imageIndex, tagIndex int) error { + imageID := imageIDs[imageIndex] + tags, err := iqb.GetTagIDs(imageID) + if err != nil { + return err + } - return nil + tags = append(tags, tagIDs[tagIndex]) + + return iqb.UpdateTags(imageID, tags) + }) } -func linkImageTag(iqb models.ImageReaderWriter, imageIndex, tagIndex int) error { - imageID := imageIDs[imageIndex] - tags, err := iqb.GetTagIDs(imageID) - if err != nil { +func linkImageStudios(qb models.ImageWriter) error { + return doLinks(imageStudioLinks, func(imageIndex, studioIndex int) error { + image := models.ImagePartial{ + ID: imageIDs[imageIndex], + StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true}, + } + _, err := qb.Update(image) + return err - } - - tags = append(tags, tagIDs[tagIndex]) - - return iqb.UpdateTags(imageID, tags) -} - -func linkImageStudio(qb models.ImageWriter, imageIndex, studioIndex int) error { - image := models.ImagePartial{ - ID: imageIDs[imageIndex], - StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true}, - } - _, err := qb.Update(image) - - return err + }) } func linkImagePerformers(qb models.ImageReaderWriter) error { - if err := linkImagePerformer(qb, imageIdxWithPerformer, performerIdxWithImage); err != nil { - return err - } - if err := linkImagePerformer(qb, imageIdxWithTwoPerformers, performerIdx1WithImage); err != nil { - return err - } - if err := linkImagePerformer(qb, imageIdxWithTwoPerformers, performerIdx2WithImage); err != nil { - return err - } + return doLinks(imagePerformerLinks, func(imageIndex, performerIndex int) error { + imageID := imageIDs[imageIndex] + performers, err := qb.GetPerformerIDs(imageID) + if err != nil { + return err + } - return nil + performers = append(performers, performerIDs[performerIndex]) + + return qb.UpdatePerformers(imageID, performers) + }) } -func linkImagePerformer(iqb models.ImageReaderWriter, imageIndex, performerIndex int) error { - imageID := imageIDs[imageIndex] - performers, err := iqb.GetPerformerIDs(imageID) - if err != nil { +func linkGalleryPerformers(qb models.GalleryReaderWriter) error { + return doLinks(galleryPerformerLinks, func(galleryIndex, performerIndex int) error { + galleryID := imageIDs[galleryIndex] + performers, err := qb.GetPerformerIDs(galleryID) + if err != nil { + return err + } + + performers = append(performers, performerIDs[performerIndex]) + + return qb.UpdatePerformers(galleryID, performers) + }) +} + +func linkGalleryTags(iqb models.GalleryReaderWriter) error { + return doLinks(galleryTagLinks, func(galleryIndex, tagIndex int) error { + galleryID := imageIDs[galleryIndex] + tags, err := iqb.GetTagIDs(galleryID) + if err != nil { + return err + } + + tags = append(tags, tagIDs[tagIndex]) + + return iqb.UpdateTags(galleryID, tags) + }) +} + +func linkMovieStudios(mqb models.MovieWriter) error { + return doLinks(movieStudioLinks, func(movieIndex, studioIndex int) error { + movie := models.MoviePartial{ + ID: movieIDs[movieIndex], + StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true}, + } + _, err := mqb.Update(movie) + return err - } - - performers = append(performers, performerIDs[performerIndex]) - - return iqb.UpdatePerformers(imageID, performers) + }) } -func linkMovieStudio(mqb models.MovieWriter, movieIndex, studioIndex int) error { - movie := models.MoviePartial{ - ID: movieIDs[movieIndex], - StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true}, - } - _, err := mqb.Update(movie) +func linkStudiosParent(qb models.StudioWriter) error { + return doLinks(studioParentLinks, func(parentIndex, childIndex int) error { + studio := models.StudioPartial{ + ID: studioIDs[childIndex], + ParentID: &sql.NullInt64{Int64: int64(studioIDs[parentIndex]), Valid: true}, + } + _, err := qb.Update(studio) - return err -} - -func linkStudioParent(qb models.StudioWriter, parentIndex, childIndex int) error { - studio := models.StudioPartial{ - ID: studioIDs[childIndex], - ParentID: &sql.NullInt64{Int64: int64(studioIDs[parentIndex]), Valid: true}, - } - _, err := qb.Update(studio) - - return err + return err + }) } func addTagImage(qb models.TagWriter, tagIndex int) error { return qb.UpdateImage(tagIDs[tagIndex], models.DefaultTagImage) } + +func linkGalleryStudio(qb models.GalleryWriter, galleryIndex, studioIndex int) error { + gallery := models.GalleryPartial{ + ID: galleryIDs[galleryIndex], + StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true}, + } + _, err := qb.UpdatePartial(gallery) + + return err +} diff --git a/pkg/sqlite/sql.go b/pkg/sqlite/sql.go index 7736559d2..71b46fe80 100644 --- a/pkg/sqlite/sql.go +++ b/pkg/sqlite/sql.go @@ -103,7 +103,7 @@ func getSearchBinding(columns []string, q string, not bool) (string, []interface notStr := "" binaryType := " OR " if not { - notStr = " NOT " + notStr = " NOT" binaryType = " AND " } diff --git a/pkg/sqlite/studio.go b/pkg/sqlite/studio.go index 84c04eed5..fa08017a1 100644 --- a/pkg/sqlite/studio.go +++ b/pkg/sqlite/studio.go @@ -121,10 +121,6 @@ func (qb *studioQueryBuilder) All() ([]*models.Studio, error) { return qb.queryStudios(selectAll("studios")+qb.getStudioSort(nil), nil) } -func (qb *studioQueryBuilder) AllSlim() ([]*models.Studio, error) { - return qb.queryStudios("SELECT studios.id, studios.name, studios.parent_id FROM studios "+qb.getStudioSort(nil), nil) -} - func (qb *studioQueryBuilder) Query(studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) ([]*models.Studio, int, error) { if studioFilter == nil { studioFilter = &models.StudioFilterType{} diff --git a/pkg/sqlite/tag.go b/pkg/sqlite/tag.go index 688b6e4b4..acb0105d1 100644 --- a/pkg/sqlite/tag.go +++ b/pkg/sqlite/tag.go @@ -108,11 +108,23 @@ func (qb *tagQueryBuilder) FindBySceneID(sceneID int) ([]*models.Tag, error) { WHERE scenes_join.scene_id = ? GROUP BY tags.id ` - query += qb.getTagSort(nil) + query += qb.getDefaultTagSort() args := []interface{}{sceneID} return qb.queryTags(query, args) } +func (qb *tagQueryBuilder) FindByPerformerID(performerID int) ([]*models.Tag, error) { + query := ` + SELECT tags.* FROM tags + LEFT JOIN performers_tags as performers_join on performers_join.tag_id = tags.id + WHERE performers_join.performer_id = ? + GROUP BY tags.id + ` + query += qb.getDefaultTagSort() + args := []interface{}{performerID} + return qb.queryTags(query, args) +} + func (qb *tagQueryBuilder) FindByImageID(imageID int) ([]*models.Tag, error) { query := ` SELECT tags.* FROM tags @@ -120,7 +132,7 @@ func (qb *tagQueryBuilder) FindByImageID(imageID int) ([]*models.Tag, error) { WHERE images_join.image_id = ? GROUP BY tags.id ` - query += qb.getTagSort(nil) + query += qb.getDefaultTagSort() args := []interface{}{imageID} return qb.queryTags(query, args) } @@ -132,7 +144,7 @@ func (qb *tagQueryBuilder) FindByGalleryID(galleryID int) ([]*models.Tag, error) WHERE galleries_join.gallery_id = ? GROUP BY tags.id ` - query += qb.getTagSort(nil) + query += qb.getDefaultTagSort() args := []interface{}{galleryID} return qb.queryTags(query, args) } @@ -144,7 +156,7 @@ func (qb *tagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int) ([]*models.Tag WHERE scene_markers_join.scene_marker_id = ? GROUP BY tags.id ` - query += qb.getTagSort(nil) + query += qb.getDefaultTagSort() args := []interface{}{sceneMarkerID} return qb.queryTags(query, args) } @@ -177,11 +189,68 @@ func (qb *tagQueryBuilder) Count() (int, error) { } func (qb *tagQueryBuilder) All() ([]*models.Tag, error) { - return qb.queryTags(selectAll("tags")+qb.getTagSort(nil), nil) + return qb.queryTags(selectAll("tags")+qb.getDefaultTagSort(), nil) } -func (qb *tagQueryBuilder) AllSlim() ([]*models.Tag, error) { - return qb.queryTags("SELECT tags.id, tags.name FROM tags "+qb.getTagSort(nil), nil) +func (qb *tagQueryBuilder) validateFilter(tagFilter *models.TagFilterType) error { + const and = "AND" + const or = "OR" + const not = "NOT" + + if tagFilter.And != nil { + if tagFilter.Or != nil { + return illegalFilterCombination(and, or) + } + if tagFilter.Not != nil { + return illegalFilterCombination(and, not) + } + + return qb.validateFilter(tagFilter.And) + } + + if tagFilter.Or != nil { + if tagFilter.Not != nil { + return illegalFilterCombination(or, not) + } + + return qb.validateFilter(tagFilter.Or) + } + + if tagFilter.Not != nil { + return qb.validateFilter(tagFilter.Not) + } + + return nil +} + +func (qb *tagQueryBuilder) makeFilter(tagFilter *models.TagFilterType) *filterBuilder { + query := &filterBuilder{} + + if tagFilter.And != nil { + query.and(qb.makeFilter(tagFilter.And)) + } + if tagFilter.Or != nil { + query.or(qb.makeFilter(tagFilter.Or)) + } + if tagFilter.Not != nil { + query.not(qb.makeFilter(tagFilter.Not)) + } + + // if markerCount := tagFilter.MarkerCount; markerCount != nil { + // clause, count := getIntCriterionWhereClause("count(distinct scene_markers.id)", *markerCount) + // query.addHaving(clause) + // if count == 1 { + // query.addArg(markerCount.Value) + // } + // } + + query.handleCriterionFunc(tagIsMissingCriterionHandler(qb, tagFilter.IsMissing)) + query.handleCriterionFunc(tagSceneCountCriterionHandler(qb, tagFilter.SceneCount)) + query.handleCriterionFunc(tagImageCountCriterionHandler(qb, tagFilter.ImageCount)) + query.handleCriterionFunc(tagGalleryCountCriterionHandler(qb, tagFilter.GalleryCount)) + query.handleCriterionFunc(tagPerformerCountCriterionHandler(qb, tagFilter.PerformerCount)) + + return query } func (qb *tagQueryBuilder) Query(tagFilter *models.TagFilterType, findFilter *models.FindFilterType) ([]*models.Tag, int, error) { @@ -209,11 +278,6 @@ func (qb *tagQueryBuilder) Query(tagFilter *models.TagFilterType, findFilter *mo // appears to confuse sqlite and causes serious performance issues. // Disabling querying/sorting on marker count for now. - query.body += ` - left join tags_image on tags_image.tag_id = tags.id - left join scenes_tags on scenes_tags.tag_id = tags.id - left join scenes on scenes_tags.scene_id = scenes.id` - if q := findFilter.Q; q != nil && *q != "" { searchColumns := []string{"tags.name"} clause, thisArgs := getSearchBinding(searchColumns, *q, false) @@ -221,32 +285,14 @@ func (qb *tagQueryBuilder) Query(tagFilter *models.TagFilterType, findFilter *mo query.addArg(thisArgs...) } - if isMissingFilter := tagFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" { - switch *isMissingFilter { - case "image": - query.addWhere("tags_image.tag_id IS NULL") - default: - query.addWhere("tags." + *isMissingFilter + " IS NULL") - } + if err := qb.validateFilter(tagFilter); err != nil { + return nil, 0, err } + filter := qb.makeFilter(tagFilter) - if sceneCount := tagFilter.SceneCount; sceneCount != nil { - clause, count := getIntCriterionWhereClause("count(distinct scenes_tags.scene_id)", *sceneCount) - query.addHaving(clause) - if count == 1 { - query.addArg(sceneCount.Value) - } - } + query.addFilter(filter) - // if markerCount := tagFilter.MarkerCount; markerCount != nil { - // clause, count := getIntCriterionWhereClause("count(distinct scene_markers.id)", *markerCount) - // query.addHaving(clause) - // if count == 1 { - // query.addArg(markerCount.Value) - // } - // } - - query.sortAndPagination = qb.getTagSort(findFilter) + getPagination(findFilter) + query.sortAndPagination = qb.getTagSort(&query, findFilter) + getPagination(findFilter) idsResult, countResult, err := query.executeFind() if err != nil { return nil, 0, err @@ -264,7 +310,89 @@ func (qb *tagQueryBuilder) Query(tagFilter *models.TagFilterType, findFilter *mo return tags, countResult, nil } -func (qb *tagQueryBuilder) getTagSort(findFilter *models.FindFilterType) string { +func tagIsMissingCriterionHandler(qb *tagQueryBuilder, isMissing *string) criterionHandlerFunc { + return func(f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "image": + qb.imageRepository().join(f, "", "tags.id") + f.addWhere("tags_image.tag_id IS NULL") + default: + f.addWhere("(tags." + *isMissing + " IS NULL OR TRIM(tags." + *isMissing + ") = '')") + } + } + } +} + +func tagSceneCountCriterionHandler(qb *tagQueryBuilder, sceneCount *models.IntCriterionInput) criterionHandlerFunc { + return func(f *filterBuilder) { + if sceneCount != nil { + f.addJoin("scenes_tags", "", "scenes_tags.tag_id = tags.id") + clause, count := getIntCriterionWhereClause("count(distinct scenes_tags.scene_id)", *sceneCount) + + args := []interface{}{} + if count == 1 { + args = append(args, sceneCount.Value) + } + + f.addHaving(clause, args...) + } + } +} + +func tagImageCountCriterionHandler(qb *tagQueryBuilder, imageCount *models.IntCriterionInput) criterionHandlerFunc { + return func(f *filterBuilder) { + if imageCount != nil { + f.addJoin("images_tags", "", "images_tags.tag_id = tags.id") + clause, count := getIntCriterionWhereClause("count(distinct images_tags.image_id)", *imageCount) + + args := []interface{}{} + if count == 1 { + args = append(args, imageCount.Value) + } + + f.addHaving(clause, args...) + } + } +} + +func tagGalleryCountCriterionHandler(qb *tagQueryBuilder, galleryCount *models.IntCriterionInput) criterionHandlerFunc { + return func(f *filterBuilder) { + if galleryCount != nil { + f.addJoin("galleries_tags", "", "galleries_tags.tag_id = tags.id") + clause, count := getIntCriterionWhereClause("count(distinct galleries_tags.gallery_id)", *galleryCount) + + args := []interface{}{} + if count == 1 { + args = append(args, galleryCount.Value) + } + + f.addHaving(clause, args...) + } + } +} + +func tagPerformerCountCriterionHandler(qb *tagQueryBuilder, performerCount *models.IntCriterionInput) criterionHandlerFunc { + return func(f *filterBuilder) { + if performerCount != nil { + f.addJoin("performers_tags", "", "performers_tags.tag_id = tags.id") + clause, count := getIntCriterionWhereClause("count(distinct performers_tags.performer_id)", *performerCount) + + args := []interface{}{} + if count == 1 { + args = append(args, performerCount.Value) + } + + f.addHaving(clause, args...) + } + } +} + +func (qb *tagQueryBuilder) getDefaultTagSort() string { + return getSort("name", "ASC", "tags") +} + +func (qb *tagQueryBuilder) getTagSort(query *queryBuilder, findFilter *models.FindFilterType) string { var sort string var direction string if findFilter == nil { @@ -274,6 +402,24 @@ func (qb *tagQueryBuilder) getTagSort(findFilter *models.FindFilterType) string sort = findFilter.GetSort("name") direction = findFilter.GetDirection() } + + if findFilter.Sort != nil { + switch *findFilter.Sort { + case "scenes_count": + query.join("scenes_tags", "", "scenes_tags.tag_id = tags.id") + return " ORDER BY COUNT(distinct scenes_tags.scene_id) " + direction + case "images_count": + query.join("images_tags", "", "images_tags.tag_id = tags.id") + return " ORDER BY COUNT(distinct images_tags.image_id) " + direction + case "galleries_count": + query.join("galleries_tags", "", "galleries_tags.tag_id = tags.id") + return " ORDER BY COUNT(distinct galleries_tags.gallery_id) " + direction + case "performers_count": + query.join("performers_tags", "", "performers_tags.tag_id = tags.id") + return " ORDER BY COUNT(distinct performers_tags.performer_id) " + direction + } + } + return getSort(sort, direction, "tags") } diff --git a/pkg/sqlite/tag_test.go b/pkg/sqlite/tag_test.go index 532a23201..272006776 100644 --- a/pkg/sqlite/tag_test.go +++ b/pkg/sqlite/tag_test.go @@ -238,6 +238,132 @@ func verifyTagMarkerCount(t *testing.T, markerCountCriterion models.IntCriterion }) } +func TestTagQueryImageCount(t *testing.T) { + countCriterion := models.IntCriterionInput{ + Value: 1, + Modifier: models.CriterionModifierEquals, + } + + verifyTagImageCount(t, countCriterion) + + countCriterion.Modifier = models.CriterionModifierNotEquals + verifyTagImageCount(t, countCriterion) + + countCriterion.Modifier = models.CriterionModifierLessThan + verifyTagImageCount(t, countCriterion) + + countCriterion.Value = 0 + countCriterion.Modifier = models.CriterionModifierGreaterThan + verifyTagImageCount(t, countCriterion) +} + +func verifyTagImageCount(t *testing.T, imageCountCriterion models.IntCriterionInput) { + withTxn(func(r models.Repository) error { + qb := r.Tag() + tagFilter := models.TagFilterType{ + ImageCount: &imageCountCriterion, + } + + tags, _, err := qb.Query(&tagFilter, nil) + if err != nil { + t.Errorf("Error querying tag: %s", err.Error()) + } + + for _, tag := range tags { + verifyInt64(t, sql.NullInt64{ + Int64: int64(getTagImageCount(tag.ID)), + Valid: true, + }, imageCountCriterion) + } + + return nil + }) +} + +func TestTagQueryGalleryCount(t *testing.T) { + countCriterion := models.IntCriterionInput{ + Value: 1, + Modifier: models.CriterionModifierEquals, + } + + verifyTagGalleryCount(t, countCriterion) + + countCriterion.Modifier = models.CriterionModifierNotEquals + verifyTagGalleryCount(t, countCriterion) + + countCriterion.Modifier = models.CriterionModifierLessThan + verifyTagGalleryCount(t, countCriterion) + + countCriterion.Value = 0 + countCriterion.Modifier = models.CriterionModifierGreaterThan + verifyTagGalleryCount(t, countCriterion) +} + +func verifyTagGalleryCount(t *testing.T, imageCountCriterion models.IntCriterionInput) { + withTxn(func(r models.Repository) error { + qb := r.Tag() + tagFilter := models.TagFilterType{ + GalleryCount: &imageCountCriterion, + } + + tags, _, err := qb.Query(&tagFilter, nil) + if err != nil { + t.Errorf("Error querying tag: %s", err.Error()) + } + + for _, tag := range tags { + verifyInt64(t, sql.NullInt64{ + Int64: int64(getTagGalleryCount(tag.ID)), + Valid: true, + }, imageCountCriterion) + } + + return nil + }) +} + +func TestTagQueryPerformerCount(t *testing.T) { + countCriterion := models.IntCriterionInput{ + Value: 1, + Modifier: models.CriterionModifierEquals, + } + + verifyTagPerformerCount(t, countCriterion) + + countCriterion.Modifier = models.CriterionModifierNotEquals + verifyTagPerformerCount(t, countCriterion) + + countCriterion.Modifier = models.CriterionModifierLessThan + verifyTagPerformerCount(t, countCriterion) + + countCriterion.Value = 0 + countCriterion.Modifier = models.CriterionModifierGreaterThan + verifyTagPerformerCount(t, countCriterion) +} + +func verifyTagPerformerCount(t *testing.T, imageCountCriterion models.IntCriterionInput) { + withTxn(func(r models.Repository) error { + qb := r.Tag() + tagFilter := models.TagFilterType{ + PerformerCount: &imageCountCriterion, + } + + tags, _, err := qb.Query(&tagFilter, nil) + if err != nil { + t.Errorf("Error querying tag: %s", err.Error()) + } + + for _, tag := range tags { + verifyInt64(t, sql.NullInt64{ + Int64: int64(getTagPerformerCount(tag.ID)), + Valid: true, + }, imageCountCriterion) + } + + return nil + }) +} + func TestTagUpdateTagImage(t *testing.T) { if err := withTxn(func(r models.Repository) error { qb := r.Tag() diff --git a/pkg/sqlite/transaction.go b/pkg/sqlite/transaction.go index 8ec0fd5d2..50016db45 100644 --- a/pkg/sqlite/transaction.go +++ b/pkg/sqlite/transaction.go @@ -176,17 +176,15 @@ func (t *ReadTransaction) Tag() models.TagReader { } type TransactionManager struct { - // only allow one write transaction at a time - c chan struct{} } func NewTransactionManager() *TransactionManager { - return &TransactionManager{ - c: make(chan struct{}, 1), - } + return &TransactionManager{} } func (t *TransactionManager) WithTxn(ctx context.Context, fn func(r models.Repository) error) error { + database.WriteMu.Lock() + defer database.WriteMu.Unlock() return models.WithTxn(&transaction{Ctx: ctx}, fn) } diff --git a/pkg/utils/boolean.go b/pkg/utils/boolean.go index f0abd02c3..a5f23733b 100644 --- a/pkg/utils/boolean.go +++ b/pkg/utils/boolean.go @@ -7,3 +7,8 @@ func Btoi(b bool) int { } return 0 } + +// IsTrue returns true if the bool pointer is not nil and true. +func IsTrue(b *bool) bool { + return b != nil && *b +} diff --git a/pkg/utils/file.go b/pkg/utils/file.go index 9e0368db7..563c3fa54 100644 --- a/pkg/utils/file.go +++ b/pkg/utils/file.go @@ -276,7 +276,7 @@ func IsPathInDir(dir, pathToCheck string) bool { rel, err := filepath.Rel(dir, pathToCheck) if err == nil { - if !strings.HasPrefix(rel, ".."+string(filepath.Separator)) { + if !strings.HasPrefix(rel, "..") { return true } } diff --git a/pkg/utils/file_test.go b/pkg/utils/file_test.go new file mode 100644 index 000000000..611ea3964 --- /dev/null +++ b/pkg/utils/file_test.go @@ -0,0 +1,43 @@ +package utils + +import ( + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIsPathInDir(t *testing.T) { + type test struct { + dir string + pathToCheck string + expected bool + } + + const parentDirName = "parentDir" + const subDirName = "subDir" + const filename = "filename" + subDir := filepath.Join(parentDirName, subDirName) + fileInSubDir := filepath.Join(subDir, filename) + fileInParentDir := filepath.Join(parentDirName, filename) + subSubSubDir := filepath.Join(parentDirName, subDirName, subDirName, subDirName) + + tests := []test{ + {dir: parentDirName, pathToCheck: subDir, expected: true}, + {dir: subDir, pathToCheck: subDir, expected: true}, + {dir: subDir, pathToCheck: parentDirName, expected: false}, + {dir: subDir, pathToCheck: fileInSubDir, expected: true}, + {dir: parentDirName, pathToCheck: fileInSubDir, expected: true}, + {dir: subDir, pathToCheck: fileInParentDir, expected: false}, + {dir: parentDirName, pathToCheck: fileInParentDir, expected: true}, + {dir: parentDirName, pathToCheck: filename, expected: false}, + {dir: parentDirName, pathToCheck: subSubSubDir, expected: true}, + {dir: subSubSubDir, pathToCheck: parentDirName, expected: false}, + } + + assert := assert.New(t) + for i, tc := range tests { + result := IsPathInDir(tc.dir, tc.pathToCheck) + assert.Equal(tc.expected, result, "[%d] expected: %t for dir: %s; pathToCheck: %s", i, tc.expected, tc.dir, tc.pathToCheck) + } +} diff --git a/pkg/utils/image.go b/pkg/utils/image.go index 7c550a0be..ad4f56941 100644 --- a/pkg/utils/image.go +++ b/pkg/utils/image.go @@ -2,13 +2,78 @@ package utils import ( "crypto/md5" + "crypto/tls" "encoding/base64" "fmt" + "io/ioutil" "net/http" "regexp" "strings" + "time" ) +// Timeout to get the image. Includes transfer time. May want to make this +// configurable at some point. +const imageGetTimeout = time.Second * 60 + +const base64RE = `^data:.+\/(.+);base64,(.*)$` + +// ProcessImageInput transforms an image string either from a base64 encoded +// string, or from a URL, and returns the image as a byte slice +func ProcessImageInput(imageInput string) ([]byte, error) { + regex := regexp.MustCompile(base64RE) + if regex.MatchString(imageInput) { + _, d, err := ProcessBase64Image(imageInput) + return d, err + } + + // assume input is a URL. Read it. + return ReadImageFromURL(imageInput) +} + +// ReadImageFromURL returns image data from a URL +func ReadImageFromURL(url string) ([]byte, error) { + client := &http.Client{ + Transport: &http.Transport{ // ignore insecure certificates + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + }, + + Timeout: imageGetTimeout, + } + + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, err + } + + // assume is a URL for now + + // set the host of the URL as the referer + if req.URL.Scheme != "" { + req.Header.Set("Referer", req.URL.Scheme+"://"+req.Host+"/") + } + req.Header.Set("User-Agent", GetUserAgent()) + + resp, err := client.Do(req) + + if err != nil { + return nil, err + } + + if resp.StatusCode >= 400 { + return nil, fmt.Errorf("http error %d", resp.StatusCode) + } + + defer resp.Body.Close() + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + return body, nil +} + // ProcessBase64Image transforms a base64 encoded string from a form post and returns the MD5 hash of the data and the // image itself as a byte slice. func ProcessBase64Image(imageString string) (string, []byte, error) { @@ -16,7 +81,7 @@ func ProcessBase64Image(imageString string) (string, []byte, error) { return "", nil, fmt.Errorf("empty image string") } - regex := regexp.MustCompile(`^data:.+\/(.+);base64,(.*)$`) + regex := regexp.MustCompile(base64RE) matches := regex.FindStringSubmatch(imageString) var encodedString string if len(matches) > 2 { @@ -66,6 +131,7 @@ func ServeImage(image []byte, w http.ResponseWriter, r *http.Request) error { w.Header().Set("Content-Type", contentType) w.Header().Add("Etag", etag) + w.Header().Set("Cache-Control", "public, max-age=604800, immutable") _, err := w.Write(image) return err } diff --git a/pkg/utils/strings.go b/pkg/utils/strings.go new file mode 100644 index 000000000..f0b8d93d4 --- /dev/null +++ b/pkg/utils/strings.go @@ -0,0 +1,17 @@ +package utils + +import ( + "math/rand" + "time" +) + +var characters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890") + +func RandomSequence(n int) string { + b := make([]rune, n) + rand.Seed(time.Now().UnixNano()) + for i := range b { + b[i] = characters[rand.Intn(len(characters))] + } + return string(b) +} diff --git a/pkg/utils/user_agent.go b/pkg/utils/user_agent.go new file mode 100644 index 000000000..533b6b6ab --- /dev/null +++ b/pkg/utils/user_agent.go @@ -0,0 +1,36 @@ +package utils + +import "runtime" + +// valid UA from https://user-agents.net +const Safari = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.3 Safari/605.1.15/iY0wnXbs-59" +const FirefoxWindows = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:88.0) Gecko/20100101 Firefox/88.0" +const FirefoxLinux = "Mozilla/5.0 (X11; Linux x86_64; rv:88.0) Gecko/20100101 Firefox/88.0" +const FirefoxLinuxArm = "Mozilla/5.0 (X11; Linux armv7l; rv:86.0) Gecko/20100101 Firefox/86.0" +const FirefoxLinuxArm64 = "Mozilla/5.0 (X11; Linux aarch64; rv:86.0) Gecko/20100101 Firefox/86.0" + +// GetUserAgent returns a valid User Agent string that matches the running os/arch +func GetUserAgent() string { + arch := runtime.GOARCH + os := runtime.GOOS + + switch os { + case "darwin": + return Safari + case "windows": + return FirefoxWindows + case "linux": + switch arch { + case "arm": + return FirefoxLinuxArm + case "arm64": + return FirefoxLinuxArm64 + case "amd64": + return FirefoxLinux + default: + return FirefoxLinux + } + default: + return FirefoxLinux + } +} diff --git a/scripts/check-gofmt.sh b/scripts/check-gofmt.sh index 7ea5015c8..24fa914a5 100644 --- a/scripts/check-gofmt.sh +++ b/scripts/check-gofmt.sh @@ -28,8 +28,8 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -gofiles=$(git diff --name-only --diff-filter=ACM develop -- '*.go' ':!vendor') -[ -z "$gofiles" ] && exit 0 +gofiles=./pkg/ +[ "$OS" = "Windows_NT" ] && gofiles=.\\pkg\\ unformatted=$(gofmt -l $gofiles) [ -z "$unformatted" ] && exit 0 diff --git a/scripts/cross-compile.sh b/scripts/cross-compile.sh index e6774d94c..5e6d302d4 100755 --- a/scripts/cross-compile.sh +++ b/scripts/cross-compile.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # "stashapp/compiler:develop" "stashapp/compiler:4" COMPILER_CONTAINER="stashapp/compiler:4" diff --git a/ui/v2.5/.env b/ui/v2.5/.env index e56c6e3aa..66df2e28e 100644 --- a/ui/v2.5/.env +++ b/ui/v2.5/.env @@ -1,2 +1,3 @@ BROWSER=none PORT=3000 +ESLINT_NO_DEV_ERRORS=true diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index 8f4ad36a3..883804306 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -35,6 +35,7 @@ "@fortawesome/free-solid-svg-icons": "^5.15.2", "@fortawesome/react-fontawesome": "^0.1.14", "@types/react-select": "^3.1.2", + "@types/yup": "^0.29.11", "apollo-upload-client": "^14.1.3", "axios": "0.21.1", "base64-blob": "^1.4.1", @@ -46,6 +47,7 @@ "graphql": "^15.4.0", "graphql-tag": "^2.11.0", "i18n-iso-countries": "^6.4.0", + "intersection-observer": "^0.12.0", "jimp": "^0.16.1", "localforage": "1.9.0", "lodash": "^4.17.20", @@ -66,7 +68,8 @@ "sass": "^1.32.5", "string.prototype.replaceall": "^1.0.4", "subscriptions-transport-ws": "^0.9.18", - "universal-cookie": "^4.0.4" + "universal-cookie": "^4.0.4", + "yup": "^0.32.9" }, "devDependencies": { "@graphql-codegen/add": "^2.0.2", @@ -98,7 +101,7 @@ "extract-react-intl-messages": "^4.1.1", "postcss-safe-parser": "^5.0.2", "prettier": "2.2.1", - "react-scripts": "^4.0.1", + "react-scripts": "^4.0.3", "stylelint": "^13.9.0", "stylelint-config-prettier": "^8.0.2", "stylelint-order": "^4.1.0", diff --git a/ui/v2.5/src/components/Changelog/Changelog.tsx b/ui/v2.5/src/components/Changelog/Changelog.tsx index 1d8d321e8..7d0434a3c 100644 --- a/ui/v2.5/src/components/Changelog/Changelog.tsx +++ b/ui/v2.5/src/components/Changelog/Changelog.tsx @@ -8,6 +8,7 @@ import V021 from "./versions/v021.md"; import V030 from "./versions/v030.md"; import V040 from "./versions/v040.md"; import V050 from "./versions/v050.md"; +import V060 from "./versions/v060.md"; import { MarkdownPage } from "../Shared/MarkdownPage"; const Changelog: React.FC = () => { @@ -37,11 +38,19 @@ const Changelog: React.FC = () => { <>

Changelog:

+ + + diff --git a/ui/v2.5/src/components/Changelog/versions/v060.md b/ui/v2.5/src/components/Changelog/versions/v060.md new file mode 100644 index 000000000..d7f4278d6 --- /dev/null +++ b/ui/v2.5/src/components/Changelog/versions/v060.md @@ -0,0 +1,31 @@ +### ✨ New Features +* Added Performer tags. + +### 🎨 Improvements +* Improve performer scraper search modal. +* Add galleries tab to Tag details page. +* Allow scene/performer/studio image upload via URL. +* Add button to hide unmatched scenes in Tagger view. +* Hide create option in dropdowns when searching in filters. +* Add scrape gallery from fragment to UI +* Improved performer details and edit UI pages. +* Resolve python executable to `python3` or `python` for python script scrapers. +* Add `url` field to `URLReplace`, and make `queryURLReplace` available when scraping by URL. +* Make logging format consistent across platforms and include full timestamp. +* Remember gallery images view mode. +* Add option to skip checking of insecure SSL certificates when scraping. +* Auto-play video previews on mobile devices. +* Replace hover menu with dropdown menu for O-Counter. +* Support random strings for scraper cookie values. +* Added Rescan button to scene, image, gallery details overflow button. + +### 🐛 Bug fixes +* Fix SQL error when filtering nullable string fields with regex. +* Fix incorrect folders being excluded during scanning. +* Filter out streaming resolution options that are over the maximum streaming resolution. +* Fix `cover.jpg` not being detected as cover image when in sub-directory. +* Fix scan re-associating galleries to the same scene. +* Fix SQL error when filtering galleries excluding performers or tags. +* Fix version checking for armv7 and arm64. +* Change "Is NULL" filter to include empty string values. +* Prevent scene card previews playing in full-screen on iOS devices. diff --git a/ui/v2.5/src/components/Galleries/Galleries.tsx b/ui/v2.5/src/components/Galleries/Galleries.tsx index 8fa2c8bde..b4aef94a5 100644 --- a/ui/v2.5/src/components/Galleries/Galleries.tsx +++ b/ui/v2.5/src/components/Galleries/Galleries.tsx @@ -1,5 +1,6 @@ import React from "react"; import { Route, Switch } from "react-router-dom"; +import { PersistanceLevel } from "src/hooks/ListHook"; import { Gallery } from "./GalleryDetails/Gallery"; import { GalleryList } from "./GalleryList"; @@ -8,7 +9,9 @@ const Galleries = () => ( } + render={(props) => ( + + )} /> diff --git a/ui/v2.5/src/components/Galleries/GalleryCard.tsx b/ui/v2.5/src/components/Galleries/GalleryCard.tsx index ce43c8eb3..7bfb14398 100644 --- a/ui/v2.5/src/components/Galleries/GalleryCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryCard.tsx @@ -47,7 +47,7 @@ export const GalleryCard: React.FC = (props) => { if (props.gallery.tags.length <= 0) return; const popoverContent = props.gallery.tags.map((tag) => ( - + )); return ( diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx index b4353fdde..f947bd658 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx @@ -1,7 +1,11 @@ import { Tab, Nav, Dropdown } from "react-bootstrap"; import React, { useEffect, useState } from "react"; import { useParams, useHistory, Link } from "react-router-dom"; -import { useFindGallery, useGalleryUpdate } from "src/core/StashService"; +import { + mutateMetadataScan, + useFindGallery, + useGalleryUpdate, +} from "src/core/StashService"; import { ErrorMessage, LoadingIndicator, Icon } from "src/components/Shared"; import { TextUtils } from "src/utils"; import * as Mousetrap from "mousetrap"; @@ -60,6 +64,18 @@ export const Gallery: React.FC = () => { } }; + async function onRescan() { + if (!gallery || !gallery.path) { + return; + } + + await mutateMetadataScan({ + paths: [gallery.path], + }); + + Toast.success({ content: "Rescanning image" }); + } + const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); function onDeleteDialogClosed(deleted: boolean) { @@ -92,6 +108,15 @@ export const Gallery: React.FC = () => { + {gallery?.path ? ( + onRescan()} + > + Rescan + + ) : undefined} ; @@ -17,7 +18,7 @@ export const GalleryAddPanel: React.FC = ({ gallery }) => { function filterHook(filter: ListFilterModel) { const galleryValue = { id: gallery.id!, - label: gallery.title ?? gallery.path ?? "", + label: gallery.title ?? TextUtils.fileNameFromPath(gallery.path ?? ""), }; // if galleries is already present, then we modify it, otherwise add let galleryCriterion = filter.criteria.find((c) => { @@ -77,10 +78,6 @@ export const GalleryAddPanel: React.FC = ({ gallery }) => { ]; return ( - + ); }; diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx index 43203395b..3b9bf1b47 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx @@ -25,7 +25,7 @@ export const GalleryDetailPanel: React.FC = (props) => { function renderTags() { if (!props.gallery.tags || props.gallery.tags.length === 0) return; const tags = props.gallery.tags.map((tag) => ( - + )); return ( <> diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx index 5f515fb0b..5769f55d2 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx @@ -1,13 +1,22 @@ import React, { useEffect, useState } from "react"; import { useHistory } from "react-router-dom"; -import { Button, Form, Col, Row } from "react-bootstrap"; +import { + Button, + Dropdown, + DropdownButton, + Form, + Col, + Row, +} from "react-bootstrap"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; import { + queryScrapeGallery, queryScrapeGalleryURL, useGalleryCreate, useGalleryUpdate, useListGalleryScrapers, + mutateReloadScrapers, } from "src/core/StashService"; import { PerformerSelect, @@ -64,6 +73,7 @@ export const GalleryEditPanel: React.FC< ); const Scrapers = useListGalleryScrapers(); + const [queryableScrapers, setQueryableScrapers] = useState([]); const [ scrapedGallery, @@ -118,6 +128,16 @@ export const GalleryEditPanel: React.FC< } }); + useEffect(() => { + const newQueryableScrapers = ( + Scrapers?.data?.listGalleryScrapers ?? [] + ).filter((s) => + s.gallery?.supported_scrapes.includes(GQL.ScrapeType.Fragment) + ); + + setQueryableScrapers(newQueryableScrapers); + }, [Scrapers]); + function getGalleryInput() { return { id: isNew ? undefined : gallery?.id ?? "", @@ -162,6 +182,39 @@ export const GalleryEditPanel: React.FC< setIsLoading(false); } + async function onScrapeClicked(scraper: GQL.Scraper) { + setIsLoading(true); + try { + const galleryInput = getGalleryInput() as GQL.GalleryUpdateInput; + const result = await queryScrapeGallery(scraper.id, galleryInput); + if (!result.data || !result.data.scrapeGallery) { + Toast.success({ + content: "No galleries found", + }); + return; + } + setScrapedGallery(result.data.scrapeGallery); + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + + async function onReloadScrapers() { + setIsLoading(true); + try { + await mutateReloadScrapers(); + + // reload the performer scrapers + await Scrapers.refetch(); + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + function onScrapeDialogClosed(data?: GQL.ScrapedGalleryDataFragment) { if (data) { updateGalleryFromScrapedGallery(data); @@ -187,6 +240,32 @@ export const GalleryEditPanel: React.FC< ); } + function renderScraperMenu() { + if (isNew) { + return; + } + + return ( + + {queryableScrapers.map((s) => ( + onScrapeClicked(s)}> + {s.name} + + ))} + onReloadScrapers()}> + + + + Reload scrapers + + + ); + } + function urlScrapable(scrapedUrl: string): boolean { return (Scrapers?.data?.listGalleryScrapers ?? []).some((s) => (s?.gallery?.urls ?? []).some((u) => scrapedUrl.includes(u)) @@ -290,6 +369,9 @@ export const GalleryEditPanel: React.FC< Delete +
+ {renderScraperMenu()} +
diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx index d536eba83..ec06b478d 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx @@ -4,8 +4,9 @@ import { GalleriesCriterion } from "src/models/list-filter/criteria/galleries"; import { ListFilterModel } from "src/models/list-filter/filter"; import { ImageList } from "src/components/Images/ImageList"; import { mutateRemoveGalleryImages } from "src/core/StashService"; -import { showWhenSelected } from "src/hooks/ListHook"; +import { showWhenSelected, PersistanceLevel } from "src/hooks/ListHook"; import { useToast } from "src/hooks"; +import { TextUtils } from "src/utils"; interface IGalleryDetailsProps { gallery: GQL.GalleryDataFragment; @@ -19,7 +20,7 @@ export const GalleryImagesPanel: React.FC = ({ function filterHook(filter: ListFilterModel) { const galleryValue = { id: gallery.id!, - label: gallery.title ?? gallery.path ?? "", + label: gallery.title ?? TextUtils.fileNameFromPath(gallery.path ?? ""), }; // if galleries is already present, then we modify it, otherwise add let galleryCriterion = filter.criteria.find((c) => { @@ -82,7 +83,8 @@ export const GalleryImagesPanel: React.FC = ({ ); }; diff --git a/ui/v2.5/src/components/Galleries/GalleryList.tsx b/ui/v2.5/src/components/Galleries/GalleryList.tsx index 283d64e10..8e3096a16 100644 --- a/ui/v2.5/src/components/Galleries/GalleryList.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryList.tsx @@ -8,7 +8,8 @@ import { GallerySlimDataFragment, } from "src/core/generated-graphql"; import { useGalleriesList } from "src/hooks"; -import { showWhenSelected } from "src/hooks/ListHook"; +import { TextUtils } from "src/utils"; +import { showWhenSelected, PersistanceLevel } from "src/hooks/ListHook"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; import { queryFindGalleries } from "src/core/StashService"; @@ -20,7 +21,7 @@ import { ExportDialog } from "../Shared/ExportDialog"; interface IGalleryList { filterHook?: (filter: ListFilterModel) => ListFilterModel; - persistState?: boolean; + persistState?: PersistanceLevel; } export const GalleryList: React.FC = ({ @@ -202,7 +203,9 @@ export const GalleryList: React.FC = ({
diff --git a/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx b/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx index 376f1062c..b724ea09b 100644 --- a/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx @@ -24,7 +24,7 @@ const GalleryWallCard: React.FC = ({ gallery }) => { ? "landscape" : "portrait"; const cover = gallery?.cover?.paths.thumbnail ?? ""; - const title = gallery.title ?? gallery.path; + const title = gallery.title ?? TextUtils.fileNameFromPath(gallery.path ?? ""); const performerNames = gallery.performers.map((p) => p.name); const performers = performerNames.length >= 2 diff --git a/ui/v2.5/src/components/Images/ImageCard.tsx b/ui/v2.5/src/components/Images/ImageCard.tsx index 4c992f598..53e153a76 100644 --- a/ui/v2.5/src/components/Images/ImageCard.tsx +++ b/ui/v2.5/src/components/Images/ImageCard.tsx @@ -42,7 +42,7 @@ export const ImageCard: React.FC = ( if (props.image.tags.length <= 0) return; const popoverContent = props.image.tags.map((tag) => ( - + )); return ( diff --git a/ui/v2.5/src/components/Images/ImageDetails/Image.tsx b/ui/v2.5/src/components/Images/ImageDetails/Image.tsx index 413c17b76..cfaec445a 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/Image.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/Image.tsx @@ -7,6 +7,7 @@ import { useImageDecrementO, useImageResetO, useImageUpdate, + mutateMetadataScan, } from "src/core/StashService"; import { ErrorMessage, LoadingIndicator, Icon } from "src/components/Shared"; import { useToast } from "src/hooks"; @@ -43,6 +44,18 @@ export const Image: React.FC = () => { const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); + async function onRescan() { + if (!image) { + return; + } + + await mutateMetadataScan({ + paths: [image.path], + }); + + Toast.success({ content: "Rescanning image" }); + } + const onOrganizedClick = async () => { try { setOrganizedLoading(true); @@ -121,6 +134,13 @@ export const Image: React.FC = () => { + onRescan()} + > + Rescan + = (props) => { function renderTags() { if (props.image.tags.length === 0) return; const tags = props.image.tags.map((tag) => ( - + )); return ( <> diff --git a/ui/v2.5/src/components/Images/ImageList.tsx b/ui/v2.5/src/components/Images/ImageList.tsx index 48cdb9824..103e1799f 100644 --- a/ui/v2.5/src/components/Images/ImageList.tsx +++ b/ui/v2.5/src/components/Images/ImageList.tsx @@ -12,7 +12,11 @@ import { useImagesList, useLightbox } from "src/hooks"; import { TextUtils } from "src/utils"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; -import { IListHookOperation, showWhenSelected } from "src/hooks/ListHook"; +import { + IListHookOperation, + showWhenSelected, + PersistanceLevel, +} from "src/hooks/ListHook"; import { ImageCard } from "./ImageCard"; import { EditImagesDialog } from "./EditImagesDialog"; import { DeleteImagesDialog } from "./DeleteImagesDialog"; @@ -79,13 +83,15 @@ const ImageWall: React.FC = ({ interface IImageList { filterHook?: (filter: ListFilterModel) => ListFilterModel; - persistState?: boolean; + persistState?: PersistanceLevel; + persistanceKey?: string; extraOperations?: IListHookOperation[]; } export const ImageList: React.FC = ({ filterHook, persistState, + persistanceKey, extraOperations, }) => { const history = useHistory(); @@ -131,6 +137,7 @@ export const ImageList: React.FC = ({ filterHook, addKeybinds, persistState, + persistanceKey, }); async function viewRandom( diff --git a/ui/v2.5/src/components/Images/Images.tsx b/ui/v2.5/src/components/Images/Images.tsx index 576b6f674..dfbd6224c 100644 --- a/ui/v2.5/src/components/Images/Images.tsx +++ b/ui/v2.5/src/components/Images/Images.tsx @@ -1,5 +1,6 @@ import React from "react"; import { Route, Switch } from "react-router-dom"; +import { PersistanceLevel } from "src/hooks/ListHook"; import { Image } from "./ImageDetails/Image"; import { ImageList } from "./ImageList"; @@ -8,7 +9,9 @@ const Images = () => ( } + render={(props) => ( + + )} /> diff --git a/ui/v2.5/src/components/List/AddFilter.tsx b/ui/v2.5/src/components/List/AddFilter.tsx index 7dc673969..03a513d22 100644 --- a/ui/v2.5/src/components/List/AddFilter.tsx +++ b/ui/v2.5/src/components/List/AddFilter.tsx @@ -154,6 +154,7 @@ export const AddFilter: React.FC = ( criterion.type !== "parent_studios" && criterion.type !== "tags" && criterion.type !== "sceneTags" && + criterion.type !== "performerTags" && criterion.type !== "movies" ) return; diff --git a/ui/v2.5/src/components/MainNavbar.tsx b/ui/v2.5/src/components/MainNavbar.tsx index 330c9d674..d91bf1f61 100644 --- a/ui/v2.5/src/components/MainNavbar.tsx +++ b/ui/v2.5/src/components/MainNavbar.tsx @@ -95,7 +95,7 @@ const allMenuItems: IMenuItem[] = [ name: "galleries", message: messages.galleries, href: "/galleries", - icon: "image", + icon: "images", }, { name: "performers", diff --git a/ui/v2.5/src/components/Movies/MovieDetails/Movie.tsx b/ui/v2.5/src/components/Movies/MovieDetails/Movie.tsx index 9f57a74bf..bd95459af 100644 --- a/ui/v2.5/src/components/Movies/MovieDetails/Movie.tsx +++ b/ui/v2.5/src/components/Movies/MovieDetails/Movie.tsx @@ -571,8 +571,10 @@ export const Movie: React.FC = () => { onToggleEdit={onToggleEdit} onSave={onSave} onImageChange={onFrontImageChange} + onImageChangeURL={onFrontImageLoad} onClearImage={onClearFrontImage} onBackImageChange={onBackImageChange} + onBackImageChangeURL={onBackImageLoad} onClearBackImage={onClearBackImage} onDelete={onDelete} /> diff --git a/ui/v2.5/src/components/Movies/MovieList.tsx b/ui/v2.5/src/components/Movies/MovieList.tsx index 2e7a52a55..040cbd400 100644 --- a/ui/v2.5/src/components/Movies/MovieList.tsx +++ b/ui/v2.5/src/components/Movies/MovieList.tsx @@ -9,7 +9,11 @@ import { import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; import { queryFindMovies, useMoviesDestroy } from "src/core/StashService"; -import { showWhenSelected, useMoviesList } from "src/hooks/ListHook"; +import { + showWhenSelected, + useMoviesList, + PersistanceLevel, +} from "src/hooks/ListHook"; import { ExportDialog, DeleteEntityDialog } from "src/components/Shared"; import { MovieCard } from "./MovieCard"; @@ -65,7 +69,7 @@ export const MovieList: React.FC = () => { addKeybinds, otherOperations, selectable: true, - persistState: true, + persistState: PersistanceLevel.ALL, renderDeleteDialog, }); diff --git a/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx b/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx new file mode 100644 index 000000000..e0f6d03a3 --- /dev/null +++ b/ui/v2.5/src/components/Performers/EditPerformersDialog.tsx @@ -0,0 +1,225 @@ +import React, { useEffect, useState } from "react"; +import { Form } from "react-bootstrap"; +import _ from "lodash"; +import { useBulkPerformerUpdate } from "src/core/StashService"; +import * as GQL from "src/core/generated-graphql"; +import { Modal } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import MultiSet from "../Shared/MultiSet"; + +interface IListOperationProps { + selected: GQL.SlimPerformerDataFragment[]; + onClose: (applied: boolean) => void; +} + +export const EditPerformersDialog: React.FC = ( + props: IListOperationProps +) => { + const Toast = useToast(); + + const [tagMode, setTagMode] = React.useState( + GQL.BulkUpdateIdMode.Add + ); + const [tagIds, setTagIds] = useState(); + const [favorite, setFavorite] = useState(); + + const [updatePerformers] = useBulkPerformerUpdate(getPerformerInput()); + + // Network state + const [isUpdating, setIsUpdating] = useState(false); + + const checkboxRef = React.createRef(); + + function makeBulkUpdateIds( + ids: string[], + mode: GQL.BulkUpdateIdMode + ): GQL.BulkUpdateIds { + return { + mode, + ids, + }; + } + + function getPerformerInput(): GQL.BulkPerformerUpdateInput { + // need to determine what we are actually setting on each performer + const aggregateTagIds = getTagIds(props.selected); + + const performerInput: GQL.BulkPerformerUpdateInput = { + ids: props.selected.map((performer) => { + return performer.id; + }), + }; + + // if tagIds non-empty, then we are setting them + if ( + tagMode === GQL.BulkUpdateIdMode.Set && + (!tagIds || tagIds.length === 0) + ) { + // and all performers have the same ids, + if (aggregateTagIds.length > 0) { + // then unset the tagIds, otherwise ignore + performerInput.tag_ids = makeBulkUpdateIds(tagIds || [], tagMode); + } + } else { + // if tagIds non-empty, then we are setting them + performerInput.tag_ids = makeBulkUpdateIds(tagIds || [], tagMode); + } + + if (favorite !== undefined) { + performerInput.favorite = favorite; + } + + return performerInput; + } + + async function onSave() { + setIsUpdating(true); + try { + await updatePerformers(); + Toast.success({ content: "Updated performers" }); + props.onClose(true); + } catch (e) { + Toast.error(e); + } + setIsUpdating(false); + } + + function getTagIds(state: GQL.SlimPerformerDataFragment[]) { + let ret: string[] = []; + let first = true; + + state.forEach((performer: GQL.SlimPerformerDataFragment) => { + if (first) { + ret = performer.tags ? performer.tags.map((t) => t.id).sort() : []; + first = false; + } else { + const tIds = performer.tags + ? performer.tags.map((t) => t.id).sort() + : []; + + if (!_.isEqual(ret, tIds)) { + ret = []; + } + } + }); + + return ret; + } + + useEffect(() => { + const state = props.selected; + let updateTagIds: string[] = []; + let updateFavorite: boolean | undefined; + let first = true; + + state.forEach((performer: GQL.SlimPerformerDataFragment) => { + const performerTagIDs = (performer.tags ?? []).map((p) => p.id).sort(); + + if (first) { + updateTagIds = performerTagIDs; + first = false; + updateFavorite = performer.favorite; + } else { + if (!_.isEqual(performerTagIDs, updateTagIds)) { + updateTagIds = []; + } + if (performer.favorite !== updateFavorite) { + updateFavorite = undefined; + } + } + }); + + if (tagMode === GQL.BulkUpdateIdMode.Set) { + setTagIds(updateTagIds); + } + setFavorite(updateFavorite); + }, [props.selected, tagMode]); + + useEffect(() => { + if (checkboxRef.current) { + checkboxRef.current.indeterminate = favorite === undefined; + } + }, [favorite, checkboxRef]); + + function renderMultiSelect( + type: "performers" | "tags", + ids: string[] | undefined + ) { + let mode = GQL.BulkUpdateIdMode.Add; + switch (type) { + case "tags": + mode = tagMode; + break; + } + + return ( + { + const itemIDs = items.map((i) => i.id); + switch (type) { + case "tags": + setTagIds(itemIDs); + break; + } + }} + onSetMode={(newMode) => { + switch (type) { + case "tags": + setTagMode(newMode); + break; + } + }} + ids={ids ?? []} + mode={mode} + /> + ); + } + + function cycleFavorite() { + if (favorite) { + setFavorite(undefined); + } else if (favorite === undefined) { + setFavorite(false); + } else { + setFavorite(true); + } + } + + function render() { + return ( + props.onClose(false), + text: "Cancel", + variant: "secondary", + }} + isRunning={isUpdating} + > +
+ + Tags + {renderMultiSelect("tags", tagIds)} + + + + cycleFavorite()} + /> + + +
+ ); + } + + return render(); +}; diff --git a/ui/v2.5/src/components/Performers/PerformerCard.tsx b/ui/v2.5/src/components/Performers/PerformerCard.tsx index 4cb317f32..cff36a880 100644 --- a/ui/v2.5/src/components/Performers/PerformerCard.tsx +++ b/ui/v2.5/src/components/Performers/PerformerCard.tsx @@ -1,9 +1,17 @@ import React from "react"; import { Link } from "react-router-dom"; -import { FormattedNumber, FormattedPlural, FormattedMessage } from "react-intl"; +import { FormattedMessage } from "react-intl"; import * as GQL from "src/core/generated-graphql"; import { NavUtils, TextUtils } from "src/utils"; -import { BasicCard, CountryFlag, TruncatedText } from "src/components/Shared"; +import { + BasicCard, + CountryFlag, + HoverPopover, + Icon, + TagLink, + TruncatedText, +} from "src/components/Shared"; +import { Button, ButtonGroup } from "react-bootstrap"; interface IPerformerCardProps { performer: GQL.PerformerDataFragment; @@ -34,6 +42,50 @@ export const PerformerCard: React.FC = ({ ); } + function maybeRenderScenesPopoverButton() { + if (!performer.scene_count) return; + + return ( + + + + ); + } + + function maybeRenderTagPopoverButton() { + if (performer.tags.length <= 0) return; + + const popoverContent = performer.tags.map((tag) => ( + + )); + + return ( + + + + ); + } + + function maybeRenderPopoverButtonGroup() { + if (performer.scene_count || performer.tags.length > 0) { + return ( + <> +
+ + {maybeRenderScenesPopoverButton()} + {maybeRenderTagPopoverButton()} + + + ); + } + } + return ( = ({ -
- Stars in  - -   - - - - . -
+ {maybeRenderPopoverButtonGroup()} } selected={selected} diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx index 9e4d587a3..ccc56531b 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx @@ -22,6 +22,7 @@ import { PerformerOperationsPanel } from "./PerformerOperationsPanel"; import { PerformerScenesPanel } from "./PerformerScenesPanel"; import { PerformerGalleriesPanel } from "./PerformerGalleriesPanel"; import { PerformerImagesPanel } from "./PerformerImagesPanel"; +import { PerformerEditPanel } from "./PerformerEditPanel"; interface IPerformerParams { id?: string; @@ -126,11 +127,7 @@ export const Performer: React.FC = () => { unmountOnExit > - + @@ -142,9 +139,8 @@ export const Performer: React.FC = () => { - { return ; } if (activeImage) { - return Performer; + return Performer; } } if (isNew) return ( -
-
{renderPerformerImage()}
-
+
+
+ {renderPerformerImage()} +
+

Create Performer

- ; - isNew?: boolean; - isEditing?: boolean; - isVisible: boolean; - onDelete?: () => void; - onImageChange?: (image?: string | null) => void; - onImageEncoding?: (loading?: boolean) => void; } export const PerformerDetailsPanel: React.FC = ({ performer, - isNew, - isEditing, - isVisible, - onDelete, - onImageChange, - onImageEncoding, }) => { - const Toast = useToast(); - const history = useHistory(); - - // Editing state - const [ - isDisplayingScraperDialog, - setIsDisplayingScraperDialog, - ] = useState(); - const [ - scrapePerformerDetails, - setScrapePerformerDetails, - ] = useState(); - const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); - - // Editing performer state - const [image, setImage] = useState(); - const [name, setName] = useState(performer?.name ?? ""); - const [aliases, setAliases] = useState(performer.aliases ?? ""); - const [birthdate, setBirthdate] = useState(performer.birthdate ?? ""); - const [ethnicity, setEthnicity] = useState(performer.ethnicity ?? ""); - const [country, setCountry] = useState(performer.country ?? ""); - const [eyeColor, setEyeColor] = useState(performer.eye_color ?? ""); - const [height, setHeight] = useState(performer.height ?? ""); - const [measurements, setMeasurements] = useState( - performer.measurements ?? "" - ); - const [fakeTits, setFakeTits] = useState(performer.fake_tits ?? ""); - const [careerLength, setCareerLength] = useState( - performer.career_length ?? "" - ); - const [tattoos, setTattoos] = useState(performer.tattoos ?? ""); - const [piercings, setPiercings] = useState(performer.piercings ?? ""); - const [url, setUrl] = useState(performer.url ?? ""); - const [twitter, setTwitter] = useState(performer.twitter ?? ""); - const [instagram, setInstagram] = useState(performer.instagram ?? ""); - const [gender, setGender] = useState( - genderToString(performer.gender ?? undefined) - ); - const [stashIDs, setStashIDs] = useState( - performer.stash_ids ?? [] - ); - const favorite = performer.favorite ?? false; - // Network state - const [isLoading, setIsLoading] = useState(false); - const intl = useIntl(); - const [updatePerformer] = usePerformerUpdate(); - const [createPerformer] = usePerformerCreate(); - - const Scrapers = useListPerformerScrapers(); - const [queryableScrapers, setQueryableScrapers] = useState([]); - - const [scrapedPerformer, setScrapedPerformer] = useState< - GQL.ScrapedPerformer | undefined - >(); - - const imageEncoding = ImageUtils.usePasteImage(onImageLoad, isEditing); - - function translateScrapedGender(scrapedGender?: string) { - if (!scrapedGender) { + function renderTagsField() { + if (!performer.tags?.length) { return; } - let retEnum: GQL.GenderEnum | undefined; - - // try to translate from enum values first - const upperGender = scrapedGender?.toUpperCase(); - const asEnum = genderToString(upperGender as GQL.GenderEnum); - if (asEnum) { - retEnum = stringToGender(asEnum); - } else { - // try to match against gender strings - const caseInsensitive = true; - retEnum = stringToGender(scrapedGender, caseInsensitive); - } - - return genderToString(retEnum); - } - - function updatePerformerEditStateFromScraper( - state: Partial - ) { - if (state.name) { - setName(state.name); - } - - if (state.aliases) { - setAliases(state.aliases ?? undefined); - } - if (state.birthdate) { - setBirthdate(state.birthdate ?? undefined); - } - if (state.ethnicity) { - setEthnicity(state.ethnicity ?? undefined); - } - if (state.country) { - setCountry(state.country ?? undefined); - } - if (state.eye_color) { - setEyeColor(state.eye_color ?? undefined); - } - if (state.height) { - setHeight(state.height ?? undefined); - } - if (state.measurements) { - setMeasurements(state.measurements ?? undefined); - } - if (state.fake_tits) { - setFakeTits(state.fake_tits ?? undefined); - } - if (state.career_length) { - setCareerLength(state.career_length ?? undefined); - } - if (state.tattoos) { - setTattoos(state.tattoos ?? undefined); - } - if (state.piercings) { - setPiercings(state.piercings ?? undefined); - } - if (state.url) { - setUrl(state.url ?? undefined); - } - if (state.twitter) { - setTwitter(state.twitter ?? undefined); - } - if (state.instagram) { - setInstagram(state.instagram ?? undefined); - } - if (state.gender) { - // gender is a string in the scraper data - setGender(translateScrapedGender(state.gender ?? undefined)); - } - - // image is a base64 string - // #404: don't overwrite image if it has been modified by the user - // overwrite if not new since it came from a dialog - // otherwise follow existing behaviour - if ( - (!isNew || image === undefined) && - (state as GQL.ScrapedPerformerDataFragment).image !== undefined - ) { - const imageStr = (state as GQL.ScrapedPerformerDataFragment).image; - setImage(imageStr ?? undefined); - } - } - - function onImageLoad(imageData: string) { - setImage(imageData); - } - - async function onSave( - performerInput: - | Partial - | Partial - ) { - setIsLoading(true); - try { - if (!isNew) { - await updatePerformer({ - variables: { - input: { - ...performerInput, - stash_ids: performerInput?.stash_ids?.map((s) => ({ - endpoint: s.endpoint, - stash_id: s.stash_id, - })), - } as GQL.PerformerUpdateInput, - }, - }); - if (performerInput.image) { - // Refetch image to bust browser cache - await fetch(`/performer/${performer.id}/image`, { cache: "reload" }); - } - } else { - const result = await createPerformer({ - variables: performerInput as GQL.PerformerCreateInput, - }); - if (result.data?.performerCreate) { - history.push(`/performers/${result.data.performerCreate.id}`); - } - } - } catch (e) { - Toast.error(e); - } - setIsLoading(false); - } - - // set up hotkeys - useEffect(() => { - if (isEditing && isVisible) { - Mousetrap.bind("s s", () => { - onSave?.(getPerformerInput()); - }); - - if (!isNew) { - Mousetrap.bind("d d", () => { - setIsDeleteAlertOpen(true); - }); - } - - return () => { - Mousetrap.unbind("s s"); - - if (!isNew) { - Mousetrap.unbind("d d"); - } - }; - } - }); - - useEffect(() => { - if (onImageChange) { - onImageChange(image); - } - return () => onImageChange?.(); - }, [image, onImageChange]); - - useEffect(() => onImageEncoding?.(imageEncoding), [ - onImageEncoding, - imageEncoding, - ]); - - useEffect(() => { - const newQueryableScrapers = ( - Scrapers?.data?.listPerformerScrapers ?? [] - ).filter((s) => - s.performer?.supported_scrapes.includes(GQL.ScrapeType.Name) - ); - - setQueryableScrapers(newQueryableScrapers); - }, [Scrapers]); - - if (isLoading) return ; - - function getPerformerInput() { - const performerInput: Partial< - GQL.PerformerCreateInput | GQL.PerformerUpdateInput - > = { - name, - aliases, - favorite, - birthdate, - ethnicity, - country, - eye_color: eyeColor, - height, - measurements, - fake_tits: fakeTits, - career_length: careerLength, - tattoos, - piercings, - url, - twitter, - instagram, - image, - gender: stringToGender(gender), - stash_ids: stashIDs.map((s) => ({ - stash_id: s.stash_id, - endpoint: s.endpoint, - })), - }; - - if (!isNew) { - (performerInput as GQL.PerformerUpdateInput).id = performer.id!; - } - return performerInput; - } - - function onImageChangeHandler(event: React.FormEvent) { - ImageUtils.onImageChange(event, onImageLoad); - } - - function onDisplayScrapeDialog(scraper: GQL.Scraper) { - setIsDisplayingScraperDialog(scraper); - } - - async function onReloadScrapers() { - setIsLoading(true); - try { - await mutateReloadScrapers(); - - // reload the performer scrapers - await Scrapers.refetch(); - } catch (e) { - Toast.error(e); - } finally { - setIsLoading(false); - } - } - - function getQueryScraperPerformerInput() { - if (!scrapePerformerDetails) return {}; - - // image is not supported - const { __typename, image: _image, ...ret } = scrapePerformerDetails; - return ret; - } - - async function onScrapePerformer() { - setIsDisplayingScraperDialog(undefined); - try { - if (!scrapePerformerDetails || !isDisplayingScraperDialog) return; - setIsLoading(true); - const result = await queryScrapePerformer( - isDisplayingScraperDialog.id, - getQueryScraperPerformerInput() - ); - if (!result?.data?.scrapePerformer) return; - - // if this is a new performer, just dump the data - if (isNew) { - updatePerformerEditStateFromScraper(result.data.scrapePerformer); - } else { - setScrapedPerformer(result.data.scrapePerformer); - } - } catch (e) { - Toast.error(e); - } finally { - setIsLoading(false); - } - } - - async function onScrapePerformerURL() { - if (!url) return; - setIsLoading(true); - try { - const result = await queryScrapePerformerURL(url); - if (!result.data || !result.data.scrapePerformerURL) { - return; - } - - // if this is a new performer, just dump the data - if (isNew) { - updatePerformerEditStateFromScraper(result.data.scrapePerformerURL); - } else { - setScrapedPerformer(result.data.scrapePerformerURL); - } - } catch (e) { - Toast.error(e); - } finally { - setIsLoading(false); - } - } - - function renderEthnicity() { - return TableUtils.renderInputGroup({ - title: "Ethnicity", - value: ethnicity, - isEditing: !!isEditing, - placeholder: "Ethnicity", - onChange: setEthnicity, - }); - } - - function renderScraperMenu() { - if (!performer || !isEditing) { - return; - } - - const popover = ( - - - <> - {queryableScrapers - ? queryableScrapers.map((s) => ( -
- -
- )) - : ""} -
- -
- -
-
- ); - return ( - - - +
+
Tags
+
+
    + {(performer.tags ?? []).map((tag) => ( + + ))} +
+
+
); } - function renderScraperDialog() { - return ( - setIsDisplayingScraperDialog(undefined)} - header="Scrape" - accept={{ onClick: onScrapePerformer, text: "Scrape" }} - > -
- setScrapePerformerDetails(query)} - /> -
-
- ); - } - - function urlScrapable(scrapedUrl: string) { - return ( - !!scrapedUrl && - (Scrapers?.data?.listPerformerScrapers ?? []).some((s) => - (s?.performer?.urls ?? []).some((u) => scrapedUrl.includes(u)) - ) - ); - } - - function maybeRenderScrapeButton() { - if (!url || !isEditing || !urlScrapable(url)) { - return undefined; - } - return ( - - ); - } - - function maybeRenderScrapeDialog() { - if (!scrapedPerformer) { - return; - } - - const currentPerformer: Partial = { - name, - aliases, - birthdate, - ethnicity, - country, - eye_color: eyeColor, - height, - measurements, - fake_tits: fakeTits, - career_length: careerLength, - tattoos, - piercings, - url, - twitter, - instagram, - gender: stringToGender(gender), - image_path: image ?? performer.image_path, - }; - - return ( - { - onScrapeDialogClosed(p); - }} - /> - ); - } - - function onScrapeDialogClosed(p?: GQL.ScrapedPerformerDataFragment) { - if (p) { - updatePerformerEditStateFromScraper(p); - } - setScrapedPerformer(undefined); - } - - function renderURLField() { - return ( -
- - - - ); - } - - function maybeRenderButtons() { - if (isEditing) { - return ( -
- - {!isNew ? ( - - ) : ( - "" - )} - {renderScraperMenu()} - - {isEditing ? ( - - ) : ( - "" - )} -
- ); - } - } - - function renderDeleteAlert() { - return ( - setIsDeleteAlertOpen(false) }} - > -

Are you sure you want to delete {name}?

-
- ); - } - - function maybeRenderName() { - if (isEditing) { - return TableUtils.renderInputGroup({ - title: "Name", - value: name, - isEditing: !!isEditing, - placeholder: "Name", - onChange: setName, - }); - } - } - - function maybeRenderAliases() { - if (isEditing) { - return TableUtils.renderInputGroup({ - title: "Aliases", - value: aliases, - isEditing: !!isEditing, - placeholder: "Aliases", - onChange: setAliases, - }); - } - } - - function renderGender() { - return TableUtils.renderHtmlSelect({ - title: "Gender", - value: gender, - isEditing: !!isEditing, - onChange: (value: string) => setGender(value), - selectOptions: [""].concat(getGenderStrings()), - }); - } - - const removeStashID = (stashID: GQL.StashIdInput) => { - setStashIDs( - stashIDs.filter( - (s) => - !(s.endpoint === stashID.endpoint && s.stash_id === stashID.stash_id) - ) - ); - }; - function renderStashIDs() { if (!performer.stash_ids?.length) { return; } return ( - - - - + + ); } - const formatHeight = () => { - if (isEditing) { - return height; - } + const formatHeight = (height?: string | null) => { if (!height) { return ""; } @@ -717,92 +83,46 @@ export const PerformerDetailsPanel: React.FC = ({ return ( <> - {renderDeleteAlert()} - {renderScraperDialog()} - {maybeRenderScrapeDialog()} - -
- {gallery.title ?? gallery.path} ({gallery.image_count}{" "} + {gallery.title ?? + TextUtils.fileNameFromPath(gallery.path ?? "")}{" "} + ({gallery.image_count}{" "} {gallery.image_count === 1 ? "image" : "images"})
- URL - {maybeRenderScrapeButton()} - - {EditableTextUtils.renderInputGroup({ - title: "URL", - value: url, - url: TextUtils.sanitiseURL(url), - isEditing: !!isEditing, - onChange: setUrl, - })} -
StashIDs +
+
StashIDs
+
-
- - {maybeRenderName()} - {maybeRenderAliases()} - {renderGender()} - {TableUtils.renderInputGroup({ - title: "Birthdate", - value: isEditing - ? birthdate - : TextUtils.formatDate(intl, birthdate), - isEditing: !!isEditing, - onChange: setBirthdate, - })} - {renderEthnicity()} - {TableUtils.renderInputGroup({ - title: "Eye Color", - value: eyeColor, - isEditing: !!isEditing, - onChange: setEyeColor, - })} - {TableUtils.renderInputGroup({ - title: "Country", - value: country, - isEditing: !!isEditing, - onChange: setCountry, - })} - {TableUtils.renderInputGroup({ - title: `Height ${isEditing ? "(cm)" : ""}`, - value: formatHeight(), - isEditing: !!isEditing, - onChange: setHeight, - })} - {TableUtils.renderInputGroup({ - title: "Measurements", - value: measurements, - isEditing: !!isEditing, - onChange: setMeasurements, - })} - {TableUtils.renderInputGroup({ - title: "Fake Tits", - value: fakeTits, - isEditing: !!isEditing, - onChange: setFakeTits, - })} - {TableUtils.renderInputGroup({ - title: "Career Length", - value: careerLength, - isEditing: !!isEditing, - onChange: setCareerLength, - })} - {TableUtils.renderInputGroup({ - title: "Tattoos", - value: tattoos, - isEditing: !!isEditing, - onChange: setTattoos, - })} - {TableUtils.renderInputGroup({ - title: "Piercings", - value: piercings, - isEditing: !!isEditing, - onChange: setPiercings, - })} - {renderURLField()} - {TableUtils.renderInputGroup({ - title: "Twitter", - value: twitter, - url: TextUtils.sanitiseURL(twitter, TextUtils.twitterURL), - isEditing: !!isEditing, - onChange: setTwitter, - })} - {TableUtils.renderInputGroup({ - title: "Instagram", - value: instagram, - url: TextUtils.sanitiseURL(instagram, TextUtils.instagramURL), - isEditing: !!isEditing, - onChange: setInstagram, - })} - {renderStashIDs()} - -
- - {maybeRenderButtons()} + + + + + + + + + + + + + + + {renderTagsField()} + {renderStashIDs()} ); }; diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx new file mode 100644 index 000000000..e4ebe7c08 --- /dev/null +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx @@ -0,0 +1,872 @@ +import React, { useEffect, useState } from "react"; +import { + Button, + Popover, + OverlayTrigger, + Form, + Col, + InputGroup, + Row, + Badge, +} from "react-bootstrap"; +import Mousetrap from "mousetrap"; +import * as GQL from "src/core/generated-graphql"; +import * as yup from "yup"; +import { + getGenderStrings, + useListPerformerScrapers, + genderToString, + stringToGender, + queryScrapePerformer, + mutateReloadScrapers, + usePerformerUpdate, + usePerformerCreate, + useTagCreate, + queryScrapePerformerURL, +} from "src/core/StashService"; +import { + Icon, + ImageInput, + LoadingIndicator, + CollapseButton, + Modal, + TagSelect, +} from "src/components/Shared"; +import { ImageUtils } from "src/utils"; +import { useToast } from "src/hooks"; +import { Prompt, useHistory } from "react-router-dom"; +import { useFormik } from "formik"; +import { PerformerScrapeDialog } from "./PerformerScrapeDialog"; +import PerformerScrapeModal from "./PerformerScrapeModal"; + +interface IPerformerDetails { + performer: Partial; + isNew?: boolean; + isVisible: boolean; + onDelete?: () => void; + onImageChange?: (image?: string | null) => void; + onImageEncoding?: (loading?: boolean) => void; +} + +export const PerformerEditPanel: React.FC = ({ + performer, + isNew, + isVisible, + onDelete, + onImageChange, + onImageEncoding, +}) => { + const Toast = useToast(); + const history = useHistory(); + + // Editing state + const [scraper, setScraper] = useState(); + const [newTags, setNewTags] = useState(); + + const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); + + // Network state + const [isLoading, setIsLoading] = useState(false); + + const [updatePerformer] = usePerformerUpdate(); + const [createPerformer] = usePerformerCreate(); + + const Scrapers = useListPerformerScrapers(); + const [queryableScrapers, setQueryableScrapers] = useState([]); + + const [scrapedPerformer, setScrapedPerformer] = useState< + GQL.ScrapedPerformer | undefined + >(); + + const imageEncoding = ImageUtils.usePasteImage(onImageLoad, true); + + const [createTag] = useTagCreate({ name: "" }); + + const genderOptions = [""].concat(getGenderStrings()); + + const labelXS = 3; + const labelXL = 2; + const fieldXS = 9; + const fieldXL = 7; + + const schema = yup.object({ + name: yup.string().required(), + aliases: yup.string().optional(), + gender: yup.string().optional().oneOf(genderOptions), + birthdate: yup.string().optional(), + ethnicity: yup.string().optional(), + eye_color: yup.string().optional(), + country: yup.string().optional(), + height: yup.string().optional(), + measurements: yup.string().optional(), + fake_tits: yup.string().optional(), + career_length: yup.string().optional(), + tattoos: yup.string().optional(), + piercings: yup.string().optional(), + url: yup.string().optional(), + twitter: yup.string().optional(), + instagram: yup.string().optional(), + tag_ids: yup.array(yup.string().required()).optional(), + stash_ids: yup.mixed().optional(), + image: yup.string().optional().nullable(), + }); + + const initialValues = { + name: performer.name ?? "", + aliases: performer.aliases ?? "", + gender: genderToString(performer.gender ?? undefined), + birthdate: performer.birthdate ?? "", + ethnicity: performer.ethnicity ?? "", + eye_color: performer.eye_color ?? "", + country: performer.country ?? "", + height: performer.height ?? "", + measurements: performer.measurements ?? "", + fake_tits: performer.fake_tits ?? "", + career_length: performer.career_length ?? "", + tattoos: performer.tattoos ?? "", + piercings: performer.piercings ?? "", + url: performer.url ?? "", + twitter: performer.twitter ?? "", + instagram: performer.instagram ?? "", + tag_ids: (performer.tags ?? []).map((t) => t.id), + stash_ids: performer.stash_ids ?? undefined, + image: undefined, + }; + + type InputValues = typeof initialValues; + + const formik = useFormik({ + initialValues, + validationSchema: schema, + onSubmit: (values) => onSave(getPerformerInput(values)), + }); + + function translateScrapedGender(scrapedGender?: string) { + if (!scrapedGender) { + return; + } + + let retEnum: GQL.GenderEnum | undefined; + + // try to translate from enum values first + const upperGender = scrapedGender?.toUpperCase(); + const asEnum = genderToString(upperGender as GQL.GenderEnum); + if (asEnum) { + retEnum = stringToGender(asEnum); + } else { + // try to match against gender strings + const caseInsensitive = true; + retEnum = stringToGender(scrapedGender, caseInsensitive); + } + + return genderToString(retEnum); + } + + function renderNewTags() { + if (!newTags || newTags.length === 0) { + return; + } + + const ret = ( + <> + {newTags.map((t) => ( + createNewTag(t)} + > + {t.name} + + + ))} + + ); + + const minCollapseLength = 10; + + if (newTags.length >= minCollapseLength) { + return ( + + {ret} + + ); + } + + return ret; + } + + async function createNewTag(toCreate: GQL.ScrapedSceneTag) { + let tagInput: GQL.TagCreateInput = { name: "" }; + try { + tagInput = Object.assign(tagInput, toCreate); + const result = await createTag({ + variables: tagInput, + }); + + // add the new tag to the new tags value + const newTagIds = formik.values.tag_ids.concat([ + result.data!.tagCreate!.id, + ]); + formik.setFieldValue("tag_ids", newTagIds); + + // remove the tag from the list + const newTagsClone = newTags!.concat(); + const pIndex = newTagsClone.indexOf(toCreate); + newTagsClone.splice(pIndex, 1); + + setNewTags(newTagsClone); + + Toast.success({ + content: ( + + Created tag: {toCreate.name} + + ), + }); + } catch (e) { + Toast.error(e); + } + } + + function updatePerformerEditStateFromScraper( + state: Partial + ) { + if (state.name) { + formik.setFieldValue("name", state.name); + } + + if (state.aliases) { + formik.setFieldValue("aliases", state.aliases); + } + if (state.birthdate) { + formik.setFieldValue("birthdate", state.birthdate); + } + if (state.ethnicity) { + formik.setFieldValue("ethnicity", state.ethnicity); + } + if (state.country) { + formik.setFieldValue("country", state.country); + } + if (state.eye_color) { + formik.setFieldValue("eye_color", state.eye_color); + } + if (state.height) { + formik.setFieldValue("height", state.height); + } + if (state.measurements) { + formik.setFieldValue("measurements", state.measurements); + } + if (state.fake_tits) { + formik.setFieldValue("fake_tits", state.fake_tits); + } + if (state.career_length) { + formik.setFieldValue("career_length", state.career_length); + } + if (state.tattoos) { + formik.setFieldValue("tattoos", state.tattoos); + } + if (state.piercings) { + formik.setFieldValue("piercings", state.piercings); + } + if (state.url) { + formik.setFieldValue("url", state.url); + } + if (state.twitter) { + formik.setFieldValue("twitter", state.twitter); + } + if (state.instagram) { + formik.setFieldValue("instagram", state.instagram); + } + if (state.gender) { + // gender is a string in the scraper data + formik.setFieldValue( + "gender", + translateScrapedGender(state.gender ?? undefined) + ); + } + if (state.tags) { + // map tags to their ids and filter out those not found + const newTagIds = state.tags.map((t) => t.stored_id).filter((t) => t); + formik.setFieldValue("tag_ids", newTagIds as string[]); + + setNewTags(state.tags.filter((t) => !t.stored_id)); + } + + // image is a base64 string + // #404: don't overwrite image if it has been modified by the user + // overwrite if not new since it came from a dialog + // otherwise follow existing behaviour + if ( + (!isNew || formik.values.image === undefined) && + (state as GQL.ScrapedPerformerDataFragment).image !== undefined + ) { + const imageStr = (state as GQL.ScrapedPerformerDataFragment).image; + formik.setFieldValue("image", imageStr ?? undefined); + } + } + + function onImageLoad(imageData: string) { + formik.setFieldValue("image", imageData); + } + + async function onSave( + performerInput: + | Partial + | Partial + ) { + setIsLoading(true); + try { + if (!isNew) { + await updatePerformer({ + variables: { + input: { + ...performerInput, + stash_ids: performerInput?.stash_ids?.map((s) => ({ + endpoint: s.endpoint, + stash_id: s.stash_id, + })), + } as GQL.PerformerUpdateInput, + }, + }); + history.push(`/performers/${performer.id}`); + } else { + const result = await createPerformer({ + variables: performerInput as GQL.PerformerCreateInput, + }); + if (result.data?.performerCreate) { + history.push(`/performers/${result.data.performerCreate.id}`); + } + } + } catch (e) { + Toast.error(e); + } + setIsLoading(false); + } + + // set up hotkeys + useEffect(() => { + if (isVisible) { + Mousetrap.bind("s s", () => { + onSave?.(getPerformerInput(formik.values)); + }); + + if (!isNew) { + Mousetrap.bind("d d", () => { + setIsDeleteAlertOpen(true); + }); + } + + return () => { + Mousetrap.unbind("s s"); + + if (!isNew) { + Mousetrap.unbind("d d"); + } + }; + } + }); + + useEffect(() => { + if (onImageChange) { + onImageChange(formik.values.image); + } + return () => onImageChange?.(); + }, [formik.values.image, onImageChange]); + + useEffect(() => onImageEncoding?.(imageEncoding), [ + onImageEncoding, + imageEncoding, + ]); + + useEffect(() => { + const newQueryableScrapers = ( + Scrapers?.data?.listPerformerScrapers ?? [] + ).filter((s) => + s.performer?.supported_scrapes.includes(GQL.ScrapeType.Name) + ); + + setQueryableScrapers(newQueryableScrapers); + }, [Scrapers]); + + if (isLoading) return ; + + function getPerformerInput(values: InputValues) { + const performerInput: Partial< + GQL.PerformerCreateInput | GQL.PerformerUpdateInput + > = { + ...values, + gender: stringToGender(values.gender), + }; + + if (!isNew) { + (performerInput as GQL.PerformerUpdateInput).id = performer.id!; + } + return performerInput; + } + + function onImageChangeHandler(event: React.FormEvent) { + ImageUtils.onImageChange(event, onImageLoad); + } + + function onImageChangeURL(url: string) { + formik.setFieldValue("image", url); + } + + async function onReloadScrapers() { + setIsLoading(true); + try { + await mutateReloadScrapers(); + + // reload the performer scrapers + await Scrapers.refetch(); + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + + async function onScrapePerformer( + selectedPerformer: GQL.ScrapedPerformerDataFragment + ) { + setScraper(undefined); + try { + if (!scraper) return; + setIsLoading(true); + + const { + __typename, + image: _image, + tags: _tags, + ...ret + } = selectedPerformer; + + const result = await queryScrapePerformer(scraper.id, ret); + if (!result?.data?.scrapePerformer) return; + + // if this is a new performer, just dump the data + if (isNew) { + updatePerformerEditStateFromScraper(result.data.scrapePerformer); + } else { + setScrapedPerformer(result.data.scrapePerformer); + } + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + + async function onScrapePerformerURL() { + const { url } = formik.values; + if (!url) return; + setIsLoading(true); + try { + const result = await queryScrapePerformerURL(url); + if (!result.data || !result.data.scrapePerformerURL) { + return; + } + + // if this is a new performer, just dump the data + if (isNew) { + updatePerformerEditStateFromScraper(result.data.scrapePerformerURL); + } else { + setScrapedPerformer(result.data.scrapePerformerURL); + } + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + + function renderScraperMenu() { + if (!performer) { + return; + } + + const popover = ( + + + <> + {queryableScrapers + ? queryableScrapers.map((s) => ( +
+ +
+ )) + : ""} +
+ +
+ +
+
+ ); + + return ( + + + + ); + } + + function urlScrapable(scrapedUrl?: string) { + return ( + !!scrapedUrl && + (Scrapers?.data?.listPerformerScrapers ?? []).some((s) => + (s?.performer?.urls ?? []).some((u) => scrapedUrl.includes(u)) + ) + ); + } + + function maybeRenderScrapeDialog() { + if (!scrapedPerformer) { + return; + } + + const currentPerformer: Partial = { + ...formik.values, + gender: stringToGender(formik.values.gender), + image: formik.values.image ?? performer.image_path, + }; + + return ( + { + onScrapeDialogClosed(p); + }} + /> + ); + } + + function onScrapeDialogClosed(p?: GQL.ScrapedPerformerDataFragment) { + if (p) { + updatePerformerEditStateFromScraper(p); + } + setScrapedPerformer(undefined); + } + + function maybeRenderScrapeButton() { + return ( + + ); + } + + function renderButtons() { + return ( + + + + {!isNew ? ( + + ) : ( + "" + )} + {renderScraperMenu()} + + + + + ); + } + + const renderScrapeModal = () => + scraper !== undefined && ( + setScraper(undefined)} + onSelectPerformer={onScrapePerformer} + name={formik.values.name || ""} + /> + ); + + function renderDeleteAlert() { + return ( + setIsDeleteAlertOpen(false) }} + > +

Are you sure you want to delete {performer.name}?

+
+ ); + } + + function renderTagsField() { + return ( + + + Tags + + + + formik.setFieldValue( + "tag_ids", + items.map((item) => item.id) + ) + } + ids={formik.values.tag_ids} + /> + {renderNewTags()} + + + ); + } + + const removeStashID = (stashID: GQL.StashIdInput) => { + formik.setFieldValue( + "stash_ids", + (formik.values.stash_ids ?? []).filter( + (s) => + !(s.endpoint === stashID.endpoint && s.stash_id === stashID.stash_id) + ) + ); + }; + + function renderStashIDs() { + if (!formik.values.stash_ids?.length) { + return; + } + + return ( + + + StashIDs + + +
    + {formik.values.stash_ids.map((stashID) => { + const base = stashID.endpoint.match(/https?:\/\/.*?\//)?.[0]; + const link = base ? ( + + {stashID.stash_id} + + ) : ( + stashID.stash_id + ); + return ( +
  • + + {link} +
  • + ); + })} +
+ +
+ ); + } + + function renderTextField(field: string, title: string) { + return ( + + + {title} + + + + + + ); + } + + return ( + <> + {renderDeleteAlert()} + {renderScrapeModal()} + {maybeRenderScrapeDialog()} + + + +
+ + + Name + + + + + {formik.errors.name} + + + + + + + Alias + + + + + + + + + Gender + + + + {genderOptions.map((opt) => ( + + ))} + + + + + {renderTextField("birthdate", "Birthdate")} + {renderTextField("country", "Country")} + {renderTextField("ethnicity", "Ethnicity")} + {renderTextField("eye_color", "Eye Color")} + {renderTextField("height", "Height (cm)")} + {renderTextField("measurements", "Measurements")} + {renderTextField("fake_tits", "Fake Tits")} + + + + Tattoos + + + + + + + + + Piercings + + + + + + + {renderTextField("career_length", "Career Length")} + + + + URL + + + + + {maybeRenderScrapeButton()} + + + + + {renderTextField("twitter", "Twitter")} + {renderTextField("instagram", "Instagram")} + + {renderTagsField()} + {renderStashIDs()} + + {renderButtons()} +
+ + ); +}; diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx index d7ce4bd49..4e6305cbb 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx @@ -6,13 +6,18 @@ import { ScrapedInputGroupRow, ScrapedImageRow, ScrapeDialogRow, + ScrapedTextAreaRow, } from "src/components/Shared/ScrapeDialog"; import { getGenderStrings, genderToString, stringToGender, + useTagCreate, } from "src/core/StashService"; import { Form } from "react-bootstrap"; +import { TagSelect } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import _ from "lodash"; function renderScrapedGender( result: ScrapeResult, @@ -62,8 +67,54 @@ function renderScrapedGenderRow( ); } +function renderScrapedTags( + result: ScrapeResult, + isNew?: boolean, + onChange?: (value: string[]) => void +) { + const resultValue = isNew ? result.newValue : result.originalValue; + const value = resultValue ?? []; + + return ( + { + if (onChange) { + onChange(items.map((i) => i.id)); + } + }} + ids={value} + /> + ); +} + +function renderScrapedTagsRow( + result: ScrapeResult, + onChange: (value: ScrapeResult) => void, + newTags: GQL.ScrapedSceneTag[], + onCreateNew?: (value: GQL.ScrapedSceneTag) => void +) { + return ( + renderScrapedTags(result)} + renderNewField={() => + renderScrapedTags(result, true, (value) => + onChange(result.cloneWithValue(value)) + ) + } + newValues={newTags} + onChange={onChange} + onCreateNew={onCreateNew} + /> + ); +} + interface IPerformerScrapeDialogProps { - performer: Partial; + performer: Partial; scraped: GQL.ScrapedPerformer; onClose: (scrapedPerformer?: GQL.ScrapedPerformer) => void; @@ -151,8 +202,64 @@ export const PerformerScrapeDialog: React.FC = ( ) ); + const [createTag] = useTagCreate({ name: "" }); + const Toast = useToast(); + + interface IHasStoredID { + stored_id?: string | null; + } + + function mapStoredIdObjects( + scrapedObjects?: IHasStoredID[] + ): string[] | undefined { + if (!scrapedObjects) { + return undefined; + } + const ret = scrapedObjects + .map((p) => p.stored_id) + .filter((p) => { + return p !== undefined && p !== null; + }) as string[]; + + if (ret.length === 0) { + return undefined; + } + + // sort by id numerically + ret.sort((a, b) => { + return parseInt(a, 10) - parseInt(b, 10); + }); + + return ret; + } + + function sortIdList(idList?: string[] | null) { + if (!idList) { + return; + } + + const ret = _.clone(idList); + // sort by id numerically + ret.sort((a, b) => { + return parseInt(a, 10) - parseInt(b, 10); + }); + + return ret; + } + + const [tags, setTags] = useState>( + new ScrapeResult( + sortIdList(props.performer.tag_ids ?? undefined), + mapStoredIdObjects(props.scraped.tags ?? undefined) + ) + ); + + const [newTags, setNewTags] = useState( + props.scraped.tags?.filter((t) => !t.stored_id) ?? [] + ); + const [image, setImage] = useState>( - new ScrapeResult(props.performer.image_path, props.scraped.image) + new ScrapeResult(props.performer.image, props.scraped.image) ); const allFields = [ @@ -173,6 +280,7 @@ export const PerformerScrapeDialog: React.FC = ( instagram, gender, image, + tags, ]; // don't show the dialog if nothing was scraped if (allFields.every((r) => !r.scraped)) { @@ -180,6 +288,41 @@ export const PerformerScrapeDialog: React.FC = ( return <>; } + async function createNewTag(toCreate: GQL.ScrapedSceneTag) { + let tagInput: GQL.TagCreateInput = { name: "" }; + try { + tagInput = Object.assign(tagInput, toCreate); + const result = await createTag({ + variables: tagInput, + }); + + // add the new tag to the new tags value + const tagClone = tags.cloneWithValue(tags.newValue); + if (!tagClone.newValue) { + tagClone.newValue = []; + } + tagClone.newValue.push(result.data!.tagCreate!.id); + setTags(tagClone); + + // remove the tag from the list + const newTagsClone = newTags.concat(); + const pIndex = newTagsClone.indexOf(toCreate); + newTagsClone.splice(pIndex, 1); + + setNewTags(newTagsClone); + + Toast.success({ + content: ( + + Created tag: {toCreate.name} + + ), + }); + } catch (e) { + Toast.error(e); + } + } + function makeNewScrapedItem(): GQL.ScrapedPerformer { return { name: name.getNewValue(), @@ -198,6 +341,12 @@ export const PerformerScrapeDialog: React.FC = ( twitter: twitter.getNewValue(), instagram: instagram.getNewValue(), gender: gender.getNewValue(), + tags: tags.getNewValue()?.map((m) => { + return { + stored_id: m, + name: "", + }; + }), image: image.getNewValue(), }; } @@ -210,7 +359,7 @@ export const PerformerScrapeDialog: React.FC = ( result={name} onChange={(value) => setName(value)} /> - setAliases(value)} @@ -256,12 +405,12 @@ export const PerformerScrapeDialog: React.FC = ( result={careerLength} onChange={(value) => setCareerLength(value)} /> - setTattoos(value)} /> - setPiercings(value)} @@ -281,6 +430,12 @@ export const PerformerScrapeDialog: React.FC = ( result={instagram} onChange={(value) => setInstagram(value)} /> + {renderScrapedTagsRow( + tags, + (value) => setTags(value), + newTags, + createNewTag + )} void; + onSelectPerformer: (performer: GQL.ScrapedPerformerDataFragment) => void; + name?: string; +} +const PerformerScrapeModal: React.FC = ({ + scraper, + name, + onHide, + onSelectPerformer, +}) => { + const inputRef = useRef(null); + const [query, setQuery] = useState(name ?? ""); + const { data, loading } = useScrapePerformerList(scraper.id, query); + + const performers = data?.scrapePerformerList ?? []; + + const onInputChange = debounce((input: string) => { + setQuery(input); + }, 500); + + useEffect(() => inputRef.current?.focus(), []); + + return ( + +
+ onInputChange(e.currentTarget.value)} + defaultValue={name ?? ""} + placeholder="Performer name..." + className="text-input mb-4" + ref={inputRef} + /> + {loading ? ( +
+ +
+ ) : ( +
    + {performers.map((p) => ( +
  • + +
  • + ))} +
+ )} +
+
+ ); +}; + +export default PerformerScrapeModal; diff --git a/ui/v2.5/src/components/Performers/PerformerList.tsx b/ui/v2.5/src/components/Performers/PerformerList.tsx index b7300a718..8aa7c038c 100644 --- a/ui/v2.5/src/components/Performers/PerformerList.tsx +++ b/ui/v2.5/src/components/Performers/PerformerList.tsx @@ -11,14 +11,23 @@ import { usePerformersDestroy, } from "src/core/StashService"; import { usePerformersList } from "src/hooks"; -import { showWhenSelected } from "src/hooks/ListHook"; +import { showWhenSelected, PersistanceLevel } from "src/hooks/ListHook"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; import { ExportDialog, DeleteEntityDialog } from "src/components/Shared"; import { PerformerCard } from "./PerformerCard"; import { PerformerListTable } from "./PerformerListTable"; +import { EditPerformersDialog } from "./EditPerformersDialog"; -export const PerformerList: React.FC = () => { +interface IPerformerList { + filterHook?: (filter: ListFilterModel) => ListFilterModel; + persistState?: PersistanceLevel; +} + +export const PerformerList: React.FC = ({ + filterHook, + persistState, +}) => { const history = useHistory(); const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); const [isExportAll, setIsExportAll] = useState(false); @@ -82,6 +91,17 @@ export const PerformerList: React.FC = () => { } } + function renderEditPerformersDialog( + selectedPerformers: SlimPerformerDataFragment[], + onClose: (applied: boolean) => void + ) { + return ( + <> + + + ); + } + const renderDeleteDialog = ( selectedPerformers: SlimPerformerDataFragment[], onClose: (confirmed: boolean) => void @@ -98,9 +118,11 @@ export const PerformerList: React.FC = () => { const listData = usePerformersList({ otherOperations, renderContent, + renderEditDialog: renderEditPerformersDialog, + filterHook, addKeybinds, selectable: true, - persistState: true, + persistState, renderDeleteDialog, }); diff --git a/ui/v2.5/src/components/Performers/Performers.tsx b/ui/v2.5/src/components/Performers/Performers.tsx index cac8dfdc8..53aa517c8 100644 --- a/ui/v2.5/src/components/Performers/Performers.tsx +++ b/ui/v2.5/src/components/Performers/Performers.tsx @@ -1,11 +1,18 @@ import React from "react"; import { Route, Switch } from "react-router-dom"; +import { PersistanceLevel } from "src/hooks/ListHook"; import { Performer } from "./PerformerDetails/Performer"; import { PerformerList } from "./PerformerList"; const Performers = () => ( - + ( + + )} + /> ); diff --git a/ui/v2.5/src/components/Performers/styles.scss b/ui/v2.5/src/components/Performers/styles.scss index 6cf3e3254..47f58d437 100644 --- a/ui/v2.5/src/components/Performers/styles.scss +++ b/ui/v2.5/src/components/Performers/styles.scss @@ -1,8 +1,6 @@ -#performer-details { - .scrape-url-button { - color: $text-color; - float: right; - margin-right: 0.5rem; +#performer-edit { + .scrape-url-button:disabled { + opacity: 0.5; } } @@ -92,3 +90,16 @@ #performer-scraper-popover { z-index: 1; } + +.PerformerScrapeModal { + &-list { + list-style-type: none; + max-height: 50vh; + overflow-x: auto; + padding-left: 1rem; + + .btn { + font-size: 1.2rem; + } + } +} diff --git a/ui/v2.5/src/components/Scenes/SceneCard.tsx b/ui/v2.5/src/components/Scenes/SceneCard.tsx index c9ea31e82..6602b6e11 100644 --- a/ui/v2.5/src/components/Scenes/SceneCard.tsx +++ b/ui/v2.5/src/components/Scenes/SceneCard.tsx @@ -29,17 +29,14 @@ export const ScenePreview: React.FC = ({ const videoEl = useRef(null); useEffect(() => { - const observer = new IntersectionObserver( - (entries) => { - entries.forEach((entry) => { - if (entry.intersectionRatio > 0) - // Catch is necessary due to DOMException if user hovers before clicking on page - videoEl.current?.play().catch(() => {}); - else videoEl.current?.pause(); - }); - }, - { root: document.documentElement } - ); + const observer = new IntersectionObserver((entries) => { + entries.forEach((entry) => { + if (entry.intersectionRatio > 0) + // Catch is necessary due to DOMException if user hovers before clicking on page + videoEl.current?.play().catch(() => {}); + else videoEl.current?.pause(); + }); + }); if (videoEl.current) observer.observe(videoEl.current); }); @@ -53,6 +50,8 @@ export const ScenePreview: React.FC = ({
- } - enterDelay={1000} - placement="bottom" - onOpen={props.onMenuOpened} - onClose={props.onMenuClosed} - > - {renderButton()} - - ); - } - return renderButton(); + const maybeRenderDropdown = () => { + if (props.value) { + return ( + + + + Decrement + + + + Reset + + + ); + } + }; + + return ( + + {renderButton()} + {maybeRenderDropdown()} + + ); }; diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx index 133f3f5d7..3c8ed2fec 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/Scene.tsx @@ -4,6 +4,7 @@ import React, { useEffect, useState } from "react"; import { useParams, useLocation, useHistory, Link } from "react-router-dom"; import * as GQL from "src/core/generated-graphql"; import { + mutateMetadataScan, useFindScene, useSceneIncrementO, useSceneDecrementO, @@ -51,6 +52,7 @@ export const Scene: React.FC = () => { error: streamableError, loading: streamableLoading, } = useSceneStreams(id); + const [oLoading, setOLoading] = useState(false); const [incrementO] = useSceneIncrementO(scene?.id ?? "0"); const [decrementO] = useSceneDecrementO(scene?.id ?? "0"); @@ -130,6 +132,18 @@ export const Scene: React.FC = () => { setTimestamp(marker.seconds); } + async function onRescan() { + if (!scene) { + return; + } + + await mutateMetadataScan({ + paths: [scene.path], + }); + + Toast.success({ content: "Rescanning scene" }); + } + async function onGenerateScreenshot(at?: number) { if (!scene) { return; @@ -184,6 +198,13 @@ export const Scene: React.FC = () => { + onRescan()} + > + Rescan + = ({ /> - -
StashIDs
    @@ -725,7 +723,11 @@ export const SceneEditPanel: React.FC = ({ alt="Scene cover" /> )} - +
diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMovieTable.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMovieTable.tsx index 40c1e763f..5b4e62aef 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMovieTable.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMovieTable.tsx @@ -3,8 +3,6 @@ import * as GQL from "src/core/generated-graphql"; import { useAllMoviesForFilter } from "src/core/StashService"; import { Form, Row, Col } from "react-bootstrap"; -type ValidTypes = GQL.SlimMovieDataFragment; - export type MovieSceneIndexMap = Map; export interface IProps { @@ -17,8 +15,8 @@ export const SceneMovieTable: React.FunctionComponent = ( ) => { const { data } = useAllMoviesForFilter(); - const items = !!data && !!data.allMoviesSlim ? data.allMoviesSlim : []; - let itemsFilter: ValidTypes[] = []; + const items = !!data && !!data.allMovies ? data.allMovies : []; + let itemsFilter: GQL.SlimMovieDataFragment[] = []; if (!!props.movieSceneIndexes && !!items) { props.movieSceneIndexes.forEach((_index, movieId) => { diff --git a/ui/v2.5/src/components/Scenes/SceneList.tsx b/ui/v2.5/src/components/Scenes/SceneList.tsx index 4f2df0914..e0667e26a 100644 --- a/ui/v2.5/src/components/Scenes/SceneList.tsx +++ b/ui/v2.5/src/components/Scenes/SceneList.tsx @@ -10,7 +10,7 @@ import { queryFindScenes } from "src/core/StashService"; import { useScenesList } from "src/hooks"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; -import { showWhenSelected } from "src/hooks/ListHook"; +import { showWhenSelected, PersistanceLevel } from "src/hooks/ListHook"; import Tagger from "src/components/Tagger"; import { WallPanel } from "../Wall/WallPanel"; import { SceneCard } from "./SceneCard"; @@ -22,7 +22,7 @@ import { ExportDialog } from "../Shared/ExportDialog"; interface ISceneList { filterHook?: (filter: ListFilterModel) => ListFilterModel; - persistState?: boolean; + persistState?: PersistanceLevel.ALL; } export const SceneList: React.FC = ({ diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx index 4e43d547c..1a70af20f 100644 --- a/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx +++ b/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx @@ -6,6 +6,7 @@ import { FindSceneMarkersQueryResult } from "src/core/generated-graphql"; import { queryFindSceneMarkers } from "src/core/StashService"; import { NavUtils } from "src/utils"; import { useSceneMarkersList } from "src/hooks"; +import { PersistanceLevel } from "src/hooks/ListHook"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; import { WallPanel } from "../Wall/WallPanel"; @@ -41,7 +42,7 @@ export const SceneMarkerList: React.FC = ({ filterHook }) => { renderContent, filterHook, addKeybinds, - persistState: true, + persistState: PersistanceLevel.ALL, }); async function playRandom( diff --git a/ui/v2.5/src/components/Scenes/Scenes.tsx b/ui/v2.5/src/components/Scenes/Scenes.tsx index 95bbb46f2..0f301f7f5 100644 --- a/ui/v2.5/src/components/Scenes/Scenes.tsx +++ b/ui/v2.5/src/components/Scenes/Scenes.tsx @@ -1,5 +1,6 @@ import React from "react"; import { Route, Switch } from "react-router-dom"; +import { PersistanceLevel } from "src/hooks/ListHook"; import { Scene } from "./SceneDetails/Scene"; import { SceneList } from "./SceneList"; import { SceneMarkerList } from "./SceneMarkerList"; @@ -9,7 +10,9 @@ const Scenes = () => ( } + render={(props) => ( + + )} /> diff --git a/ui/v2.5/src/components/Scenes/styles.scss b/ui/v2.5/src/components/Scenes/styles.scss index 73538c968..e04288211 100644 --- a/ui/v2.5/src/components/Scenes/styles.scss +++ b/ui/v2.5/src/components/Scenes/styles.scss @@ -209,15 +209,36 @@ textarea.scene-description { } } - &:hover { - .scene-specs-overlay, - .rating-banner, - .scene-studio-overlay { - opacity: 0; - transition: opacity 0.5s; - } + @media (pointer: fine) { + &:hover { + .scene-specs-overlay, + .rating-banner, + .scene-studio-overlay { + opacity: 0; + transition: opacity 0.5s; + } - .scene-studio-overlay:hover { + .scene-studio-overlay:hover { + opacity: 0.75; + transition: opacity 0.5s; + } + + .scene-card-check { + opacity: 0.75; + transition: opacity 0.5s; + } + + .scene-card-preview-video { + top: 0; + transition-delay: 0.2s; + } + } + } + + /* replicate hover for non-hoverable interfaces */ + @media (hover: none), (pointer: coarse), (pointer: none) { + /* don't hide overlays */ + .scene-studio-overlay { opacity: 0.75; transition: opacity 0.5s; } @@ -545,3 +566,10 @@ input[type="range"].blue-slider { color: #664c3f; } } + +.o-counter .dropdown-toggle { + background-color: rgba(0, 0, 0, 0); + border: none; + padding-left: 0; + padding-right: 0.25rem; +} diff --git a/ui/v2.5/src/components/Settings/SettingsConfigurationPanel.tsx b/ui/v2.5/src/components/Settings/SettingsConfigurationPanel.tsx index 92f1067b6..6070765f3 100644 --- a/ui/v2.5/src/components/Settings/SettingsConfigurationPanel.tsx +++ b/ui/v2.5/src/components/Settings/SettingsConfigurationPanel.tsx @@ -125,6 +125,7 @@ export const SettingsConfigurationPanel: React.FC = () => { const [scraperCDPPath, setScraperCDPPath] = useState( undefined ); + const [scraperCertCheck, setScraperCertCheck] = useState(true); const [stashBoxes, setStashBoxes] = useState([]); const { data, error, loading } = useConfiguration(); @@ -164,6 +165,7 @@ export const SettingsConfigurationPanel: React.FC = () => { imageExcludes, scraperUserAgent, scraperCDPPath, + scraperCertCheck, stashBoxes: stashBoxes.map( (b) => ({ @@ -212,6 +214,7 @@ export const SettingsConfigurationPanel: React.FC = () => { setImageExcludes(conf.general.imageExcludes); setScraperUserAgent(conf.general.scraperUserAgent ?? undefined); setScraperCDPPath(conf.general.scraperCDPPath ?? undefined); + setScraperCertCheck(conf.general.scraperCertCheck); setStashBoxes( conf.general.stashBoxes.map((box, i) => ({ name: box?.name ?? undefined, @@ -717,6 +720,20 @@ export const SettingsConfigurationPanel: React.FC = () => { http://localhost:9222/json/version) to a Chrome instance. + + + setScraperCertCheck(!scraperCertCheck)} + /> + + Some sites use insecure ssl certificates. When unticked the scraper + skips the insecure certificates check and allows scraping of those + sites. If you get a certificate error when scraping untick this. + +
diff --git a/ui/v2.5/src/components/Shared/DetailsEditNavbar.tsx b/ui/v2.5/src/components/Shared/DetailsEditNavbar.tsx index f2b3ed0fc..cfcbd135e 100644 --- a/ui/v2.5/src/components/Shared/DetailsEditNavbar.tsx +++ b/ui/v2.5/src/components/Shared/DetailsEditNavbar.tsx @@ -12,6 +12,8 @@ interface IProps { onAutoTag?: () => void; onImageChange: (event: React.FormEvent) => void; onBackImageChange?: (event: React.FormEvent) => void; + onImageChangeURL?: (url: string) => void; + onBackImageChangeURL?: (url: string) => void; onClearImage?: () => void; onClearBackImage?: () => void; acceptSVG?: boolean; @@ -65,6 +67,7 @@ export const DetailsEditNavbar: React.FC = (props: IProps) => { isEditing={props.isEditing} text="Back image..." onImageChange={props.onBackImageChange} + onImageURL={props.onBackImageChangeURL} /> ); } @@ -116,6 +119,7 @@ export const DetailsEditNavbar: React.FC = (props: IProps) => { isEditing={props.isEditing} text={props.onBackImageChange ? "Front image..." : undefined} onImageChange={props.onImageChange} + onImageURL={props.onImageChangeURL} acceptSVG={props.acceptSVG ?? false} /> {props.isEditing && props.onClearImage ? ( diff --git a/ui/v2.5/src/components/Shared/ImageInput.tsx b/ui/v2.5/src/components/Shared/ImageInput.tsx index 9bb4b46a8..c243a3a6b 100644 --- a/ui/v2.5/src/components/Shared/ImageInput.tsx +++ b/ui/v2.5/src/components/Shared/ImageInput.tsx @@ -1,10 +1,20 @@ -import React from "react"; -import { Button, Form } from "react-bootstrap"; +import React, { useState } from "react"; +import { + Button, + Col, + Form, + OverlayTrigger, + Popover, + Row, +} from "react-bootstrap"; +import { Modal } from "."; +import Icon from "./Icon"; interface IImageInput { isEditing: boolean; text?: string; onImageChange: (event: React.ChangeEvent) => void; + onImageURL?: (url: string) => void; acceptSVG?: boolean; } @@ -12,18 +22,107 @@ export const ImageInput: React.FC = ({ isEditing, text, onImageChange, + onImageURL, acceptSVG = false, }) => { + const [isShowDialog, setIsShowDialog] = useState(false); + const [url, setURL] = useState(""); + if (!isEditing) return
; + if (!onImageURL) { + // just return the file input + return ( + + + + + ); + } + + function onConfirmURL() { + if (!onImageURL) { + return; + } + + setIsShowDialog(false); + onImageURL(url); + } + + function renderDialog() { + return ( + setIsShowDialog(false)} + header="Image URL" + accept={{ onClick: onConfirmURL, text: "Confirm" }} + > +
+ + + URL + + + ) => + setURL(event.currentTarget.value) + } + value={url} + placeholder="URL" + /> + + +
+
+ ); + } + + const popover = ( + + + <> +
+ + + + +
+
+ +
+ +
+
+ ); + return ( - - - - + <> + {renderDialog()} + + + + ); }; diff --git a/ui/v2.5/src/components/Shared/Select.tsx b/ui/v2.5/src/components/Shared/Select.tsx index ae1088b7f..8ed94b245 100644 --- a/ui/v2.5/src/components/Shared/Select.tsx +++ b/ui/v2.5/src/components/Shared/Select.tsx @@ -10,7 +10,6 @@ import { useAllStudiosForFilter, useAllPerformersForFilter, useMarkerStrings, - useScrapePerformerList, useTagCreate, useStudioCreate, usePerformerCreate, @@ -32,6 +31,7 @@ interface ITypeProps { | "parent_studios" | "tags" | "sceneTags" + | "performerTags" | "movies"; } interface IFilterProps { @@ -43,6 +43,8 @@ interface IFilterProps { isMulti?: boolean; isClearable?: boolean; isDisabled?: boolean; + creatable?: boolean; + menuPortalTarget?: HTMLElement | null; } interface ISelectProps { className?: string; @@ -60,6 +62,7 @@ interface ISelectProps { placeholder?: string; showDropdown?: boolean; groupHeader?: string; + menuPortalTarget?: HTMLElement | null; closeMenuOnSelect?: boolean; noOptionsMessage?: string | null; } @@ -109,6 +112,7 @@ const SelectComponent = ({ placeholder, showDropdown = true, groupHeader, + menuPortalTarget, closeMenuOnSelect = true, noOptionsMessage = type !== "tags" ? "None" : null, }: ISelectProps & ITypeProps) => { @@ -158,6 +162,7 @@ const SelectComponent = ({ isLoading, styles, closeMenuOnSelect, + menuPortalTarget, components: { IndicatorSeparator: () => null, ...((!showDropdown || isDisabled) && { DropdownIndicator: () => null }), @@ -336,48 +341,6 @@ export const SceneSelect: React.FC = (props) => { ); }; -interface IScrapePerformerSuggestProps { - scraperId: string; - onSelectPerformer: (performer: GQL.ScrapedPerformerDataFragment) => void; - placeholder?: string; -} -export const ScrapePerformerSuggest: React.FC = ( - props -) => { - const [query, setQuery] = useState(""); - const { data, loading } = useScrapePerformerList(props.scraperId, query); - - const performers = data?.scrapePerformerList ?? []; - const items = performers.map((item) => ({ - label: item.name ?? "", - value: item.name ?? "", - })); - - const onInputChange = debounce((input: string) => { - setQuery(input); - }, 500); - - const onChange = (option: ValueType) => { - const performer = performers.find((p) => p.name === option?.value); - if (performer) props.onSelectPerformer(performer); - }; - - return ( - - ); -}; - interface IMarkerSuggestProps { initialMarkerTitle?: string; onChange: (title: string) => void; @@ -414,7 +377,7 @@ export const PerformerSelect: React.FC = (props) => { const { data, loading } = useAllPerformersForFilter(); const [createPerformer] = usePerformerCreate(); - const performers = data?.allPerformersSlim ?? []; + const performers = data?.allPerformers ?? []; const onCreate = async (name: string) => { const result = await createPerformer({ @@ -430,7 +393,7 @@ export const PerformerSelect: React.FC = (props) => { !exclude.includes(studio.id) ); @@ -464,7 +427,7 @@ export const StudioSelect: React.FC< isLoading={loading} items={studios} placeholder={props.noSelectionString ?? "Select studio..."} - creatable + creatable={props.creatable ?? true} onCreate={onCreate} /> ); @@ -472,7 +435,7 @@ export const StudioSelect: React.FC< export const MovieSelect: React.FC = (props) => { const { data, loading } = useAllMoviesForFilter(); - const items = data?.allMoviesSlim ?? []; + const items = data?.allMovies ?? []; return ( = (props) => { const [createTag] = useTagCreate({ name: "" }); const placeholder = props.noSelectionString ?? "Select tags..."; - const tags = data?.allTagsSlim ?? []; + const tags = data?.allTags ?? []; const onCreate = async (name: string) => { const result = await createTag({ @@ -505,7 +468,7 @@ export const TagSelect: React.FC = (props) => { {...props} isMulti={props.isMulti ?? false} items={tags} - creatable + creatable={props.creatable ?? true} type="tags" placeholder={placeholder} isLoading={loading} @@ -517,11 +480,11 @@ export const TagSelect: React.FC = (props) => { export const FilterSelect: React.FC = (props) => props.type === "performers" ? ( - + ) : props.type === "studios" || props.type === "parent_studios" ? ( - + ) : props.type === "movies" ? ( - + ) : ( - + ); diff --git a/ui/v2.5/src/components/Shared/TagLink.tsx b/ui/v2.5/src/components/Shared/TagLink.tsx index cb35cdbc5..1e9a58f2f 100644 --- a/ui/v2.5/src/components/Shared/TagLink.tsx +++ b/ui/v2.5/src/components/Shared/TagLink.tsx @@ -14,6 +14,7 @@ import { NavUtils, TextUtils } from "src/utils"; interface IProps { tag?: Partial; + tagType?: "performer" | "scene" | "gallery" | "image"; performer?: Partial; marker?: Partial; movie?: Partial; @@ -26,7 +27,21 @@ export const TagLink: React.FC = (props: IProps) => { let link: string = "#"; let title: string = ""; if (props.tag) { - link = NavUtils.makeTagScenesUrl(props.tag); + switch (props.tagType) { + case "scene": + case undefined: + link = NavUtils.makeTagScenesUrl(props.tag); + break; + case "performer": + link = NavUtils.makeTagPerformersUrl(props.tag); + break; + case "gallery": + link = NavUtils.makeTagGalleriesUrl(props.tag); + break; + case "image": + link = NavUtils.makeTagImagesUrl(props.tag); + break; + } title = props.tag.name || ""; } else if (props.performer) { link = NavUtils.makePerformerScenesUrl(props.performer); diff --git a/ui/v2.5/src/components/Shared/index.ts b/ui/v2.5/src/components/Shared/index.ts index f74648125..3a711091a 100644 --- a/ui/v2.5/src/components/Shared/index.ts +++ b/ui/v2.5/src/components/Shared/index.ts @@ -1,14 +1,4 @@ -export { - GallerySelect, - ScrapePerformerSuggest, - MarkerTitleSuggest, - FilterSelect, - PerformerSelect, - StudioSelect, - TagSelect, - SceneSelect, -} from "./Select"; - +export * from "./Select"; export { default as Icon } from "./Icon"; export { default as Modal } from "./Modal"; export { CollapseButton } from "./CollapseButton"; diff --git a/ui/v2.5/src/components/Studios/StudioDetails/Studio.tsx b/ui/v2.5/src/components/Studios/StudioDetails/Studio.tsx index 9a79551c5..048f199b3 100644 --- a/ui/v2.5/src/components/Studios/StudioDetails/Studio.tsx +++ b/ui/v2.5/src/components/Studios/StudioDetails/Studio.tsx @@ -134,10 +134,6 @@ export const Studio: React.FC = () => { input: getStudioInput() as GQL.StudioUpdateInput, }, }); - if (result.data?.studioUpdate?.image_path) - await fetch(result.data?.studioUpdate?.image_path, { - cache: "reload", - }); if (result.data?.studioUpdate) { updateStudioData(result.data.studioUpdate); setIsEditing(false); @@ -319,6 +315,7 @@ export const Studio: React.FC = () => { onToggleEdit={onToggleEdit} onSave={onSave} onImageChange={onImageChangeHandler} + onImageChangeURL={onImageLoad} onClearImage={() => { onClearImage(); }} diff --git a/ui/v2.5/src/components/Studios/StudioList.tsx b/ui/v2.5/src/components/Studios/StudioList.tsx index 6156046d2..4d6aef8f9 100644 --- a/ui/v2.5/src/components/Studios/StudioList.tsx +++ b/ui/v2.5/src/components/Studios/StudioList.tsx @@ -7,7 +7,7 @@ import { SlimStudioDataFragment, } from "src/core/generated-graphql"; import { useStudiosList } from "src/hooks"; -import { showWhenSelected } from "src/hooks/ListHook"; +import { showWhenSelected, PersistanceLevel } from "src/hooks/ListHook"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; import { queryFindStudios, useStudiosDestroy } from "src/core/StashService"; @@ -131,7 +131,7 @@ export const StudioList: React.FC = ({ addKeybinds, otherOperations, selectable: true, - persistState: !fromParent, + persistState: !fromParent ? PersistanceLevel.ALL : PersistanceLevel.NONE, renderDeleteDialog, }); diff --git a/ui/v2.5/src/components/Tagger/StashSearchResult.tsx b/ui/v2.5/src/components/Tagger/StashSearchResult.tsx index 62aa9a277..22dfdfb5e 100755 --- a/ui/v2.5/src/components/Tagger/StashSearchResult.tsx +++ b/ui/v2.5/src/components/Tagger/StashSearchResult.tsx @@ -263,7 +263,7 @@ const StashSearchResult: React.FC = ({ const newTagIDs = tagOperation === "merge" ? updatedTags : []; const tags = scene.tags ?? []; if (tags.length > 0) { - const tagDict: Record = (allTags?.allTagsSlim ?? []) + const tagDict: Record = (allTags?.allTags ?? []) .filter((t) => t.name) .reduce( (dict, t) => ({ ...dict, [t.name.toLowerCase()]: t.id }), diff --git a/ui/v2.5/src/components/Tagger/Tagger.tsx b/ui/v2.5/src/components/Tagger/Tagger.tsx index 99d277318..281996f1d 100755 --- a/ui/v2.5/src/components/Tagger/Tagger.tsx +++ b/ui/v2.5/src/components/Tagger/Tagger.tsx @@ -68,7 +68,7 @@ const parseDate = (input: string): string => { if (mmddyy) { output = output.replace( mmddyy[0], - ` 20${mmddyy[1]}-${mmddyy[2]}-${mmddyy[3]} ` + ` ${mmddyy[1]}-${mmddyy[2]}-${mmddyy[3]} ` ); } const ddMMyy = output.match(ddMMyyRegex); @@ -133,7 +133,7 @@ function prepareQueryString( s = paths[paths.length - 1]; } blacklist.forEach((b) => { - s = s.replace(new RegExp(b, "i"), ""); + s = s.replace(new RegExp(b, "gi"), " "); }); s = parseDate(s); return s.replace(/\./g, " "); @@ -175,6 +175,7 @@ const TaggerList: React.FC = ({ const [fingerprints, setFingerprints] = useState< Record >({}); + const [hideUnmatched, setHideUnmatched] = useState(false); const fingerprintQueue = config.fingerprintQueue[selectedEndpoint.endpoint] ?? []; @@ -285,15 +286,23 @@ const TaggerList: React.FC = ({ ); const getFingerprintCount = () => { - const count = scenes.filter( + return scenes.filter( (s) => s.stash_ids.length === 0 && ((s.checksum && fingerprints[s.checksum]) || (s.oshash && fingerprints[s.oshash])) ).length; + }; + + const getFingerprintCountMessage = () => { + const count = getFingerprintCount(); return `${count > 0 ? count : "No"} new fingerprint matches found`; }; + const toggleHideUnmatchedScenes = () => { + setHideUnmatched(!hideUnmatched); + }; + const renderScenes = () => scenes.map((scene) => { const { paths, file, ext } = parsePath(scene.path); @@ -460,7 +469,7 @@ const TaggerList: React.FC = ({ ); } - return ( + return hideUnmatched && !fingerprintMatch ? null : (
@@ -498,6 +507,13 @@ const TaggerList: React.FC = ({
{fingerprintError} +
+ {(getFingerprintCount() > 0 || hideUnmatched) && ( + + )} +
{fingerprintQueue.length > 0 && (
diff --git a/ui/v2.5/src/components/Tagger/queries.ts b/ui/v2.5/src/components/Tagger/queries.ts index ae46d14e2..66a91e106 100644 --- a/ui/v2.5/src/components/Tagger/queries.ts +++ b/ui/v2.5/src/components/Tagger/queries.ts @@ -65,21 +65,21 @@ export const useCreatePerformer = () => { >({ query: GQL.AllPerformersForFilterDocument, }); - const allPerformersSlim = sortBy( + const allPerformers = sortBy( [ - ...(currentQuery?.allPerformersSlim ?? []), + ...(currentQuery?.allPerformers ?? []), newPerformer.data.performerCreate, ], ["name"] ); - if (allPerformersSlim.length > 1) { + if (allPerformers.length > 1) { store.writeQuery< GQL.AllPerformersForFilterQuery, GQL.AllPerformersForFilterQueryVariables >({ query: GQL.AllPerformersForFilterDocument, data: { - allPerformersSlim, + allPerformers, }, }); } @@ -169,18 +169,18 @@ export const useCreateStudio = () => { >({ query: GQL.AllStudiosForFilterDocument, }); - const allStudiosSlim = sortBy( - [...(currentQuery?.allStudiosSlim ?? []), result.data.studioCreate], + const allStudios = sortBy( + [...(currentQuery?.allStudios ?? []), result.data.studioCreate], ["name"] ); - if (allStudiosSlim.length > 1) { + if (allStudios.length > 1) { store.writeQuery< GQL.AllStudiosForFilterQuery, GQL.AllStudiosForFilterQueryVariables >({ query: GQL.AllStudiosForFilterDocument, data: { - allStudiosSlim, + allStudios, }, }); } @@ -225,8 +225,8 @@ export const useCreateTag = () => { >({ query: GQL.AllTagsForFilterDocument, }); - const allTagsSlim = sortBy( - [...(currentQuery?.allTagsSlim ?? []), result.data.tagCreate], + const allTags = sortBy( + [...(currentQuery?.allTags ?? []), result.data.tagCreate], ["name"] ); @@ -236,7 +236,7 @@ export const useCreateTag = () => { >({ query: GQL.AllTagsForFilterDocument, data: { - allTagsSlim, + allTags, }, }); }, diff --git a/ui/v2.5/src/components/Tags/TagCard.tsx b/ui/v2.5/src/components/Tags/TagCard.tsx index c6943c7f7..5f5db0358 100644 --- a/ui/v2.5/src/components/Tags/TagCard.tsx +++ b/ui/v2.5/src/components/Tags/TagCard.tsx @@ -47,6 +47,19 @@ export const TagCard: React.FC = ({ ); } + function maybeRenderPerformersPopoverButton() { + if (!tag.performer_count) return; + + return ( + + + + ); + } + function maybeRenderPopoverButtonGroup() { if (tag) { return ( @@ -55,6 +68,7 @@ export const TagCard: React.FC = ({ {maybeRenderScenesPopoverButton()} {maybeRenderSceneMarkersPopoverButton()} + {maybeRenderPerformersPopoverButton()} ); diff --git a/ui/v2.5/src/components/Tags/TagDetails/Tag.tsx b/ui/v2.5/src/components/Tags/TagDetails/Tag.tsx index 0583ac60b..c1ec1d63f 100644 --- a/ui/v2.5/src/components/Tags/TagDetails/Tag.tsx +++ b/ui/v2.5/src/components/Tags/TagDetails/Tag.tsx @@ -22,6 +22,8 @@ import { useToast } from "src/hooks"; import { TagScenesPanel } from "./TagScenesPanel"; import { TagMarkersPanel } from "./TagMarkersPanel"; import { TagImagesPanel } from "./TagImagesPanel"; +import { TagPerformersPanel } from "./TagPerformersPanel"; +import { TagGalleriesPanel } from "./TagGalleriesPanel"; interface ITabParams { id?: string; @@ -51,7 +53,13 @@ export const Tag: React.FC = () => { const [createTag] = useTagCreate(getTagInput() as GQL.TagUpdateInput); const [deleteTag] = useTagDestroy(getTagInput() as GQL.TagUpdateInput); - const activeTabKey = tab === "markers" || tab === "images" ? tab : "scenes"; + const activeTabKey = + tab === "markers" || + tab === "images" || + tab === "performers" || + tab === "galleries" + ? tab + : "scenes"; const setActiveTabKey = (newTab: string | null) => { if (tab !== newTab) { const tabParam = newTab === "scenes" ? "" : `/${newTab}`; @@ -133,8 +141,6 @@ export const Tag: React.FC = () => { }, }); if (result.data?.tagUpdate) { - if (result.data.tagUpdate.image_path) - await fetch(result.data.tagUpdate.image_path, { cache: "reload" }); updateTagData(result.data.tagUpdate); setIsEditing(false); } @@ -257,9 +263,15 @@ export const Tag: React.FC = () => { + + + + + +
)} diff --git a/ui/v2.5/src/components/Tags/TagDetails/TagGalleriesPanel.tsx b/ui/v2.5/src/components/Tags/TagDetails/TagGalleriesPanel.tsx new file mode 100644 index 000000000..97ae35107 --- /dev/null +++ b/ui/v2.5/src/components/Tags/TagDetails/TagGalleriesPanel.tsx @@ -0,0 +1,12 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { tagFilterHook } from "src/core/tags"; +import { GalleryList } from "src/components/Galleries/GalleryList"; + +interface ITagGalleriesPanel { + tag: GQL.TagDataFragment; +} + +export const TagGalleriesPanel: React.FC = ({ tag }) => { + return ; +}; diff --git a/ui/v2.5/src/components/Tags/TagDetails/TagPerformersPanel.tsx b/ui/v2.5/src/components/Tags/TagDetails/TagPerformersPanel.tsx new file mode 100644 index 000000000..4cbb4e6d6 --- /dev/null +++ b/ui/v2.5/src/components/Tags/TagDetails/TagPerformersPanel.tsx @@ -0,0 +1,12 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { tagFilterHook } from "src/core/tags"; +import { PerformerList } from "src/components/Performers/PerformerList"; + +interface ITagPerformersPanel { + tag: GQL.TagDataFragment; +} + +export const TagPerformersPanel: React.FC = ({ tag }) => { + return ; +}; diff --git a/ui/v2.5/src/components/Tags/TagList.tsx b/ui/v2.5/src/components/Tags/TagList.tsx index ed2a918ec..53645afd7 100644 --- a/ui/v2.5/src/components/Tags/TagList.tsx +++ b/ui/v2.5/src/components/Tags/TagList.tsx @@ -4,7 +4,11 @@ import Mousetrap from "mousetrap"; import { FindTagsQueryResult } from "src/core/generated-graphql"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; -import { showWhenSelected, useTagsList } from "src/hooks/ListHook"; +import { + showWhenSelected, + useTagsList, + PersistanceLevel, +} from "src/hooks/ListHook"; import { Button } from "react-bootstrap"; import { Link, useHistory } from "react-router-dom"; import * as GQL from "src/core/generated-graphql"; @@ -144,7 +148,7 @@ export const TagList: React.FC = ({ filterHook }) => { selectable: true, zoomable: true, defaultZoomIndex: 0, - persistState: true, + persistState: PersistanceLevel.ALL, renderDeleteDialog, }); diff --git a/ui/v2.5/src/components/Wall/WallItem.tsx b/ui/v2.5/src/components/Wall/WallItem.tsx index 4d2b36c3d..24ccdb92d 100644 --- a/ui/v2.5/src/components/Wall/WallItem.tsx +++ b/ui/v2.5/src/components/Wall/WallItem.tsx @@ -57,6 +57,8 @@ const Preview: React.FC<{ ); const video = (