Merge to master for 0.5

This commit is contained in:
WithoutPants 2021-02-22 18:04:33 +11:00 committed by GitHub
commit 2ec595dedf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
366 changed files with 25473 additions and 21282 deletions

View file

@ -12,6 +12,24 @@ env:
global:
- GO111MODULE=on
before_install:
- set -e
# Configure environment so changes are picked up when the Docker daemon is restarted after upgrading
- echo '{"experimental":true}' | sudo tee /etc/docker/daemon.json
- export DOCKER_CLI_EXPERIMENTAL=enabled
# Upgrade to Docker CE 19.03 for BuildKit support
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
- sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
- sudo apt-get update
- sudo apt-get -y -o Dpkg::Options::="--force-confnew" install docker-ce
# install binfmt docker container, this container uses qemu to run arm programs transparently allowng docker to build arm 6,7,8 containers.
- docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
# Show info to simplify debugging and create a builder that can build the platforms we need
- docker info
- docker buildx create --name builder --use
- docker buildx inspect --bootstrap
- docker buildx ls
install:
- echo -e "machine github.com\n login $CI_USER_TOKEN" > ~/.netrc
- nvm install 12
- travis_retry make pre-ui
@ -25,6 +43,8 @@ script:
after_success:
- docker pull stashapp/compiler:4
- sh ./scripts/cross-compile.sh
- git describe --tags --exclude latest_develop | tee CHECKSUMS_SHA1
- sha1sum dist/stash-* | sed 's/dist\///g' | tee -a CHECKSUMS_SHA1
- 'if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then sh ./scripts/upload-pull-request.sh; fi'
before_deploy:
# push the latest tag when on the develop branch
@ -47,6 +67,7 @@ deploy:
- dist/stash-linux-arm64v8
- dist/stash-linux-arm32v7
- dist/stash-pi
- CHECKSUMS_SHA1
skip_cleanup: true
overwrite: true
name: "${STASH_VERSION}: Latest development build"
@ -58,7 +79,7 @@ deploy:
# docker image build for develop release
- provider: script
skip_cleanup: true
script: bash ./docker/ci/x86_64/docker_push.sh development-x86_64
script: bash ./docker/ci/x86_64/docker_push.sh development
on:
repo: stashapp/stash
branch: develop
@ -73,6 +94,7 @@ deploy:
- dist/stash-linux-arm64v8
- dist/stash-linux-arm32v7
- dist/stash-pi
- CHECKSUMS_SHA1
# make the release a draft so the maintainers can confirm before releasing
draft: true
skip_cleanup: true

View file

@ -1,25 +1,17 @@
# must be built from /dist directory
FROM ubuntu:20.04 as prep
LABEL MAINTAINER="https://discord.gg/Uz29ny"
RUN apt-get update && \
apt-get -y install curl xz-utils && \
apt-get autoclean -y && \
rm -rf /var/lib/apt/lists/*
FROM --platform=$BUILDPLATFORM ubuntu:20.04 AS prep
ARG TARGETPLATFORM
WORKDIR /
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN curl --http1.1 -o /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz && \
tar xf /ffmpeg.tar.xz && \
rm ffmpeg.tar.xz && \
mv /ffmpeg*/ /ffmpeg/
COPY stash-* /
RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then BIN=stash-pi; \
elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then BIN=stash-linux-arm32v7; \
elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then BIN=stash-linux-arm64v8; \
elif [ "$TARGETPLATFORM" = "linux/amd64" ]; then BIN=stash-linux; \
fi; \
mv $BIN /stash
FROM ubuntu:20.04 as app
RUN apt-get update && apt-get -y install ca-certificates
COPY --from=prep /ffmpeg/ffmpeg /ffmpeg/ffprobe /usr/bin/
COPY /stash-linux /usr/bin/stash
run apt update && apt install -y python3 python3 python-is-python3 python3-requests ffmpeg && rm -rf /var/lib/apt/lists/*
COPY --from=prep /stash /usr/bin/
EXPOSE 9999
CMD ["stash"]

View file

@ -1,9 +1,8 @@
#!/bin/bash
DOCKER_TAG=$1
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
# must build the image from dist directory
docker build -t stashapp/stash:$DOCKER_TAG -f ./docker/ci/x86_64/Dockerfile ./dist
docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 --push --output type=image,name=stashapp/stash:$DOCKER_TAG,push=true -f docker/ci/x86_64/Dockerfile dist/
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker push stashapp/stash:$DOCKER_TAG

3
go.mod
View file

@ -4,10 +4,10 @@ require (
github.com/99designs/gqlgen v0.12.2
github.com/Yamashou/gqlgenc v0.0.0-20200902035953-4dbef3551953
github.com/antchfx/htmlquery v1.2.3
github.com/bmatcuk/doublestar/v2 v2.0.1
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c
github.com/chromedp/chromedp v0.5.3
github.com/disintegration/imaging v1.6.0
github.com/fvbommel/sortorder v1.0.2
github.com/go-chi/chi v4.0.2+incompatible
github.com/gobuffalo/packr/v2 v2.0.2
github.com/golang-migrate/migrate/v4 v4.3.1
@ -20,6 +20,7 @@ require (
github.com/json-iterator/go v1.1.9
github.com/mattn/go-sqlite3 v1.13.0
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rs/cors v1.6.0
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f
github.com/sirupsen/logrus v1.4.2

6
go.sum
View file

@ -62,8 +62,6 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
github.com/bmatcuk/doublestar/v2 v2.0.1 h1:EFT91DmIMRcrUEcYUW7AqSAwKvNzP5+CoDmNVBbcQOU=
github.com/bmatcuk/doublestar/v2 v2.0.1/go.mod h1:QMmcs3H2AUQICWhfzLXz+IYln8lRQmTZRptLie8RgRw=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
@ -128,6 +126,8 @@ github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsouza/fake-gcs-server v1.7.0/go.mod h1:5XIRs4YvwNbNoz+1JF8j6KLAyDh7RHGAyAK3EP2EsNk=
github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo=
github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
@ -614,6 +614,8 @@ github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E=
github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rogpeppe/go-internal v1.0.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.1.0 h1:g0fH8RicVgNl+zVZDCDfbdWxAWoAEJyI7I3TZYXFiig=

View file

@ -9,6 +9,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
cachePath
calculateMD5
videoFileNamingAlgorithm
parallelTasks
previewSegments
previewSegmentDuration
previewExcludeStart
@ -39,6 +40,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
}
fragment ConfigInterfaceData on ConfigInterfaceResult {
menuItems
soundOnPreview
wallShowTitle
wallPlayback

View file

@ -7,6 +7,7 @@ fragment GallerySlimData on Gallery {
url
details
rating
organized
image_count
cover {
...SlimImageData
@ -20,7 +21,7 @@ fragment GallerySlimData on Gallery {
performers {
...PerformerData
}
scene {
scenes {
id
title
path

View file

@ -7,6 +7,7 @@ fragment GalleryData on Gallery {
url
details
rating
organized
images {
...SlimImageData
}
@ -23,9 +24,7 @@ fragment GalleryData on Gallery {
performers {
...PerformerData
}
scene {
id
title
path
scenes {
...SceneData
}
}

View file

@ -3,6 +3,7 @@ fragment SlimImageData on Image {
checksum
title
rating
organized
o_counter
path

View file

@ -3,6 +3,7 @@ fragment ImageData on Image {
checksum
title
rating
organized
o_counter
path

View file

@ -8,6 +8,7 @@ fragment SlimSceneData on Scene {
date
rating
o_counter
organized
path
file {
@ -36,7 +37,7 @@ fragment SlimSceneData on Scene {
seconds
}
gallery {
galleries {
id
path
title

View file

@ -8,6 +8,7 @@ fragment SceneData on Scene {
date
rating
o_counter
organized
path
file {
@ -34,8 +35,8 @@ fragment SceneData on Scene {
...SceneMarkerData
}
gallery {
...GalleryData
galleries {
...GallerySlimData
}
studio {

View file

@ -1,79 +1,23 @@
mutation GalleryCreate(
$title: String!,
$details: String,
$url: String,
$date: String,
$rating: Int,
$scene_id: ID,
$studio_id: ID,
$performer_ids: [ID!] = [],
$tag_ids: [ID!] = []) {
$input: GalleryCreateInput!) {
galleryCreate(input: {
title: $title,
details: $details,
url: $url,
date: $date,
rating: $rating,
scene_id: $scene_id,
studio_id: $studio_id,
tag_ids: $tag_ids,
performer_ids: $performer_ids
}) {
galleryCreate(input: $input) {
...GalleryData
}
}
mutation GalleryUpdate(
$id: ID!,
$title: String,
$details: String,
$url: String,
$date: String,
$rating: Int,
$scene_id: ID,
$studio_id: ID,
$performer_ids: [ID!] = [],
$tag_ids: [ID!] = []) {
$input: GalleryUpdateInput!) {
galleryUpdate(input: {
id: $id,
title: $title,
details: $details,
url: $url,
date: $date,
rating: $rating,
scene_id: $scene_id,
studio_id: $studio_id,
tag_ids: $tag_ids,
performer_ids: $performer_ids
}) {
galleryUpdate(input: $input) {
...GalleryData
}
}
mutation BulkGalleryUpdate(
$ids: [ID!] = [],
$url: String,
$date: String,
$details: String,
$rating: Int,
$scene_id: ID,
$studio_id: ID,
$tag_ids: BulkUpdateIds,
$performer_ids: BulkUpdateIds) {
$input: BulkGalleryUpdateInput!) {
bulkGalleryUpdate(input: {
ids: $ids,
details: $details,
url: $url,
date: $date,
rating: $rating,
scene_id: $scene_id,
studio_id: $studio_id,
tag_ids: $tag_ids,
performer_ids: $performer_ids
}) {
bulkGalleryUpdate(input: $input) {
...GalleryData
}
}

View file

@ -1,43 +1,15 @@
mutation ImageUpdate(
$id: ID!,
$title: String,
$rating: Int,
$studio_id: ID,
$gallery_ids: [ID!] = [],
$performer_ids: [ID!] = [],
$tag_ids: [ID!] = []) {
$input: ImageUpdateInput!) {
imageUpdate(input: {
id: $id,
title: $title,
rating: $rating,
studio_id: $studio_id,
gallery_ids: $gallery_ids,
performer_ids: $performer_ids,
tag_ids: $tag_ids
}) {
imageUpdate(input: $input) {
...SlimImageData
}
}
mutation BulkImageUpdate(
$ids: [ID!] = [],
$title: String,
$rating: Int,
$studio_id: ID,
$gallery_ids: BulkUpdateIds,
$performer_ids: BulkUpdateIds,
$tag_ids: BulkUpdateIds) {
$input: BulkImageUpdateInput!) {
bulkImageUpdate(input: {
ids: $ids,
title: $title,
rating: $rating,
studio_id: $studio_id,
gallery_ids: $gallery_ids,
performer_ids: $performer_ids,
tag_ids: $tag_ids
}) {
bulkImageUpdate(input: $input) {
...SlimImageData
}
}

View file

@ -26,8 +26,8 @@ mutation MetadataAutoTag($input: AutoTagMetadataInput!) {
metadataAutoTag(input: $input)
}
mutation MetadataClean {
metadataClean
mutation MetadataClean($input: CleanMetadataInput!) {
metadataClean(input: $input)
}
mutation MigrateHashNaming {
@ -36,4 +36,8 @@ mutation MigrateHashNaming {
mutation StopJob {
stopJob
}
}
mutation BackupDatabase($input: BackupDatabaseInput!) {
backupDatabase(input: $input)
}

View file

@ -16,25 +16,16 @@ mutation MovieCreate(
}
}
mutation MovieUpdate(
$id: ID!
$name: String,
$aliases: String,
$duration: Int,
$date: String,
$rating: Int,
$studio_id: ID,
$director: String,
$synopsis: String,
$url: String,
$front_image: String,
$back_image: String) {
movieUpdate(input: { id: $id, name: $name, aliases: $aliases, duration: $duration, date: $date, rating: $rating, studio_id: $studio_id, director: $director, synopsis: $synopsis, url: $url, front_image: $front_image, back_image: $back_image }) {
mutation MovieUpdate($input: MovieUpdateInput!) {
movieUpdate(input: $input) {
...MovieData
}
}
mutation MovieDestroy($id: ID!) {
movieDestroy(input: { id: $id })
}
}
mutation MoviesDestroy($ids: [ID!]!) {
moviesDestroy(ids: $ids)
}

View file

@ -45,49 +45,9 @@ mutation PerformerCreate(
}
mutation PerformerUpdate(
$id: ID!,
$name: String,
$url: String,
$gender: GenderEnum,
$birthdate: String,
$ethnicity: String,
$country: String,
$eye_color: String,
$height: String,
$measurements: String,
$fake_tits: String,
$career_length: String,
$tattoos: String,
$piercings: String,
$aliases: String,
$twitter: String,
$instagram: String,
$favorite: Boolean,
$stash_ids: [StashIDInput!],
$image: String) {
$input: PerformerUpdateInput!) {
performerUpdate(input: {
id: $id,
name: $name,
url: $url,
gender: $gender,
birthdate: $birthdate,
ethnicity: $ethnicity,
country: $country,
eye_color: $eye_color,
height: $height,
measurements: $measurements,
fake_tits: $fake_tits,
career_length: $career_length,
tattoos: $tattoos,
piercings: $piercings,
aliases: $aliases,
twitter: $twitter,
instagram: $instagram,
favorite: $favorite,
stash_ids: $stash_ids,
image: $image
}) {
performerUpdate(input: $input) {
...PerformerData
}
}
@ -95,3 +55,7 @@ mutation PerformerUpdate(
mutation PerformerDestroy($id: ID!) {
performerDestroy(input: { id: $id })
}
mutation PerformersDestroy($ids: [ID!]!) {
performersDestroy(ids: $ids)
}

View file

@ -1,62 +1,16 @@
mutation SceneUpdate(
$id: ID!,
$title: String,
$details: String,
$url: String,
$date: String,
$rating: Int,
$studio_id: ID,
$gallery_id: ID,
$performer_ids: [ID!] = [],
$movies: [SceneMovieInput!] = [],
$tag_ids: [ID!] = [],
$stash_ids: [StashIDInput!],
$cover_image: String) {
$input: SceneUpdateInput!) {
sceneUpdate(input: {
id: $id,
title: $title,
details: $details,
url: $url,
date: $date,
rating: $rating,
studio_id: $studio_id,
gallery_id: $gallery_id,
performer_ids: $performer_ids,
movies: $movies,
tag_ids: $tag_ids,
stash_ids: $stash_ids,
cover_image: $cover_image
}) {
...SceneData
sceneUpdate(input: $input) {
...SceneData
}
}
mutation BulkSceneUpdate(
$ids: [ID!] = [],
$title: String,
$details: String,
$url: String,
$date: String,
$rating: Int,
$studio_id: ID,
$gallery_id: ID,
$performer_ids: BulkUpdateIds,
$tag_ids: BulkUpdateIds) {
$input: BulkSceneUpdateInput!) {
bulkSceneUpdate(input: {
ids: $ids,
title: $title,
details: $details,
url: $url,
date: $date,
rating: $rating,
studio_id: $studio_id,
gallery_id: $gallery_id,
performer_ids: $performer_ids,
tag_ids: $tag_ids
}) {
...SceneData
bulkSceneUpdate(input: $input) {
...SceneData
}
}

View file

@ -11,14 +11,9 @@ mutation StudioCreate(
}
mutation StudioUpdate(
$id: ID!
$name: String,
$url: String,
$image: String,
$stash_ids: [StashIDInput!],
$parent_id: ID) {
$input: StudioUpdateInput!) {
studioUpdate(input: { id: $id, name: $name, url: $url, image: $image, stash_ids: $stash_ids, parent_id: $parent_id }) {
studioUpdate(input: $input) {
...StudioData
}
}
@ -26,3 +21,7 @@ mutation StudioUpdate(
mutation StudioDestroy($id: ID!) {
studioDestroy(input: { id: $id })
}
mutation StudiosDestroy($ids: [ID!]!) {
studiosDestroy(ids: $ids)
}

View file

@ -8,8 +8,12 @@ mutation TagDestroy($id: ID!) {
tagDestroy(input: { id: $id })
}
mutation TagUpdate($id: ID!, $name: String!, $image: String) {
tagUpdate(input: { id: $id, name: $name, image: $image }) {
mutation TagsDestroy($ids: [ID!]!) {
tagsDestroy(ids: $ids)
}
mutation TagUpdate($input: TagUpdateInput!) {
tagUpdate(input: $input) {
...TagData
}
}
}

View file

@ -36,14 +36,6 @@ query AllTagsForFilter {
}
}
query ValidGalleriesForScene($scene_id: ID!) {
validGalleriesForScene(scene_id: $scene_id) {
id
path
title
}
}
query Stats {
stats {
scene_count,

View file

@ -45,7 +45,7 @@ query ParseSceneFilenames($filter: FindFilterType!, $config: SceneParserInput!)
date
rating
studio_id
gallery_id
gallery_ids
movies {
movie_id
}

View file

@ -50,8 +50,6 @@ type Query {
"""Get marker strings"""
markerStrings(q: String, sort: String): [MarkerStringsResultType]!
"""Get the list of valid galleries for a given scene ID"""
validGalleriesForScene(scene_id: ID): [Gallery!]!
"""Get stats"""
stats: StatsResultType!
"""Organize scene markers by tag for a given scene ID"""
@ -175,18 +173,22 @@ type Mutation {
performerCreate(input: PerformerCreateInput!): Performer
performerUpdate(input: PerformerUpdateInput!): Performer
performerDestroy(input: PerformerDestroyInput!): Boolean!
performersDestroy(ids: [ID!]!): Boolean!
studioCreate(input: StudioCreateInput!): Studio
studioUpdate(input: StudioUpdateInput!): Studio
studioDestroy(input: StudioDestroyInput!): Boolean!
studiosDestroy(ids: [ID!]!): Boolean!
movieCreate(input: MovieCreateInput!): Movie
movieUpdate(input: MovieUpdateInput!): Movie
movieDestroy(input: MovieDestroyInput!): Boolean!
moviesDestroy(ids: [ID!]!): Boolean!
tagCreate(input: TagCreateInput!): Tag
tagUpdate(input: TagUpdateInput!): Tag
tagDestroy(input: TagDestroyInput!): Boolean!
tagsDestroy(ids: [ID!]!): Boolean!
"""Change general configuration options"""
configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult!
@ -209,7 +211,7 @@ type Mutation {
"""Start auto-tagging. Returns the job ID"""
metadataAutoTag(input: AutoTagMetadataInput!): String!
"""Clean metadata. Returns the job ID"""
metadataClean: String!
metadataClean(input: CleanMetadataInput!): String!
"""Migrate generated files for the current hash naming"""
migrateHashNaming: String!
@ -222,8 +224,11 @@ type Mutation {
stopJob: Boolean!
""" Submit fingerprints to stash-box instance """
"""Submit fingerprints to stash-box instance"""
submitStashBoxFingerprints(input: StashBoxFingerprintSubmissionInput!): Boolean!
"""Backup the database. Optionally returns a link to download the database file"""
backupDatabase(input: BackupDatabaseInput!): String
}
type Subscription {

View file

@ -35,6 +35,8 @@ input ConfigGeneralInput {
calculateMD5: Boolean!
"""Hash algorithm to use for generated file naming"""
videoFileNamingAlgorithm: HashAlgorithm!
"""Number of parallel tasks to start during scan/generate"""
parallelTasks: Int
"""Number of segments in a preview file"""
previewSegments: Int
"""Preview segment duration, in seconds"""
@ -96,6 +98,8 @@ type ConfigGeneralResult {
calculateMD5: Boolean!
"""Hash algorithm to use for generated file naming"""
videoFileNamingAlgorithm: HashAlgorithm!
"""Number of parallel tasks to start during scan/generate"""
parallelTasks: Int!
"""Number of segments in a preview file"""
previewSegments: Int!
"""Preview segment duration, in seconds"""
@ -145,6 +149,8 @@ type ConfigGeneralResult {
}
input ConfigInterfaceInput {
"""Ordered list of items that should be shown in the menu"""
menuItems: [String!]
"""Enable sound on mouseover previews"""
soundOnPreview: Boolean
"""Show title and tags in wall view"""
@ -160,10 +166,13 @@ input ConfigInterfaceInput {
"""Custom CSS"""
css: String
cssEnabled: Boolean
"""Interface language"""
language: String
}
type ConfigInterfaceResult {
"""Ordered list of items that should be shown in the menu"""
menuItems: [String!]
"""Enable sound on mouseover previews"""
soundOnPreview: Boolean
"""Show title and tags in wall view"""
@ -174,7 +183,7 @@ type ConfigInterfaceResult {
maximumLoopDuration: Int
"""If true, video will autostart on load in the scene player"""
autostartVideo: Boolean
"""If true, studio overlays will be shown as text instead of logo images"""
"""If true, studio overlays will be shown as text instead of logo images"""
showStudioAsText: Boolean
"""Custom CSS"""
css: String

View file

@ -6,17 +6,26 @@ enum SortDirectionEnum {
input FindFilterType {
q: String
page: Int
"""use per_page = 0 to indicate all results. Defaults to 25."""
per_page: Int
sort: String
direction: SortDirectionEnum
}
enum ResolutionEnum {
"144p", VERY_LOW
"240p", LOW
"360p", R360P
"480p", STANDARD
"540p", WEB_HD
"720p", STANDARD_HD
"1080p", FULL_HD
"1440p", QUAD_HD
"1920p", VR_HD
"4k", FOUR_K
"5k", FIVE_K
"6k", SIX_K
"8k", EIGHT_K
}
input PerformerFilterType {
@ -70,6 +79,8 @@ input SceneFilterType {
path: StringCriterionInput
"""Filter by rating"""
rating: IntCriterionInput
"""Filter by organized"""
organized: Boolean
"""Filter by o-counter"""
o_counter: IntCriterionInput
"""Filter by resolution"""
@ -117,6 +128,8 @@ input GalleryFilterType {
is_zip: Boolean
"""Filter by rating"""
rating: IntCriterionInput
"""Filter by organized"""
organized: Boolean
"""Filter by average image resolution"""
average_resolution: ResolutionEnum
"""Filter to only include scenes with this studio"""
@ -145,6 +158,8 @@ input ImageFilterType {
path: StringCriterionInput
"""Filter by rating"""
rating: IntCriterionInput
"""Filter by organized"""
organized: Boolean
"""Filter by o-counter"""
o_counter: IntCriterionInput
"""Filter by resolution"""
@ -178,6 +193,10 @@ enum CriterionModifier {
INCLUDES_ALL,
INCLUDES,
EXCLUDES,
"""MATCHES REGEX"""
MATCHES_REGEX,
"""NOT MATCHES REGEX"""
NOT_MATCHES_REGEX,
}
input StringCriterionInput {

View file

@ -8,7 +8,8 @@ type Gallery {
date: String
details: String
rating: Int
scene: Scene
organized: Boolean!
scenes: [Scene!]!
studio: Studio
image_count: Int!
tags: [Tag!]!
@ -31,7 +32,8 @@ input GalleryCreateInput {
date: String
details: String
rating: Int
scene_id: ID
organized: Boolean
scene_ids: [ID!]
studio_id: ID
tag_ids: [ID!]
performer_ids: [ID!]
@ -45,7 +47,8 @@ input GalleryUpdateInput {
date: String
details: String
rating: Int
scene_id: ID
organized: Boolean
scene_ids: [ID!]
studio_id: ID
tag_ids: [ID!]
performer_ids: [ID!]
@ -58,7 +61,8 @@ input BulkGalleryUpdateInput {
date: String
details: String
rating: Int
scene_id: ID
organized: Boolean
scene_ids: BulkUpdateIds
studio_id: ID
tag_ids: BulkUpdateIds
performer_ids: BulkUpdateIds

View file

@ -4,6 +4,7 @@ type Image {
title: String
rating: Int
o_counter: Int
organized: Boolean!
path: String!
file: ImageFileType! # Resolver
@ -31,6 +32,7 @@ input ImageUpdateInput {
id: ID!
title: String
rating: Int
organized: Boolean
studio_id: ID
performer_ids: [ID!]
@ -43,6 +45,7 @@ input BulkImageUpdateInput {
ids: [ID!]
title: String
rating: Int
organized: Boolean
studio_id: ID
performer_ids: BulkUpdateIds

View file

@ -32,10 +32,26 @@ input GeneratePreviewOptionsInput {
input ScanMetadataInput {
paths: [String!]
"""Set name, date, details from metadata (if present)"""
useFileMetadata: Boolean!
"""Strip file extension from title"""
stripFileExtension: Boolean!
"""Generate previews during scan"""
scanGeneratePreviews: Boolean!
"""Generate image previews during scan"""
scanGenerateImagePreviews: Boolean!
"""Generate sprites during scan"""
scanGenerateSprites: Boolean!
}
input CleanMetadataInput {
"""Do a dry run. Don't delete any files"""
dryRun: Boolean!
}
input AutoTagMetadataInput {
"""Paths to tag, null for all files"""
paths: [String!]
"""IDs of performers to tag files with, or "*" for all"""
performers: [String!]
"""IDs of studios to tag files with, or "*" for all"""
@ -83,3 +99,7 @@ input ImportObjectsInput {
duplicateBehaviour: ImportDuplicateEnum!
missingRefBehaviour: ImportMissingRefEnum!
}
input BackupDatabaseInput {
download: Boolean
}

View file

@ -32,6 +32,7 @@ type Scene {
url: String
date: String
rating: Int
organized: Boolean!
o_counter: Int
path: String!
@ -39,7 +40,7 @@ type Scene {
paths: ScenePathsType! # Resolver
scene_markers: [SceneMarker!]!
gallery: Gallery
galleries: [Gallery!]!
studio: Studio
movies: [SceneMovie!]!
tags: [Tag!]!
@ -60,8 +61,9 @@ input SceneUpdateInput {
url: String
date: String
rating: Int
organized: Boolean
studio_id: ID
gallery_id: ID
gallery_ids: [ID!]
performer_ids: [ID!]
movies: [SceneMovieInput!]
tag_ids: [ID!]
@ -89,8 +91,9 @@ input BulkSceneUpdateInput {
url: String
date: String
rating: Int
organized: Boolean
studio_id: ID
gallery_id: ID
gallery_ids: BulkUpdateIds
performer_ids: BulkUpdateIds
tag_ids: BulkUpdateIds
}
@ -131,7 +134,7 @@ type SceneParserResult {
date: String
rating: Int
studio_id: ID
gallery_id: ID
gallery_ids: [ID!]
performer_ids: [ID!]
movies: [SceneMovieID!]
tag_ids: [ID!]

View file

@ -1,8 +1,8 @@
type StatsResultType {
scene_count: Int!
scenes_size: Int!
scenes_size: Float!
image_count: Int!
images_size: Int!
images_size: Float!
gallery_count: Int!
performer_count: Int!
studio_count: Int!

View file

@ -0,0 +1,141 @@
package api
import (
"context"
"database/sql"
"strconv"
"github.com/99designs/gqlgen/graphql"
"github.com/stashapp/stash/pkg/models"
)
const updateInputField = "input"
func getArgumentMap(ctx context.Context) map[string]interface{} {
rctx := graphql.GetResolverContext(ctx)
reqCtx := graphql.GetRequestContext(ctx)
return rctx.Field.ArgumentMap(reqCtx.Variables)
}
func getUpdateInputMap(ctx context.Context) map[string]interface{} {
args := getArgumentMap(ctx)
input, _ := args[updateInputField]
var ret map[string]interface{}
if input != nil {
ret, _ = input.(map[string]interface{})
}
if ret == nil {
ret = make(map[string]interface{})
}
return ret
}
func getUpdateInputMaps(ctx context.Context) []map[string]interface{} {
args := getArgumentMap(ctx)
input, _ := args[updateInputField]
var ret []map[string]interface{}
if input != nil {
// convert []interface{} into []map[string]interface{}
iSlice, _ := input.([]interface{})
for _, i := range iSlice {
m, _ := i.(map[string]interface{})
if m != nil {
ret = append(ret, m)
}
}
}
return ret
}
type changesetTranslator struct {
inputMap map[string]interface{}
}
func (t changesetTranslator) hasField(field string) bool {
if t.inputMap == nil {
return false
}
_, found := t.inputMap[field]
return found
}
func (t changesetTranslator) nullString(value *string, field string) *sql.NullString {
if !t.hasField(field) {
return nil
}
ret := &sql.NullString{}
if value != nil {
ret.String = *value
ret.Valid = true
}
return ret
}
func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQLiteDate {
if !t.hasField(field) {
return nil
}
ret := &models.SQLiteDate{}
if value != nil {
ret.String = *value
ret.Valid = true
}
return ret
}
func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64 {
if !t.hasField(field) {
return nil
}
ret := &sql.NullInt64{}
if value != nil {
ret.Int64 = int64(*value)
ret.Valid = true
}
return ret
}
func (t changesetTranslator) nullInt64FromString(value *string, field string) *sql.NullInt64 {
if !t.hasField(field) {
return nil
}
ret := &sql.NullInt64{}
if value != nil {
ret.Int64, _ = strconv.ParseInt(*value, 10, 64)
ret.Valid = true
}
return ret
}
func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool {
if !t.hasField(field) {
return nil
}
ret := &sql.NullBool{}
if value != nil {
ret.Bool = *value
ret.Valid = true
}
return ret
}

View file

@ -18,6 +18,7 @@ const apiReleases string = "https://api.github.com/repos/stashapp/stash/releases
const apiTags string = "https://api.github.com/repos/stashapp/stash/tags"
const apiAcceptHeader string = "application/vnd.github.v3+json"
const developmentTag string = "latest_develop"
const defaultSHLength int = 7 // default length of SHA short hash returned by <git rev-parse --short HEAD>
// ErrNoVersion indicates that no version information has been embedded in the
// stash binary
@ -191,14 +192,20 @@ func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease strin
}
func getReleaseHash(release githubReleasesResponse, shortHash bool, usePreRelease bool) string {
shaLength := len(release.Target_commitish)
// the /latest API call doesn't return the hash in target_commitish
// also add sanity check in case Target_commitish is not 40 characters
if !usePreRelease || len(release.Target_commitish) != 40 {
if !usePreRelease || shaLength != 40 {
return getShaFromTags(shortHash, release.Tag_name)
}
if shortHash {
return release.Target_commitish[0:7] //shorthash is first 7 digits of git commit hash
last := defaultSHLength // default length of git short hash
_, gitShort, _ := GetVersion() // retrieve it to check actual length
if len(gitShort) > last && len(gitShort) < shaLength { // sometimes short hash is longer
last = len(gitShort)
}
return release.Target_commitish[0:last]
}
return release.Target_commitish
@ -229,14 +236,20 @@ func getShaFromTags(shortHash bool, name string) string {
logger.Errorf("Github Tags Api %v", err)
return ""
}
_, gitShort, _ := GetVersion() // retrieve short hash to check actual length
for _, tag := range tags {
if tag.Name == name {
if len(tag.Commit.Sha) != 40 {
shaLength := len(tag.Commit.Sha)
if shaLength != 40 {
return ""
}
if shortHash {
return tag.Commit.Sha[0:7] //shorthash is first 7 digits of git commit hash
last := defaultSHLength // default length of git short hash
if len(gitShort) > last && len(gitShort) < shaLength { // sometimes short hash is longer
last = len(gitShort)
}
return tag.Commit.Sha[0:last]
}
return tag.Commit.Sha

View file

@ -60,7 +60,7 @@ func doMigrateHandler(w http.ResponseWriter, r *http.Request) {
}
// perform database backup
if err = database.Backup(backupPath); err != nil {
if err = database.Backup(database.DB, backupPath); err != nil {
http.Error(w, fmt.Sprintf("error backing up database: %s", err), 500)
return
}

View file

@ -5,12 +5,13 @@ import (
"sort"
"strconv"
"github.com/99designs/gqlgen/graphql"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
type Resolver struct{}
type Resolver struct {
txnManager models.TransactionManager
}
func (r *Resolver) Gallery() models.GalleryResolver {
return &galleryResolver{r}
@ -79,69 +80,84 @@ type scrapedSceneMovieResolver struct{ *Resolver }
type scrapedScenePerformerResolver struct{ *Resolver }
type scrapedSceneStudioResolver struct{ *Resolver }
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) ([]*models.SceneMarker, error) {
qb := models.NewSceneMarkerQueryBuilder()
return qb.Wall(q)
func (r *Resolver) withTxn(ctx context.Context, fn func(r models.Repository) error) error {
return r.txnManager.WithTxn(ctx, fn)
}
func (r *queryResolver) SceneWall(ctx context.Context, q *string) ([]*models.Scene, error) {
qb := models.NewSceneQueryBuilder()
return qb.Wall(q)
func (r *Resolver) withReadTxn(ctx context.Context, fn func(r models.ReaderRepository) error) error {
return r.txnManager.WithReadTxn(ctx, fn)
}
func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *string) ([]*models.MarkerStringsResultType, error) {
qb := models.NewSceneMarkerQueryBuilder()
return qb.GetMarkerStrings(q, sort)
}
func (r *queryResolver) ValidGalleriesForScene(ctx context.Context, scene_id *string) ([]*models.Gallery, error) {
if scene_id == nil {
panic("nil scene id") // TODO make scene_id mandatory
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.SceneMarker().Wall(q)
return err
}); err != nil {
return nil, err
}
sceneID, _ := strconv.Atoi(*scene_id)
sqb := models.NewSceneQueryBuilder()
scene, err := sqb.Find(sceneID)
if err != nil {
return ret, nil
}
func (r *queryResolver) SceneWall(ctx context.Context, q *string) (ret []*models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Scene().Wall(q)
return err
}); err != nil {
return nil, err
}
qb := models.NewGalleryQueryBuilder()
validGalleries, err := qb.ValidGalleriesForScenePath(scene.Path)
sceneGallery, _ := qb.FindBySceneID(sceneID, nil)
if sceneGallery != nil {
validGalleries = append(validGalleries, sceneGallery)
return ret, nil
}
func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *string) (ret []*models.MarkerStringsResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.SceneMarker().GetMarkerStrings(q, sort)
return err
}); err != nil {
return nil, err
}
return validGalleries, nil
return ret, nil
}
func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, error) {
scenesQB := models.NewSceneQueryBuilder()
scenesCount, _ := scenesQB.Count()
scenesSize, _ := scenesQB.Size()
imageQB := models.NewImageQueryBuilder()
imageCount, _ := imageQB.Count()
imageSize, _ := imageQB.Size()
galleryQB := models.NewGalleryQueryBuilder()
galleryCount, _ := galleryQB.Count()
performersQB := models.NewPerformerQueryBuilder()
performersCount, _ := performersQB.Count()
studiosQB := models.NewStudioQueryBuilder()
studiosCount, _ := studiosQB.Count()
moviesQB := models.NewMovieQueryBuilder()
moviesCount, _ := moviesQB.Count()
tagsQB := models.NewTagQueryBuilder()
tagsCount, _ := tagsQB.Count()
return &models.StatsResultType{
SceneCount: scenesCount,
ScenesSize: int(scenesSize),
ImageCount: imageCount,
ImagesSize: int(imageSize),
GalleryCount: galleryCount,
PerformerCount: performersCount,
StudioCount: studiosCount,
MovieCount: moviesCount,
TagCount: tagsCount,
}, nil
var ret models.StatsResultType
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
scenesQB := repo.Scene()
imageQB := repo.Image()
galleryQB := repo.Gallery()
studiosQB := repo.Studio()
performersQB := repo.Performer()
moviesQB := repo.Movie()
tagsQB := repo.Tag()
scenesCount, _ := scenesQB.Count()
scenesSize, _ := scenesQB.Size()
imageCount, _ := imageQB.Count()
imageSize, _ := imageQB.Size()
galleryCount, _ := galleryQB.Count()
performersCount, _ := performersQB.Count()
studiosCount, _ := studiosQB.Count()
moviesCount, _ := moviesQB.Count()
tagsCount, _ := tagsQB.Count()
ret = models.StatsResultType{
SceneCount: scenesCount,
ScenesSize: scenesSize,
ImageCount: imageCount,
ImagesSize: imageSize,
GalleryCount: galleryCount,
PerformerCount: performersCount,
StudioCount: studiosCount,
MovieCount: moviesCount,
TagCount: tagsCount,
}
return nil
}); err != nil {
return nil, err
}
return &ret, nil
}
func (r *queryResolver) Version(ctx context.Context) (*models.Version, error) {
@ -171,31 +187,41 @@ func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion
// Get scene marker tags which show up under the video.
func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([]*models.SceneMarkerTag, error) {
sceneID, _ := strconv.Atoi(scene_id)
sqb := models.NewSceneMarkerQueryBuilder()
sceneMarkers, err := sqb.FindBySceneID(sceneID, nil)
sceneID, err := strconv.Atoi(scene_id)
if err != nil {
return nil, err
}
tags := make(map[int]*models.SceneMarkerTag)
var keys []int
tqb := models.NewTagQueryBuilder()
for _, sceneMarker := range sceneMarkers {
markerPrimaryTag, err := tqb.Find(sceneMarker.PrimaryTagID, nil)
tags := make(map[int]*models.SceneMarkerTag)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
sceneMarkers, err := repo.SceneMarker().FindBySceneID(sceneID)
if err != nil {
return nil, err
return err
}
_, hasKey := tags[markerPrimaryTag.ID]
var sceneMarkerTag *models.SceneMarkerTag
if !hasKey {
sceneMarkerTag = &models.SceneMarkerTag{Tag: markerPrimaryTag}
tags[markerPrimaryTag.ID] = sceneMarkerTag
keys = append(keys, markerPrimaryTag.ID)
} else {
sceneMarkerTag = tags[markerPrimaryTag.ID]
tqb := repo.Tag()
for _, sceneMarker := range sceneMarkers {
markerPrimaryTag, err := tqb.Find(sceneMarker.PrimaryTagID)
if err != nil {
return err
}
_, hasKey := tags[markerPrimaryTag.ID]
var sceneMarkerTag *models.SceneMarkerTag
if !hasKey {
sceneMarkerTag = &models.SceneMarkerTag{Tag: markerPrimaryTag}
tags[markerPrimaryTag.ID] = sceneMarkerTag
keys = append(keys, markerPrimaryTag.ID)
} else {
sceneMarkerTag = tags[markerPrimaryTag.ID]
}
tags[markerPrimaryTag.ID].SceneMarkers = append(tags[markerPrimaryTag.ID].SceneMarkers, sceneMarker)
}
tags[markerPrimaryTag.ID].SceneMarkers = append(tags[markerPrimaryTag.ID].SceneMarkers, sceneMarker)
return nil
}); err != nil {
return nil, err
}
// Sort so that primary tags that show up earlier in the video are first.
@ -212,13 +238,3 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
return result, nil
}
// wasFieldIncluded returns true if the given field was included in the request.
// Slices are unmarshalled to empty slices even if the field was omitted. This
// method determines if it was omitted altogether.
func wasFieldIncluded(ctx context.Context, field string) bool {
rctx := graphql.GetRequestContext(ctx)
_, ret := rctx.Variables[field]
return ret
}

View file

@ -22,30 +22,39 @@ func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*stri
return nil, nil
}
func (r *galleryResolver) Images(ctx context.Context, obj *models.Gallery) ([]*models.Image, error) {
qb := models.NewImageQueryBuilder()
return qb.FindByGalleryID(obj.ID)
}
func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (*models.Image, error) {
qb := models.NewImageQueryBuilder()
imgs, err := qb.FindByGalleryID(obj.ID)
if err != nil {
func (r *galleryResolver) Images(ctx context.Context, obj *models.Gallery) (ret []*models.Image, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
ret, err = repo.Image().FindByGalleryID(obj.ID)
return err
}); err != nil {
return nil, err
}
var ret *models.Image
if len(imgs) > 0 {
ret = imgs[0]
}
return ret, nil
}
for _, img := range imgs {
if image.IsCover(img) {
ret = img
break
func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (ret *models.Image, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
imgs, err := repo.Image().FindByGalleryID(obj.ID)
if err != nil {
return err
}
if len(imgs) > 0 {
ret = imgs[0]
}
for _, img := range imgs {
if image.IsCover(img) {
ret = img
break
}
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
@ -81,35 +90,66 @@ func (r *galleryResolver) Rating(ctx context.Context, obj *models.Gallery) (*int
return nil, nil
}
func (r *galleryResolver) Scene(ctx context.Context, obj *models.Gallery) (*models.Scene, error) {
if !obj.SceneID.Valid {
return nil, nil
func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret []*models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
ret, err = repo.Scene().FindByGalleryID(obj.ID)
return err
}); err != nil {
return nil, err
}
qb := models.NewSceneQueryBuilder()
return qb.Find(int(obj.SceneID.Int64))
return ret, nil
}
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (*models.Studio, error) {
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) {
if !obj.StudioID.Valid {
return nil, nil
}
qb := models.NewStudioQueryBuilder()
return qb.Find(int(obj.StudioID.Int64), nil)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.FindByGalleryID(obj.ID, nil)
func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
ret, err = repo.Tag().FindByGalleryID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) ([]*models.Performer, error) {
qb := models.NewPerformerQueryBuilder()
return qb.FindByGalleryID(obj.ID, nil)
func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
ret, err = repo.Performer().FindByGalleryID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (int, error) {
qb := models.NewImageQueryBuilder()
return qb.CountByGalleryID(obj.ID)
func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (ret int, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
ret, err = repo.Image().CountByGalleryID(obj.ID)
return err
}); err != nil {
return 0, err
}
return ret, nil
}

View file

@ -43,26 +43,51 @@ func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*models.I
}, nil
}
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) ([]*models.Gallery, error) {
qb := models.NewGalleryQueryBuilder()
return qb.FindByImageID(obj.ID, nil)
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
ret, err = repo.Gallery().FindByImageID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (*models.Studio, error) {
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) {
if !obj.StudioID.Valid {
return nil, nil
}
qb := models.NewStudioQueryBuilder()
return qb.Find(int(obj.StudioID.Int64), nil)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.FindByImageID(obj.ID, nil)
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().FindByImageID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) ([]*models.Performer, error) {
qb := models.NewPerformerQueryBuilder()
return qb.FindByImageID(obj.ID, nil)
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Performer().FindByImageID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -53,10 +53,16 @@ func (r *movieResolver) Rating(ctx context.Context, obj *models.Movie) (*int, er
return nil, nil
}
func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *models.Studio, err error) {
if obj.StudioID.Valid {
return qb.Find(int(obj.StudioID.Int64), nil)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
}
return nil, nil
@ -88,8 +94,14 @@ func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*
return &backimagePath, nil
}
func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (*int, error) {
qb := models.NewSceneQueryBuilder()
res, err := qb.CountByMovieID(obj.ID)
func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = repo.Scene().CountByMovieID(obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, err
}

View file

@ -138,18 +138,36 @@ func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer
return &imagePath, nil
}
func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (*int, error) {
qb := models.NewSceneQueryBuilder()
res, err := qb.CountByPerformerID(obj.ID)
return &res, err
func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = repo.Scene().CountByPerformerID(obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, nil
}
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) ([]*models.Scene, error) {
qb := models.NewSceneQueryBuilder()
return qb.FindByPerformerID(obj.ID)
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (ret []*models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Scene().FindByPerformerID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) ([]*models.StashID, error) {
qb := models.NewJoinsQueryBuilder()
return qb.GetPerformerStashIDs(obj.ID)
func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) (ret []*models.StashID, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Performer().GetStashIDs(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -94,64 +94,109 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
}, nil
}
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) ([]*models.SceneMarker, error) {
qb := models.NewSceneMarkerQueryBuilder()
return qb.FindBySceneID(obj.ID, nil)
}
func (r *sceneResolver) Gallery(ctx context.Context, obj *models.Scene) (*models.Gallery, error) {
qb := models.NewGalleryQueryBuilder()
return qb.FindBySceneID(obj.ID, nil)
}
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
return qb.FindBySceneID(obj.ID)
}
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) ([]*models.SceneMovie, error) {
joinQB := models.NewJoinsQueryBuilder()
qb := models.NewMovieQueryBuilder()
sceneMovies, err := joinQB.GetSceneMovies(obj.ID, nil)
if err != nil {
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (ret []*models.SceneMarker, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.SceneMarker().FindBySceneID(obj.ID)
return err
}); err != nil {
return nil, err
}
var ret []*models.SceneMovie
for _, sm := range sceneMovies {
movie, err := qb.Find(sm.MovieID, nil)
return ret, nil
}
func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret []*models.Gallery, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Gallery().FindBySceneID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) {
if !obj.StudioID.Valid {
return nil, nil
}
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*models.SceneMovie, err error) {
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
mqb := repo.Movie()
sceneMovies, err := qb.GetMovies(obj.ID)
if err != nil {
return nil, err
return err
}
sceneIdx := sm.SceneIndex
sceneMovie := &models.SceneMovie{
Movie: movie,
for _, sm := range sceneMovies {
movie, err := mqb.Find(sm.MovieID)
if err != nil {
return err
}
sceneIdx := sm.SceneIndex
sceneMovie := &models.SceneMovie{
Movie: movie,
}
if sceneIdx.Valid {
var idx int
idx = int(sceneIdx.Int64)
sceneMovie.SceneIndex = &idx
}
ret = append(ret, sceneMovie)
}
if sceneIdx.Valid {
var idx int
idx = int(sceneIdx.Int64)
sceneMovie.SceneIndex = &idx
}
ret = append(ret, sceneMovie)
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.FindBySceneID(obj.ID, nil)
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().FindBySceneID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) ([]*models.Performer, error) {
qb := models.NewPerformerQueryBuilder()
return qb.FindBySceneID(obj.ID, nil)
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Performer().FindBySceneID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) ([]*models.StashID, error) {
qb := models.NewJoinsQueryBuilder()
return qb.GetSceneStashIDs(obj.ID)
func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []*models.StashID, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Scene().GetStashIDs(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -2,29 +2,47 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/models"
)
func (r *sceneMarkerResolver) Scene(ctx context.Context, obj *models.SceneMarker) (*models.Scene, error) {
func (r *sceneMarkerResolver) Scene(ctx context.Context, obj *models.SceneMarker) (ret *models.Scene, err error) {
if !obj.SceneID.Valid {
panic("Invalid scene id")
}
qb := models.NewSceneQueryBuilder()
sceneID := int(obj.SceneID.Int64)
scene, err := qb.Find(sceneID)
return scene, err
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
sceneID := int(obj.SceneID.Int64)
ret, err = repo.Scene().Find(sceneID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneMarker) (*models.Tag, error) {
qb := models.NewTagQueryBuilder()
tag, err := qb.Find(obj.PrimaryTagID, nil)
return tag, err
func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneMarker) (ret *models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().Find(obj.PrimaryTagID)
return err
}); err != nil {
return nil, err
}
return ret, err
}
func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.FindBySceneMarkerID(obj.ID, nil)
func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().FindBySceneMarkerID(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, err
}
func (r *sceneMarkerResolver) Stream(ctx context.Context, obj *models.SceneMarker) (string, error) {

View file

@ -24,30 +24,69 @@ func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string,
func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj.ID).GetStudioImageURL()
var hasImage bool
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
hasImage, err = repo.Studio().HasImage(obj.ID)
return err
}); err != nil {
return nil, err
}
// indicate that image is missing by setting default query param to true
if !hasImage {
imagePath = imagePath + "?default=true"
}
return &imagePath, nil
}
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (*int, error) {
qb := models.NewSceneQueryBuilder()
res, err := qb.CountByStudioID(obj.ID)
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
res, err = repo.Scene().CountByStudioID(obj.ID)
return err
}); err != nil {
return nil, err
}
return &res, err
}
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (*models.Studio, error) {
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) {
if !obj.ParentID.Valid {
return nil, nil
}
qb := models.NewStudioQueryBuilder()
return qb.Find(int(obj.ParentID.Int64), nil)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().Find(int(obj.ParentID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) ([]*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
return qb.FindChildren(obj.ID, nil)
func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) (ret []*models.Studio, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().FindChildren(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) ([]*models.StashID, error) {
qb := models.NewJoinsQueryBuilder()
return qb.GetStudioStashIDs(obj.ID)
func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) (ret []*models.StashID, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().GetStashIDs(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -7,21 +7,27 @@ import (
"github.com/stashapp/stash/pkg/models"
)
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (*int, error) {
qb := models.NewSceneQueryBuilder()
if obj == nil {
return nil, nil
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var count int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
count, err = repo.Scene().CountByTagID(obj.ID)
return err
}); err != nil {
return nil, err
}
count, err := qb.CountByTagID(obj.ID)
return &count, err
}
func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (*int, error) {
qb := models.NewSceneMarkerQueryBuilder()
if obj == nil {
return nil, nil
func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var count int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
count, err = repo.SceneMarker().CountByTagID(obj.ID)
return err
}); err != nil {
return nil, err
}
count, err := qb.CountByTagID(obj.ID)
return &count, err
}

View file

@ -52,7 +52,7 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
if input.VideoFileNamingAlgorithm != config.GetVideoFileNamingAlgorithm() {
// validate changing VideoFileNamingAlgorithm
if err := manager.ValidateVideoFileNamingAlgorithm(input.VideoFileNamingAlgorithm); err != nil {
if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, input.VideoFileNamingAlgorithm); err != nil {
return makeConfigGeneralResult(), err
}
@ -61,6 +61,9 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
config.Set(config.CalculateMD5, input.CalculateMd5)
if input.ParallelTasks != nil {
config.Set(config.ParallelTasks, *input.ParallelTasks)
}
if input.PreviewSegments != nil {
config.Set(config.PreviewSegments, *input.PreviewSegments)
}
@ -168,6 +171,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
}
func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.ConfigInterfaceInput) (*models.ConfigInterfaceResult, error) {
if input.MenuItems != nil {
config.Set(config.MenuItems, input.MenuItems)
}
if input.SoundOnPreview != nil {
config.Set(config.SoundOnPreview, *input.SoundOnPreview)
}

View file

@ -4,11 +4,10 @@ import (
"context"
"database/sql"
"errors"
"fmt"
"strconv"
"time"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
@ -61,108 +60,115 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input models.Galle
newGallery.StudioID = sql.NullInt64{Valid: false}
}
if input.SceneID != nil {
sceneID, _ := strconv.ParseInt(*input.SceneID, 10, 64)
newGallery.SceneID = sql.NullInt64{Int64: sceneID, Valid: true}
} else {
// studio must be nullable
newGallery.SceneID = sql.NullInt64{Valid: false}
}
// Start the transaction and save the performer
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewGalleryQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
gallery, err := qb.Create(newGallery, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// Save the performers
var performerJoins []models.PerformersGalleries
for _, pid := range input.PerformerIds {
performerID, _ := strconv.Atoi(pid)
performerJoin := models.PerformersGalleries{
PerformerID: performerID,
GalleryID: gallery.ID,
// Start the transaction and save the gallery
var gallery *models.Gallery
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Gallery()
var err error
gallery, err = qb.Create(newGallery)
if err != nil {
return err
}
performerJoins = append(performerJoins, performerJoin)
}
if err := jqb.UpdatePerformersGalleries(gallery.ID, performerJoins, tx); err != nil {
return nil, err
}
// Save the tags
var tagJoins []models.GalleriesTags
for _, tid := range input.TagIds {
tagID, _ := strconv.Atoi(tid)
tagJoin := models.GalleriesTags{
GalleryID: gallery.ID,
TagID: tagID,
// Save the performers
if err := r.updateGalleryPerformers(qb, gallery.ID, input.PerformerIds); err != nil {
return err
}
tagJoins = append(tagJoins, tagJoin)
}
if err := jqb.UpdateGalleriesTags(gallery.ID, tagJoins, tx); err != nil {
return nil, err
}
// Commit
if err := tx.Commit(); err != nil {
// Save the tags
if err := r.updateGalleryTags(qb, gallery.ID, input.TagIds); err != nil {
return err
}
// Save the scenes
if err := r.updateGalleryScenes(qb, gallery.ID, input.SceneIds); err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
return gallery, nil
}
func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (*models.Gallery, error) {
// Start the transaction and save the gallery
tx := database.DB.MustBeginTx(ctx, nil)
ret, err := r.galleryUpdate(input, tx)
func (r *mutationResolver) updateGalleryPerformers(qb models.GalleryReaderWriter, galleryID int, performerIDs []string) error {
ids, err := utils.StringSliceToIntSlice(performerIDs)
if err != nil {
_ = tx.Rollback()
return nil, err
return err
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err
}
return ret, nil
return qb.UpdatePerformers(galleryID, ids)
}
func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.GalleryUpdateInput) ([]*models.Gallery, error) {
func (r *mutationResolver) updateGalleryTags(qb models.GalleryReaderWriter, galleryID int, tagIDs []string) error {
ids, err := utils.StringSliceToIntSlice(tagIDs)
if err != nil {
return err
}
return qb.UpdateTags(galleryID, ids)
}
func (r *mutationResolver) updateGalleryScenes(qb models.GalleryReaderWriter, galleryID int, sceneIDs []string) error {
ids, err := utils.StringSliceToIntSlice(sceneIDs)
if err != nil {
return err
}
return qb.UpdateScenes(galleryID, ids)
}
func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (ret *models.Gallery, err error) {
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Start the transaction and save the gallery
tx := database.DB.MustBeginTx(ctx, nil)
var ret []*models.Gallery
for _, gallery := range input {
thisGallery, err := r.galleryUpdate(*gallery, tx)
ret = append(ret, thisGallery)
if err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
ret, err = r.galleryUpdate(input, translator, repo)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, tx *sqlx.Tx) (*models.Gallery, error) {
qb := models.NewGalleryQueryBuilder()
func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.GalleryUpdateInput) (ret []*models.Gallery, err error) {
inputMaps := getUpdateInputMaps(ctx)
// Start the transaction and save the gallery
if err := r.withTxn(ctx, func(repo models.Repository) error {
for i, gallery := range input {
translator := changesetTranslator{
inputMap: inputMaps[i],
}
thisGallery, err := r.galleryUpdate(*gallery, translator, repo)
if err != nil {
return err
}
ret = append(ret, thisGallery)
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Gallery, error) {
qb := repo.Gallery()
// Populate gallery from the input
galleryID, _ := strconv.Atoi(input.ID)
originalGallery, err := qb.Find(galleryID, nil)
galleryID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, err
}
originalGallery, err := qb.Find(galleryID)
if err != nil {
return nil, err
}
@ -176,6 +182,7 @@ func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, tx *sq
ID: galleryID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if input.Title != nil {
// ensure title is not empty
if *input.Title == "" {
@ -190,65 +197,40 @@ func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, tx *sq
updatedGallery.Title = &sql.NullString{String: *input.Title, Valid: true}
}
if input.Details != nil {
updatedGallery.Details = &sql.NullString{String: *input.Details, Valid: true}
}
if input.URL != nil {
updatedGallery.URL = &sql.NullString{String: *input.URL, Valid: true}
}
if input.Date != nil {
updatedGallery.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
updatedGallery.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
} else {
// rating must be nullable
updatedGallery.Rating = &sql.NullInt64{Valid: false}
}
if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
updatedGallery.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
} else {
// studio must be nullable
updatedGallery.StudioID = &sql.NullInt64{Valid: false}
}
updatedGallery.Details = translator.nullString(input.Details, "details")
updatedGallery.URL = translator.nullString(input.URL, "url")
updatedGallery.Date = translator.sqliteDate(input.Date, "date")
updatedGallery.Rating = translator.nullInt64(input.Rating, "rating")
updatedGallery.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedGallery.Organized = input.Organized
// gallery scene is set from the scene only
jqb := models.NewJoinsQueryBuilder()
gallery, err := qb.UpdatePartial(updatedGallery, tx)
gallery, err := qb.UpdatePartial(updatedGallery)
if err != nil {
return nil, err
}
// Save the performers
var performerJoins []models.PerformersGalleries
for _, pid := range input.PerformerIds {
performerID, _ := strconv.Atoi(pid)
performerJoin := models.PerformersGalleries{
PerformerID: performerID,
GalleryID: galleryID,
if translator.hasField("performer_ids") {
if err := r.updateGalleryPerformers(qb, galleryID, input.PerformerIds); err != nil {
return nil, err
}
performerJoins = append(performerJoins, performerJoin)
}
if err := jqb.UpdatePerformersGalleries(galleryID, performerJoins, tx); err != nil {
return nil, err
}
// Save the tags
var tagJoins []models.GalleriesTags
for _, tid := range input.TagIds {
tagID, _ := strconv.Atoi(tid)
tagJoin := models.GalleriesTags{
GalleryID: galleryID,
TagID: tagID,
if translator.hasField("tag_ids") {
if err := r.updateGalleryTags(qb, galleryID, input.TagIds); err != nil {
return nil, err
}
tagJoins = append(tagJoins, tagJoin)
}
if err := jqb.UpdateGalleriesTags(galleryID, tagJoins, tx); err != nil {
return nil, err
// Save the scenes
if translator.hasField("scene_ids") {
if err := r.updateGalleryScenes(qb, galleryID, input.SceneIds); err != nil {
return nil, err
}
}
return gallery, nil
@ -258,227 +240,181 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.B
// Populate gallery from the input
updatedTime := time.Now()
// Start the transaction and save the gallery marker
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewGalleryQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedGallery := models.GalleryPartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if input.Details != nil {
updatedGallery.Details = &sql.NullString{String: *input.Details, Valid: true}
}
if input.URL != nil {
updatedGallery.URL = &sql.NullString{String: *input.URL, Valid: true}
}
if input.Date != nil {
updatedGallery.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
// a rating of 0 means unset the rating
if *input.Rating == 0 {
updatedGallery.Rating = &sql.NullInt64{Int64: 0, Valid: false}
} else {
updatedGallery.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
}
}
if input.StudioID != nil {
// empty string means unset the studio
if *input.StudioID == "" {
updatedGallery.StudioID = &sql.NullInt64{Int64: 0, Valid: false}
} else {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
updatedGallery.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
}
}
if input.SceneID != nil {
// empty string means unset the studio
if *input.SceneID == "" {
updatedGallery.SceneID = &sql.NullInt64{Int64: 0, Valid: false}
} else {
sceneID, _ := strconv.ParseInt(*input.SceneID, 10, 64)
updatedGallery.SceneID = &sql.NullInt64{Int64: sceneID, Valid: true}
}
}
updatedGallery.Details = translator.nullString(input.Details, "details")
updatedGallery.URL = translator.nullString(input.URL, "url")
updatedGallery.Date = translator.sqliteDate(input.Date, "date")
updatedGallery.Rating = translator.nullInt64(input.Rating, "rating")
updatedGallery.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedGallery.Organized = input.Organized
ret := []*models.Gallery{}
for _, galleryIDStr := range input.Ids {
galleryID, _ := strconv.Atoi(galleryIDStr)
updatedGallery.ID = galleryID
// Start the transaction and save the galleries
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Gallery()
gallery, err := qb.UpdatePartial(updatedGallery, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
for _, galleryIDStr := range input.Ids {
galleryID, _ := strconv.Atoi(galleryIDStr)
updatedGallery.ID = galleryID
ret = append(ret, gallery)
// Save the performers
if wasFieldIncluded(ctx, "performer_ids") {
performerIDs, err := adjustGalleryPerformerIDs(tx, galleryID, *input.PerformerIds)
gallery, err := qb.UpdatePartial(updatedGallery)
if err != nil {
_ = tx.Rollback()
return nil, err
return err
}
var performerJoins []models.PerformersGalleries
for _, performerID := range performerIDs {
performerJoin := models.PerformersGalleries{
PerformerID: performerID,
GalleryID: galleryID,
ret = append(ret, gallery)
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := adjustGalleryPerformerIDs(qb, galleryID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(galleryID, performerIDs); err != nil {
return err
}
performerJoins = append(performerJoins, performerJoin)
}
if err := jqb.UpdatePerformersGalleries(galleryID, performerJoins, tx); err != nil {
_ = tx.Rollback()
return nil, err
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustGalleryTagIDs(qb, galleryID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(galleryID, tagIDs); err != nil {
return err
}
}
// Save the scenes
if translator.hasField("scene_ids") {
sceneIDs, err := adjustGallerySceneIDs(qb, galleryID, *input.SceneIds)
if err != nil {
return err
}
if err := qb.UpdateScenes(galleryID, sceneIDs); err != nil {
return err
}
}
}
// Save the tags
if wasFieldIncluded(ctx, "tag_ids") {
tagIDs, err := adjustGalleryTagIDs(tx, galleryID, *input.TagIds)
if err != nil {
_ = tx.Rollback()
return nil, err
}
var tagJoins []models.GalleriesTags
for _, tagID := range tagIDs {
tagJoin := models.GalleriesTags{
GalleryID: galleryID,
TagID: tagID,
}
tagJoins = append(tagJoins, tagJoin)
}
if err := jqb.UpdateGalleriesTags(galleryID, tagJoins, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func adjustGalleryPerformerIDs(tx *sqlx.Tx, galleryID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
performerJoins, err := jqb.GetGalleryPerformers(galleryID, tx)
if err != nil {
return nil, err
}
for _, join := range performerJoins {
ret = append(ret, join.PerformerID)
}
func adjustGalleryPerformerIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetPerformerIDs(galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustGalleryTagIDs(tx *sqlx.Tx, galleryID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
func adjustGalleryTagIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(galleryID)
if err != nil {
return nil, err
}
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
tagJoins, err := jqb.GetGalleryTags(galleryID, tx)
return adjustIDs(ret, ids), nil
}
if err != nil {
return nil, err
}
for _, join := range tagJoins {
ret = append(ret, join.TagID)
}
func adjustGallerySceneIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetSceneIDs(galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) {
qb := models.NewGalleryQueryBuilder()
iqb := models.NewImageQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
galleryIDs, err := utils.StringSliceToIntSlice(input.Ids)
if err != nil {
return false, err
}
var galleries []*models.Gallery
var imgsToPostProcess []*models.Image
var imgsToDelete []*models.Image
for _, id := range input.Ids {
galleryID, _ := strconv.Atoi(id)
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Gallery()
iqb := repo.Image()
for _, id := range galleryIDs {
gallery, err := qb.Find(id)
if err != nil {
return err
}
if gallery == nil {
return fmt.Errorf("gallery with id %d not found", id)
}
gallery, err := qb.Find(galleryID, tx)
if gallery != nil {
galleries = append(galleries, gallery)
}
err = qb.Destroy(galleryID, tx)
if err != nil {
tx.Rollback()
return false, err
}
// if this is a zip-based gallery, delete the images as well
if gallery.Zip {
imgs, err := iqb.FindByGalleryID(galleryID)
if err != nil {
tx.Rollback()
return false, err
}
for _, img := range imgs {
err = iqb.Destroy(img.ID, tx)
// if this is a zip-based gallery, delete the images as well first
if gallery.Zip {
imgs, err := iqb.FindByGalleryID(id)
if err != nil {
tx.Rollback()
return false, err
return err
}
imgsToPostProcess = append(imgsToPostProcess, img)
}
} else if input.DeleteFile != nil && *input.DeleteFile {
// Delete image if it is only attached to this gallery
imgs, err := iqb.FindByGalleryID(galleryID)
if err != nil {
tx.Rollback()
return false, err
}
for _, img := range imgs {
imgGalleries, err := qb.FindByImageID(img.ID, tx)
if err != nil {
tx.Rollback()
return false, err
}
if len(imgGalleries) == 0 {
err = iqb.Destroy(img.ID, tx)
if err != nil {
tx.Rollback()
return false, err
for _, img := range imgs {
if err := iqb.Destroy(img.ID); err != nil {
return err
}
imgsToDelete = append(imgsToDelete, img)
imgsToPostProcess = append(imgsToPostProcess, img)
}
} else if input.DeleteFile != nil && *input.DeleteFile {
// Delete image if it is only attached to this gallery
imgs, err := iqb.FindByGalleryID(id)
if err != nil {
return err
}
for _, img := range imgs {
imgGalleries, err := qb.FindByImageID(img.ID)
if err != nil {
return err
}
if len(imgGalleries) == 0 {
if err := iqb.Destroy(img.ID); err != nil {
return err
}
imgsToDelete = append(imgsToDelete, img)
imgsToPostProcess = append(imgsToPostProcess, img)
}
}
}
if err := qb.Destroy(id); err != nil {
return err
}
}
}
if err := tx.Commit(); err != nil {
return nil
}); err != nil {
return false, err
}
@ -506,34 +442,39 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
}
func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.GalleryAddInput) (bool, error) {
galleryID, _ := strconv.Atoi(input.GalleryID)
qb := models.NewGalleryQueryBuilder()
gallery, err := qb.Find(galleryID, nil)
galleryID, err := strconv.Atoi(input.GalleryID)
if err != nil {
return false, err
}
if gallery == nil {
return false, errors.New("gallery not found")
imageIDs, err := utils.StringSliceToIntSlice(input.ImageIds)
if err != nil {
return false, err
}
if gallery.Zip {
return false, errors.New("cannot modify zip gallery images")
}
jqb := models.NewJoinsQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
for _, id := range input.ImageIds {
imageID, _ := strconv.Atoi(id)
_, err := jqb.AddImageGallery(imageID, galleryID, tx)
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Gallery()
gallery, err := qb.Find(galleryID)
if err != nil {
tx.Rollback()
return false, err
return err
}
}
if err := tx.Commit(); err != nil {
if gallery == nil {
return errors.New("gallery not found")
}
if gallery.Zip {
return errors.New("cannot modify zip gallery images")
}
newIDs, err := qb.GetImageIDs(galleryID)
if err != nil {
return err
}
newIDs = utils.IntAppendUniques(newIDs, imageIDs)
return qb.UpdateImages(galleryID, newIDs)
}); err != nil {
return false, err
}
@ -541,34 +482,39 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.Ga
}
func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input models.GalleryRemoveInput) (bool, error) {
galleryID, _ := strconv.Atoi(input.GalleryID)
qb := models.NewGalleryQueryBuilder()
gallery, err := qb.Find(galleryID, nil)
galleryID, err := strconv.Atoi(input.GalleryID)
if err != nil {
return false, err
}
if gallery == nil {
return false, errors.New("gallery not found")
imageIDs, err := utils.StringSliceToIntSlice(input.ImageIds)
if err != nil {
return false, err
}
if gallery.Zip {
return false, errors.New("cannot modify zip gallery images")
}
jqb := models.NewJoinsQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
for _, id := range input.ImageIds {
imageID, _ := strconv.Atoi(id)
_, err := jqb.RemoveImageGallery(imageID, galleryID, tx)
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Gallery()
gallery, err := qb.Find(galleryID)
if err != nil {
tx.Rollback()
return false, err
return err
}
}
if err := tx.Commit(); err != nil {
if gallery == nil {
return errors.New("gallery not found")
}
if gallery.Zip {
return errors.New("cannot modify zip gallery images")
}
newIDs, err := qb.GetImageIDs(galleryID)
if err != nil {
return err
}
newIDs = utils.IntExclude(newIDs, imageIDs)
return qb.UpdateImages(galleryID, newIDs)
}); err != nil {
return false, err
}

View file

@ -2,323 +2,257 @@ package api
import (
"context"
"database/sql"
"fmt"
"strconv"
"time"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (*models.Image, error) {
// Start the transaction and save the image
tx := database.DB.MustBeginTx(ctx, nil)
ret, err := r.imageUpdate(input, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (ret *models.Image, err error) {
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Commit
if err := tx.Commit(); err != nil {
// Start the transaction and save the image
if err := r.withTxn(ctx, func(repo models.Repository) error {
ret, err = r.imageUpdate(input, translator, repo)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) ([]*models.Image, error) {
func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) (ret []*models.Image, err error) {
inputMaps := getUpdateInputMaps(ctx)
// Start the transaction and save the image
tx := database.DB.MustBeginTx(ctx, nil)
if err := r.withTxn(ctx, func(repo models.Repository) error {
for i, image := range input {
translator := changesetTranslator{
inputMap: inputMaps[i],
}
var ret []*models.Image
thisImage, err := r.imageUpdate(*image, translator, repo)
if err != nil {
return err
}
for _, image := range input {
thisImage, err := r.imageUpdate(*image, tx)
ret = append(ret, thisImage)
if err != nil {
_ = tx.Rollback()
return nil, err
ret = append(ret, thisImage)
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, tx *sqlx.Tx) (*models.Image, error) {
func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Image, error) {
// Populate image from the input
imageID, _ := strconv.Atoi(input.ID)
imageID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, err
}
updatedTime := time.Now()
updatedImage := models.ImagePartial{
ID: imageID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if input.Title != nil {
updatedImage.Title = &sql.NullString{String: *input.Title, Valid: true}
}
if input.Rating != nil {
updatedImage.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
} else {
// rating must be nullable
updatedImage.Rating = &sql.NullInt64{Valid: false}
}
updatedImage.Title = translator.nullString(input.Title, "title")
updatedImage.Rating = translator.nullInt64(input.Rating, "rating")
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedImage.Organized = input.Organized
if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
updatedImage.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
} else {
// studio must be nullable
updatedImage.StudioID = &sql.NullInt64{Valid: false}
}
qb := models.NewImageQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
image, err := qb.Update(updatedImage, tx)
qb := repo.Image()
image, err := qb.Update(updatedImage)
if err != nil {
return nil, err
}
// don't set the galleries directly. Use add/remove gallery images interface instead
if translator.hasField("gallery_ids") {
if err := r.updateImageGalleries(qb, imageID, input.GalleryIds); err != nil {
return nil, err
}
}
// Save the performers
var performerJoins []models.PerformersImages
for _, pid := range input.PerformerIds {
performerID, _ := strconv.Atoi(pid)
performerJoin := models.PerformersImages{
PerformerID: performerID,
ImageID: imageID,
if translator.hasField("performer_ids") {
if err := r.updateImagePerformers(qb, imageID, input.PerformerIds); err != nil {
return nil, err
}
performerJoins = append(performerJoins, performerJoin)
}
if err := jqb.UpdatePerformersImages(imageID, performerJoins, tx); err != nil {
return nil, err
}
// Save the tags
var tagJoins []models.ImagesTags
for _, tid := range input.TagIds {
tagID, _ := strconv.Atoi(tid)
tagJoin := models.ImagesTags{
ImageID: imageID,
TagID: tagID,
if translator.hasField("tag_ids") {
if err := r.updateImageTags(qb, imageID, input.TagIds); err != nil {
return nil, err
}
tagJoins = append(tagJoins, tagJoin)
}
if err := jqb.UpdateImagesTags(imageID, tagJoins, tx); err != nil {
return nil, err
}
return image, nil
}
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.BulkImageUpdateInput) ([]*models.Image, error) {
func (r *mutationResolver) updateImageGalleries(qb models.ImageReaderWriter, imageID int, galleryIDs []string) error {
ids, err := utils.StringSliceToIntSlice(galleryIDs)
if err != nil {
return err
}
return qb.UpdateGalleries(imageID, ids)
}
func (r *mutationResolver) updateImagePerformers(qb models.ImageReaderWriter, imageID int, performerIDs []string) error {
ids, err := utils.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return qb.UpdatePerformers(imageID, ids)
}
func (r *mutationResolver) updateImageTags(qb models.ImageReaderWriter, imageID int, tagsIDs []string) error {
ids, err := utils.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return qb.UpdateTags(imageID, ids)
}
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.BulkImageUpdateInput) (ret []*models.Image, err error) {
imageIDs, err := utils.StringSliceToIntSlice(input.Ids)
if err != nil {
return nil, err
}
// Populate image from the input
updatedTime := time.Now()
// Start the transaction and save the image marker
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewImageQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
updatedImage := models.ImagePartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if input.Title != nil {
updatedImage.Title = &sql.NullString{String: *input.Title, Valid: true}
}
if input.Rating != nil {
// a rating of 0 means unset the rating
if *input.Rating == 0 {
updatedImage.Rating = &sql.NullInt64{Int64: 0, Valid: false}
} else {
updatedImage.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
}
}
if input.StudioID != nil {
// empty string means unset the studio
if *input.StudioID == "" {
updatedImage.StudioID = &sql.NullInt64{Int64: 0, Valid: false}
} else {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
updatedImage.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
ret := []*models.Image{}
updatedImage.Title = translator.nullString(input.Title, "title")
updatedImage.Rating = translator.nullInt64(input.Rating, "rating")
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedImage.Organized = input.Organized
for _, imageIDStr := range input.Ids {
imageID, _ := strconv.Atoi(imageIDStr)
updatedImage.ID = imageID
// Start the transaction and save the image marker
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Image()
image, err := qb.Update(updatedImage, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
for _, imageID := range imageIDs {
updatedImage.ID = imageID
ret = append(ret, image)
// Save the galleries
if wasFieldIncluded(ctx, "gallery_ids") {
galleryIDs, err := adjustImageGalleryIDs(tx, imageID, *input.GalleryIds)
image, err := qb.Update(updatedImage)
if err != nil {
_ = tx.Rollback()
return nil, err
return err
}
var galleryJoins []models.GalleriesImages
for _, gid := range galleryIDs {
galleryJoin := models.GalleriesImages{
GalleryID: gid,
ImageID: imageID,
ret = append(ret, image)
// Save the galleries
if translator.hasField("gallery_ids") {
galleryIDs, err := adjustImageGalleryIDs(qb, imageID, *input.GalleryIds)
if err != nil {
return err
}
if err := qb.UpdateGalleries(imageID, galleryIDs); err != nil {
return err
}
galleryJoins = append(galleryJoins, galleryJoin)
}
if err := jqb.UpdateGalleriesImages(imageID, galleryJoins, tx); err != nil {
return nil, err
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := adjustImagePerformerIDs(qb, imageID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(imageID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustImageTagIDs(qb, imageID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(imageID, tagIDs); err != nil {
return err
}
}
}
// Save the performers
if wasFieldIncluded(ctx, "performer_ids") {
performerIDs, err := adjustImagePerformerIDs(tx, imageID, *input.PerformerIds)
if err != nil {
_ = tx.Rollback()
return nil, err
}
var performerJoins []models.PerformersImages
for _, performerID := range performerIDs {
performerJoin := models.PerformersImages{
PerformerID: performerID,
ImageID: imageID,
}
performerJoins = append(performerJoins, performerJoin)
}
if err := jqb.UpdatePerformersImages(imageID, performerJoins, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Save the tags
if wasFieldIncluded(ctx, "tag_ids") {
tagIDs, err := adjustImageTagIDs(tx, imageID, *input.TagIds)
if err != nil {
_ = tx.Rollback()
return nil, err
}
var tagJoins []models.ImagesTags
for _, tagID := range tagIDs {
tagJoin := models.ImagesTags{
ImageID: imageID,
TagID: tagID,
}
tagJoins = append(tagJoins, tagJoin)
}
if err := jqb.UpdateImagesTags(imageID, tagJoins, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func adjustImageGalleryIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
galleryJoins, err := jqb.GetImageGalleries(imageID, tx)
if err != nil {
return nil, err
}
for _, join := range galleryJoins {
ret = append(ret, join.GalleryID)
}
}
return adjustIDs(ret, ids), nil
}
func adjustImagePerformerIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
performerJoins, err := jqb.GetImagePerformers(imageID, tx)
if err != nil {
return nil, err
}
for _, join := range performerJoins {
ret = append(ret, join.PerformerID)
}
}
return adjustIDs(ret, ids), nil
}
func adjustImageTagIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
tagJoins, err := jqb.GetImageTags(imageID, tx)
if err != nil {
return nil, err
}
for _, join := range tagJoins {
ret = append(ret, join.TagID)
}
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (bool, error) {
qb := models.NewImageQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
imageID, _ := strconv.Atoi(input.ID)
image, err := qb.Find(imageID)
err = qb.Destroy(imageID, tx)
func adjustImageGalleryIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetGalleryIDs(imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustImagePerformerIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetPerformerIDs(imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustImageTagIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (ret bool, err error) {
imageID, err := strconv.Atoi(input.ID)
if err != nil {
tx.Rollback()
return false, err
}
if err := tx.Commit(); err != nil {
var image *models.Image
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Image()
image, err = qb.Find(imageID)
if err != nil {
return err
}
if image == nil {
return fmt.Errorf("image with id %d not found", imageID)
}
return qb.Destroy(imageID)
}); err != nil {
return false, err
}
@ -337,27 +271,35 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
return true, nil
}
func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.ImagesDestroyInput) (bool, error) {
qb := models.NewImageQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
var images []*models.Image
for _, id := range input.Ids {
imageID, _ := strconv.Atoi(id)
image, err := qb.Find(imageID)
if image != nil {
images = append(images, image)
}
err = qb.Destroy(imageID, tx)
if err != nil {
tx.Rollback()
return false, err
}
func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.ImagesDestroyInput) (ret bool, err error) {
imageIDs, err := utils.StringSliceToIntSlice(input.Ids)
if err != nil {
return false, err
}
if err := tx.Commit(); err != nil {
var images []*models.Image
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Image()
for _, imageID := range imageIDs {
image, err := qb.Find(imageID)
if err != nil {
return err
}
if image == nil {
return fmt.Errorf("image with id %d not found", imageID)
}
images = append(images, image)
if err := qb.Destroy(imageID); err != nil {
return err
}
}
return nil
}); err != nil {
return false, err
}
@ -378,62 +320,56 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
return true, nil
}
func (r *mutationResolver) ImageIncrementO(ctx context.Context, id string) (int, error) {
imageID, _ := strconv.Atoi(id)
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewImageQueryBuilder()
newVal, err := qb.IncrementOCounter(imageID, tx)
func (r *mutationResolver) ImageIncrementO(ctx context.Context, id string) (ret int, err error) {
imageID, err := strconv.Atoi(id)
if err != nil {
_ = tx.Rollback()
return 0, err
}
// Commit
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Image()
ret, err = qb.IncrementOCounter(imageID)
return err
}); err != nil {
return 0, err
}
return newVal, nil
return ret, nil
}
func (r *mutationResolver) ImageDecrementO(ctx context.Context, id string) (int, error) {
imageID, _ := strconv.Atoi(id)
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewImageQueryBuilder()
newVal, err := qb.DecrementOCounter(imageID, tx)
func (r *mutationResolver) ImageDecrementO(ctx context.Context, id string) (ret int, err error) {
imageID, err := strconv.Atoi(id)
if err != nil {
_ = tx.Rollback()
return 0, err
}
// Commit
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Image()
ret, err = qb.DecrementOCounter(imageID)
return err
}); err != nil {
return 0, err
}
return newVal, nil
return ret, nil
}
func (r *mutationResolver) ImageResetO(ctx context.Context, id string) (int, error) {
imageID, _ := strconv.Atoi(id)
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewImageQueryBuilder()
newVal, err := qb.ResetOCounter(imageID, tx)
func (r *mutationResolver) ImageResetO(ctx context.Context, id string) (ret int, err error) {
imageID, err := strconv.Atoi(id)
if err != nil {
_ = tx.Rollback()
return 0, err
}
// Commit
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Image()
ret, err = qb.ResetOCounter(imageID)
return err
}); err != nil {
return 0, err
}
return newVal, nil
return ret, nil
}

View file

@ -2,11 +2,16 @@ package api
import (
"context"
"io/ioutil"
"path/filepath"
"time"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) {
@ -20,8 +25,12 @@ func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
}
func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) {
t := manager.CreateImportTask(config.GetVideoFileNamingAlgorithm(), input)
_, err := manager.GetInstance().RunSingleTask(t)
t, err := manager.CreateImportTask(config.GetVideoFileNamingAlgorithm(), input)
if err != nil {
return "", err
}
_, err = manager.GetInstance().RunSingleTask(t)
if err != nil {
return "", err
}
@ -61,12 +70,12 @@ func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.Ge
}
func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) {
manager.GetInstance().AutoTag(input.Performers, input.Studios, input.Tags)
manager.GetInstance().AutoTag(input)
return "todo", nil
}
func (r *mutationResolver) MetadataClean(ctx context.Context) (string, error) {
manager.GetInstance().Clean()
func (r *mutationResolver) MetadataClean(ctx context.Context, input models.CleanMetadataInput) (string, error) {
manager.GetInstance().Clean(input)
return "todo", nil
}
@ -89,3 +98,42 @@ func (r *mutationResolver) JobStatus(ctx context.Context) (*models.MetadataUpdat
func (r *mutationResolver) StopJob(ctx context.Context) (bool, error) {
return manager.GetInstance().Status.Stop(), nil
}
func (r *mutationResolver) BackupDatabase(ctx context.Context, input models.BackupDatabaseInput) (*string, error) {
// if download is true, then backup to temporary file and return a link
download := input.Download != nil && *input.Download
mgr := manager.GetInstance()
var backupPath string
if download {
utils.EnsureDir(mgr.Paths.Generated.Downloads)
f, err := ioutil.TempFile(mgr.Paths.Generated.Downloads, "backup*.sqlite")
if err != nil {
return nil, err
}
backupPath = f.Name()
f.Close()
} else {
backupPath = database.DatabaseBackupPath()
}
err := database.Backup(database.DB, backupPath)
if err != nil {
return nil, err
}
if download {
downloadHash := mgr.DownloadStore.RegisterFile(backupPath, "", false)
logger.Debugf("Generated backup file %s with hash %s", backupPath, downloadHash)
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
fn := filepath.Base(database.DatabaseBackupPath())
ret := baseURL + "/downloads/" + downloadHash + "/" + fn
return &ret, nil
} else {
logger.Infof("Successfully backed up database to: %s", backupPath)
}
return nil, nil
}

View file

@ -6,7 +6,6 @@ import (
"strconv"
"time"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
@ -85,24 +84,23 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
}
// Start the transaction and save the movie
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewMovieQueryBuilder()
movie, err := qb.Create(newMovie, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// update image table
if len(frontimageData) > 0 {
if err := qb.UpdateMovieImages(movie.ID, frontimageData, backimageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
var movie *models.Movie
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Movie()
movie, err = qb.Create(newMovie)
if err != nil {
return err
}
}
// Commit
if err := tx.Commit(); err != nil {
// update image table
if len(frontimageData) > 0 {
if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
@ -111,22 +109,29 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUpdateInput) (*models.Movie, error) {
// Populate movie from the input
movieID, _ := strconv.Atoi(input.ID)
movieID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, err
}
updatedMovie := models.MoviePartial{
ID: movieID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
var frontimageData []byte
var err error
frontImageIncluded := wasFieldIncluded(ctx, "front_image")
frontImageIncluded := translator.hasField("front_image")
if input.FrontImage != nil {
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
if err != nil {
return nil, err
}
}
backImageIncluded := wasFieldIncluded(ctx, "back_image")
backImageIncluded := translator.hasField("back_image")
var backimageData []byte
if input.BackImage != nil {
_, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
@ -142,94 +147,59 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
updatedMovie.Checksum = &checksum
}
if input.Aliases != nil {
updatedMovie.Aliases = &sql.NullString{String: *input.Aliases, Valid: true}
}
if input.Duration != nil {
duration := int64(*input.Duration)
updatedMovie.Duration = &sql.NullInt64{Int64: duration, Valid: true}
}
if input.Date != nil {
updatedMovie.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
rating := int64(*input.Rating)
updatedMovie.Rating = &sql.NullInt64{Int64: rating, Valid: true}
} else {
// rating must be nullable
updatedMovie.Rating = &sql.NullInt64{Valid: false}
}
if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
updatedMovie.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
} else {
// studio must be nullable
updatedMovie.StudioID = &sql.NullInt64{Valid: false}
}
if input.Director != nil {
updatedMovie.Director = &sql.NullString{String: *input.Director, Valid: true}
}
if input.Synopsis != nil {
updatedMovie.Synopsis = &sql.NullString{String: *input.Synopsis, Valid: true}
}
if input.URL != nil {
updatedMovie.URL = &sql.NullString{String: *input.URL, Valid: true}
}
updatedMovie.Aliases = translator.nullString(input.Aliases, "aliases")
updatedMovie.Duration = translator.nullInt64(input.Duration, "duration")
updatedMovie.Date = translator.sqliteDate(input.Date, "date")
updatedMovie.Rating = translator.nullInt64(input.Rating, "rating")
updatedMovie.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedMovie.Director = translator.nullString(input.Director, "director")
updatedMovie.Synopsis = translator.nullString(input.Synopsis, "synopsis")
updatedMovie.URL = translator.nullString(input.URL, "url")
// Start the transaction and save the movie
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewMovieQueryBuilder()
movie, err := qb.Update(updatedMovie, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// update image table
if frontImageIncluded || backImageIncluded {
if !frontImageIncluded {
frontimageData, err = qb.GetFrontImage(updatedMovie.ID, tx)
if err != nil {
tx.Rollback()
return nil, err
}
var movie *models.Movie
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Movie()
movie, err = qb.Update(updatedMovie)
if err != nil {
return err
}
if !backImageIncluded {
backimageData, err = qb.GetBackImage(updatedMovie.ID, tx)
if err != nil {
tx.Rollback()
return nil, err
// update image table
if frontImageIncluded || backImageIncluded {
if !frontImageIncluded {
frontimageData, err = qb.GetFrontImage(updatedMovie.ID)
if err != nil {
return err
}
}
if !backImageIncluded {
backimageData, err = qb.GetBackImage(updatedMovie.ID)
if err != nil {
return err
}
}
if len(frontimageData) == 0 && len(backimageData) == 0 {
// both images are being nulled. Destroy them.
if err := qb.DestroyImages(movie.ID); err != nil {
return err
}
} else {
// HACK - if front image is null and back image is not null, then set the front image
// to the default image since we can't have a null front image and a non-null back image
if frontimageData == nil && backimageData != nil {
_, frontimageData, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
}
if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil {
return err
}
}
}
if len(frontimageData) == 0 && len(backimageData) == 0 {
// both images are being nulled. Destroy them.
if err := qb.DestroyMovieImages(movie.ID, tx); err != nil {
tx.Rollback()
return nil, err
}
} else {
// HACK - if front image is null and back image is not null, then set the front image
// to the default image since we can't have a null front image and a non-null back image
if frontimageData == nil && backimageData != nil {
_, frontimageData, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
}
if err := qb.UpdateMovieImages(movie.ID, frontimageData, backimageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil
}); err != nil {
return nil, err
}
@ -237,13 +207,35 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
}
func (r *mutationResolver) MovieDestroy(ctx context.Context, input models.MovieDestroyInput) (bool, error) {
qb := models.NewMovieQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
if err := qb.Destroy(input.ID, tx); err != nil {
_ = tx.Rollback()
id, err := strconv.Atoi(input.ID)
if err != nil {
return false, err
}
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
return repo.Movie().Destroy(id)
}); err != nil {
return false, err
}
return true, nil
}
func (r *mutationResolver) MoviesDestroy(ctx context.Context, movieIDs []string) (bool, error) {
ids, err := utils.StringSliceToIntSlice(movieIDs)
if err != nil {
return false, err
}
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Movie()
for _, id := range ids {
if err := qb.Destroy(id); err != nil {
return err
}
}
return nil
}); err != nil {
return false, err
}
return true, nil

View file

@ -6,7 +6,6 @@ import (
"strconv"
"time"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
@ -86,41 +85,32 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
}
// Start the transaction and save the performer
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewPerformerQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
var performer *models.Performer
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Performer()
performer, err := qb.Create(newPerformer, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdatePerformerImage(performer.ID, imageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
performer, err = qb.Create(newPerformer)
if err != nil {
return err
}
}
// Save the stash_ids
if input.StashIds != nil {
var stashIDJoins []models.StashID
for _, stashID := range input.StashIds {
newJoin := models.StashID{
StashID: stashID.StashID,
Endpoint: stashID.Endpoint,
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(performer.ID, imageData); err != nil {
return err
}
stashIDJoins = append(stashIDJoins, newJoin)
}
if err := jqb.UpdatePerformerStashIDs(performer.ID, stashIDJoins, tx); err != nil {
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
// Save the stash_ids
if input.StashIds != nil {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
if err := qb.UpdateStashIDs(performer.ID, stashIDJoins); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
@ -130,119 +120,91 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
// Populate performer from the input
performerID, _ := strconv.Atoi(input.ID)
updatedPerformer := models.Performer{
updatedPerformer := models.PerformerPartial{
ID: performerID,
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
var imageData []byte
var err error
imageIncluded := wasFieldIncluded(ctx, "image")
imageIncluded := translator.hasField("image")
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
if err != nil {
return nil, err
}
}
if input.Name != nil {
// generate checksum from performer name rather than image
checksum := utils.MD5FromString(*input.Name)
updatedPerformer.Name = sql.NullString{String: *input.Name, Valid: true}
updatedPerformer.Checksum = checksum
updatedPerformer.Name = &sql.NullString{String: *input.Name, Valid: true}
updatedPerformer.Checksum = &checksum
}
if input.URL != nil {
updatedPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
}
if input.Gender != nil {
updatedPerformer.Gender = sql.NullString{String: input.Gender.String(), Valid: true}
}
if input.Birthdate != nil {
updatedPerformer.Birthdate = models.SQLiteDate{String: *input.Birthdate, Valid: true}
}
if input.Ethnicity != nil {
updatedPerformer.Ethnicity = sql.NullString{String: *input.Ethnicity, Valid: true}
}
if input.Country != nil {
updatedPerformer.Country = sql.NullString{String: *input.Country, Valid: true}
}
if input.EyeColor != nil {
updatedPerformer.EyeColor = sql.NullString{String: *input.EyeColor, Valid: true}
}
if input.Height != nil {
updatedPerformer.Height = sql.NullString{String: *input.Height, Valid: true}
}
if input.Measurements != nil {
updatedPerformer.Measurements = sql.NullString{String: *input.Measurements, Valid: true}
}
if input.FakeTits != nil {
updatedPerformer.FakeTits = sql.NullString{String: *input.FakeTits, Valid: true}
}
if input.CareerLength != nil {
updatedPerformer.CareerLength = sql.NullString{String: *input.CareerLength, Valid: true}
}
if input.Tattoos != nil {
updatedPerformer.Tattoos = sql.NullString{String: *input.Tattoos, Valid: true}
}
if input.Piercings != nil {
updatedPerformer.Piercings = sql.NullString{String: *input.Piercings, Valid: true}
}
if input.Aliases != nil {
updatedPerformer.Aliases = sql.NullString{String: *input.Aliases, Valid: true}
}
if input.Twitter != nil {
updatedPerformer.Twitter = sql.NullString{String: *input.Twitter, Valid: true}
}
if input.Instagram != nil {
updatedPerformer.Instagram = sql.NullString{String: *input.Instagram, Valid: true}
}
if input.Favorite != nil {
updatedPerformer.Favorite = sql.NullBool{Bool: *input.Favorite, Valid: true}
} else {
updatedPerformer.Favorite = sql.NullBool{Bool: false, Valid: true}
updatedPerformer.URL = translator.nullString(input.URL, "url")
if translator.hasField("gender") {
if input.Gender != nil {
updatedPerformer.Gender = &sql.NullString{String: input.Gender.String(), Valid: true}
} else {
updatedPerformer.Gender = &sql.NullString{String: "", Valid: false}
}
}
updatedPerformer.Birthdate = translator.sqliteDate(input.Birthdate, "birthdate")
updatedPerformer.Country = translator.nullString(input.Country, "country")
updatedPerformer.EyeColor = translator.nullString(input.EyeColor, "eye_color")
updatedPerformer.Measurements = translator.nullString(input.Measurements, "measurements")
updatedPerformer.Height = translator.nullString(input.Height, "height")
updatedPerformer.Ethnicity = translator.nullString(input.Ethnicity, "ethnicity")
updatedPerformer.FakeTits = translator.nullString(input.FakeTits, "fake_tits")
updatedPerformer.CareerLength = translator.nullString(input.CareerLength, "career_length")
updatedPerformer.Tattoos = translator.nullString(input.Tattoos, "tattoos")
updatedPerformer.Piercings = translator.nullString(input.Piercings, "piercings")
updatedPerformer.Aliases = translator.nullString(input.Aliases, "aliases")
updatedPerformer.Twitter = translator.nullString(input.Twitter, "twitter")
updatedPerformer.Instagram = translator.nullString(input.Instagram, "instagram")
updatedPerformer.Favorite = translator.nullBool(input.Favorite, "favorite")
// Start the transaction and save the performer
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewPerformerQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
var performer *models.Performer
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Performer()
performer, err := qb.Update(updatedPerformer, tx)
if err != nil {
tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdatePerformerImage(performer.ID, imageData, tx); err != nil {
tx.Rollback()
return nil, err
var err error
performer, err = qb.Update(updatedPerformer)
if err != nil {
return err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyPerformerImage(performer.ID, tx); err != nil {
tx.Rollback()
return nil, err
}
}
// Save the stash_ids
if input.StashIds != nil {
var stashIDJoins []models.StashID
for _, stashID := range input.StashIds {
newJoin := models.StashID{
StashID: stashID.StashID,
Endpoint: stashID.Endpoint,
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(performer.ID, imageData); err != nil {
return err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyImage(performer.ID); err != nil {
return err
}
stashIDJoins = append(stashIDJoins, newJoin)
}
if err := jqb.UpdatePerformerStashIDs(performerID, stashIDJoins, tx); err != nil {
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
// Save the stash_ids
if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
if err := qb.UpdateStashIDs(performerID, stashIDJoins); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
@ -250,13 +212,35 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
}
func (r *mutationResolver) PerformerDestroy(ctx context.Context, input models.PerformerDestroyInput) (bool, error) {
qb := models.NewPerformerQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
if err := qb.Destroy(input.ID, tx); err != nil {
_ = tx.Rollback()
id, err := strconv.Atoi(input.ID)
if err != nil {
return false, err
}
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
return repo.Performer().Destroy(id)
}); err != nil {
return false, err
}
return true, nil
}
func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs []string) (bool, error) {
ids, err := utils.StringSliceToIntSlice(performerIDs)
if err != nil {
return false, err
}
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Performer()
for _, id := range ids {
if err := qb.Destroy(id); err != nil {
return err
}
}
return nil
}); err != nil {
return false, err
}
return true, nil

View file

@ -3,64 +3,64 @@ package api
import (
"context"
"database/sql"
"fmt"
"strconv"
"time"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUpdateInput) (*models.Scene, error) {
// Start the transaction and save the scene
tx := database.DB.MustBeginTx(ctx, nil)
ret, err := r.sceneUpdate(input, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUpdateInput) (ret *models.Scene, err error) {
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Commit
if err := tx.Commit(); err != nil {
// Start the transaction and save the scene
if err := r.withTxn(ctx, func(repo models.Repository) error {
ret, err = r.sceneUpdate(input, translator, repo)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.SceneUpdateInput) ([]*models.Scene, error) {
func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.SceneUpdateInput) (ret []*models.Scene, err error) {
inputMaps := getUpdateInputMaps(ctx)
// Start the transaction and save the scene
tx := database.DB.MustBeginTx(ctx, nil)
if err := r.withTxn(ctx, func(repo models.Repository) error {
for i, scene := range input {
translator := changesetTranslator{
inputMap: inputMaps[i],
}
var ret []*models.Scene
thisScene, err := r.sceneUpdate(*scene, translator, repo)
ret = append(ret, thisScene)
for _, scene := range input {
thisScene, err := r.sceneUpdate(*scene, tx)
ret = append(ret, thisScene)
if err != nil {
_ = tx.Rollback()
return nil, err
if err != nil {
return err
}
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.Tx) (*models.Scene, error) {
func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Scene, error) {
// Populate scene from the input
sceneID, _ := strconv.Atoi(input.ID)
sceneID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, err
}
var coverImageData []byte
@ -69,18 +69,14 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
ID: sceneID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if input.Title != nil {
updatedScene.Title = &sql.NullString{String: *input.Title, Valid: true}
}
if input.Details != nil {
updatedScene.Details = &sql.NullString{String: *input.Details, Valid: true}
}
if input.URL != nil {
updatedScene.URL = &sql.NullString{String: *input.URL, Valid: true}
}
if input.Date != nil {
updatedScene.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
}
updatedScene.Title = translator.nullString(input.Title, "title")
updatedScene.Details = translator.nullString(input.Details, "details")
updatedScene.URL = translator.nullString(input.URL, "url")
updatedScene.Date = translator.sqliteDate(input.Date, "date")
updatedScene.Rating = translator.nullInt64(input.Rating, "rating")
updatedScene.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedScene.Organized = input.Organized
if input.CoverImage != nil && *input.CoverImage != "" {
var err error
@ -92,81 +88,85 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
// update the cover after updating the scene
}
if input.Rating != nil {
updatedScene.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
} else {
// rating must be nullable
updatedScene.Rating = &sql.NullInt64{Valid: false}
}
if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
updatedScene.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
} else {
// studio must be nullable
updatedScene.StudioID = &sql.NullInt64{Valid: false}
}
qb := models.NewSceneQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
scene, err := qb.Update(updatedScene, tx)
qb := repo.Scene()
scene, err := qb.Update(updatedScene)
if err != nil {
return nil, err
}
// update cover table
if len(coverImageData) > 0 {
if err := qb.UpdateSceneCover(sceneID, coverImageData, tx); err != nil {
return nil, err
}
}
// Clear the existing gallery value
gqb := models.NewGalleryQueryBuilder()
err = gqb.ClearGalleryId(sceneID, tx)
if err != nil {
return nil, err
}
if input.GalleryID != nil {
// Save the gallery
galleryID, _ := strconv.Atoi(*input.GalleryID)
updatedGallery := models.Gallery{
ID: galleryID,
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: true},
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
}
gqb := models.NewGalleryQueryBuilder()
_, err := gqb.Update(updatedGallery, tx)
if err != nil {
if err := qb.UpdateCover(sceneID, coverImageData); err != nil {
return nil, err
}
}
// Save the performers
var performerJoins []models.PerformersScenes
for _, pid := range input.PerformerIds {
performerID, _ := strconv.Atoi(pid)
performerJoin := models.PerformersScenes{
PerformerID: performerID,
SceneID: sceneID,
if translator.hasField("performer_ids") {
if err := r.updateScenePerformers(qb, sceneID, input.PerformerIds); err != nil {
return nil, err
}
performerJoins = append(performerJoins, performerJoin)
}
if err := jqb.UpdatePerformersScenes(sceneID, performerJoins, tx); err != nil {
return nil, err
}
// Save the movies
if translator.hasField("movies") {
if err := r.updateSceneMovies(qb, sceneID, input.Movies); err != nil {
return nil, err
}
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updateSceneTags(qb, sceneID, input.TagIds); err != nil {
return nil, err
}
}
// Save the galleries
if translator.hasField("gallery_ids") {
if err := r.updateSceneGalleries(qb, sceneID, input.GalleryIds); err != nil {
return nil, err
}
}
// Save the stash_ids
if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
if err := qb.UpdateStashIDs(sceneID, stashIDJoins); err != nil {
return nil, err
}
}
// only update the cover image if provided and everything else was successful
if coverImageData != nil {
err = manager.SetSceneScreenshot(scene.GetHash(config.GetVideoFileNamingAlgorithm()), coverImageData)
if err != nil {
return nil, err
}
}
return scene, nil
}
func (r *mutationResolver) updateScenePerformers(qb models.SceneReaderWriter, sceneID int, performerIDs []string) error {
ids, err := utils.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return qb.UpdatePerformers(sceneID, ids)
}
func (r *mutationResolver) updateSceneMovies(qb models.SceneReaderWriter, sceneID int, movies []*models.SceneMovieInput) error {
var movieJoins []models.MoviesScenes
for _, movie := range input.Movies {
movieID, _ := strconv.Atoi(movie.MovieID)
for _, movie := range movies {
movieID, err := strconv.Atoi(movie.MovieID)
if err != nil {
return err
}
movieJoin := models.MoviesScenes{
MovieID: movieID,
SceneID: sceneID,
}
if movie.SceneIndex != nil {
@ -178,169 +178,106 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
movieJoins = append(movieJoins, movieJoin)
}
if err := jqb.UpdateMoviesScenes(sceneID, movieJoins, tx); err != nil {
return nil, err
}
// Save the tags
var tagJoins []models.ScenesTags
for _, tid := range input.TagIds {
tagID, _ := strconv.Atoi(tid)
tagJoin := models.ScenesTags{
SceneID: sceneID,
TagID: tagID,
}
tagJoins = append(tagJoins, tagJoin)
}
if err := jqb.UpdateScenesTags(sceneID, tagJoins, tx); err != nil {
return nil, err
}
return qb.UpdateMovies(sceneID, movieJoins)
}
// only update the cover image if provided and everything else was successful
if coverImageData != nil {
err = manager.SetSceneScreenshot(scene.GetHash(config.GetVideoFileNamingAlgorithm()), coverImageData)
if err != nil {
return nil, err
}
func (r *mutationResolver) updateSceneTags(qb models.SceneReaderWriter, sceneID int, tagsIDs []string) error {
ids, err := utils.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return qb.UpdateTags(sceneID, ids)
}
// Save the stash_ids
if input.StashIds != nil {
var stashIDJoins []models.StashID
for _, stashID := range input.StashIds {
newJoin := models.StashID{
StashID: stashID.StashID,
Endpoint: stashID.Endpoint,
}
stashIDJoins = append(stashIDJoins, newJoin)
}
if err := jqb.UpdateSceneStashIDs(sceneID, stashIDJoins, tx); err != nil {
return nil, err
}
func (r *mutationResolver) updateSceneGalleries(qb models.SceneReaderWriter, sceneID int, galleryIDs []string) error {
ids, err := utils.StringSliceToIntSlice(galleryIDs)
if err != nil {
return err
}
return scene, nil
return qb.UpdateGalleries(sceneID, ids)
}
func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.BulkSceneUpdateInput) ([]*models.Scene, error) {
sceneIDs, err := utils.StringSliceToIntSlice(input.Ids)
if err != nil {
return nil, err
}
// Populate scene from the input
updatedTime := time.Now()
// Start the transaction and save the scene marker
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewSceneQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedScene := models.ScenePartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if input.Title != nil {
updatedScene.Title = &sql.NullString{String: *input.Title, Valid: true}
}
if input.Details != nil {
updatedScene.Details = &sql.NullString{String: *input.Details, Valid: true}
}
if input.URL != nil {
updatedScene.URL = &sql.NullString{String: *input.URL, Valid: true}
}
if input.Date != nil {
updatedScene.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
// a rating of 0 means unset the rating
if *input.Rating == 0 {
updatedScene.Rating = &sql.NullInt64{Int64: 0, Valid: false}
} else {
updatedScene.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
}
}
if input.StudioID != nil {
// empty string means unset the studio
if *input.StudioID == "" {
updatedScene.StudioID = &sql.NullInt64{Int64: 0, Valid: false}
} else {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
updatedScene.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
}
}
updatedScene.Title = translator.nullString(input.Title, "title")
updatedScene.Details = translator.nullString(input.Details, "details")
updatedScene.URL = translator.nullString(input.URL, "url")
updatedScene.Date = translator.sqliteDate(input.Date, "date")
updatedScene.Rating = translator.nullInt64(input.Rating, "rating")
updatedScene.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedScene.Organized = input.Organized
ret := []*models.Scene{}
for _, sceneIDStr := range input.Ids {
sceneID, _ := strconv.Atoi(sceneIDStr)
updatedScene.ID = sceneID
// Start the transaction and save the scene marker
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
scene, err := qb.Update(updatedScene, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
for _, sceneID := range sceneIDs {
updatedScene.ID = sceneID
ret = append(ret, scene)
if input.GalleryID != nil {
// Save the gallery
galleryID, _ := strconv.Atoi(*input.GalleryID)
updatedGallery := models.Gallery{
ID: galleryID,
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: true},
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
}
gqb := models.NewGalleryQueryBuilder()
_, err := gqb.Update(updatedGallery, tx)
scene, err := qb.Update(updatedScene)
if err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Save the performers
if wasFieldIncluded(ctx, "performer_ids") {
performerIDs, err := adjustScenePerformerIDs(tx, sceneID, *input.PerformerIds)
if err != nil {
_ = tx.Rollback()
return nil, err
return err
}
var performerJoins []models.PerformersScenes
for _, performerID := range performerIDs {
performerJoin := models.PerformersScenes{
PerformerID: performerID,
SceneID: sceneID,
ret = append(ret, scene)
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := adjustScenePerformerIDs(qb, sceneID, *input.PerformerIds)
if err != nil {
return err
}
performerJoins = append(performerJoins, performerJoin)
}
if err := jqb.UpdatePerformersScenes(sceneID, performerJoins, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Save the tags
if wasFieldIncluded(ctx, "tag_ids") {
tagIDs, err := adjustSceneTagIDs(tx, sceneID, *input.TagIds)
if err != nil {
_ = tx.Rollback()
return nil, err
}
var tagJoins []models.ScenesTags
for _, tagID := range tagIDs {
tagJoin := models.ScenesTags{
SceneID: sceneID,
TagID: tagID,
if err := qb.UpdatePerformers(sceneID, performerIDs); err != nil {
return err
}
tagJoins = append(tagJoins, tagJoin)
}
if err := jqb.UpdateScenesTags(sceneID, tagJoins, tx); err != nil {
_ = tx.Rollback()
return nil, err
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustSceneTagIDs(qb, sceneID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(sceneID, tagIDs); err != nil {
return err
}
}
// Save the galleries
if translator.hasField("gallery_ids") {
galleryIDs, err := adjustSceneGalleryIDs(qb, sceneID, *input.GalleryIds)
if err != nil {
return err
}
if err := qb.UpdateGalleries(sceneID, galleryIDs); err != nil {
return err
}
}
}
}
// Commit
if err := tx.Commit(); err != nil {
return nil
}); err != nil {
return nil, err
}
@ -348,6 +285,17 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
}
func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
// if we are setting the ids, just return the ids
if updateIDs.Mode == models.BulkUpdateIDModeSet {
existingIDs = []int{}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
existingIDs = append(existingIDs, id)
}
return existingIDs
}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
@ -373,63 +321,62 @@ func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
return existingIDs
}
func adjustScenePerformerIDs(tx *sqlx.Tx, sceneID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
performerJoins, err := jqb.GetScenePerformers(sceneID, tx)
if err != nil {
return nil, err
}
for _, join := range performerJoins {
ret = append(ret, join.PerformerID)
}
func adjustScenePerformerIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetPerformerIDs(sceneID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustSceneTagIDs(tx *sqlx.Tx, sceneID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
func adjustSceneTagIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(sceneID)
if err != nil {
return nil, err
}
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
tagJoins, err := jqb.GetSceneTags(sceneID, tx)
return adjustIDs(ret, ids), nil
}
if err != nil {
return nil, err
}
for _, join := range tagJoins {
ret = append(ret, join.TagID)
}
func adjustSceneGalleryIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetGalleryIDs(sceneID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) {
qb := models.NewSceneQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
sceneID, _ := strconv.Atoi(input.ID)
scene, err := qb.Find(sceneID)
err = manager.DestroyScene(sceneID, tx)
sceneID, err := strconv.Atoi(input.ID)
if err != nil {
tx.Rollback()
return false, err
}
if err := tx.Commit(); err != nil {
var scene *models.Scene
var postCommitFunc func()
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
var err error
scene, err = qb.Find(sceneID)
if err != nil {
return err
}
if scene == nil {
return fmt.Errorf("scene with id %d not found", sceneID)
}
postCommitFunc, err = manager.DestroyScene(scene, repo)
return err
}); err != nil {
return false, err
}
// perform the post-commit actions
postCommitFunc()
// if delete generated is true, then delete the generated files
// for the scene
if input.DeleteGenerated != nil && *input.DeleteGenerated {
@ -446,27 +393,33 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
}
func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.ScenesDestroyInput) (bool, error) {
qb := models.NewSceneQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
var scenes []*models.Scene
for _, id := range input.Ids {
sceneID, _ := strconv.Atoi(id)
var postCommitFuncs []func()
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
scene, err := qb.Find(sceneID)
if scene != nil {
scenes = append(scenes, scene)
}
err = manager.DestroyScene(sceneID, tx)
for _, id := range input.Ids {
sceneID, _ := strconv.Atoi(id)
if err != nil {
tx.Rollback()
return false, err
scene, err := qb.Find(sceneID)
if scene != nil {
scenes = append(scenes, scene)
}
f, err := manager.DestroyScene(scene, repo)
if err != nil {
return err
}
postCommitFuncs = append(postCommitFuncs, f)
}
return nil
}); err != nil {
return false, err
}
if err := tx.Commit(); err != nil {
return false, err
for _, f := range postCommitFuncs {
f()
}
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
@ -488,8 +441,16 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
}
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.SceneMarkerCreateInput) (*models.SceneMarker, error) {
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
sceneID, _ := strconv.Atoi(input.SceneID)
primaryTagID, err := strconv.Atoi(input.PrimaryTagID)
if err != nil {
return nil, err
}
sceneID, err := strconv.Atoi(input.SceneID)
if err != nil {
return nil, err
}
currentTime := time.Now()
newSceneMarker := models.SceneMarker{
Title: input.Title,
@ -500,14 +461,31 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.S
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
}
return changeMarker(ctx, create, newSceneMarker, input.TagIds)
tagIDs, err := utils.StringSliceToIntSlice(input.TagIds)
if err != nil {
return nil, err
}
return r.changeMarker(ctx, create, newSceneMarker, tagIDs)
}
func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.SceneMarkerUpdateInput) (*models.SceneMarker, error) {
// Populate scene marker from the input
sceneMarkerID, _ := strconv.Atoi(input.ID)
sceneID, _ := strconv.Atoi(input.SceneID)
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
sceneMarkerID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, err
}
primaryTagID, err := strconv.Atoi(input.PrimaryTagID)
if err != nil {
return nil, err
}
sceneID, err := strconv.Atoi(input.SceneID)
if err != nil {
return nil, err
}
updatedSceneMarker := models.SceneMarker{
ID: sceneMarkerID,
Title: input.Title,
@ -517,168 +495,151 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.S
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
}
return changeMarker(ctx, update, updatedSceneMarker, input.TagIds)
tagIDs, err := utils.StringSliceToIntSlice(input.TagIds)
if err != nil {
return nil, err
}
return r.changeMarker(ctx, update, updatedSceneMarker, tagIDs)
}
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
qb := models.NewSceneMarkerQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
markerID, _ := strconv.Atoi(id)
marker, err := qb.Find(markerID)
markerID, err := strconv.Atoi(id)
if err != nil {
return false, err
}
if err := qb.Destroy(id, tx); err != nil {
_ = tx.Rollback()
return false, err
}
if err := tx.Commit(); err != nil {
var postCommitFunc func()
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.SceneMarker()
sqb := repo.Scene()
marker, err := qb.Find(markerID)
if err != nil {
return err
}
if marker == nil {
return fmt.Errorf("scene marker with id %d not found", markerID)
}
scene, err := sqb.Find(int(marker.SceneID.Int64))
if err != nil {
return err
}
postCommitFunc, err = manager.DestroySceneMarker(scene, marker, qb)
return err
}); err != nil {
return false, err
}
// delete the preview for the marker
sqb := models.NewSceneQueryBuilder()
scene, _ := sqb.Find(int(marker.SceneID.Int64))
if scene != nil {
seconds := int(marker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
}
postCommitFunc()
return true, nil
}
func changeMarker(ctx context.Context, changeType int, changedMarker models.SceneMarker, tagIds []string) (*models.SceneMarker, error) {
// Start the transaction and save the scene marker
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewSceneMarkerQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, changedMarker models.SceneMarker, tagIDs []int) (*models.SceneMarker, error) {
var existingMarker *models.SceneMarker
var sceneMarker *models.SceneMarker
var err error
switch changeType {
case create:
sceneMarker, err = qb.Create(changedMarker, tx)
case update:
// check to see if timestamp was changed
existingMarker, err = qb.Find(changedMarker.ID)
if err == nil {
sceneMarker, err = qb.Update(changedMarker, tx)
}
}
if err != nil {
_ = tx.Rollback()
return nil, err
}
var scene *models.Scene
// Save the marker tags
var markerTagJoins []models.SceneMarkersTags
for _, tid := range tagIds {
tagID, _ := strconv.Atoi(tid)
if tagID == changedMarker.PrimaryTagID {
continue // If this tag is the primary tag, then let's not add it.
}
markerTag := models.SceneMarkersTags{
SceneMarkerID: sceneMarker.ID,
TagID: tagID,
}
markerTagJoins = append(markerTagJoins, markerTag)
}
switch changeType {
case create:
if err := jqb.CreateSceneMarkersTags(markerTagJoins, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
case update:
if err := jqb.UpdateSceneMarkersTags(changedMarker.ID, markerTagJoins, tx); err != nil {
_ = tx.Rollback()
return nil, err
}
}
// Start the transaction and save the scene marker
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.SceneMarker()
sqb := repo.Scene()
// Commit
if err := tx.Commit(); err != nil {
var err error
switch changeType {
case create:
sceneMarker, err = qb.Create(changedMarker)
case update:
// check to see if timestamp was changed
existingMarker, err = qb.Find(changedMarker.ID)
if err != nil {
return err
}
sceneMarker, err = qb.Update(changedMarker)
if err != nil {
return err
}
scene, err = sqb.Find(int(existingMarker.SceneID.Int64))
}
if err != nil {
return err
}
// Save the marker tags
// If this tag is the primary tag, then let's not add it.
tagIDs = utils.IntExclude(tagIDs, []int{changedMarker.PrimaryTagID})
return qb.UpdateTags(sceneMarker.ID, tagIDs)
}); err != nil {
return nil, err
}
// remove the marker preview if the timestamp was changed
if existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
sqb := models.NewSceneQueryBuilder()
scene, _ := sqb.Find(int(existingMarker.SceneID.Int64))
if scene != nil {
seconds := int(existingMarker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
}
if scene != nil && existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
seconds := int(existingMarker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
}
return sceneMarker, nil
}
func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (int, error) {
sceneID, _ := strconv.Atoi(id)
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewSceneQueryBuilder()
newVal, err := qb.IncrementOCounter(sceneID, tx)
func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
_ = tx.Rollback()
return 0, err
}
// Commit
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
ret, err = qb.IncrementOCounter(sceneID)
return err
}); err != nil {
return 0, err
}
return newVal, nil
return ret, nil
}
func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (int, error) {
sceneID, _ := strconv.Atoi(id)
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewSceneQueryBuilder()
newVal, err := qb.DecrementOCounter(sceneID, tx)
func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
_ = tx.Rollback()
return 0, err
}
// Commit
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
ret, err = qb.DecrementOCounter(sceneID)
return err
}); err != nil {
return 0, err
}
return newVal, nil
return ret, nil
}
func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (int, error) {
sceneID, _ := strconv.Atoi(id)
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewSceneQueryBuilder()
newVal, err := qb.ResetOCounter(sceneID, tx)
func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
_ = tx.Rollback()
return 0, err
}
// Commit
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Scene()
ret, err = qb.ResetOCounter(sceneID)
return err
}); err != nil {
return 0, err
}
return newVal, nil
return ret, nil
}
func (r *mutationResolver) SceneGenerateScreenshot(ctx context.Context, id string, at *float64) (string, error) {

View file

@ -16,7 +16,7 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
return false, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
}
client := stashbox.NewClient(*boxes[input.StashBoxIndex])
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
return client.SubmitStashBoxFingerprints(input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
}

View file

@ -6,7 +6,6 @@ import (
"strconv"
"time"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
@ -44,41 +43,33 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
}
// Start the transaction and save the studio
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewStudioQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
var studio *models.Studio
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Studio()
studio, err := qb.Create(newStudio, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateStudioImage(studio.ID, imageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
var err error
studio, err = qb.Create(newStudio)
if err != nil {
return err
}
}
// Save the stash_ids
if input.StashIds != nil {
var stashIDJoins []models.StashID
for _, stashID := range input.StashIds {
newJoin := models.StashID{
StashID: stashID.StashID,
Endpoint: stashID.Endpoint,
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(studio.ID, imageData); err != nil {
return err
}
stashIDJoins = append(stashIDJoins, newJoin)
}
if err := jqb.UpdateStudioStashIDs(studio.ID, stashIDJoins, tx); err != nil {
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
// Save the stash_ids
if input.StashIds != nil {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
if err := qb.UpdateStashIDs(studio.ID, stashIDJoins); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
@ -87,7 +78,14 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) {
// Populate studio from the input
studioID, _ := strconv.Atoi(input.ID)
studioID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, err
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedStudio := models.StudioPartial{
ID: studioID,
@ -95,7 +93,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
}
var imageData []byte
imageIncluded := wasFieldIncluded(ctx, "image")
imageIncluded := translator.hasField("image")
if input.Image != nil {
var err error
_, imageData, err = utils.ProcessBase64Image(*input.Image)
@ -109,65 +107,47 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
updatedStudio.Name = &sql.NullString{String: *input.Name, Valid: true}
updatedStudio.Checksum = &checksum
}
if input.URL != nil {
updatedStudio.URL = &sql.NullString{String: *input.URL, Valid: true}
}
if input.ParentID != nil {
parentID, _ := strconv.ParseInt(*input.ParentID, 10, 64)
updatedStudio.ParentID = &sql.NullInt64{Int64: parentID, Valid: true}
} else {
// parent studio must be nullable
updatedStudio.ParentID = &sql.NullInt64{Valid: false}
}
updatedStudio.URL = translator.nullString(input.URL, "url")
updatedStudio.ParentID = translator.nullInt64FromString(input.ParentID, "parent_id")
// Start the transaction and save the studio
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewStudioQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
var studio *models.Studio
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Studio()
if err := manager.ValidateModifyStudio(updatedStudio, tx); err != nil {
tx.Rollback()
return nil, err
}
studio, err := qb.Update(updatedStudio, tx)
if err != nil {
tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateStudioImage(studio.ID, imageData, tx); err != nil {
tx.Rollback()
return nil, err
if err := manager.ValidateModifyStudio(updatedStudio, qb); err != nil {
return err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyStudioImage(studio.ID, tx); err != nil {
tx.Rollback()
return nil, err
}
}
// Save the stash_ids
if input.StashIds != nil {
var stashIDJoins []models.StashID
for _, stashID := range input.StashIds {
newJoin := models.StashID{
StashID: stashID.StashID,
Endpoint: stashID.Endpoint,
var err error
studio, err = qb.Update(updatedStudio)
if err != nil {
return err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(studio.ID, imageData); err != nil {
return err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyImage(studio.ID); err != nil {
return err
}
stashIDJoins = append(stashIDJoins, newJoin)
}
if err := jqb.UpdateStudioStashIDs(studioID, stashIDJoins, tx); err != nil {
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
// Save the stash_ids
if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
if err := qb.UpdateStashIDs(studioID, stashIDJoins); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
@ -175,13 +155,35 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
}
func (r *mutationResolver) StudioDestroy(ctx context.Context, input models.StudioDestroyInput) (bool, error) {
qb := models.NewStudioQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
if err := qb.Destroy(input.ID, tx); err != nil {
_ = tx.Rollback()
id, err := strconv.Atoi(input.ID)
if err != nil {
return false, err
}
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
return repo.Studio().Destroy(id)
}); err != nil {
return false, err
}
return true, nil
}
func (r *mutationResolver) StudiosDestroy(ctx context.Context, studioIDs []string) (bool, error) {
ids, err := utils.StringSliceToIntSlice(studioIDs)
if err != nil {
return false, err
}
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Studio()
for _, id := range ids {
if err := qb.Destroy(id); err != nil {
return err
}
}
return nil
}); err != nil {
return false, err
}
return true, nil

View file

@ -6,7 +6,6 @@ import (
"strconv"
"time"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
@ -33,31 +32,29 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
}
// Start the transaction and save the tag
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewTagQueryBuilder()
var tag *models.Tag
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Tag()
// ensure name is unique
if err := manager.EnsureTagNameUnique(newTag, tx); err != nil {
tx.Rollback()
return nil, err
}
tag, err := qb.Create(newTag, tx)
if err != nil {
tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateTagImage(tag.ID, imageData, tx); err != nil {
_ = tx.Rollback()
return nil, err
// ensure name is unique
if err := manager.EnsureTagNameUnique(newTag, qb); err != nil {
return err
}
}
// Commit
if err := tx.Commit(); err != nil {
tag, err = qb.Create(newTag)
if err != nil {
return err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(tag.ID, imageData); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
@ -66,7 +63,11 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdateInput) (*models.Tag, error) {
// Populate tag from the input
tagID, _ := strconv.Atoi(input.ID)
tagID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, err
}
updatedTag := models.Tag{
ID: tagID,
Name: input.Name,
@ -74,9 +75,12 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
}
var imageData []byte
var err error
imageIncluded := wasFieldIncluded(ctx, "image")
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
imageIncluded := translator.hasField("image")
if input.Image != nil {
_, imageData, err = utils.ProcessBase64Image(*input.Image)
@ -86,50 +90,45 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
}
// Start the transaction and save the tag
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewTagQueryBuilder()
var tag *models.Tag
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Tag()
// ensure name is unique
existing, err := qb.Find(tagID, tx)
if err != nil {
tx.Rollback()
return nil, err
}
if existing == nil {
tx.Rollback()
return nil, fmt.Errorf("Tag with ID %d not found", tagID)
}
if existing.Name != updatedTag.Name {
if err := manager.EnsureTagNameUnique(updatedTag, tx); err != nil {
tx.Rollback()
return nil, err
// ensure name is unique
existing, err := qb.Find(tagID)
if err != nil {
return err
}
}
tag, err := qb.Update(updatedTag, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateTagImage(tag.ID, imageData, tx); err != nil {
tx.Rollback()
return nil, err
if existing == nil {
return fmt.Errorf("Tag with ID %d not found", tagID)
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyTagImage(tag.ID, tx); err != nil {
tx.Rollback()
return nil, err
}
}
// Commit
if err := tx.Commit(); err != nil {
if existing.Name != updatedTag.Name {
if err := manager.EnsureTagNameUnique(updatedTag, qb); err != nil {
return err
}
}
tag, err = qb.Update(updatedTag)
if err != nil {
return err
}
// update image table
if len(imageData) > 0 {
if err := qb.UpdateImage(tag.ID, imageData); err != nil {
return err
}
} else if imageIncluded {
// must be unsetting
if err := qb.DestroyImage(tag.ID); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
@ -137,13 +136,35 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
}
func (r *mutationResolver) TagDestroy(ctx context.Context, input models.TagDestroyInput) (bool, error) {
qb := models.NewTagQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
if err := qb.Destroy(input.ID, tx); err != nil {
_ = tx.Rollback()
tagID, err := strconv.Atoi(input.ID)
if err != nil {
return false, err
}
if err := tx.Commit(); err != nil {
if err := r.withTxn(ctx, func(repo models.Repository) error {
return repo.Tag().Destroy(tagID)
}); err != nil {
return false, err
}
return true, nil
}
func (r *mutationResolver) TagsDestroy(ctx context.Context, tagIDs []string) (bool, error) {
ids, err := utils.StringSliceToIntSlice(tagIDs)
if err != nil {
return false, err
}
if err := r.withTxn(ctx, func(repo models.Repository) error {
qb := repo.Tag()
for _, id := range ids {
if err := qb.Destroy(id); err != nil {
return err
}
}
return nil
}); err != nil {
return false, err
}
return true, nil

View file

@ -0,0 +1,70 @@
package api
import (
"context"
"errors"
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
// TODO - move this into a common area
func newResolver() *Resolver {
return &Resolver{
txnManager: mocks.NewTransactionManager(),
}
}
const tagName = "tagName"
const errTagName = "errTagName"
const existingTagID = 1
const existingTagName = "existingTagName"
const newTagID = 2
func TestTagCreate(t *testing.T) {
r := newResolver()
tagRW := r.txnManager.(*mocks.TransactionManager).Tag().(*mocks.TagReaderWriter)
tagRW.On("FindByName", existingTagName, true).Return(&models.Tag{
ID: existingTagID,
Name: existingTagName,
}, nil).Once()
tagRW.On("FindByName", errTagName, true).Return(nil, nil).Once()
expectedErr := errors.New("TagCreate error")
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, expectedErr)
_, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
Name: existingTagName,
})
assert.NotNil(t, err)
_, err = r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
Name: errTagName,
})
assert.Equal(t, expectedErr, err)
tagRW.AssertExpectations(t)
r = newResolver()
tagRW = r.txnManager.(*mocks.TransactionManager).Tag().(*mocks.TagReaderWriter)
tagRW.On("FindByName", tagName, true).Return(nil, nil).Once()
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{
ID: newTagID,
Name: tagName,
}, nil)
tag, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
Name: tagName,
})
assert.Nil(t, err)
assert.NotNil(t, tag)
}

View file

@ -49,6 +49,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
CachePath: config.GetCachePath(),
CalculateMd5: config.IsCalculateMD5(),
VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
ParallelTasks: config.GetParallelTasks(),
PreviewSegments: config.GetPreviewSegments(),
PreviewSegmentDuration: config.GetPreviewSegmentDuration(),
PreviewExcludeStart: config.GetPreviewExcludeStart(),
@ -76,6 +77,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
}
func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
menuItems := config.GetMenuItems()
soundOnPreview := config.GetSoundOnPreview()
wallShowTitle := config.GetWallShowTitle()
wallPlayback := config.GetWallPlayback()
@ -87,6 +89,7 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
language := config.GetLanguage()
return &models.ConfigInterfaceResult{
MenuItems: menuItems,
SoundOnPreview: &soundOnPreview,
WallShowTitle: &wallShowTitle,
WallPlayback: &wallPlayback,

View file

@ -7,17 +7,37 @@ import (
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindGallery(ctx context.Context, id string) (*models.Gallery, error) {
qb := models.NewGalleryQueryBuilder()
idInt, _ := strconv.Atoi(id)
return qb.Find(idInt, nil)
func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models.Gallery, err error) {
idInt, err := strconv.Atoi(id)
if err != nil {
return nil, err
}
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Gallery().Find(idInt)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (*models.FindGalleriesResultType, error) {
qb := models.NewGalleryQueryBuilder()
galleries, total := qb.Query(galleryFilter, filter)
return &models.FindGalleriesResultType{
Count: total,
Galleries: galleries,
}, nil
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (ret *models.FindGalleriesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
galleries, total, err := repo.Gallery().Query(galleryFilter, filter)
if err != nil {
return err
}
ret = &models.FindGalleriesResultType{
Count: total,
Galleries: galleries,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -8,23 +8,48 @@ import (
)
func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *string) (*models.Image, error) {
qb := models.NewImageQueryBuilder()
var image *models.Image
var err error
if id != nil {
idInt, _ := strconv.Atoi(*id)
image, err = qb.Find(idInt)
} else if checksum != nil {
image, err = qb.FindByChecksum(*checksum)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
qb := repo.Image()
var err error
if id != nil {
idInt, err := strconv.Atoi(*id)
if err != nil {
return err
}
image, err = qb.Find(idInt)
} else if checksum != nil {
image, err = qb.FindByChecksum(*checksum)
}
return err
}); err != nil {
return nil, err
}
return image, err
return image, nil
}
func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (*models.FindImagesResultType, error) {
qb := models.NewImageQueryBuilder()
images, total := qb.Query(imageFilter, filter)
return &models.FindImagesResultType{
Count: total,
Images: images,
}, nil
func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (ret *models.FindImagesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
qb := repo.Image()
images, total, err := qb.Query(imageFilter, filter)
if err != nil {
return err
}
ret = &models.FindImagesResultType{
Count: total,
Images: images,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -7,27 +7,60 @@ import (
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindMovie(ctx context.Context, id string) (*models.Movie, error) {
qb := models.NewMovieQueryBuilder()
idInt, _ := strconv.Atoi(id)
return qb.Find(idInt, nil)
func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.Movie, err error) {
idInt, err := strconv.Atoi(id)
if err != nil {
return nil, err
}
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Movie().Find(idInt)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (*models.FindMoviesResultType, error) {
qb := models.NewMovieQueryBuilder()
movies, total := qb.Query(movieFilter, filter)
return &models.FindMoviesResultType{
Count: total,
Movies: movies,
}, nil
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (ret *models.FindMoviesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
movies, total, err := repo.Movie().Query(movieFilter, filter)
if err != nil {
return err
}
ret = &models.FindMoviesResultType{
Count: total,
Movies: movies,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) AllMovies(ctx context.Context) ([]*models.Movie, error) {
qb := models.NewMovieQueryBuilder()
return qb.All()
func (r *queryResolver) AllMovies(ctx context.Context) (ret []*models.Movie, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Movie().All()
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) AllMoviesSlim(ctx context.Context) ([]*models.Movie, error) {
qb := models.NewMovieQueryBuilder()
return qb.AllSlim()
func (r *queryResolver) AllMoviesSlim(ctx context.Context) (ret []*models.Movie, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Movie().AllSlim()
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -2,31 +2,64 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/models"
"strconv"
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindPerformer(ctx context.Context, id string) (*models.Performer, error) {
qb := models.NewPerformerQueryBuilder()
idInt, _ := strconv.Atoi(id)
return qb.Find(idInt)
func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *models.Performer, err error) {
idInt, err := strconv.Atoi(id)
if err != nil {
return nil, err
}
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Performer().Find(idInt)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (*models.FindPerformersResultType, error) {
qb := models.NewPerformerQueryBuilder()
performers, total := qb.Query(performerFilter, filter)
return &models.FindPerformersResultType{
Count: total,
Performers: performers,
}, nil
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (ret *models.FindPerformersResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
performers, total, err := repo.Performer().Query(performerFilter, filter)
if err != nil {
return err
}
ret = &models.FindPerformersResultType{
Count: total,
Performers: performers,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) AllPerformers(ctx context.Context) ([]*models.Performer, error) {
qb := models.NewPerformerQueryBuilder()
return qb.All()
func (r *queryResolver) AllPerformers(ctx context.Context) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Performer().All()
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) AllPerformersSlim(ctx context.Context) ([]*models.Performer, error) {
qb := models.NewPerformerQueryBuilder()
return qb.AllSlim()
func (r *queryResolver) AllPerformersSlim(ctx context.Context) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Performer().AllSlim()
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -9,70 +9,132 @@ import (
)
func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) {
qb := models.NewSceneQueryBuilder()
var scene *models.Scene
var err error
if id != nil {
idInt, _ := strconv.Atoi(*id)
scene, err = qb.Find(idInt)
} else if checksum != nil {
scene, err = qb.FindByChecksum(*checksum)
}
return scene, err
}
func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneHashInput) (*models.Scene, error) {
qb := models.NewSceneQueryBuilder()
var scene *models.Scene
var err error
if input.Checksum != nil {
scene, err = qb.FindByChecksum(*input.Checksum)
if err != nil {
return nil, err
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
qb := repo.Scene()
var err error
if id != nil {
idInt, err := strconv.Atoi(*id)
if err != nil {
return err
}
scene, err = qb.Find(idInt)
} else if checksum != nil {
scene, err = qb.FindByChecksum(*checksum)
}
}
if scene == nil && input.Oshash != nil {
scene, err = qb.FindByOSHash(*input.Oshash)
if err != nil {
return nil, err
}
}
return scene, err
}
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (*models.FindScenesResultType, error) {
qb := models.NewSceneQueryBuilder()
scenes, total := qb.Query(sceneFilter, filter)
return &models.FindScenesResultType{
Count: total,
Scenes: scenes,
}, nil
}
func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (*models.FindScenesResultType, error) {
qb := models.NewSceneQueryBuilder()
scenes, total := qb.QueryByPathRegex(filter)
return &models.FindScenesResultType{
Count: total,
Scenes: scenes,
}, nil
}
func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.FindFilterType, config models.SceneParserInput) (*models.SceneParserResultType, error) {
parser := manager.NewSceneFilenameParser(filter, config)
result, count, err := parser.Parse()
if err != nil {
return err
}); err != nil {
return nil, err
}
return &models.SceneParserResultType{
Count: count,
Results: result,
}, nil
return scene, nil
}
func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneHashInput) (*models.Scene, error) {
var scene *models.Scene
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
qb := repo.Scene()
var err error
if input.Checksum != nil {
scene, err = qb.FindByChecksum(*input.Checksum)
if err != nil {
return err
}
}
if scene == nil && input.Oshash != nil {
scene, err = qb.FindByOSHash(*input.Oshash)
if err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
return scene, nil
}
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
scenes, total, err := repo.Scene().Query(sceneFilter, filter)
if err != nil {
return err
}
ret = &models.FindScenesResultType{
Count: total,
Scenes: scenes,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
sceneFilter := &models.SceneFilterType{}
if filter != nil && filter.Q != nil {
sceneFilter.Path = &models.StringCriterionInput{
Modifier: models.CriterionModifierMatchesRegex,
Value: "(?i)" + *filter.Q,
}
}
// make a copy of the filter if provided, nilling out Q
var queryFilter *models.FindFilterType
if filter != nil {
f := *filter
queryFilter = &f
queryFilter.Q = nil
}
scenes, total, err := repo.Scene().Query(sceneFilter, queryFilter)
if err != nil {
return err
}
ret = &models.FindScenesResultType{
Count: total,
Scenes: scenes,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.FindFilterType, config models.SceneParserInput) (ret *models.SceneParserResultType, err error) {
parser := manager.NewSceneFilenameParser(filter, config)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
result, count, err := parser.Parse(repo)
if err != nil {
return err
}
ret = &models.SceneParserResultType{
Count: count,
Results: result,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -2,14 +2,25 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType) (*models.FindSceneMarkersResultType, error) {
qb := models.NewSceneMarkerQueryBuilder()
sceneMarkers, total := qb.Query(sceneMarkerFilter, filter)
return &models.FindSceneMarkersResultType{
Count: total,
SceneMarkers: sceneMarkers,
}, nil
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType) (ret *models.FindSceneMarkersResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
sceneMarkers, total, err := repo.SceneMarker().Query(sceneMarkerFilter, filter)
if err != nil {
return err
}
ret = &models.FindSceneMarkersResultType{
Count: total,
SceneMarkers: sceneMarkers,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -2,31 +2,66 @@ package api
import (
"context"
"github.com/stashapp/stash/pkg/models"
"strconv"
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindStudio(ctx context.Context, id string) (*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
idInt, _ := strconv.Atoi(id)
return qb.Find(idInt, nil)
func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.Studio, err error) {
idInt, err := strconv.Atoi(id)
if err != nil {
return nil, err
}
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
var err error
ret, err = repo.Studio().Find(idInt)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (*models.FindStudiosResultType, error) {
qb := models.NewStudioQueryBuilder()
studios, total := qb.Query(studioFilter, filter)
return &models.FindStudiosResultType{
Count: total,
Studios: studios,
}, nil
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (ret *models.FindStudiosResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
studios, total, err := repo.Studio().Query(studioFilter, filter)
if err != nil {
return err
}
ret = &models.FindStudiosResultType{
Count: total,
Studios: studios,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) AllStudios(ctx context.Context) ([]*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
return qb.All()
func (r *queryResolver) AllStudios(ctx context.Context) (ret []*models.Studio, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().All()
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) AllStudiosSlim(ctx context.Context) ([]*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
return qb.AllSlim()
func (r *queryResolver) AllStudiosSlim(ctx context.Context) (ret []*models.Studio, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Studio().AllSlim()
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -7,27 +7,60 @@ import (
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindTag(ctx context.Context, id string) (*models.Tag, error) {
qb := models.NewTagQueryBuilder()
idInt, _ := strconv.Atoi(id)
return qb.Find(idInt, nil)
func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag, err error) {
idInt, err := strconv.Atoi(id)
if err != nil {
return nil, err
}
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().Find(idInt)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (*models.FindTagsResultType, error) {
qb := models.NewTagQueryBuilder()
tags, total := qb.Query(tagFilter, filter)
return &models.FindTagsResultType{
Count: total,
Tags: tags,
}, nil
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (ret *models.FindTagsResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
tags, total, err := repo.Tag().Query(tagFilter, filter)
if err != nil {
return err
}
ret = &models.FindTagsResultType{
Count: total,
Tags: tags,
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) AllTags(ctx context.Context) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.All()
func (r *queryResolver) AllTags(ctx context.Context) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().All()
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *queryResolver) AllTagsSlim(ctx context.Context) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.AllSlim()
func (r *queryResolver) AllTagsSlim(ctx context.Context) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Tag().AllSlim()
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -12,11 +12,13 @@ import (
func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models.SceneStreamEndpoint, error) {
// find the scene
qb := models.NewSceneQueryBuilder()
idInt, _ := strconv.Atoi(*id)
scene, err := qb.Find(idInt)
if err != nil {
var scene *models.Scene
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
idInt, _ := strconv.Atoi(*id)
var err error
scene, err = repo.Scene().Find(idInt)
return err
}); err != nil {
return nil, err
}

View file

@ -95,7 +95,7 @@ func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.Sta
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
}
client := stashbox.NewClient(*boxes[input.StashBoxIndex])
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
if len(input.SceneIds) > 0 {
return client.FindStashBoxScenesByFingerprints(input.SceneIds)

View file

@ -12,7 +12,9 @@ import (
"github.com/stashapp/stash/pkg/utils"
)
type imageRoutes struct{}
type imageRoutes struct {
txnManager models.TransactionManager
}
func (rs imageRoutes) Routes() chi.Router {
r := chi.NewRouter()
@ -57,12 +59,16 @@ func ImageCtx(next http.Handler) http.Handler {
imageID, _ := strconv.Atoi(imageIdentifierQueryParam)
var image *models.Image
qb := models.NewImageQueryBuilder()
if imageID == 0 {
image, _ = qb.FindByChecksum(imageIdentifierQueryParam)
} else {
image, _ = qb.Find(imageID)
}
manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
qb := repo.Image()
if imageID == 0 {
image, _ = qb.FindByChecksum(imageIdentifierQueryParam)
} else {
image, _ = qb.Find(imageID)
}
return nil
})
if image == nil {
http.Error(w, http.StatusText(404), 404)

View file

@ -6,11 +6,14 @@ import (
"strconv"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type movieRoutes struct{}
type movieRoutes struct {
txnManager models.TransactionManager
}
func (rs movieRoutes) Routes() chi.Router {
r := chi.NewRouter()
@ -26,11 +29,16 @@ func (rs movieRoutes) Routes() chi.Router {
func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
movie := r.Context().Value(movieKey).(*models.Movie)
qb := models.NewMovieQueryBuilder()
image, _ := qb.GetFrontImage(movie.ID, nil)
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
var image []byte
if defaultParam != "true" {
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
image, _ = repo.Movie().GetFrontImage(movie.ID)
return nil
})
}
if len(image) == 0 {
_, image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
}
@ -39,11 +47,16 @@ func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
movie := r.Context().Value(movieKey).(*models.Movie)
qb := models.NewMovieQueryBuilder()
image, _ := qb.GetBackImage(movie.ID, nil)
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
var image []byte
if defaultParam != "true" {
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
image, _ = repo.Movie().GetBackImage(movie.ID)
return nil
})
}
if len(image) == 0 {
_, image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
}
@ -58,9 +71,12 @@ func MovieCtx(next http.Handler) http.Handler {
return
}
qb := models.NewMovieQueryBuilder()
movie, err := qb.Find(movieID, nil)
if err != nil {
var movie *models.Movie
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
var err error
movie, err = repo.Movie().Find(movieID)
return err
}); err != nil {
http.Error(w, http.StatusText(404), 404)
return
}

View file

@ -6,11 +6,14 @@ import (
"strconv"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type performerRoutes struct{}
type performerRoutes struct {
txnManager models.TransactionManager
}
func (rs performerRoutes) Routes() chi.Router {
r := chi.NewRouter()
@ -25,10 +28,16 @@ func (rs performerRoutes) Routes() chi.Router {
func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
performer := r.Context().Value(performerKey).(*models.Performer)
qb := models.NewPerformerQueryBuilder()
image, _ := qb.GetPerformerImage(performer.ID, nil)
defaultParam := r.URL.Query().Get("default")
var image []byte
if defaultParam != "true" {
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
image, _ = repo.Performer().GetImage(performer.ID)
return nil
})
}
if len(image) == 0 || defaultParam == "true" {
image, _ = getRandomPerformerImageUsingName(performer.Name.String, performer.Gender.String)
}
@ -44,9 +53,12 @@ func PerformerCtx(next http.Handler) http.Handler {
return
}
qb := models.NewPerformerQueryBuilder()
performer, err := qb.Find(performerID)
if err != nil {
var performer *models.Performer
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
var err error
performer, err = repo.Performer().Find(performerID)
return err
}); err != nil {
http.Error(w, http.StatusText(404), 404)
return
}

View file

@ -15,7 +15,9 @@ import (
"github.com/stashapp/stash/pkg/utils"
)
type sceneRoutes struct{}
type sceneRoutes struct {
txnManager models.TransactionManager
}
func (rs sceneRoutes) Routes() chi.Router {
r := chi.NewRouter()
@ -53,7 +55,7 @@ func getSceneFileContainer(scene *models.Scene) ffmpeg.Container {
container = ffmpeg.Container(scene.Format.String)
} else { // container isn't in the DB
// shouldn't happen, fallback to ffprobe
tmpVideoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path)
tmpVideoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
if err != nil {
logger.Errorf("[transcode] error reading video file: %s", err.Error())
return ffmpeg.Container("")
@ -100,7 +102,7 @@ func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path)
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
if err != nil {
logger.Errorf("[stream] error reading video file: %s", err.Error())
return
@ -136,7 +138,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, vi
// needs to be transcoded
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path)
videoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path, false)
if err != nil {
logger.Errorf("[stream] error reading video file: %s", err.Error())
return
@ -183,8 +185,11 @@ func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
if screenshotExists {
http.ServeFile(w, r, filepath)
} else {
qb := models.NewSceneQueryBuilder()
cover, _ := qb.GetSceneCover(scene.ID, nil)
var cover []byte
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
cover, _ = repo.Scene().GetCover(scene.ID)
return nil
})
utils.ServeImage(cover, w, r)
}
}
@ -201,39 +206,48 @@ func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
http.ServeFile(w, r, filepath)
}
func getChapterVttTitle(marker *models.SceneMarker) string {
func (rs sceneRoutes) getChapterVttTitle(ctx context.Context, marker *models.SceneMarker) string {
if marker.Title != "" {
return marker.Title
}
qb := models.NewTagQueryBuilder()
primaryTag, err := qb.Find(marker.PrimaryTagID, nil)
if err != nil {
// should not happen
var ret string
if err := rs.txnManager.WithReadTxn(ctx, func(repo models.ReaderRepository) error {
qb := repo.Tag()
primaryTag, err := qb.Find(marker.PrimaryTagID)
if err != nil {
return err
}
ret = primaryTag.Name
tags, err := qb.FindBySceneMarkerID(marker.ID)
if err != nil {
return err
}
for _, t := range tags {
ret += ", " + t.Name
}
return nil
}); err != nil {
panic(err)
}
ret := primaryTag.Name
tags, err := qb.FindBySceneMarkerID(marker.ID, nil)
if err != nil {
// should not happen
panic(err)
}
for _, t := range tags {
ret += ", " + t.Name
}
return ret
}
func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
qb := models.NewSceneMarkerQueryBuilder()
sceneMarkers, err := qb.FindBySceneID(scene.ID, nil)
if err != nil {
panic("invalid scene markers for chapter vtt")
var sceneMarkers []*models.SceneMarker
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
var err error
sceneMarkers, err = repo.SceneMarker().FindBySceneID(scene.ID)
return err
}); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
vttLines := []string{"WEBVTT", ""}
@ -241,7 +255,7 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
vttLines = append(vttLines, strconv.Itoa(i+1))
time := utils.GetVTTTime(marker.Seconds)
vttLines = append(vttLines, time+" --> "+time)
vttLines = append(vttLines, getChapterVttTitle(marker))
vttLines = append(vttLines, rs.getChapterVttTitle(r.Context(), marker))
vttLines = append(vttLines, "")
}
vtt := strings.Join(vttLines, "\n")
@ -267,11 +281,14 @@ func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
qb := models.NewSceneMarkerQueryBuilder()
sceneMarker, err := qb.Find(sceneMarkerID)
if err != nil {
logger.Warn("Error when getting scene marker for stream")
http.Error(w, http.StatusText(404), 404)
var sceneMarker *models.SceneMarker
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
var err error
sceneMarker, err = repo.SceneMarker().Find(sceneMarkerID)
return err
}); err != nil {
logger.Warnf("Error when getting scene marker for stream: %s", err.Error())
http.Error(w, http.StatusText(500), 500)
return
}
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
@ -281,11 +298,14 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
qb := models.NewSceneMarkerQueryBuilder()
sceneMarker, err := qb.Find(sceneMarkerID)
if err != nil {
logger.Warn("Error when getting scene marker for stream")
http.Error(w, http.StatusText(404), 404)
var sceneMarker *models.SceneMarker
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
var err error
sceneMarker, err = repo.SceneMarker().Find(sceneMarkerID)
return err
}); err != nil {
logger.Warnf("Error when getting scene marker for stream: %s", err.Error())
http.Error(w, http.StatusText(500), 500)
return
}
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
@ -310,17 +330,21 @@ func SceneCtx(next http.Handler) http.Handler {
sceneID, _ := strconv.Atoi(sceneIdentifierQueryParam)
var scene *models.Scene
qb := models.NewSceneQueryBuilder()
if sceneID == 0 {
// determine checksum/os by the length of the query param
if len(sceneIdentifierQueryParam) == 32 {
scene, _ = qb.FindByChecksum(sceneIdentifierQueryParam)
manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
qb := repo.Scene()
if sceneID == 0 {
// determine checksum/os by the length of the query param
if len(sceneIdentifierQueryParam) == 32 {
scene, _ = qb.FindByChecksum(sceneIdentifierQueryParam)
} else {
scene, _ = qb.FindByOSHash(sceneIdentifierQueryParam)
}
} else {
scene, _ = qb.FindByOSHash(sceneIdentifierQueryParam)
scene, _ = qb.Find(sceneID)
}
} else {
scene, _ = qb.Find(sceneID)
}
return nil
})
if scene == nil {
http.Error(w, http.StatusText(404), 404)

View file

@ -6,11 +6,14 @@ import (
"strconv"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type studioRoutes struct{}
type studioRoutes struct {
txnManager models.TransactionManager
}
func (rs studioRoutes) Routes() chi.Router {
r := chi.NewRouter()
@ -25,11 +28,17 @@ func (rs studioRoutes) Routes() chi.Router {
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
studio := r.Context().Value(studioKey).(*models.Studio)
qb := models.NewStudioQueryBuilder()
image, _ := qb.GetStudioImage(studio.ID, nil)
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
var image []byte
if defaultParam != "true" {
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
image, _ = repo.Studio().GetImage(studio.ID)
return nil
})
}
if len(image) == 0 {
_, image, _ = utils.ProcessBase64Image(models.DefaultStudioImage)
}
@ -44,9 +53,12 @@ func StudioCtx(next http.Handler) http.Handler {
return
}
qb := models.NewStudioQueryBuilder()
studio, err := qb.Find(studioID, nil)
if err != nil {
var studio *models.Studio
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
var err error
studio, err = repo.Studio().Find(studioID)
return err
}); err != nil {
http.Error(w, http.StatusText(404), 404)
return
}

View file

@ -6,11 +6,14 @@ import (
"strconv"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
type tagRoutes struct{}
type tagRoutes struct {
txnManager models.TransactionManager
}
func (rs tagRoutes) Routes() chi.Router {
r := chi.NewRouter()
@ -25,12 +28,17 @@ func (rs tagRoutes) Routes() chi.Router {
func (rs tagRoutes) Image(w http.ResponseWriter, r *http.Request) {
tag := r.Context().Value(tagKey).(*models.Tag)
qb := models.NewTagQueryBuilder()
image, _ := qb.GetTagImage(tag.ID, nil)
// use default image if not present
defaultParam := r.URL.Query().Get("default")
if len(image) == 0 || defaultParam == "true" {
var image []byte
if defaultParam != "true" {
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
image, _ = repo.Tag().GetImage(tag.ID)
return nil
})
}
if len(image) == 0 {
image = models.DefaultTagImage
}
@ -45,9 +53,12 @@ func TagCtx(next http.Handler) http.Handler {
return
}
qb := models.NewTagQueryBuilder()
tag, err := qb.Find(tagID, nil)
if err != nil {
var tag *models.Tag
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
var err error
tag, err = repo.Tag().Find(tagID)
return err
}); err != nil {
http.Error(w, http.StatusText(404), 404)
return
}

View file

@ -134,7 +134,13 @@ func Start() {
return true
},
})
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: &Resolver{}}), recoverFunc, websocketUpgrader)
maxUploadSize := handler.UploadMaxSize(config.GetMaxUploadSize())
txnManager := manager.GetInstance().TxnManager
resolver := &Resolver{
txnManager: txnManager,
}
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: resolver}), recoverFunc, websocketUpgrader, maxUploadSize)
r.Handle("/graphql", gqlHandler)
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
@ -145,12 +151,24 @@ func Start() {
r.Get(loginEndPoint, getLoginHandler)
r.Mount("/performer", performerRoutes{}.Routes())
r.Mount("/scene", sceneRoutes{}.Routes())
r.Mount("/image", imageRoutes{}.Routes())
r.Mount("/studio", studioRoutes{}.Routes())
r.Mount("/movie", movieRoutes{}.Routes())
r.Mount("/tag", tagRoutes{}.Routes())
r.Mount("/performer", performerRoutes{
txnManager: txnManager,
}.Routes())
r.Mount("/scene", sceneRoutes{
txnManager: txnManager,
}.Routes())
r.Mount("/image", imageRoutes{
txnManager: txnManager,
}.Routes())
r.Mount("/studio", studioRoutes{
txnManager: txnManager,
}.Routes())
r.Mount("/movie", movieRoutes{
txnManager: txnManager,
}.Routes())
r.Mount("/tag", tagRoutes{
txnManager: txnManager,
}.Routes())
r.Mount("/downloads", downloadsRoutes{}.Routes())
r.HandleFunc("/css", func(w http.ResponseWriter, r *http.Request) {

View file

@ -7,19 +7,21 @@ import (
"os"
"time"
"github.com/fvbommel/sortorder"
"github.com/gobuffalo/packr/v2"
"github.com/golang-migrate/migrate/v4"
sqlite3mig "github.com/golang-migrate/migrate/v4/database/sqlite3"
"github.com/golang-migrate/migrate/v4/source"
"github.com/jmoiron/sqlx"
sqlite3 "github.com/mattn/go-sqlite3"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
var DB *sqlx.DB
var dbPath string
var appSchemaVersion uint = 15
var appSchemaVersion uint = 18
var databaseSchemaVersion uint
const sqlite3Driver = "sqlite3ex"
@ -67,9 +69,9 @@ func Initialize(databasePath string) bool {
func open(databasePath string, disableForeignKeys bool) *sqlx.DB {
// https://github.com/mattn/go-sqlite3
url := "file:" + databasePath
url := "file:" + databasePath + "?_journal=WAL"
if !disableForeignKeys {
url += "?_fk=true"
url += "&_fk=true"
}
conn, err := sqlx.Open(sqlite3Driver, url)
@ -94,20 +96,35 @@ func Reset(databasePath string) error {
return errors.New("Error removing database: " + err.Error())
}
// remove the -shm, -wal files ( if they exist )
walFiles := []string{databasePath + "-shm", databasePath + "-wal"}
for _, wf := range walFiles {
if exists, _ := utils.FileExists(wf); exists {
err = os.Remove(wf)
if err != nil {
return errors.New("Error removing database: " + err.Error())
}
}
}
Initialize(databasePath)
return nil
}
// Backup the database
func Backup(backupPath string) error {
db, err := sqlx.Connect(sqlite3Driver, "file:"+dbPath+"?_fk=true")
if err != nil {
return fmt.Errorf("Open database %s failed:%s", dbPath, err)
// Backup the database. If db is nil, then uses the existing database
// connection.
func Backup(db *sqlx.DB, backupPath string) error {
if db == nil {
var err error
db, err = sqlx.Connect(sqlite3Driver, "file:"+dbPath+"?_fk=true")
if err != nil {
return fmt.Errorf("Open database %s failed:%s", dbPath, err)
}
defer db.Close()
}
defer db.Close()
logger.Infof("Backing up database into: %s", backupPath)
_, err = db.Exec(`VACUUM INTO "` + backupPath + `"`)
_, err := db.Exec(`VACUUM INTO "` + backupPath + `"`)
if err != nil {
return fmt.Errorf("Vacuum failed: %s", err)
}
@ -225,6 +242,19 @@ func registerCustomDriver() {
}
}
// COLLATE NATURAL_CS - Case sensitive natural sort
err := conn.RegisterCollation("NATURAL_CS", func(s string, s2 string) int {
if sortorder.NaturalLess(s, s2) {
return -1
} else {
return 1
}
})
if err != nil {
return fmt.Errorf("Error registering natural sort collation: %s", err.Error())
}
return nil
},
},

View file

@ -0,0 +1,3 @@
ALTER TABLE `scenes` ADD COLUMN `organized` boolean not null default '0';
ALTER TABLE `images` ADD COLUMN `organized` boolean not null default '0';
ALTER TABLE `galleries` ADD COLUMN `organized` boolean not null default '0';

View file

@ -0,0 +1 @@
UPDATE `scenes` SET `size` = NULL;

View file

@ -0,0 +1,138 @@
-- recreate the tables referencing galleries to correct their references
ALTER TABLE `galleries` rename to `_galleries_old`;
ALTER TABLE `galleries_images` rename to `_galleries_images_old`;
ALTER TABLE `galleries_tags` rename to `_galleries_tags_old`;
ALTER TABLE `performers_galleries` rename to `_performers_galleries_old`;
CREATE TABLE `galleries` (
`id` integer not null primary key autoincrement,
`path` varchar(510),
`checksum` varchar(255) not null,
`zip` boolean not null default '0',
`title` varchar(255),
`url` varchar(255),
`date` date,
`details` text,
`studio_id` integer,
`rating` tinyint,
`file_mod_time` datetime,
`organized` boolean not null default '0',
`created_at` datetime not null,
`updated_at` datetime not null,
foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL
);
DROP INDEX IF EXISTS `index_galleries_on_scene_id`;
DROP INDEX IF EXISTS `galleries_path_unique`;
DROP INDEX IF EXISTS `galleries_checksum_unique`;
DROP INDEX IF EXISTS `index_galleries_on_studio_id`;
CREATE UNIQUE INDEX `galleries_path_unique` on `galleries` (`path`);
CREATE UNIQUE INDEX `galleries_checksum_unique` on `galleries` (`checksum`);
CREATE INDEX `index_galleries_on_studio_id` on `galleries` (`studio_id`);
CREATE TABLE `scenes_galleries` (
`scene_id` integer,
`gallery_id` integer,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE,
foreign key(`gallery_id`) references `galleries`(`id`) on delete CASCADE
);
CREATE INDEX `index_scenes_galleries_on_scene_id` on `scenes_galleries` (`scene_id`);
CREATE INDEX `index_scenes_galleries_on_gallery_id` on `scenes_galleries` (`gallery_id`);
CREATE TABLE `galleries_images` (
`gallery_id` integer,
`image_id` integer,
foreign key(`gallery_id`) references `galleries`(`id`) on delete CASCADE,
foreign key(`image_id`) references `images`(`id`) on delete CASCADE
);
DROP INDEX IF EXISTS `index_galleries_images_on_image_id`;
DROP INDEX IF EXISTS `index_galleries_images_on_gallery_id`;
CREATE INDEX `index_galleries_images_on_image_id` on `galleries_images` (`image_id`);
CREATE INDEX `index_galleries_images_on_gallery_id` on `galleries_images` (`gallery_id`);
CREATE TABLE `performers_galleries` (
`performer_id` integer,
`gallery_id` integer,
foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE,
foreign key(`gallery_id`) references `galleries`(`id`) on delete CASCADE
);
DROP INDEX IF EXISTS `index_performers_galleries_on_gallery_id`;
DROP INDEX IF EXISTS `index_performers_galleries_on_performer_id`;
CREATE INDEX `index_performers_galleries_on_gallery_id` on `performers_galleries` (`gallery_id`);
CREATE INDEX `index_performers_galleries_on_performer_id` on `performers_galleries` (`performer_id`);
CREATE TABLE `galleries_tags` (
`gallery_id` integer,
`tag_id` integer,
foreign key(`gallery_id`) references `galleries`(`id`) on delete CASCADE,
foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE
);
DROP INDEX IF EXISTS `index_galleries_tags_on_tag_id`;
DROP INDEX IF EXISTS `index_galleries_tags_on_gallery_id`;
CREATE INDEX `index_galleries_tags_on_tag_id` on `galleries_tags` (`tag_id`);
CREATE INDEX `index_galleries_tags_on_gallery_id` on `galleries_tags` (`gallery_id`);
-- populate from the old tables
INSERT INTO `galleries`
(
`id`,
`path`,
`checksum`,
`zip`,
`title`,
`url`,
`date`,
`details`,
`studio_id`,
`rating`,
`file_mod_time`,
`organized`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`path`,
`checksum`,
`zip`,
`title`,
`url`,
`date`,
`details`,
`studio_id`,
`rating`,
`file_mod_time`,
`organized`,
`created_at`,
`updated_at`
FROM `_galleries_old`;
INSERT INTO `scenes_galleries`
(
`scene_id`,
`gallery_id`
)
SELECT
`scene_id`,
`id`
FROM `_galleries_old`
WHERE scene_id IS NOT NULL;
-- these tables are a direct copy
INSERT INTO `galleries_images` SELECT * from `_galleries_images_old`;
INSERT INTO `galleries_tags` SELECT * from `_galleries_tags_old`;
INSERT INTO `performers_galleries` SELECT * from `_performers_galleries_old`;
-- drop old tables
DROP TABLE `_galleries_old`;
DROP TABLE `_galleries_images_old`;
DROP TABLE `_galleries_tags_old`;
DROP TABLE `_performers_galleries_old`;

View file

@ -222,7 +222,7 @@ type VideoFile struct {
}
// Execute exec command and bind result to struct.
func NewVideoFile(ffprobePath string, videoPath string) (*VideoFile, error) {
func NewVideoFile(ffprobePath string, videoPath string, stripExt bool) (*VideoFile, error) {
args := []string{"-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", "-show_error", videoPath}
//// Extremely slow on windows for some reason
//if runtime.GOOS != "windows" {
@ -239,10 +239,10 @@ func NewVideoFile(ffprobePath string, videoPath string) (*VideoFile, error) {
return nil, fmt.Errorf("Error unmarshalling video data for <%s>: %s", videoPath, err.Error())
}
return parse(videoPath, probeJSON)
return parse(videoPath, probeJSON, stripExt)
}
func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
func parse(filePath string, probeJSON *FFProbeJSON, stripExt bool) (*VideoFile, error) {
if probeJSON == nil {
return nil, fmt.Errorf("failed to get ffprobe json for <%s>", filePath)
}
@ -262,7 +262,7 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
if result.Title == "" {
// default title to filename
result.SetTitleFromPath()
result.SetTitleFromPath(stripExt)
}
result.Comment = probeJSON.Format.Tags.Comment
@ -339,6 +339,11 @@ func (v *VideoFile) getStreamIndex(fileType string, probeJSON FFProbeJSON) int {
return -1
}
func (v *VideoFile) SetTitleFromPath() {
func (v *VideoFile) SetTitleFromPath(stripExtension bool) {
v.Title = filepath.Base(v.Path)
if stripExtension {
ext := filepath.Ext(v.Title)
v.Title = strings.TrimSuffix(v.Title, ext)
}
}

View file

@ -40,6 +40,8 @@ func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) {
newGalleryJSON.Rating = int(gallery.Rating.Int64)
}
newGalleryJSON.Organized = gallery.Organized
if gallery.Details.Valid {
newGalleryJSON.Details = gallery.Details.String
}
@ -72,3 +74,14 @@ func GetIDs(galleries []*models.Gallery) []int {
return results
}
func GetChecksums(galleries []*models.Gallery) []string {
var results []string
for _, gallery := range galleries {
if gallery.Checksum != "" {
results = append(results, gallery.Checksum)
}
}
return results
}

View file

@ -6,7 +6,6 @@ import (
"github.com/stashapp/stash/pkg/manager/jsonschema"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stashapp/stash/pkg/models/modelstest"
"github.com/stretchr/testify/assert"
"testing"
@ -25,14 +24,15 @@ const (
)
const (
path = "path"
zip = true
url = "url"
checksum = "checksum"
title = "title"
date = "2001-01-01"
rating = 5
details = "details"
path = "path"
zip = true
url = "url"
checksum = "checksum"
title = "title"
date = "2001-01-01"
rating = 5
organized = true
details = "details"
)
const (
@ -50,17 +50,18 @@ var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
func createFullGallery(id int) models.Gallery {
return models.Gallery{
ID: id,
Path: modelstest.NullString(path),
Path: models.NullString(path),
Zip: zip,
Title: modelstest.NullString(title),
Title: models.NullString(title),
Checksum: checksum,
Date: models.SQLiteDate{
String: date,
Valid: true,
},
Details: modelstest.NullString(details),
Rating: modelstest.NullInt64(rating),
URL: modelstest.NullString(url),
Details: models.NullString(details),
Rating: models.NullInt64(rating),
Organized: organized,
URL: models.NullString(url),
CreatedAt: models.SQLiteTimestamp{
Timestamp: createTime,
},
@ -84,14 +85,15 @@ func createEmptyGallery(id int) models.Gallery {
func createFullJSONGallery() *jsonschema.Gallery {
return &jsonschema.Gallery{
Title: title,
Path: path,
Zip: zip,
Checksum: checksum,
Date: date,
Details: details,
Rating: rating,
URL: url,
Title: title,
Path: path,
Zip: zip,
Checksum: checksum,
Date: date,
Details: details,
Rating: rating,
Organized: organized,
URL: url,
CreatedAt: models.JSONTime{
Time: createTime,
},
@ -143,7 +145,7 @@ func TestToJSON(t *testing.T) {
func createStudioGallery(studioID int) models.Gallery {
return models.Gallery{
StudioID: modelstest.NullInt64(int64(studioID)),
StudioID: models.NullInt64(int64(studioID)),
}
}
@ -177,7 +179,7 @@ func TestGetStudioName(t *testing.T) {
studioErr := errors.New("error getting image")
mockStudioReader.On("Find", studioID).Return(&models.Studio{
Name: modelstest.NullString(studioName),
Name: models.NullString(studioName),
}, nil).Once()
mockStudioReader.On("Find", missingStudioID).Return(nil, nil).Once()
mockStudioReader.On("Find", errStudioID).Return(nil, studioErr).Once()

View file

@ -1,30 +0,0 @@
package gallery
import (
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/models"
)
func GetFiles(g *models.Gallery, baseURL string) []*models.GalleryFilesType {
var galleryFiles []*models.GalleryFilesType
qb := models.NewImageQueryBuilder()
images, err := qb.FindByGalleryID(g.ID)
if err != nil {
return nil
}
for i, img := range images {
builder := urlbuilders.NewImageURLBuilder(baseURL, img.ID)
imageURL := builder.GetImageURL()
galleryFile := models.GalleryFilesType{
Index: i,
Name: &img.Title.String,
Path: &imageURL,
}
galleryFiles = append(galleryFiles, &galleryFile)
}
return galleryFiles
}

View file

@ -15,7 +15,6 @@ type Importer struct {
StudioWriter models.StudioReaderWriter
PerformerWriter models.PerformerReaderWriter
TagWriter models.TagReaderWriter
JoinWriter models.JoinReaderWriter
Input jsonschema.Gallery
MissingRefBehaviour models.ImportMissingRefEnum
@ -68,6 +67,7 @@ func (i *Importer) galleryJSONToGallery(galleryJSON jsonschema.Gallery) models.G
newGallery.Rating = sql.NullInt64{Int64: int64(galleryJSON.Rating), Valid: true}
}
newGallery.Organized = galleryJSON.Organized
newGallery.CreatedAt = models.SQLiteTimestamp{Timestamp: galleryJSON.CreatedAt.GetTime()}
newGallery.UpdatedAt = models.SQLiteTimestamp{Timestamp: galleryJSON.UpdatedAt.GetTime()}
@ -236,29 +236,22 @@ func (i *Importer) createTags(names []string) ([]*models.Tag, error) {
func (i *Importer) PostImport(id int) error {
if len(i.performers) > 0 {
var performerJoins []models.PerformersGalleries
var performerIDs []int
for _, performer := range i.performers {
join := models.PerformersGalleries{
PerformerID: performer.ID,
GalleryID: id,
}
performerJoins = append(performerJoins, join)
performerIDs = append(performerIDs, performer.ID)
}
if err := i.JoinWriter.UpdatePerformersGalleries(id, performerJoins); err != nil {
if err := i.ReaderWriter.UpdatePerformers(id, performerIDs); err != nil {
return fmt.Errorf("failed to associate performers: %s", err.Error())
}
}
if len(i.tags) > 0 {
var tagJoins []models.GalleriesTags
for _, tag := range i.tags {
join := models.GalleriesTags{
GalleryID: id,
TagID: tag.ID,
}
tagJoins = append(tagJoins, join)
var tagIDs []int
for _, t := range i.tags {
tagIDs = append(tagIDs, t.ID)
}
if err := i.JoinWriter.UpdateGalleriesTags(id, tagJoins); err != nil {
if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil {
return fmt.Errorf("failed to associate tags: %s", err.Error())
}
}

View file

@ -8,7 +8,6 @@ import (
"github.com/stashapp/stash/pkg/manager/jsonschema"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stashapp/stash/pkg/models/modelstest"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
@ -56,13 +55,14 @@ func TestImporterName(t *testing.T) {
func TestImporterPreImport(t *testing.T) {
i := Importer{
Input: jsonschema.Gallery{
Path: path,
Checksum: checksum,
Title: title,
Date: date,
Details: details,
Rating: rating,
URL: url,
Path: path,
Checksum: checksum,
Title: title,
Date: date,
Details: details,
Rating: rating,
Organized: organized,
URL: url,
CreatedAt: models.JSONTime{
Time: createdAt,
},
@ -76,16 +76,17 @@ func TestImporterPreImport(t *testing.T) {
assert.Nil(t, err)
expectedGallery := models.Gallery{
Path: modelstest.NullString(path),
Path: models.NullString(path),
Checksum: checksum,
Title: modelstest.NullString(title),
Title: models.NullString(title),
Date: models.SQLiteDate{
String: date,
Valid: true,
},
Details: modelstest.NullString(details),
Rating: modelstest.NullInt64(rating),
URL: modelstest.NullString(url),
Details: models.NullString(details),
Rating: models.NullInt64(rating),
Organized: organized,
URL: models.NullString(url),
CreatedAt: models.SQLiteTimestamp{
Timestamp: createdAt,
},
@ -192,7 +193,7 @@ func TestImporterPreImportWithPerformer(t *testing.T) {
performerReaderWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{
{
ID: existingPerformerID,
Name: modelstest.NullString(existingPerformerName),
Name: models.NullString(existingPerformerName),
},
}, nil).Once()
performerReaderWriter.On("FindByNames", []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
@ -352,10 +353,10 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
}
func TestImporterPostImportUpdatePerformers(t *testing.T) {
joinReaderWriter := &mocks.JoinReaderWriter{}
galleryReaderWriter := &mocks.GalleryReaderWriter{}
i := Importer{
JoinWriter: joinReaderWriter,
ReaderWriter: galleryReaderWriter,
performers: []*models.Performer{
{
ID: existingPerformerID,
@ -363,15 +364,10 @@ func TestImporterPostImportUpdatePerformers(t *testing.T) {
},
}
updateErr := errors.New("UpdatePerformersGalleries error")
updateErr := errors.New("UpdatePerformers error")
joinReaderWriter.On("UpdatePerformersGalleries", galleryID, []models.PerformersGalleries{
{
PerformerID: existingPerformerID,
GalleryID: galleryID,
},
}).Return(nil).Once()
joinReaderWriter.On("UpdatePerformersGalleries", errPerformersID, mock.AnythingOfType("[]models.PerformersGalleries")).Return(updateErr).Once()
galleryReaderWriter.On("UpdatePerformers", galleryID, []int{existingPerformerID}).Return(nil).Once()
galleryReaderWriter.On("UpdatePerformers", errPerformersID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
err := i.PostImport(galleryID)
assert.Nil(t, err)
@ -379,14 +375,14 @@ func TestImporterPostImportUpdatePerformers(t *testing.T) {
err = i.PostImport(errPerformersID)
assert.NotNil(t, err)
joinReaderWriter.AssertExpectations(t)
galleryReaderWriter.AssertExpectations(t)
}
func TestImporterPostImportUpdateTags(t *testing.T) {
joinReaderWriter := &mocks.JoinReaderWriter{}
galleryReaderWriter := &mocks.GalleryReaderWriter{}
i := Importer{
JoinWriter: joinReaderWriter,
ReaderWriter: galleryReaderWriter,
tags: []*models.Tag{
{
ID: existingTagID,
@ -394,15 +390,10 @@ func TestImporterPostImportUpdateTags(t *testing.T) {
},
}
updateErr := errors.New("UpdateGalleriesTags error")
updateErr := errors.New("UpdateTags error")
joinReaderWriter.On("UpdateGalleriesTags", galleryID, []models.GalleriesTags{
{
TagID: existingTagID,
GalleryID: galleryID,
},
}).Return(nil).Once()
joinReaderWriter.On("UpdateGalleriesTags", errTagsID, mock.AnythingOfType("[]models.GalleriesTags")).Return(updateErr).Once()
galleryReaderWriter.On("UpdateTags", galleryID, []int{existingTagID}).Return(nil).Once()
galleryReaderWriter.On("UpdateTags", errTagsID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
err := i.PostImport(galleryID)
assert.Nil(t, err)
@ -410,7 +401,7 @@ func TestImporterPostImportUpdateTags(t *testing.T) {
err = i.PostImport(errTagsID)
assert.NotNil(t, err)
joinReaderWriter.AssertExpectations(t)
galleryReaderWriter.AssertExpectations(t)
}
func TestImporterFindExistingID(t *testing.T) {
@ -452,11 +443,11 @@ func TestCreate(t *testing.T) {
readerWriter := &mocks.GalleryReaderWriter{}
gallery := models.Gallery{
Title: modelstest.NullString(title),
Title: models.NullString(title),
}
galleryErr := models.Gallery{
Title: modelstest.NullString(galleryNameErr),
Title: models.NullString(galleryNameErr),
}
i := Importer{
@ -486,7 +477,7 @@ func TestUpdate(t *testing.T) {
readerWriter := &mocks.GalleryReaderWriter{}
gallery := models.Gallery{
Title: modelstest.NullString(title),
Title: models.NullString(title),
}
i := Importer{

23
pkg/gallery/update.go Normal file
View file

@ -0,0 +1,23 @@
package gallery
import (
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func UpdateFileModTime(qb models.GalleryWriter, id int, modTime models.NullSQLiteTimestamp) (*models.Gallery, error) {
return qb.UpdatePartial(models.GalleryPartial{
ID: id,
FileModTime: &modTime,
})
}
func AddImage(qb models.GalleryReaderWriter, galleryID int, imageID int) error {
imageIDs, err := qb.GetImageIDs(galleryID)
if err != nil {
return err
}
imageIDs = utils.IntAppendUnique(imageIDs, imageID)
return qb.UpdateImages(galleryID, imageIDs)
}

View file

@ -23,6 +23,7 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image {
newImageJSON.Rating = int(image.Rating.Int64)
}
newImageJSON.Organized = image.Organized
newImageJSON.OCounter = image.OCounter
newImageJSON.File = getImageFileJSON(image)

View file

@ -6,7 +6,6 @@ import (
"github.com/stashapp/stash/pkg/manager/jsonschema"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stashapp/stash/pkg/models/modelstest"
"github.com/stretchr/testify/assert"
"testing"
@ -39,13 +38,14 @@ const (
)
const (
checksum = "checksum"
title = "title"
rating = 5
ocounter = 2
size = 123
width = 100
height = 100
checksum = "checksum"
title = "title"
rating = 5
organized = true
ocounter = 2
size = 123
width = 100
height = 100
)
const (
@ -63,14 +63,15 @@ var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
func createFullImage(id int) models.Image {
return models.Image{
ID: id,
Title: modelstest.NullString(title),
Checksum: checksum,
Height: modelstest.NullInt64(height),
OCounter: ocounter,
Rating: modelstest.NullInt64(rating),
Size: modelstest.NullInt64(int64(size)),
Width: modelstest.NullInt64(width),
ID: id,
Title: models.NullString(title),
Checksum: checksum,
Height: models.NullInt64(height),
OCounter: ocounter,
Rating: models.NullInt64(rating),
Size: models.NullInt64(int64(size)),
Organized: organized,
Width: models.NullInt64(width),
CreatedAt: models.SQLiteTimestamp{
Timestamp: createTime,
},
@ -94,10 +95,11 @@ func createEmptyImage(id int) models.Image {
func createFullJSONImage() *jsonschema.Image {
return &jsonschema.Image{
Title: title,
Checksum: checksum,
OCounter: ocounter,
Rating: rating,
Title: title,
Checksum: checksum,
OCounter: ocounter,
Rating: rating,
Organized: organized,
File: &jsonschema.ImageFile{
Height: height,
Size: size,
@ -147,7 +149,7 @@ func TestToJSON(t *testing.T) {
func createStudioImage(studioID int) models.Image {
return models.Image{
StudioID: modelstest.NullInt64(int64(studioID)),
StudioID: models.NullInt64(int64(studioID)),
}
}
@ -181,7 +183,7 @@ func TestGetStudioName(t *testing.T) {
studioErr := errors.New("error getting image")
mockStudioReader.On("Find", studioID).Return(&models.Studio{
Name: modelstest.NullString(studioName),
Name: models.NullString(studioName),
}, nil).Once()
mockStudioReader.On("Find", missingStudioID).Return(nil, nil).Once()
mockStudioReader.On("Find", errStudioID).Return(nil, studioErr).Once()

View file

@ -16,7 +16,6 @@ type Importer struct {
GalleryWriter models.GalleryReaderWriter
PerformerWriter models.PerformerReaderWriter
TagWriter models.TagReaderWriter
JoinWriter models.JoinReaderWriter
Input jsonschema.Image
Path string
MissingRefBehaviour models.ImportMissingRefEnum
@ -63,6 +62,7 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
newImage.Rating = sql.NullInt64{Int64: int64(imageJSON.Rating), Valid: true}
}
newImage.Organized = imageJSON.Organized
newImage.OCounter = imageJSON.OCounter
newImage.CreatedAt = models.SQLiteTimestamp{Timestamp: imageJSON.CreatedAt.GetTime()}
newImage.UpdatedAt = models.SQLiteTimestamp{Timestamp: imageJSON.UpdatedAt.GetTime()}
@ -226,43 +226,33 @@ func (i *Importer) populateTags() error {
func (i *Importer) PostImport(id int) error {
if len(i.galleries) > 0 {
var galleryJoins []models.GalleriesImages
for _, gallery := range i.galleries {
join := models.GalleriesImages{
GalleryID: gallery.ID,
ImageID: id,
}
galleryJoins = append(galleryJoins, join)
var galleryIDs []int
for _, g := range i.galleries {
galleryIDs = append(galleryIDs, g.ID)
}
if err := i.JoinWriter.UpdateGalleriesImages(id, galleryJoins); err != nil {
if err := i.ReaderWriter.UpdateGalleries(id, galleryIDs); err != nil {
return fmt.Errorf("failed to associate galleries: %s", err.Error())
}
}
if len(i.performers) > 0 {
var performerJoins []models.PerformersImages
var performerIDs []int
for _, performer := range i.performers {
join := models.PerformersImages{
PerformerID: performer.ID,
ImageID: id,
}
performerJoins = append(performerJoins, join)
performerIDs = append(performerIDs, performer.ID)
}
if err := i.JoinWriter.UpdatePerformersImages(id, performerJoins); err != nil {
if err := i.ReaderWriter.UpdatePerformers(id, performerIDs); err != nil {
return fmt.Errorf("failed to associate performers: %s", err.Error())
}
}
if len(i.tags) > 0 {
var tagJoins []models.ImagesTags
for _, tag := range i.tags {
join := models.ImagesTags{
ImageID: id,
TagID: tag.ID,
}
tagJoins = append(tagJoins, join)
var tagIDs []int
for _, t := range i.tags {
tagIDs = append(tagIDs, t.ID)
}
if err := i.JoinWriter.UpdateImagesTags(id, tagJoins); err != nil {
if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil {
return fmt.Errorf("failed to associate tags: %s", err.Error())
}
}

View file

@ -7,7 +7,6 @@ import (
"github.com/stashapp/stash/pkg/manager/jsonschema"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stashapp/stash/pkg/models/modelstest"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
@ -227,7 +226,7 @@ func TestImporterPreImportWithPerformer(t *testing.T) {
performerReaderWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{
{
ID: existingPerformerID,
Name: modelstest.NullString(existingPerformerName),
Name: models.NullString(existingPerformerName),
},
}, nil).Once()
performerReaderWriter.On("FindByNames", []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
@ -387,10 +386,10 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
}
func TestImporterPostImportUpdateGallery(t *testing.T) {
joinReaderWriter := &mocks.JoinReaderWriter{}
readerWriter := &mocks.ImageReaderWriter{}
i := Importer{
JoinWriter: joinReaderWriter,
ReaderWriter: readerWriter,
galleries: []*models.Gallery{
{
ID: existingGalleryID,
@ -398,15 +397,10 @@ func TestImporterPostImportUpdateGallery(t *testing.T) {
},
}
updateErr := errors.New("UpdateGalleriesImages error")
updateErr := errors.New("UpdateGalleries error")
joinReaderWriter.On("UpdateGalleriesImages", imageID, []models.GalleriesImages{
{
GalleryID: existingGalleryID,
ImageID: imageID,
},
}).Return(nil).Once()
joinReaderWriter.On("UpdateGalleriesImages", errGalleriesID, mock.AnythingOfType("[]models.GalleriesImages")).Return(updateErr).Once()
readerWriter.On("UpdateGalleries", imageID, []int{existingGalleryID}).Return(nil).Once()
readerWriter.On("UpdateGalleries", errGalleriesID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
err := i.PostImport(imageID)
assert.Nil(t, err)
@ -414,14 +408,14 @@ func TestImporterPostImportUpdateGallery(t *testing.T) {
err = i.PostImport(errGalleriesID)
assert.NotNil(t, err)
joinReaderWriter.AssertExpectations(t)
readerWriter.AssertExpectations(t)
}
func TestImporterPostImportUpdatePerformers(t *testing.T) {
joinReaderWriter := &mocks.JoinReaderWriter{}
readerWriter := &mocks.ImageReaderWriter{}
i := Importer{
JoinWriter: joinReaderWriter,
ReaderWriter: readerWriter,
performers: []*models.Performer{
{
ID: existingPerformerID,
@ -429,15 +423,10 @@ func TestImporterPostImportUpdatePerformers(t *testing.T) {
},
}
updateErr := errors.New("UpdatePerformersImages error")
updateErr := errors.New("UpdatePerformers error")
joinReaderWriter.On("UpdatePerformersImages", imageID, []models.PerformersImages{
{
PerformerID: existingPerformerID,
ImageID: imageID,
},
}).Return(nil).Once()
joinReaderWriter.On("UpdatePerformersImages", errPerformersID, mock.AnythingOfType("[]models.PerformersImages")).Return(updateErr).Once()
readerWriter.On("UpdatePerformers", imageID, []int{existingPerformerID}).Return(nil).Once()
readerWriter.On("UpdatePerformers", errPerformersID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
err := i.PostImport(imageID)
assert.Nil(t, err)
@ -445,14 +434,14 @@ func TestImporterPostImportUpdatePerformers(t *testing.T) {
err = i.PostImport(errPerformersID)
assert.NotNil(t, err)
joinReaderWriter.AssertExpectations(t)
readerWriter.AssertExpectations(t)
}
func TestImporterPostImportUpdateTags(t *testing.T) {
joinReaderWriter := &mocks.JoinReaderWriter{}
readerWriter := &mocks.ImageReaderWriter{}
i := Importer{
JoinWriter: joinReaderWriter,
ReaderWriter: readerWriter,
tags: []*models.Tag{
{
ID: existingTagID,
@ -460,15 +449,10 @@ func TestImporterPostImportUpdateTags(t *testing.T) {
},
}
updateErr := errors.New("UpdateImagesTags error")
updateErr := errors.New("UpdateTags error")
joinReaderWriter.On("UpdateImagesTags", imageID, []models.ImagesTags{
{
TagID: existingTagID,
ImageID: imageID,
},
}).Return(nil).Once()
joinReaderWriter.On("UpdateImagesTags", errTagsID, mock.AnythingOfType("[]models.ImagesTags")).Return(updateErr).Once()
readerWriter.On("UpdateTags", imageID, []int{existingTagID}).Return(nil).Once()
readerWriter.On("UpdateTags", errTagsID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
err := i.PostImport(imageID)
assert.Nil(t, err)
@ -476,7 +460,7 @@ func TestImporterPostImportUpdateTags(t *testing.T) {
err = i.PostImport(errTagsID)
assert.NotNil(t, err)
joinReaderWriter.AssertExpectations(t)
readerWriter.AssertExpectations(t)
}
func TestImporterFindExistingID(t *testing.T) {
@ -518,11 +502,11 @@ func TestCreate(t *testing.T) {
readerWriter := &mocks.ImageReaderWriter{}
image := models.Image{
Title: modelstest.NullString(title),
Title: models.NullString(title),
}
imageErr := models.Image{
Title: modelstest.NullString(imageNameErr),
Title: models.NullString(imageNameErr),
}
i := Importer{
@ -553,11 +537,11 @@ func TestUpdate(t *testing.T) {
readerWriter := &mocks.ImageReaderWriter{}
image := models.Image{
Title: modelstest.NullString(title),
Title: models.NullString(title),
}
imageErr := models.Image{
Title: modelstest.NullString(imageNameErr),
Title: models.NullString(imageNameErr),
}
i := Importer{

10
pkg/image/update.go Normal file
View file

@ -0,0 +1,10 @@
package image
import "github.com/stashapp/stash/pkg/models"
func UpdateFileModTime(qb models.ImageWriter, id int, modTime models.NullSQLiteTimestamp) (*models.Image, error) {
return qb.Update(models.ImagePartial{
ID: id,
FileModTime: &modTime,
})
}

View file

@ -1,6 +1,7 @@
package manager
import (
"context"
"errors"
"github.com/spf13/viper"
@ -9,13 +10,16 @@ import (
"github.com/stashapp/stash/pkg/models"
)
func setInitialMD5Config() {
func setInitialMD5Config(txnManager models.TransactionManager) {
// if there are no scene files in the database, then default the
// VideoFileNamingAlgorithm config setting to oshash and calculateMD5 to
// false, otherwise set them to true for backwards compatibility purposes
sqb := models.NewSceneQueryBuilder()
count, err := sqb.Count()
if err != nil {
var count int
if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
count, err = r.Scene().Count()
return err
}); err != nil {
logger.Errorf("Error while counting scenes: %s", err.Error())
return
}
@ -43,28 +47,30 @@ func setInitialMD5Config() {
//
// Likewise, if VideoFileNamingAlgorithm is set to oshash, then this function
// will ensure that all oshash values are set on all scenes.
func ValidateVideoFileNamingAlgorithm(newValue models.HashAlgorithm) error {
func ValidateVideoFileNamingAlgorithm(txnManager models.TransactionManager, newValue models.HashAlgorithm) error {
// if algorithm is being set to MD5, then all checksums must be present
qb := models.NewSceneQueryBuilder()
if newValue == models.HashAlgorithmMd5 {
missingMD5, err := qb.CountMissingChecksum()
if err != nil {
return err
return txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
qb := r.Scene()
if newValue == models.HashAlgorithmMd5 {
missingMD5, err := qb.CountMissingChecksum()
if err != nil {
return err
}
if missingMD5 > 0 {
return errors.New("some checksums are missing on scenes. Run Scan with calculateMD5 set to true")
}
} else if newValue == models.HashAlgorithmOshash {
missingOSHash, err := qb.CountMissingOSHash()
if err != nil {
return err
}
if missingOSHash > 0 {
return errors.New("some oshash values are missing on scenes. Run Scan to populate")
}
}
if missingMD5 > 0 {
return errors.New("some checksums are missing on scenes. Run Scan with calculateMD5 set to true")
}
} else if newValue == models.HashAlgorithmOshash {
missingOSHash, err := qb.CountMissingOSHash()
if err != nil {
return err
}
if missingOSHash > 0 {
return errors.New("some oshash values are missing on scenes. Run Scan to populate")
}
}
return nil
return nil
})
}

View file

@ -2,10 +2,12 @@ package config
import (
"golang.org/x/crypto/bcrypt"
"runtime"
"errors"
"io/ioutil"
"path/filepath"
"regexp"
"github.com/spf13/viper"
@ -56,6 +58,9 @@ const PreviewPreset = "preview_preset"
const MaxTranscodeSize = "max_transcode_size"
const MaxStreamingTranscodeSize = "max_streaming_transcode_size"
const ParallelTasks = "parallel_tasks"
const parallelTasksDefault = 1
const PreviewSegmentDuration = "preview_segment_duration"
const previewSegmentDurationDefault = 0.75
@ -97,6 +102,10 @@ const Language = "language"
const CustomServedFolders = "custom_served_folders"
// Interface options
const MenuItems = "menu_items"
var defaultMenuItems = []string{"scenes", "images", "movies", "markers", "galleries", "performers", "studios", "tags"}
const SoundOnPreview = "sound_on_preview"
const WallShowTitle = "wall_show_title"
const MaximumLoopDuration = "maximum_loop_duration"
@ -111,6 +120,9 @@ const LogOut = "logOut"
const LogLevel = "logLevel"
const LogAccess = "logAccess"
// File upload options
const MaxUploadSize = "max_upload_size"
func Set(key string, value interface{}) {
viper.Set(key, value)
}
@ -297,6 +309,20 @@ func GetPreviewSegmentDuration() float64 {
return viper.GetFloat64(PreviewSegmentDuration)
}
// GetParallelTasks returns the number of parallel tasks that should be started
// by scan or generate task.
func GetParallelTasks() int {
return viper.GetInt(ParallelTasks)
}
func GetParallelTasksWithAutoDetection() int {
parallelTasks := viper.GetInt(ParallelTasks)
if parallelTasks <= 0 {
parallelTasks = (runtime.NumCPU() / 4) + 1
}
return parallelTasks
}
// GetPreviewSegments returns the amount of segments in a scene preview file.
func GetPreviewSegments() int {
return viper.GetInt(PreviewSegments)
@ -405,11 +431,18 @@ func ValidateCredentials(username string, password string) bool {
func ValidateStashBoxes(boxes []*models.StashBoxInput) error {
isMulti := len(boxes) > 1
re, err := regexp.Compile("^http.*graphql$")
if err != nil {
return errors.New("Failure to generate regular expression")
}
for _, box := range boxes {
if box.APIKey == "" {
return errors.New("Stash-box API Key cannot be blank")
} else if box.Endpoint == "" {
return errors.New("Stash-box Endpoint cannot be blank")
} else if !re.Match([]byte(box.Endpoint)) {
return errors.New("Stash-box Endpoint is invalid")
} else if isMulti && box.Name == "" {
return errors.New("Stash-box Name cannot be blank")
}
@ -431,6 +464,13 @@ func GetCustomServedFolders() URLMap {
}
// Interface options
func GetMenuItems() []string {
if viper.IsSet(MenuItems) {
return viper.GetStringSlice(MenuItems)
}
return defaultMenuItems
}
func GetSoundOnPreview() bool {
viper.SetDefault(SoundOnPreview, true)
return viper.GetBool(SoundOnPreview)
@ -542,6 +582,15 @@ func GetLogAccess() bool {
return ret
}
// Max allowed graphql upload size in megabytes
func GetMaxUploadSize() int64 {
ret := int64(1024)
if viper.IsSet(MaxUploadSize) {
ret = viper.GetInt64(MaxUploadSize)
}
return ret << 20
}
func IsValid() bool {
setPaths := viper.IsSet(Stash) && viper.IsSet(Cache) && viper.IsSet(Generated) && viper.IsSet(Metadata)
@ -550,6 +599,7 @@ func IsValid() bool {
}
func setDefaultValues() {
viper.SetDefault(ParallelTasks, parallelTasksDefault)
viper.SetDefault(PreviewSegmentDuration, previewSegmentDurationDefault)
viper.SetDefault(PreviewSegments, previewSegmentsDefault)
viper.SetDefault(PreviewExcludeStart, previewExcludeStartDefault)

View file

@ -433,12 +433,6 @@ type SceneFilenameParser struct {
studioCache map[string]*models.Studio
movieCache map[string]*models.Movie
tagCache map[string]*models.Tag
performerQuery performerQueryer
sceneQuery sceneQueryer
tagQuery tagQueryer
studioQuery studioQueryer
movieQuery movieQueryer
}
func NewSceneFilenameParser(filter *models.FindFilterType, config models.SceneParserInput) *SceneFilenameParser {
@ -455,21 +449,6 @@ func NewSceneFilenameParser(filter *models.FindFilterType, config models.ScenePa
p.initWhiteSpaceRegex()
performerQuery := models.NewPerformerQueryBuilder()
p.performerQuery = &performerQuery
sceneQuery := models.NewSceneQueryBuilder()
p.sceneQuery = &sceneQuery
tagQuery := models.NewTagQueryBuilder()
p.tagQuery = &tagQuery
studioQuery := models.NewStudioQueryBuilder()
p.studioQuery = &studioQuery
movieQuery := models.NewMovieQueryBuilder()
p.movieQuery = &movieQuery
return p
}
@ -489,7 +468,7 @@ func (p *SceneFilenameParser) initWhiteSpaceRegex() {
}
}
func (p *SceneFilenameParser) Parse() ([]*models.SceneParserResult, int, error) {
func (p *SceneFilenameParser) Parse(repo models.ReaderRepository) ([]*models.SceneParserResult, int, error) {
// perform the query to find the scenes
mapper, err := newParseMapper(p.Pattern, p.ParserInput.IgnoreWords)
@ -497,16 +476,26 @@ func (p *SceneFilenameParser) Parse() ([]*models.SceneParserResult, int, error)
return nil, 0, err
}
p.Filter.Q = &mapper.regexString
sceneFilter := &models.SceneFilterType{
Path: &models.StringCriterionInput{
Modifier: models.CriterionModifierMatchesRegex,
Value: "(?i)" + mapper.regexString,
},
}
scenes, total := p.sceneQuery.QueryByPathRegex(p.Filter)
p.Filter.Q = nil
ret := p.parseScenes(scenes, mapper)
scenes, total, err := repo.Scene().Query(sceneFilter, p.Filter)
if err != nil {
return nil, 0, err
}
ret := p.parseScenes(repo, scenes, mapper)
return ret, total, nil
}
func (p *SceneFilenameParser) parseScenes(scenes []*models.Scene, mapper *parseMapper) []*models.SceneParserResult {
func (p *SceneFilenameParser) parseScenes(repo models.ReaderRepository, scenes []*models.Scene, mapper *parseMapper) []*models.SceneParserResult {
var ret []*models.SceneParserResult
for _, scene := range scenes {
sceneHolder := mapper.parse(scene)
@ -515,7 +504,7 @@ func (p *SceneFilenameParser) parseScenes(scenes []*models.Scene, mapper *parseM
r := &models.SceneParserResult{
Scene: scene,
}
p.setParserResult(*sceneHolder, r)
p.setParserResult(repo, *sceneHolder, r)
if r != nil {
ret = append(ret, r)
@ -536,7 +525,7 @@ func (p SceneFilenameParser) replaceWhitespaceCharacters(value string) string {
return value
}
func (p *SceneFilenameParser) queryPerformer(performerName string) *models.Performer {
func (p *SceneFilenameParser) queryPerformer(qb models.PerformerReader, performerName string) *models.Performer {
// massage the performer name
performerName = delimiterRE.ReplaceAllString(performerName, " ")
@ -546,7 +535,7 @@ func (p *SceneFilenameParser) queryPerformer(performerName string) *models.Perfo
}
// perform an exact match and grab the first
performers, _ := p.performerQuery.FindByNames([]string{performerName}, nil, true)
performers, _ := qb.FindByNames([]string{performerName}, true)
var ret *models.Performer
if len(performers) > 0 {
@ -559,7 +548,7 @@ func (p *SceneFilenameParser) queryPerformer(performerName string) *models.Perfo
return ret
}
func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
func (p *SceneFilenameParser) queryStudio(qb models.StudioReader, studioName string) *models.Studio {
// massage the performer name
studioName = delimiterRE.ReplaceAllString(studioName, " ")
@ -568,7 +557,7 @@ func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
return ret
}
ret, _ := p.studioQuery.FindByName(studioName, nil, true)
ret, _ := qb.FindByName(studioName, true)
// add result to cache
p.studioCache[studioName] = ret
@ -576,7 +565,7 @@ func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
return ret
}
func (p *SceneFilenameParser) queryMovie(movieName string) *models.Movie {
func (p *SceneFilenameParser) queryMovie(qb models.MovieReader, movieName string) *models.Movie {
// massage the movie name
movieName = delimiterRE.ReplaceAllString(movieName, " ")
@ -585,7 +574,7 @@ func (p *SceneFilenameParser) queryMovie(movieName string) *models.Movie {
return ret
}
ret, _ := p.movieQuery.FindByName(movieName, nil, true)
ret, _ := qb.FindByName(movieName, true)
// add result to cache
p.movieCache[movieName] = ret
@ -593,7 +582,7 @@ func (p *SceneFilenameParser) queryMovie(movieName string) *models.Movie {
return ret
}
func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
func (p *SceneFilenameParser) queryTag(qb models.TagReader, tagName string) *models.Tag {
// massage the performer name
tagName = delimiterRE.ReplaceAllString(tagName, " ")
@ -603,7 +592,7 @@ func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
}
// match tag name exactly
ret, _ := p.tagQuery.FindByName(tagName, nil, true)
ret, _ := qb.FindByName(tagName, true)
// add result to cache
p.tagCache[tagName] = ret
@ -611,12 +600,12 @@ func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
return ret
}
func (p *SceneFilenameParser) setPerformers(h sceneHolder, result *models.SceneParserResult) {
func (p *SceneFilenameParser) setPerformers(qb models.PerformerReader, h sceneHolder, result *models.SceneParserResult) {
// query for each performer
performersSet := make(map[int]bool)
for _, performerName := range h.performers {
if performerName != "" {
performer := p.queryPerformer(performerName)
performer := p.queryPerformer(qb, performerName)
if performer != nil {
if _, found := performersSet[performer.ID]; !found {
result.PerformerIds = append(result.PerformerIds, strconv.Itoa(performer.ID))
@ -627,12 +616,12 @@ func (p *SceneFilenameParser) setPerformers(h sceneHolder, result *models.SceneP
}
}
func (p *SceneFilenameParser) setTags(h sceneHolder, result *models.SceneParserResult) {
func (p *SceneFilenameParser) setTags(qb models.TagReader, h sceneHolder, result *models.SceneParserResult) {
// query for each performer
tagsSet := make(map[int]bool)
for _, tagName := range h.tags {
if tagName != "" {
tag := p.queryTag(tagName)
tag := p.queryTag(qb, tagName)
if tag != nil {
if _, found := tagsSet[tag.ID]; !found {
result.TagIds = append(result.TagIds, strconv.Itoa(tag.ID))
@ -643,23 +632,23 @@ func (p *SceneFilenameParser) setTags(h sceneHolder, result *models.SceneParserR
}
}
func (p *SceneFilenameParser) setStudio(h sceneHolder, result *models.SceneParserResult) {
func (p *SceneFilenameParser) setStudio(qb models.StudioReader, h sceneHolder, result *models.SceneParserResult) {
// query for each performer
if h.studio != "" {
studio := p.queryStudio(h.studio)
studio := p.queryStudio(qb, h.studio)
if studio != nil {
studioId := strconv.Itoa(studio.ID)
result.StudioID = &studioId
studioID := strconv.Itoa(studio.ID)
result.StudioID = &studioID
}
}
}
func (p *SceneFilenameParser) setMovies(h sceneHolder, result *models.SceneParserResult) {
func (p *SceneFilenameParser) setMovies(qb models.MovieReader, h sceneHolder, result *models.SceneParserResult) {
// query for each movie
moviesSet := make(map[int]bool)
for _, movieName := range h.movies {
if movieName != "" {
movie := p.queryMovie(movieName)
movie := p.queryMovie(qb, movieName)
if movie != nil {
if _, found := moviesSet[movie.ID]; !found {
result.Movies = append(result.Movies, &models.SceneMovieID{
@ -672,7 +661,7 @@ func (p *SceneFilenameParser) setMovies(h sceneHolder, result *models.SceneParse
}
}
func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.SceneParserResult) {
func (p *SceneFilenameParser) setParserResult(repo models.ReaderRepository, h sceneHolder, result *models.SceneParserResult) {
if h.result.Title.Valid {
title := h.result.Title.String
title = p.replaceWhitespaceCharacters(title)
@ -694,15 +683,15 @@ func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.Scen
}
if len(h.performers) > 0 {
p.setPerformers(h, result)
p.setPerformers(repo.Performer(), h, result)
}
if len(h.tags) > 0 {
p.setTags(h, result)
p.setTags(repo.Tag(), h, result)
}
p.setStudio(h, result)
p.setStudio(repo.Studio(), h, result)
if len(h.movies) > 0 {
p.setMovies(h, result)
p.setMovies(repo.Movie(), h, result)
}
}

View file

@ -14,6 +14,7 @@ import (
type PreviewGenerator struct {
Info *GeneratorInfo
VideoChecksum string
VideoFilename string
ImageFilename string
OutputDirectory string
@ -26,7 +27,7 @@ type PreviewGenerator struct {
Overwrite bool
}
func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, imageFilename string, outputDirectory string, generateVideo bool, generateImage bool, previewPreset string) (*PreviewGenerator, error) {
func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, videoFilename string, imageFilename string, outputDirectory string, generateVideo bool, generateImage bool, previewPreset string) (*PreviewGenerator, error) {
exists, err := utils.FileExists(videoFile.Path)
if !exists {
return nil, err
@ -39,6 +40,7 @@ func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, image
return &PreviewGenerator{
Info: generator,
VideoChecksum: videoChecksum,
VideoFilename: videoFilename,
ImageFilename: imageFilename,
OutputDirectory: outputDirectory,
@ -87,7 +89,7 @@ func (g *PreviewGenerator) generateConcatFile() error {
w := bufio.NewWriter(f)
for i := 0; i < g.Info.ChunkCount; i++ {
num := fmt.Sprintf("%.3d", i)
filename := "preview" + num + ".mp4"
filename := "preview_" + g.VideoChecksum + "_" + num + ".mp4"
_, _ = w.WriteString(fmt.Sprintf("file '%s'\n", filename))
}
return w.Flush()
@ -105,7 +107,7 @@ func (g *PreviewGenerator) generateVideo(encoder *ffmpeg.Encoder, fallback bool)
for i := 0; i < g.Info.ChunkCount; i++ {
time := offset + (float64(i) * stepSize)
num := fmt.Sprintf("%.3d", i)
filename := "preview" + num + ".mp4"
filename := "preview_" + g.VideoChecksum + "_" + num + ".mp4"
chunkOutputPath := instance.Paths.Generated.GetTmpPath(filename)
options := ffmpeg.ScenePreviewChunkOptions{
@ -148,5 +150,5 @@ func (g *PreviewGenerator) generateImage(encoder *ffmpeg.Encoder) error {
}
func (g *PreviewGenerator) getConcatFilePath() string {
return instance.Paths.Generated.GetTmpPath("files.txt")
return instance.Paths.Generated.GetTmpPath(fmt.Sprintf("files_%s.txt", g.VideoChecksum))
}

View file

@ -8,10 +8,12 @@ import (
"math"
"os"
"path/filepath"
"sort"
"strings"
"github.com/bmatcuk/doublestar/v2"
"github.com/disintegration/imaging"
"github.com/fvbommel/sortorder"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
@ -20,6 +22,7 @@ import (
type SpriteGenerator struct {
Info *GeneratorInfo
VideoChecksum string
ImageOutputPath string
VTTOutputPath string
Rows int
@ -28,7 +31,7 @@ type SpriteGenerator struct {
Overwrite bool
}
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) {
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) {
exists, err := utils.FileExists(videoFile.Path)
if !exists {
return nil, err
@ -44,6 +47,7 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, imageOutputPath string, vttO
return &SpriteGenerator{
Info: generator,
VideoChecksum: videoChecksum,
ImageOutputPath: imageOutputPath,
VTTOutputPath: vttOutputPath,
Rows: rows,
@ -74,7 +78,7 @@ func (g *SpriteGenerator) generateSpriteImage(encoder *ffmpeg.Encoder) error {
for i := 0; i < g.Info.ChunkCount; i++ {
time := float64(i) * stepSize
num := fmt.Sprintf("%.3d", i)
filename := "thumbnail" + num + ".jpg"
filename := "thumbnail_" + g.VideoChecksum + "_" + num + ".jpg"
options := ffmpeg.ScreenshotOptions{
OutputPath: instance.Paths.Generated.GetTmpPath(filename),
@ -85,9 +89,12 @@ func (g *SpriteGenerator) generateSpriteImage(encoder *ffmpeg.Encoder) error {
}
// Combine all of the thumbnails into a sprite image
globPath := filepath.Join(instance.Paths.Generated.Tmp, "thumbnail*.jpg")
imagePaths, _ := doublestar.Glob(globPath)
utils.NaturalSort(imagePaths)
pattern := fmt.Sprintf("thumbnail_%s_.+\\.jpg$", g.VideoChecksum)
imagePaths, err := utils.MatchEntries(instance.Paths.Generated.Tmp, pattern)
if err != nil {
return err
}
sort.Sort(sortorder.Natural(imagePaths))
var images []image.Image
for _, imagePath := range imagePaths {
img, err := imaging.Open(imagePath)

View file

@ -5,43 +5,11 @@ import (
"os"
"strings"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
// DestroyImage deletes an image and its associated relationships from the
// database.
func DestroyImage(imageID int, tx *sqlx.Tx) error {
qb := models.NewImageQueryBuilder()
jqb := models.NewJoinsQueryBuilder()
_, err := qb.Find(imageID)
if err != nil {
return err
}
if err := jqb.DestroyImagesTags(imageID, tx); err != nil {
return err
}
if err := jqb.DestroyPerformersImages(imageID, tx); err != nil {
return err
}
if err := jqb.DestroyImageGalleries(imageID, tx); err != nil {
return err
}
if err := qb.Destroy(imageID, tx); err != nil {
return err
}
return nil
}
// DeleteGeneratedImageFiles deletes generated files for the provided image.
func DeleteGeneratedImageFiles(image *models.Image) {
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)

View file

@ -17,6 +17,7 @@ type Gallery struct {
Date string `json:"date,omitempty"`
Details string `json:"details,omitempty"`
Rating int `json:"rating,omitempty"`
Organized bool `json:"organized,omitempty"`
Studio string `json:"studio,omitempty"`
Performers []string `json:"performers,omitempty"`
Tags []string `json:"tags,omitempty"`

View file

@ -20,6 +20,7 @@ type Image struct {
Checksum string `json:"checksum,omitempty"`
Studio string `json:"studio,omitempty"`
Rating int `json:"rating,omitempty"`
Organized bool `json:"organized,omitempty"`
OCounter int `json:"o_counter,omitempty"`
Galleries []string `json:"galleries,omitempty"`
Performers []string `json:"performers,omitempty"`

View file

@ -43,9 +43,10 @@ type Scene struct {
URL string `json:"url,omitempty"`
Date string `json:"date,omitempty"`
Rating int `json:"rating,omitempty"`
Organized bool `json:"organized,omitempty"`
OCounter int `json:"o_counter,omitempty"`
Details string `json:"details,omitempty"`
Gallery string `json:"gallery,omitempty"`
Galleries []string `json:"galleries,omitempty"`
Performers []string `json:"performers,omitempty"`
Movies []SceneMovie `json:"movies,omitempty"`
Tags []string `json:"tags,omitempty"`

View file

@ -10,8 +10,10 @@ import (
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scraper"
"github.com/stashapp/stash/pkg/sqlite"
"github.com/stashapp/stash/pkg/utils"
)
@ -26,6 +28,8 @@ type singleton struct {
ScraperCache *scraper.Cache
DownloadStore *DownloadStore
TxnManager models.TransactionManager
}
var instance *singleton
@ -53,17 +57,21 @@ func Initialize() *singleton {
Status: TaskStatus{Status: Idle, Progress: -1},
Paths: paths.NewPaths(),
PluginCache: initPluginCache(),
ScraperCache: initScraperCache(),
PluginCache: initPluginCache(),
DownloadStore: NewDownloadStore(),
TxnManager: sqlite.NewTransactionManager(),
}
instance.ScraperCache = instance.initScraperCache()
instance.RefreshConfig()
// clear the downloads and tmp directories
utils.EmptyDir(instance.Paths.Generated.Downloads)
utils.EmptyDir(instance.Paths.Generated.Tmp)
// #1021 - only clear these directories if the generated folder is non-empty
if config.GetGeneratedPath() != "" {
utils.EmptyDir(instance.Paths.Generated.Downloads)
utils.EmptyDir(instance.Paths.Generated.Tmp)
}
initFFMPEG()
})
@ -180,13 +188,13 @@ func initPluginCache() *plugin.Cache {
}
// initScraperCache initializes a new scraper cache and returns it.
func initScraperCache() *scraper.Cache {
func (s *singleton) initScraperCache() *scraper.Cache {
scraperConfig := scraper.GlobalConfig{
Path: config.GetScrapersPath(),
UserAgent: config.GetScraperUserAgent(),
CDPPath: config.GetScraperCDPPath(),
}
ret, err := scraper.NewCache(scraperConfig)
ret, err := scraper.NewCache(scraperConfig, s.TxnManager)
if err != nil {
logger.Errorf("Error reading scraper configs: %s", err.Error())
@ -210,5 +218,5 @@ func (s *singleton) RefreshConfig() {
// RefreshScraperCache refreshes the scraper cache. Call this when scraper
// configuration changes.
func (s *singleton) RefreshScraperCache() {
s.ScraperCache = initScraperCache()
s.ScraperCache = s.initScraperCache()
}

View file

@ -1,12 +1,16 @@
package manager
import (
"context"
"errors"
"fmt"
"os"
"strconv"
"sync"
"time"
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models"
@ -116,7 +120,7 @@ func (s *singleton) neededScan(paths []*models.StashConfig) (total *int, newFile
for _, sp := range paths {
err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error {
t++
task := ScanTask{FilePath: path}
task := ScanTask{FilePath: path, TxnManager: s.TxnManager}
if !task.doesPathExist() {
n++
}
@ -176,7 +180,11 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
logger.Infof("Starting scan of %d files. %d New files found", *total, *newFiles)
}
var wg sync.WaitGroup
start := time.Now()
parallelTasks := config.GetParallelTasksWithAutoDetection()
logger.Infof("Scan started with %d parallel tasks", parallelTasks)
wg := sizedwaitgroup.New(parallelTasks)
s.Status.Progress = 0
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
calculateMD5 := config.IsCalculateMD5()
@ -201,10 +209,21 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
galleries = append(galleries, path)
}
wg.Add(1)
task := ScanTask{FilePath: path, UseFileMetadata: input.UseFileMetadata, fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5}
instance.Paths.Generated.EnsureTmpDir()
wg.Add()
task := ScanTask{
TxnManager: s.TxnManager,
FilePath: path,
UseFileMetadata: input.UseFileMetadata,
StripFileExtension: input.StripFileExtension,
fileNamingAlgorithm: fileNamingAlgo,
calculateMD5: calculateMD5,
GeneratePreview: input.ScanGeneratePreviews,
GenerateImagePreview: input.ScanGenerateImagePreviews,
GenerateSprite: input.ScanGenerateSprites,
}
go task.Start(&wg)
wg.Wait()
return nil
})
@ -224,10 +243,19 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
return
}
logger.Info("Finished scan")
wg.Wait()
instance.Paths.Generated.EmptyTmpDir()
elapsed := time.Since(start)
logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed))
for _, path := range galleries {
wg.Add(1)
task := ScanTask{FilePath: path, UseFileMetadata: false}
wg.Add()
task := ScanTask{
TxnManager: s.TxnManager,
FilePath: path,
UseFileMetadata: false,
}
go task.associateGallery(&wg)
wg.Wait()
}
@ -248,6 +276,7 @@ func (s *singleton) Import() {
var wg sync.WaitGroup
wg.Add(1)
task := ImportTask{
txnManager: s.TxnManager,
BaseDir: config.GetMetadataPath(),
Reset: true,
DuplicateBehaviour: models.ImportDuplicateEnumFail,
@ -271,7 +300,11 @@ func (s *singleton) Export() {
var wg sync.WaitGroup
wg.Add(1)
task := ExportTask{full: true, fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm()}
task := ExportTask{
txnManager: s.TxnManager,
full: true,
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
}
go task.Start(&wg)
wg.Wait()
}()
@ -331,45 +364,58 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
s.Status.SetStatus(Generate)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
mqb := models.NewSceneMarkerQueryBuilder()
//this.job.total = await ObjectionUtils.getCount(Scene);
instance.Paths.Generated.EnsureTmpDir()
sceneIDs := utils.StringSliceToIntSlice(input.SceneIDs)
markerIDs := utils.StringSliceToIntSlice(input.MarkerIDs)
sceneIDs, err := utils.StringSliceToIntSlice(input.SceneIDs)
if err != nil {
logger.Error(err.Error())
}
markerIDs, err := utils.StringSliceToIntSlice(input.MarkerIDs)
if err != nil {
logger.Error(err.Error())
}
go func() {
defer s.returnToIdleState()
var scenes []*models.Scene
var err error
var markers []*models.SceneMarker
if len(sceneIDs) > 0 {
scenes, err = qb.FindMany(sceneIDs)
} else {
scenes, err = qb.All()
}
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
qb := r.Scene()
if len(sceneIDs) > 0 {
scenes, err = qb.FindMany(sceneIDs)
} else {
scenes, err = qb.All()
}
if err != nil {
logger.Errorf("failed to get scenes for generate")
if err != nil {
return err
}
if len(markerIDs) > 0 {
markers, err = r.SceneMarker().FindMany(markerIDs)
if err != nil {
return err
}
}
return nil
}); err != nil {
logger.Error(err.Error())
return
}
delta := utils.Btoi(input.Sprites) + utils.Btoi(input.Previews) + utils.Btoi(input.Markers) + utils.Btoi(input.Transcodes)
var wg sync.WaitGroup
parallelTasks := config.GetParallelTasksWithAutoDetection()
logger.Infof("Generate started with %d parallel tasks", parallelTasks)
wg := sizedwaitgroup.New(parallelTasks)
s.Status.Progress = 0
lenScenes := len(scenes)
total := lenScenes
var markers []*models.SceneMarker
if len(markerIDs) > 0 {
markers, err = mqb.FindMany(markerIDs)
total += len(markers)
}
total := lenScenes + len(markers)
if s.Status.stopping {
logger.Info("Stopping due to user request")
@ -397,6 +443,10 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
}
setGeneratePreviewOptionsInput(generatePreviewOptions)
// Start measuring how long the scan has taken. (consider moving this up)
start := time.Now()
instance.Paths.Generated.EnsureTmpDir()
for i, scene := range scenes {
s.Status.setProgress(i, total)
if s.Status.stopping {
@ -409,15 +459,13 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
continue
}
wg.Add(delta)
// Clear the tmp directory for each scene
if input.Sprites || input.Previews || input.Markers {
instance.Paths.Generated.EmptyTmpDir()
}
if input.Sprites {
task := GenerateSpriteTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
task := GenerateSpriteTask{
Scene: *scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
wg.Add()
go task.Start(&wg)
}
@ -429,22 +477,34 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
wg.Add()
go task.Start(&wg)
}
if input.Markers {
task := GenerateMarkersTask{Scene: scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
wg.Add()
task := GenerateMarkersTask{
TxnManager: s.TxnManager,
Scene: scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
go task.Start(&wg)
}
if input.Transcodes {
task := GenerateTranscodeTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
wg.Add()
task := GenerateTranscodeTask{
Scene: *scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
go task.Start(&wg)
}
wg.Wait()
}
wg.Wait()
for i, marker := range markers {
s.Status.setProgress(lenScenes+i, total)
if s.Status.stopping {
@ -457,13 +517,21 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
continue
}
wg.Add(1)
task := GenerateMarkersTask{Marker: marker, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
wg.Add()
task := GenerateMarkersTask{
TxnManager: s.TxnManager,
Marker: marker,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,
}
go task.Start(&wg)
wg.Wait()
}
logger.Infof("Generate finished")
wg.Wait()
instance.Paths.Generated.EmptyTmpDir()
elapsed := time.Since(start)
logger.Info(fmt.Sprintf("Generate finished (%s)", elapsed))
}()
}
@ -483,7 +551,6 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) {
s.Status.SetStatus(Generate)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
instance.Paths.Generated.EnsureTmpDir()
go func() {
@ -495,13 +562,18 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) {
return
}
scene, err := qb.Find(sceneIdInt)
if err != nil || scene == nil {
logger.Errorf("failed to get scene for generate")
var scene *models.Scene
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
scene, err = r.Scene().Find(sceneIdInt)
return err
}); err != nil || scene == nil {
logger.Errorf("failed to get scene for generate: %s", err.Error())
return
}
task := GenerateScreenshotTask{
txnManager: s.TxnManager,
Scene: *scene,
ScreenshotAt: at,
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
@ -517,7 +589,7 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) {
}()
}
func (s *singleton) AutoTag(performerIds []string, studioIds []string, tagIds []string) {
func (s *singleton) AutoTag(input models.AutoTagMetadataInput) {
if s.Status.Status != Idle {
return
}
@ -527,76 +599,98 @@ func (s *singleton) AutoTag(performerIds []string, studioIds []string, tagIds []
go func() {
defer s.returnToIdleState()
performerIds := input.Performers
studioIds := input.Studios
tagIds := input.Tags
// calculate work load
performerCount := len(performerIds)
studioCount := len(studioIds)
tagCount := len(tagIds)
performerQuery := models.NewPerformerQueryBuilder()
studioQuery := models.NewTagQueryBuilder()
tagQuery := models.NewTagQueryBuilder()
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
performerQuery := r.Performer()
studioQuery := r.Studio()
tagQuery := r.Tag()
const wildcard = "*"
var err error
if performerCount == 1 && performerIds[0] == wildcard {
performerCount, err = performerQuery.Count()
if err != nil {
logger.Errorf("Error getting performer count: %s", err.Error())
const wildcard = "*"
var err error
if performerCount == 1 && performerIds[0] == wildcard {
performerCount, err = performerQuery.Count()
if err != nil {
return fmt.Errorf("Error getting performer count: %s", err.Error())
}
}
}
if studioCount == 1 && studioIds[0] == wildcard {
studioCount, err = studioQuery.Count()
if err != nil {
logger.Errorf("Error getting studio count: %s", err.Error())
if studioCount == 1 && studioIds[0] == wildcard {
studioCount, err = studioQuery.Count()
if err != nil {
return fmt.Errorf("Error getting studio count: %s", err.Error())
}
}
}
if tagCount == 1 && tagIds[0] == wildcard {
tagCount, err = tagQuery.Count()
if err != nil {
logger.Errorf("Error getting tag count: %s", err.Error())
if tagCount == 1 && tagIds[0] == wildcard {
tagCount, err = tagQuery.Count()
if err != nil {
return fmt.Errorf("Error getting tag count: %s", err.Error())
}
}
return nil
}); err != nil {
logger.Error(err.Error())
return
}
total := performerCount + studioCount + tagCount
s.Status.setProgress(0, total)
s.autoTagPerformers(performerIds)
s.autoTagStudios(studioIds)
s.autoTagTags(tagIds)
s.autoTagPerformers(input.Paths, performerIds)
s.autoTagStudios(input.Paths, studioIds)
s.autoTagTags(input.Paths, tagIds)
}()
}
func (s *singleton) autoTagPerformers(performerIds []string) {
performerQuery := models.NewPerformerQueryBuilder()
func (s *singleton) autoTagPerformers(paths []string, performerIds []string) {
var wg sync.WaitGroup
for _, performerId := range performerIds {
var performers []*models.Performer
if performerId == "*" {
var err error
performers, err = performerQuery.All()
if err != nil {
logger.Errorf("Error querying performers: %s", err.Error())
continue
}
} else {
performerIdInt, err := strconv.Atoi(performerId)
if err != nil {
logger.Errorf("Error parsing performer id %s: %s", performerId, err.Error())
continue
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
performerQuery := r.Performer()
if performerId == "*" {
var err error
performers, err = performerQuery.All()
if err != nil {
return fmt.Errorf("Error querying performers: %s", err.Error())
}
} else {
performerIdInt, err := strconv.Atoi(performerId)
if err != nil {
return fmt.Errorf("Error parsing performer id %s: %s", performerId, err.Error())
}
performer, err := performerQuery.Find(performerIdInt)
if err != nil {
return fmt.Errorf("Error finding performer id %s: %s", performerId, err.Error())
}
performers = append(performers, performer)
}
performer, err := performerQuery.Find(performerIdInt)
if err != nil {
logger.Errorf("Error finding performer id %s: %s", performerId, err.Error())
continue
}
performers = append(performers, performer)
return nil
}); err != nil {
logger.Error(err.Error())
continue
}
for _, performer := range performers {
wg.Add(1)
task := AutoTagPerformerTask{performer: performer}
task := AutoTagPerformerTask{
AutoTagTask: AutoTagTask{
txnManager: s.TxnManager,
paths: paths,
},
performer: performer,
}
go task.Start(&wg)
wg.Wait()
@ -605,37 +699,47 @@ func (s *singleton) autoTagPerformers(performerIds []string) {
}
}
func (s *singleton) autoTagStudios(studioIds []string) {
studioQuery := models.NewStudioQueryBuilder()
func (s *singleton) autoTagStudios(paths []string, studioIds []string) {
var wg sync.WaitGroup
for _, studioId := range studioIds {
var studios []*models.Studio
if studioId == "*" {
var err error
studios, err = studioQuery.All()
if err != nil {
logger.Errorf("Error querying studios: %s", err.Error())
continue
}
} else {
studioIdInt, err := strconv.Atoi(studioId)
if err != nil {
logger.Errorf("Error parsing studio id %s: %s", studioId, err.Error())
continue
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
studioQuery := r.Studio()
if studioId == "*" {
var err error
studios, err = studioQuery.All()
if err != nil {
return fmt.Errorf("Error querying studios: %s", err.Error())
}
} else {
studioIdInt, err := strconv.Atoi(studioId)
if err != nil {
return fmt.Errorf("Error parsing studio id %s: %s", studioId, err.Error())
}
studio, err := studioQuery.Find(studioIdInt)
if err != nil {
return fmt.Errorf("Error finding studio id %s: %s", studioId, err.Error())
}
studios = append(studios, studio)
}
studio, err := studioQuery.Find(studioIdInt, nil)
if err != nil {
logger.Errorf("Error finding studio id %s: %s", studioId, err.Error())
continue
}
studios = append(studios, studio)
return nil
}); err != nil {
logger.Error(err.Error())
continue
}
for _, studio := range studios {
wg.Add(1)
task := AutoTagStudioTask{studio: studio}
task := AutoTagStudioTask{
AutoTagTask: AutoTagTask{
txnManager: s.TxnManager,
paths: paths,
},
studio: studio,
}
go task.Start(&wg)
wg.Wait()
@ -644,37 +748,46 @@ func (s *singleton) autoTagStudios(studioIds []string) {
}
}
func (s *singleton) autoTagTags(tagIds []string) {
tagQuery := models.NewTagQueryBuilder()
func (s *singleton) autoTagTags(paths []string, tagIds []string) {
var wg sync.WaitGroup
for _, tagId := range tagIds {
var tags []*models.Tag
if tagId == "*" {
var err error
tags, err = tagQuery.All()
if err != nil {
logger.Errorf("Error querying tags: %s", err.Error())
continue
}
} else {
tagIdInt, err := strconv.Atoi(tagId)
if err != nil {
logger.Errorf("Error parsing tag id %s: %s", tagId, err.Error())
continue
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
tagQuery := r.Tag()
if tagId == "*" {
var err error
tags, err = tagQuery.All()
if err != nil {
return fmt.Errorf("Error querying tags: %s", err.Error())
}
} else {
tagIdInt, err := strconv.Atoi(tagId)
if err != nil {
return fmt.Errorf("Error parsing tag id %s: %s", tagId, err.Error())
}
tag, err := tagQuery.Find(tagIdInt)
if err != nil {
return fmt.Errorf("Error finding tag id %s: %s", tagId, err.Error())
}
tags = append(tags, tag)
}
tag, err := tagQuery.Find(tagIdInt, nil)
if err != nil {
logger.Errorf("Error finding tag id %s: %s", tagId, err.Error())
continue
}
tags = append(tags, tag)
return nil
}); err != nil {
logger.Error(err.Error())
continue
}
for _, tag := range tags {
wg.Add(1)
task := AutoTagTagTask{tag: tag}
task := AutoTagTagTask{
AutoTagTask: AutoTagTask{
txnManager: s.TxnManager,
paths: paths,
},
tag: tag,
}
go task.Start(&wg)
wg.Wait()
@ -683,35 +796,50 @@ func (s *singleton) autoTagTags(tagIds []string) {
}
}
func (s *singleton) Clean() {
func (s *singleton) Clean(input models.CleanMetadataInput) {
if s.Status.Status != Idle {
return
}
s.Status.SetStatus(Clean)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
iqb := models.NewImageQueryBuilder()
gqb := models.NewGalleryQueryBuilder()
go func() {
defer s.returnToIdleState()
logger.Infof("Starting cleaning of tracked files")
scenes, err := qb.All()
if err != nil {
logger.Errorf("failed to fetch list of scenes for cleaning")
return
}
var scenes []*models.Scene
var images []*models.Image
var galleries []*models.Gallery
images, err := iqb.All()
if err != nil {
logger.Errorf("failed to fetch list of images for cleaning")
return
}
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
qb := r.Scene()
iqb := r.Image()
gqb := r.Gallery()
galleries, err := gqb.All()
if err != nil {
logger.Errorf("failed to fetch list of galleries for cleaning")
logger.Infof("Starting cleaning of tracked files")
if input.DryRun {
logger.Infof("Running in Dry Mode")
}
var err error
scenes, err = qb.All()
if err != nil {
return errors.New("failed to fetch list of scenes for cleaning")
}
images, err = iqb.All()
if err != nil {
return errors.New("failed to fetch list of images for cleaning")
}
galleries, err = gqb.All()
if err != nil {
return errors.New("failed to fetch list of galleries for cleaning")
}
return nil
}); err != nil {
logger.Error(err.Error())
return
}
@ -738,8 +866,12 @@ func (s *singleton) Clean() {
wg.Add(1)
task := CleanTask{Scene: scene, fileNamingAlgorithm: fileNamingAlgo}
go task.Start(&wg)
task := CleanTask{
TxnManager: s.TxnManager,
Scene: scene,
fileNamingAlgorithm: fileNamingAlgo,
}
go task.Start(&wg, input.DryRun)
wg.Wait()
}
@ -757,8 +889,11 @@ func (s *singleton) Clean() {
wg.Add(1)
task := CleanTask{Image: img}
go task.Start(&wg)
task := CleanTask{
TxnManager: s.TxnManager,
Image: img,
}
go task.Start(&wg, input.DryRun)
wg.Wait()
}
@ -776,8 +911,11 @@ func (s *singleton) Clean() {
wg.Add(1)
task := CleanTask{Gallery: gallery}
go task.Start(&wg)
task := CleanTask{
TxnManager: s.TxnManager,
Gallery: gallery,
}
go task.Start(&wg, input.DryRun)
wg.Wait()
}
@ -792,17 +930,19 @@ func (s *singleton) MigrateHash() {
s.Status.SetStatus(Migrate)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
go func() {
defer s.returnToIdleState()
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
logger.Infof("Migrating generated files for %s naming hash", fileNamingAlgo.String())
scenes, err := qb.All()
if err != nil {
logger.Errorf("failed to fetch list of scenes for migration")
var scenes []*models.Scene
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
scenes, err = r.Scene().All()
return err
}); err != nil {
logger.Errorf("failed to fetch list of scenes for migration: %s", err.Error())
return
}
@ -907,6 +1047,7 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate
if input.Markers {
task := GenerateMarkersTask{
TxnManager: s.TxnManager,
Scene: scene,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgo,

View file

@ -0,0 +1,59 @@
package manager
import (
"os"
"path/filepath"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
func MigrateHash(oldHash string, newHash string) {
oldPath := filepath.Join(instance.Paths.Generated.Markers, oldHash)
newPath := filepath.Join(instance.Paths.Generated.Markers, newHash)
migrate(oldPath, newPath)
scenePaths := GetInstance().Paths.Scene
oldPath = scenePaths.GetThumbnailScreenshotPath(oldHash)
newPath = scenePaths.GetThumbnailScreenshotPath(newHash)
migrate(oldPath, newPath)
oldPath = scenePaths.GetScreenshotPath(oldHash)
newPath = scenePaths.GetScreenshotPath(newHash)
migrate(oldPath, newPath)
oldPath = scenePaths.GetStreamPreviewPath(oldHash)
newPath = scenePaths.GetStreamPreviewPath(newHash)
migrate(oldPath, newPath)
oldPath = scenePaths.GetStreamPreviewImagePath(oldHash)
newPath = scenePaths.GetStreamPreviewImagePath(newHash)
migrate(oldPath, newPath)
oldPath = scenePaths.GetTranscodePath(oldHash)
newPath = scenePaths.GetTranscodePath(newHash)
migrate(oldPath, newPath)
oldPath = scenePaths.GetSpriteVttFilePath(oldHash)
newPath = scenePaths.GetSpriteVttFilePath(newHash)
migrate(oldPath, newPath)
oldPath = scenePaths.GetSpriteImageFilePath(oldHash)
newPath = scenePaths.GetSpriteImageFilePath(newHash)
migrate(oldPath, newPath)
}
func migrate(oldName, newName string) {
oldExists, err := utils.FileExists(oldName)
if err != nil && !os.IsNotExist(err) {
logger.Errorf("Error checking existence of %s: %s", oldName, err.Error())
return
}
if oldExists {
logger.Infof("renaming %s to %s", oldName, newName)
if err := os.Rename(oldName, newName); err != nil {
logger.Errorf("error renaming %s to %s: %s", oldName, newName, err.Error())
}
}
}

View file

@ -29,7 +29,7 @@ func newGeneratedPaths() *generatedPaths {
gp.Vtt = filepath.Join(config.GetGeneratedPath(), "vtt")
gp.Markers = filepath.Join(config.GetGeneratedPath(), "markers")
gp.Transcodes = filepath.Join(config.GetGeneratedPath(), "transcodes")
gp.Downloads = filepath.Join(config.GetGeneratedPath(), "downloads")
gp.Downloads = filepath.Join(config.GetGeneratedPath(), "download_stage")
gp.Tmp = filepath.Join(config.GetGeneratedPath(), "tmp")
return &gp
}

Some files were not shown because too many files have changed in this diff Show more