Merge branch 'develop' of https://github.com/stashapp/stash into docs-patchable-components

This commit is contained in:
DogmaDragon 2026-01-22 13:01:43 +02:00
commit 7c97a140f9
281 changed files with 11008 additions and 4894 deletions

View file

@ -76,6 +76,10 @@ func main() {
defer pprof.StopCPUProfile()
}
// initialise desktop.IsDesktop here so that it doesn't get affected by
// ffmpeg hardware checks later on
desktop.InitIsDesktop()
mgr, err := manager.Initialize(cfg, l)
if err != nil {
exitError(fmt.Errorf("manager initialization error: %w", err))

15
go.mod
View file

@ -7,10 +7,10 @@ require (
github.com/WithoutPants/sortorder v0.0.0-20230616003020-921c9ef69552
github.com/Yamashou/gqlgenc v0.32.1
github.com/anacrolix/dms v1.2.2
github.com/antchfx/htmlquery v1.3.0
github.com/antchfx/htmlquery v1.3.5
github.com/asticode/go-astisub v0.25.1
github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617
github.com/chromedp/chromedp v0.9.2
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d
github.com/chromedp/chromedp v0.14.2
github.com/corona10/goimagehash v1.1.0
github.com/disintegration/imaging v1.6.2
github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d
@ -69,20 +69,21 @@ require (
require (
github.com/agnivade/levenshtein v1.2.1 // indirect
github.com/antchfx/xpath v1.2.3 // indirect
github.com/antchfx/xpath v1.3.5 // indirect
github.com/asticode/go-astikit v0.20.0 // indirect
github.com/asticode/go-astits v1.8.0 // indirect
github.com/chromedp/sysutil v1.0.0 // indirect
github.com/chromedp/sysutil v1.1.0 // indirect
github.com/coder/websocket v1.8.12 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.7.0 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 // indirect
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/gobwas/ws v1.3.0 // indirect
github.com/gobwas/ws v1.4.0 // indirect
github.com/goccy/go-yaml v1.18.0 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
@ -90,10 +91,8 @@ require (
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/knadh/koanf/maps v0.1.2 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect

78
go.sum
View file

@ -85,10 +85,10 @@ github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/antchfx/htmlquery v1.3.0 h1:5I5yNFOVI+egyia5F2s/5Do2nFWxJz41Tr3DyfKD25E=
github.com/antchfx/htmlquery v1.3.0/go.mod h1:zKPDVTMhfOmcwxheXUsx4rKJy8KEY/PU6eXr/2SebQ8=
github.com/antchfx/xpath v1.2.3 h1:CCZWOzv5bAqjVv0offZ2LVgVYFbeldKQVuLNbViZdes=
github.com/antchfx/xpath v1.2.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0=
github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA=
github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ=
github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
@ -116,13 +116,12 @@ github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs=
github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617 h1:/5dwcyi5WOawM1Iz6MjrYqB90TRIdZv3O0fVHEJb86w=
github.com/chromedp/cdproto v0.0.0-20231007061347-18b01cd81617/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs=
github.com/chromedp/chromedp v0.9.2 h1:dKtNz4kApb06KuSXoTQIyUC2TrA0fhGDwNZf3bcgfKw=
github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs=
github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic=
github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww=
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d h1:ZtA1sedVbEW7EW80Iz2GR3Ye6PwbJAJXjv7D74xG6HU=
github.com/chromedp/cdproto v0.0.0-20250803210736-d308e07a266d/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k=
github.com/chromedp/chromedp v0.14.2 h1:r3b/WtwM50RsBZHMUm9fsNhhzRStTHrKdr2zmwbZSzM=
github.com/chromedp/chromedp v0.14.2/go.mod h1:rHzAv60xDE7VNy/MYtTUrYreSc0ujt2O1/C3bzctYBo=
github.com/chromedp/sysutil v1.1.0 h1:PUFNv5EcprjqXZD9nJb9b/c9ibAbxiYo4exNWZyipwM=
github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHGPTUfWTJ8=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
@ -206,6 +205,8 @@ github.com/go-chi/httplog v0.3.1/go.mod h1:UoiQQ/MTZH5V6JbNB2FzF0DynTh5okpXxlhsy
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 h1:iizUGZ9pEquQS5jTGkh4AqeeHCMbfbjeb0zMt0aEFzs=
github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
@ -224,9 +225,8 @@ github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY=
github.com/gobwas/ws v1.3.0 h1:sbeU3Y4Qzlb+MOzIe6mQGf7QR4Hkv6ZD0qhGkBFL2O0=
github.com/gobwas/ws v1.3.0/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY=
github.com/gobwas/ws v1.4.0 h1:CTaoG1tojrh4ucGPcoJFiAQUAsEWekEWvLy7GsVNqGs=
github.com/gobwas/ws v1.4.0/go.mod h1:G3gNqMNtPppf5XUz7O4shetPpcZ1VJ7zt18dlUeakrc=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
@ -286,6 +286,7 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@ -379,8 +380,6 @@ github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
@ -432,8 +431,6 @@ github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc8
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
@ -664,6 +661,10 @@ golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@ -707,6 +708,10 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -757,7 +762,12 @@ golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@ -789,6 +799,11 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -869,14 +884,25 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -889,7 +915,12 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -956,6 +987,9 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View file

@ -165,6 +165,12 @@ type Query {
input: ScrapeSingleStudioInput!
): [ScrapedStudio!]!
"Scrape for a single tag"
scrapeSingleTag(
source: ScraperSourceInput!
input: ScrapeSingleTagInput!
): [ScrapedTag!]!
"Scrape for a single performer"
scrapeSinglePerformer(
source: ScraperSourceInput!
@ -367,6 +373,7 @@ type Mutation {
performerDestroy(input: PerformerDestroyInput!): Boolean!
performersDestroy(ids: [ID!]!): Boolean!
bulkPerformerUpdate(input: BulkPerformerUpdateInput!): [Performer!]
performerMerge(input: PerformerMergeInput!): Performer!
studioCreate(input: StudioCreateInput!): Studio
studioUpdate(input: StudioUpdateInput!): Studio

View file

@ -319,6 +319,7 @@ input ConfigDisableDropdownCreateInput {
tag: Boolean
studio: Boolean
movie: Boolean
gallery: Boolean
}
enum ImageLightboxDisplayMode {
@ -419,6 +420,7 @@ type ConfigDisableDropdownCreate {
tag: Boolean!
studio: Boolean!
movie: Boolean!
gallery: Boolean!
}
type ConfigInterfaceResult {

View file

@ -84,13 +84,23 @@ input PHashDuplicationCriterionInput {
input StashIDCriterionInput {
"""
If present, this value is treated as a predicate.
That is, it will filter based on stash_ids with the matching endpoint
That is, it will filter based on stash_id with the matching endpoint
"""
endpoint: String
stash_id: String
modifier: CriterionModifier!
}
input StashIDsCriterionInput {
"""
If present, this value is treated as a predicate.
That is, it will filter based on stash_ids with the matching endpoint
"""
endpoint: String
stash_ids: [String]
modifier: CriterionModifier!
}
input CustomFieldCriterionInput {
field: String!
value: [Any!]
@ -156,6 +166,9 @@ input PerformerFilterType {
o_counter: IntCriterionInput
"Filter by StashID"
stash_id_endpoint: StashIDCriterionInput
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashIDs"
stash_ids_endpoint: StashIDsCriterionInput
# rating expressed as 1-100
rating100: IntCriterionInput
"Filter by url"
@ -292,6 +305,9 @@ input SceneFilterType {
performer_count: IntCriterionInput
"Filter by StashID"
stash_id_endpoint: StashIDCriterionInput
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashIDs"
stash_ids_endpoint: StashIDsCriterionInput
"Filter by url"
url: StringCriterionInput
"Filter by interactive"
@ -432,6 +448,9 @@ input StudioFilterType {
parents: MultiCriterionInput
"Filter by StashID"
stash_id_endpoint: StashIDCriterionInput
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashIDs"
stash_ids_endpoint: StashIDsCriterionInput
"Filter to only include studios with these tags"
tags: HierarchicalMultiCriterionInput
"Filter to only include studios missing this property"
@ -606,6 +625,13 @@ input TagFilterType {
"Filter by autotag ignore value"
ignore_auto_tag: Boolean
"Filter by StashID"
stash_id_endpoint: StashIDCriterionInput
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashID"
stash_ids_endpoint: StashIDsCriterionInput
"Filter by related scenes that meet this criteria"
scenes_filter: SceneFilterType
"Filter by related images that meet this criteria"

View file

@ -344,4 +344,6 @@ input CustomFieldsInput {
full: Map
"If populated, only the keys in this map will be updated"
partial: Map
"Remove any keys in this list"
remove: [String!]
}

View file

@ -185,3 +185,10 @@ type FindPerformersResultType {
count: Int!
performers: [Performer!]!
}
input PerformerMergeInput {
source: [ID!]!
destination: ID!
# values defined here will override values in the destination
values: PerformerUpdateInput
}

View file

@ -198,6 +198,13 @@ input ScrapeSingleStudioInput {
query: String
}
input ScrapeSingleTagInput {
"""
Query can be either a name or a Stash ID
"""
query: String
}
input ScrapeSinglePerformerInput {
"Instructs to query by string"
query: String

View file

@ -170,6 +170,21 @@ query FindStudio($id: ID, $name: String) {
}
}
query FindTag($id: ID, $name: String) {
findTag(id: $id, name: $name) {
...TagFragment
}
}
query QueryTags($input: TagQueryInput!) {
queryTags(input: $input) {
count
tags {
...TagFragment
}
}
}
mutation SubmitFingerprint($input: FingerprintSubmission!) {
submitFingerprint(input: $input)
}

View file

@ -0,0 +1,12 @@
package api
import "github.com/stashapp/stash/pkg/models"
func handleUpdateCustomFields(input models.CustomFieldsInput) models.CustomFieldsInput {
ret := input
// convert json.Numbers to int/float
ret.Full = convertMapJSONNumbers(ret.Full)
ret.Partial = convertMapJSONNumbers(ret.Partial)
return ret
}

35
internal/api/input.go Normal file
View file

@ -0,0 +1,35 @@
package api
import (
"fmt"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
// TODO - apply handleIDs to other resolvers that accept ID lists
// handleIDList validates and converts a list of string IDs to integers
func handleIDList(idList []string, field string) ([]int, error) {
if err := validateIDList(idList); err != nil {
return nil, fmt.Errorf("validating %s: %w", field, err)
}
ids, err := stringslice.StringSliceToIntSlice(idList)
if err != nil {
return nil, fmt.Errorf("converting %s: %w", field, err)
}
return ids, nil
}
// validateIDList returns an error if there are any duplicate ids in the list
func validateIDList(ids []string) error {
seen := make(map[string]struct{})
for _, id := range ids {
if _, exists := seen[id]; exists {
return fmt.Errorf("duplicate id found: %s", id)
}
seen[id] = struct{}{}
}
return nil
}

View file

@ -521,6 +521,7 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigI
r.setConfigBool(config.DisableDropdownCreateStudio, ddc.Studio)
r.setConfigBool(config.DisableDropdownCreateTag, ddc.Tag)
r.setConfigBool(config.DisableDropdownCreateMovie, ddc.Movie)
r.setConfigBool(config.DisableDropdownCreateGallery, ddc.Gallery)
}
r.setConfigString(config.HandyKey, input.HandyKey)

View file

@ -49,6 +49,7 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
newGallery.Details = translator.string(input.Details)
newGallery.Photographer = translator.string(input.Photographer)
newGallery.Rating = input.Rating100
newGallery.Organized = translator.bool(input.Organized)
var err error

View file

@ -2,13 +2,16 @@ package api
import (
"context"
"errors"
"fmt"
"slices"
"strconv"
"strings"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/performer"
"github.com/stashapp/stash/pkg/plugin/hook"
"github.com/stashapp/stash/pkg/sliceutil"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
"github.com/stashapp/stash/pkg/utils"
)
@ -136,7 +139,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return r.getPerformer(ctx, newPerformer.ID)
}
func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator) error {
func validateNoLegacyURLs(translator changesetTranslator) error {
// ensure url/twitter/instagram are not included in the input
if translator.hasField("url") {
return fmt.Errorf("url field must not be included if urls is included")
@ -151,7 +154,7 @@ func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator)
return nil
}
func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURL, legacyTwitter, legacyInstagram models.OptionalString, updatedPerformer *models.PerformerPartial) error {
func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURLs legacyPerformerURLs, updatedPerformer *models.PerformerPartial) error {
qb := r.repository.Performer
// we need to be careful with URL/Twitter/Instagram
@ -170,23 +173,23 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int
existingURLs := p.URLs.List()
// performer partial URLs should be empty
if legacyURL.Set {
if legacyURLs.URL.Set {
replaced := false
for i, url := range existingURLs {
if !performer.IsTwitterURL(url) && !performer.IsInstagramURL(url) {
existingURLs[i] = legacyURL.Value
existingURLs[i] = legacyURLs.URL.Value
replaced = true
break
}
}
if !replaced {
existingURLs = append(existingURLs, legacyURL.Value)
existingURLs = append(existingURLs, legacyURLs.URL.Value)
}
}
if legacyTwitter.Set {
value := utils.URLFromHandle(legacyTwitter.Value, twitterURL)
if legacyURLs.Twitter.Set {
value := utils.URLFromHandle(legacyURLs.Twitter.Value, twitterURL)
found := false
// find and replace the first twitter URL
for i, url := range existingURLs {
@ -201,9 +204,9 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int
existingURLs = append(existingURLs, value)
}
}
if legacyInstagram.Set {
if legacyURLs.Instagram.Set {
found := false
value := utils.URLFromHandle(legacyInstagram.Value, instagramURL)
value := utils.URLFromHandle(legacyURLs.Instagram.Value, instagramURL)
// find and replace the first instagram URL
for i, url := range existingURLs {
if performer.IsInstagramURL(url) {
@ -226,16 +229,25 @@ func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int
return nil
}
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
performerID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
type legacyPerformerURLs struct {
URL models.OptionalString
Twitter models.OptionalString
Instagram models.OptionalString
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
func (u *legacyPerformerURLs) AnySet() bool {
return u.URL.Set || u.Twitter.Set || u.Instagram.Set
}
func legacyPerformerURLsFromInput(input models.PerformerUpdateInput, translator changesetTranslator) legacyPerformerURLs {
return legacyPerformerURLs{
URL: translator.optionalString(input.URL, "url"),
Twitter: translator.optionalString(input.Twitter, "twitter"),
Instagram: translator.optionalString(input.Instagram, "instagram"),
}
}
func performerPartialFromInput(input models.PerformerUpdateInput, translator changesetTranslator) (*models.PerformerPartial, error) {
// Populate performer from the input
updatedPerformer := models.NewPerformerPartial()
@ -260,19 +272,17 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
updatedPerformer.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids")
var err error
if translator.hasField("urls") {
// ensure url/twitter/instagram are not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
if err := validateNoLegacyURLs(translator); err != nil {
return nil, err
}
updatedPerformer.URLs = translator.updateStrings(input.Urls, "urls")
}
legacyURL := translator.optionalString(input.URL, "url")
legacyTwitter := translator.optionalString(input.Twitter, "twitter")
legacyInstagram := translator.optionalString(input.Instagram, "instagram")
updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate")
if err != nil {
return nil, fmt.Errorf("converting birthdate: %w", err)
@ -297,10 +307,27 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
return nil, fmt.Errorf("converting tag ids: %w", err)
}
updatedPerformer.CustomFields = input.CustomFields
// convert json.Numbers to int/float
updatedPerformer.CustomFields.Full = convertMapJSONNumbers(updatedPerformer.CustomFields.Full)
updatedPerformer.CustomFields.Partial = convertMapJSONNumbers(updatedPerformer.CustomFields.Partial)
updatedPerformer.CustomFields = handleUpdateCustomFields(input.CustomFields)
return &updatedPerformer, nil
}
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
performerID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedPerformer, err := performerPartialFromInput(input, translator)
if err != nil {
return nil, err
}
legacyURLs := legacyPerformerURLsFromInput(input, translator)
var imageData []byte
imageIncluded := translator.hasField("image")
@ -315,17 +342,17 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Performer
if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set {
if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil {
if legacyURLs.AnySet() {
if err := r.handleLegacyURLs(ctx, performerID, legacyURLs, updatedPerformer); err != nil {
return err
}
}
if err := performer.ValidateUpdate(ctx, performerID, updatedPerformer, qb); err != nil {
if err := performer.ValidateUpdate(ctx, performerID, *updatedPerformer, qb); err != nil {
return err
}
_, err = qb.UpdatePartial(ctx, performerID, updatedPerformer)
_, err = qb.UpdatePartial(ctx, performerID, *updatedPerformer)
if err != nil {
return err
}
@ -382,16 +409,18 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
if translator.hasField("urls") {
// ensure url/twitter/instagram are not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
if err := validateNoLegacyURLs(translator); err != nil {
return nil, err
}
updatedPerformer.URLs = translator.updateStringsBulk(input.Urls, "urls")
}
legacyURL := translator.optionalString(input.URL, "url")
legacyTwitter := translator.optionalString(input.Twitter, "twitter")
legacyInstagram := translator.optionalString(input.Instagram, "instagram")
legacyURLs := legacyPerformerURLs{
URL: translator.optionalString(input.URL, "url"),
Twitter: translator.optionalString(input.Twitter, "twitter"),
Instagram: translator.optionalString(input.Instagram, "instagram"),
}
updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate")
if err != nil {
@ -417,6 +446,10 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
return nil, fmt.Errorf("converting tag ids: %w", err)
}
if input.CustomFields != nil {
updatedPerformer.CustomFields = handleUpdateCustomFields(*input.CustomFields)
}
ret := []*models.Performer{}
// Start the transaction and save the performers
@ -424,8 +457,8 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
qb := r.repository.Performer
for _, performerID := range performerIDs {
if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set {
if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil {
if legacyURLs.AnySet() {
if err := r.handleLegacyURLs(ctx, performerID, legacyURLs, &updatedPerformer); err != nil {
return err
}
}
@ -505,3 +538,87 @@ func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs [
return true, nil
}
func (r *mutationResolver) PerformerMerge(ctx context.Context, input PerformerMergeInput) (*models.Performer, error) {
srcIDs, err := stringslice.StringSliceToIntSlice(input.Source)
if err != nil {
return nil, fmt.Errorf("converting source ids: %w", err)
}
// ensure source ids are unique
srcIDs = sliceutil.AppendUniques(nil, srcIDs)
destID, err := strconv.Atoi(input.Destination)
if err != nil {
return nil, fmt.Errorf("converting destination id: %w", err)
}
// ensure destination is not in source list
if slices.Contains(srcIDs, destID) {
return nil, errors.New("destination performer cannot be in source list")
}
var values *models.PerformerPartial
var imageData []byte
if input.Values != nil {
translator := changesetTranslator{
inputMap: getNamedUpdateInputMap(ctx, "input.values"),
}
values, err = performerPartialFromInput(*input.Values, translator)
if err != nil {
return nil, err
}
legacyURLs := legacyPerformerURLsFromInput(*input.Values, translator)
if legacyURLs.AnySet() {
return nil, errors.New("Merging legacy performer URLs is not supported")
}
if input.Values.Image != nil {
var err error
imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image)
if err != nil {
return nil, fmt.Errorf("processing cover image: %w", err)
}
}
} else {
v := models.NewPerformerPartial()
values = &v
}
var dest *models.Performer
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Performer
dest, err = qb.Find(ctx, destID)
if err != nil {
return fmt.Errorf("finding destination performer ID %d: %w", destID, err)
}
// ensure source performers exist
if _, err := qb.FindMany(ctx, srcIDs); err != nil {
return fmt.Errorf("finding source performers: %w", err)
}
if _, err := qb.UpdatePartial(ctx, destID, *values); err != nil {
return fmt.Errorf("updating performer: %w", err)
}
if err := qb.Merge(ctx, srcIDs, destID); err != nil {
return fmt.Errorf("merging performers: %w", err)
}
if len(imageData) > 0 {
if err := qb.UpdateImage(ctx, destID, imageData); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
return dest, nil
}

View file

@ -297,6 +297,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
}
var coverImageData []byte
coverImageIncluded := translator.hasField("cover_image")
if input.CoverImage != nil {
var err error
coverImageData, err = utils.ProcessImageInput(ctx, *input.CoverImage)
@ -310,21 +311,21 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
return nil, err
}
if err := r.sceneUpdateCoverImage(ctx, scene, coverImageData); err != nil {
return nil, err
if coverImageIncluded {
if err := r.sceneUpdateCoverImage(ctx, scene, coverImageData); err != nil {
return nil, err
}
}
return scene, nil
}
func (r *mutationResolver) sceneUpdateCoverImage(ctx context.Context, s *models.Scene, coverImageData []byte) error {
if len(coverImageData) > 0 {
qb := r.repository.Scene
qb := r.repository.Scene
// update cover table
if err := qb.UpdateCover(ctx, s.ID, coverImageData); err != nil {
return err
}
// update cover table - empty data will clear the cover
if err := qb.UpdateCover(ctx, s.ID, coverImageData); err != nil {
return err
}
return nil

View file

@ -134,7 +134,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
if translator.hasField("urls") {
// ensure url not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
if err := validateNoLegacyURLs(translator); err != nil {
return nil, err
}
@ -211,7 +211,7 @@ func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudi
if translator.hasField("urls") {
// ensure url/twitter/instagram are not included in the input
if err := r.validateNoLegacyURLs(translator); err != nil {
if err := validateNoLegacyURLs(translator); err != nil {
return nil, err
}

View file

@ -6,7 +6,6 @@ import (
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindFolder(ctx context.Context, id *string, path *string) (*models.Folder, error) {
@ -49,7 +48,7 @@ func (r *queryResolver) FindFolders(
) (ret *FindFoldersResultType, err error) {
var folderIDs []models.FolderID
if len(ids) > 0 {
folderIDsInt, err := stringslice.StringSliceToIntSlice(ids)
folderIDsInt, err := handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -5,7 +5,6 @@ import (
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models.Gallery, err error) {
@ -25,7 +24,7 @@ func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models
}
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType, ids []string) (ret *FindGalleriesResultType, err error) {
idInts, err := stringslice.StringSliceToIntSlice(ids)
idInts, err := handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -5,7 +5,6 @@ import (
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindGroup(ctx context.Context, id string) (ret *models.Group, err error) {
@ -25,7 +24,7 @@ func (r *queryResolver) FindGroup(ctx context.Context, id string) (ret *models.G
}
func (r *queryResolver) FindGroups(ctx context.Context, groupFilter *models.GroupFilterType, filter *models.FindFilterType, ids []string) (ret *FindGroupsResultType, err error) {
idInts, err := stringslice.StringSliceToIntSlice(ids)
idInts, err := handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -7,7 +7,6 @@ import (
"github.com/99designs/gqlgen/graphql"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *string) (*models.Image, error) {
@ -55,7 +54,7 @@ func (r *queryResolver) FindImages(
filter *models.FindFilterType,
) (ret *FindImagesResultType, err error) {
if len(ids) > 0 {
imageIds, err = stringslice.StringSliceToIntSlice(ids)
imageIds, err = handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -5,7 +5,6 @@ import (
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.Group, err error) {
@ -25,7 +24,7 @@ func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.G
}
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.GroupFilterType, filter *models.FindFilterType, ids []string) (ret *FindMoviesResultType, err error) {
idInts, err := stringslice.StringSliceToIntSlice(ids)
idInts, err := handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -5,7 +5,6 @@ import (
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *models.Performer, err error) {
@ -26,7 +25,7 @@ func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *mode
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType, performerIDs []int, ids []string) (ret *FindPerformersResultType, err error) {
if len(ids) > 0 {
performerIDs, err = stringslice.StringSliceToIntSlice(ids)
performerIDs, err = handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -9,7 +9,6 @@ import (
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) {
@ -83,7 +82,7 @@ func (r *queryResolver) FindScenes(
filter *models.FindFilterType,
) (ret *FindScenesResultType, err error) {
if len(ids) > 0 {
sceneIDs, err = stringslice.StringSliceToIntSlice(ids)
sceneIDs, err = handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -4,11 +4,10 @@ import (
"context"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType, ids []string) (ret *FindSceneMarkersResultType, err error) {
idInts, err := stringslice.StringSliceToIntSlice(ids)
idInts, err := handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -5,7 +5,6 @@ import (
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.Studio, err error) {
@ -26,7 +25,7 @@ func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.
}
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType, ids []string) (ret *FindStudiosResultType, err error) {
idInts, err := stringslice.StringSliceToIntSlice(ids)
idInts, err := handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -5,7 +5,6 @@ import (
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag, err error) {
@ -25,7 +24,7 @@ func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag
}
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType, ids []string) (ret *FindTagsResultType, err error) {
idInts, err := stringslice.StringSliceToIntSlice(ids)
idInts, err := handleIDList(ids, "ids")
if err != nil {
return nil, err
}

View file

@ -350,7 +350,46 @@ func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.S
return nil, nil
}
return nil, errors.New("stash_box_index must be set")
return nil, errors.New("stash_box_endpoint must be set")
}
func (r *queryResolver) ScrapeSingleTag(ctx context.Context, source scraper.Source, input ScrapeSingleTagInput) ([]*models.ScrapedTag, error) {
if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint)
if err != nil {
return nil, err
}
client := r.newStashBoxClient(*b)
var ret []*models.ScrapedTag
out, err := client.QueryTag(ctx, *input.Query)
if err != nil {
return nil, err
} else if out != nil {
ret = append(ret, out...)
}
if len(ret) > 0 {
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
for _, tag := range ret {
if err := match.ScrapedTag(ctx, r.repository.Tag, tag, b.Endpoint); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
return nil, nil
}
return nil, errors.New("stash_box_endpoint must be set")
}
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scraper.Source, input ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {

View file

@ -12,6 +12,7 @@ import (
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/internal/static"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil"
@ -243,6 +244,12 @@ func (rs sceneRoutes) streamSegment(w http.ResponseWriter, r *http.Request, stre
}
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
// if default flag is set, return the default image
if r.URL.Query().Get("default") == "true" {
utils.ServeImage(w, r, static.ReadAll(static.DefaultSceneImage))
return
}
scene := r.Context().Value(sceneKey).(*models.Scene)
ss := manager.SceneServer{

View file

@ -11,6 +11,7 @@ import (
"net/http"
"os"
"path"
"path/filepath"
"runtime/debug"
"strconv"
"strings"
@ -255,6 +256,9 @@ func Initialize() (*Server, error) {
staticUI = statigz.FileServer(ui.UIBox.(fs.ReadDirFS))
}
// handle favicon override
r.HandleFunc("/favicon.ico", handleFavicon(staticUI))
// Serve the web app
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
@ -295,6 +299,31 @@ func Initialize() (*Server, error) {
return server, nil
}
func handleFavicon(staticUI *statigz.Server) func(w http.ResponseWriter, r *http.Request) {
mgr := manager.GetInstance()
cfg := mgr.Config
// check if favicon.ico exists in the config directory
// if so, use that
// otherwise, use the embedded one
iconPath := filepath.Join(cfg.GetConfigPath(), "favicon.ico")
exists, _ := fsutil.FileExists(iconPath)
if exists {
logger.Debugf("Using custom favicon at %s", iconPath)
}
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Cache-Control", "no-cache")
if exists {
http.ServeFile(w, r, iconPath)
} else {
staticUI.ServeHTTP(w, r)
}
}
}
// Start starts the server. It listens on the configured address and port.
// It calls ListenAndServeTLS if TLS is configured, otherwise it calls ListenAndServe.
// Calls to Start are blocked until the server is shutdown.

View file

@ -17,6 +17,16 @@ import (
"golang.org/x/term"
)
var isDesktop bool
// InitIsDesktop sets the value of isDesktop.
// Changed IsDesktop to be evaluated once at startup because if it is
// checked while there are open terminal sessions (such as the ffmpeg hardware
// encoding checks), it may return false.
func InitIsDesktop() {
isDesktop = isDesktopCheck()
}
type FaviconProvider interface {
GetFavicon() []byte
GetFaviconPng() []byte
@ -59,22 +69,33 @@ func SendNotification(title string, text string) {
}
func IsDesktop() bool {
return isDesktop
}
// isDesktop tries to determine if the application is running in a desktop environment
// where desktop features like system tray and notifications should be enabled.
func isDesktopCheck() bool {
if isDoubleClickLaunched() {
logger.Debug("Detected double-click launch")
return true
}
// Check if running under root
if os.Getuid() == 0 {
logger.Debug("Running as root, disabling desktop features")
return false
}
// Check if stdin is a terminal
if term.IsTerminal(int(os.Stdin.Fd())) {
logger.Debug("Running in terminal, disabling desktop features")
return false
}
if isService() {
logger.Debug("Running as a service, disabling desktop features")
return false
}
if IsServerDockerized() {
logger.Debug("Running in docker, disabling desktop features")
return false
}

View file

@ -3,6 +3,7 @@
package desktop
import (
"fmt"
"runtime"
"strings"
@ -58,12 +59,12 @@ func startSystray(exit chan int, faviconProvider FaviconProvider) {
func systrayInitialize(exit chan<- int, faviconProvider FaviconProvider) {
favicon := faviconProvider.GetFavicon()
systray.SetTemplateIcon(favicon, favicon)
systray.SetTooltip("🟢 Stash is Running.")
c := config.GetInstance()
systray.SetTooltip(fmt.Sprintf("🟢 Stash is Running on port %d.", c.GetPort()))
openStashButton := systray.AddMenuItem("Open Stash", "Open a browser window to Stash")
var menuItems []string
systray.AddSeparator()
c := config.GetInstance()
if !c.IsNewSystem() {
menuItems = c.GetMenuItems()
for _, item := range menuItems {

333
internal/dlna/activity.go Normal file
View file

@ -0,0 +1,333 @@
package dlna
import (
"context"
"fmt"
"sync"
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/txn"
)
const (
// DefaultSessionTimeout is the time after which a session is considered complete
// if no new requests are received.
// This is set high (5 minutes) because DLNA clients buffer aggressively and may not
// send any HTTP requests for extended periods while the user is still watching.
DefaultSessionTimeout = 5 * time.Minute
// monitorInterval is how often we check for expired sessions.
monitorInterval = 10 * time.Second
)
// ActivityConfig provides configuration options for DLNA activity tracking.
type ActivityConfig interface {
// GetDLNAActivityTrackingEnabled returns true if activity tracking should be enabled.
// If not implemented, defaults to true.
GetDLNAActivityTrackingEnabled() bool
// GetMinimumPlayPercent returns the minimum percentage of a video that must be
// watched before incrementing the play count. Uses UI setting if available.
GetMinimumPlayPercent() int
}
// SceneActivityWriter provides methods for saving scene activity.
type SceneActivityWriter interface {
SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error)
AddViews(ctx context.Context, sceneID int, dates []time.Time) ([]time.Time, error)
}
// streamSession represents an active DLNA streaming session.
type streamSession struct {
SceneID int
ClientIP string
StartTime time.Time
LastActivity time.Time
VideoDuration float64
PlayCountAdded bool
}
// sessionKey generates a unique key for a session based on client IP and scene ID.
func sessionKey(clientIP string, sceneID int) string {
return fmt.Sprintf("%s:%d", clientIP, sceneID)
}
// percentWatched calculates the estimated percentage of video watched.
// Uses a time-based approach since DLNA clients buffer aggressively and byte
// positions don't correlate with actual playback position.
//
// The key insight: you cannot have watched more of the video than time has elapsed.
// If the video is 30 minutes and only 1 minute has passed, maximum watched is ~3.3%.
func (s *streamSession) percentWatched() float64 {
if s.VideoDuration <= 0 {
return 0
}
// Calculate elapsed time from session start to last activity
elapsed := s.LastActivity.Sub(s.StartTime).Seconds()
if elapsed <= 0 {
return 0
}
// Maximum possible percent is based on elapsed time
// You can't watch more of the video than time has passed
timeBasedPercent := (elapsed / s.VideoDuration) * 100
// Cap at 100%
if timeBasedPercent > 100 {
return 100
}
return timeBasedPercent
}
// estimatedResumeTime calculates the estimated resume time based on elapsed time.
// Since DLNA clients buffer aggressively, byte positions don't correlate with playback.
// Instead, we estimate based on how long the session has been active.
// Returns the time in seconds, or 0 if the video is nearly complete (>=98%).
func (s *streamSession) estimatedResumeTime() float64 {
if s.VideoDuration <= 0 {
return 0
}
// Calculate elapsed time from session start
elapsed := s.LastActivity.Sub(s.StartTime).Seconds()
if elapsed <= 0 {
return 0
}
// If elapsed time exceeds 98% of video duration, reset resume time (matches frontend behavior)
if elapsed >= s.VideoDuration*0.98 {
return 0
}
// Resume time is approximately where the user was watching
// Capped by video duration
if elapsed > s.VideoDuration {
elapsed = s.VideoDuration
}
return elapsed
}
// ActivityTracker tracks DLNA streaming activity and saves it to the database.
type ActivityTracker struct {
txnManager txn.Manager
sceneWriter SceneActivityWriter
config ActivityConfig
sessionTimeout time.Duration
sessions map[string]*streamSession
mutex sync.RWMutex
ctx context.Context
cancelFunc context.CancelFunc
wg sync.WaitGroup
}
// NewActivityTracker creates a new ActivityTracker.
func NewActivityTracker(
txnManager txn.Manager,
sceneWriter SceneActivityWriter,
config ActivityConfig,
) *ActivityTracker {
ctx, cancel := context.WithCancel(context.Background())
tracker := &ActivityTracker{
txnManager: txnManager,
sceneWriter: sceneWriter,
config: config,
sessionTimeout: DefaultSessionTimeout,
sessions: make(map[string]*streamSession),
ctx: ctx,
cancelFunc: cancel,
}
// Start the session monitor goroutine
tracker.wg.Add(1)
go tracker.monitorSessions()
return tracker
}
// Stop stops the activity tracker and processes any remaining sessions.
func (t *ActivityTracker) Stop() {
t.cancelFunc()
t.wg.Wait()
// Process any remaining sessions
t.mutex.Lock()
sessions := make([]*streamSession, 0, len(t.sessions))
for _, session := range t.sessions {
sessions = append(sessions, session)
}
t.sessions = make(map[string]*streamSession)
t.mutex.Unlock()
for _, session := range sessions {
t.processCompletedSession(session)
}
}
// RecordRequest records a streaming request for activity tracking.
// Each request updates the session's LastActivity time, which is used for
// time-based tracking of watch progress.
func (t *ActivityTracker) RecordRequest(sceneID int, clientIP string, videoDuration float64) {
if !t.isEnabled() {
return
}
key := sessionKey(clientIP, sceneID)
now := time.Now()
t.mutex.Lock()
defer t.mutex.Unlock()
session, exists := t.sessions[key]
if !exists {
session = &streamSession{
SceneID: sceneID,
ClientIP: clientIP,
StartTime: now,
VideoDuration: videoDuration,
}
t.sessions[key] = session
logger.Debugf("[DLNA Activity] New session started: scene=%d, client=%s", sceneID, clientIP)
}
session.LastActivity = now
}
// monitorSessions periodically checks for expired sessions and processes them.
func (t *ActivityTracker) monitorSessions() {
defer t.wg.Done()
ticker := time.NewTicker(monitorInterval)
defer ticker.Stop()
for {
select {
case <-t.ctx.Done():
return
case <-ticker.C:
t.processExpiredSessions()
}
}
}
// processExpiredSessions finds and processes sessions that have timed out.
func (t *ActivityTracker) processExpiredSessions() {
now := time.Now()
var expiredSessions []*streamSession
t.mutex.Lock()
for key, session := range t.sessions {
timeSinceStart := now.Sub(session.StartTime)
timeSinceActivity := now.Sub(session.LastActivity)
// Must have no HTTP activity for the full timeout period
if timeSinceActivity <= t.sessionTimeout {
continue
}
// DLNA clients buffer aggressively - they fetch most/all of the video quickly,
// then play from cache with NO further HTTP requests.
//
// Two scenarios:
// 1. User watched the whole video: timeSinceStart >= videoDuration
// -> Set LastActivity to when timeout began (they finished watching)
// 2. User stopped early: timeSinceStart < videoDuration
// -> Keep LastActivity as-is (best estimate of when they stopped)
videoDuration := time.Duration(session.VideoDuration) * time.Second
if timeSinceStart >= videoDuration && videoDuration > 0 {
// User likely watched the whole video, then it timed out
// Estimate they watched until the timeout period started
session.LastActivity = now.Add(-t.sessionTimeout)
}
// else: User stopped early - LastActivity is already our best estimate
expiredSessions = append(expiredSessions, session)
delete(t.sessions, key)
}
t.mutex.Unlock()
for _, session := range expiredSessions {
t.processCompletedSession(session)
}
}
// processCompletedSession saves activity data for a completed streaming session.
func (t *ActivityTracker) processCompletedSession(session *streamSession) {
percentWatched := session.percentWatched()
resumeTime := session.estimatedResumeTime()
logger.Debugf("[DLNA Activity] Session completed: scene=%d, client=%s, videoDuration=%.1fs, percent=%.1f%%, resume=%.1fs",
session.SceneID, session.ClientIP, session.VideoDuration, percentWatched, resumeTime)
// Only save if there was meaningful activity (at least 1% watched)
if percentWatched < 1 {
logger.Debugf("[DLNA Activity] Session too short, skipping save")
return
}
// Skip DB operations if txnManager is nil (for testing)
if t.txnManager == nil {
logger.Debugf("[DLNA Activity] No transaction manager, skipping DB save")
return
}
// Determine what needs to be saved
shouldSaveResume := resumeTime > 0
shouldAddView := !session.PlayCountAdded && percentWatched >= float64(t.getMinimumPlayPercent())
// Nothing to save
if !shouldSaveResume && !shouldAddView {
return
}
// Save everything in a single transaction
ctx := context.Background()
if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error {
// Save resume time only. DLNA clients buffer aggressively and don't report
// playback position, so we can't accurately track play duration - saving
// guesses would corrupt analytics. Resume time is still useful as a
// "continue watching" hint even if imprecise.
if shouldSaveResume {
if _, err := t.sceneWriter.SaveActivity(ctx, session.SceneID, &resumeTime, nil); err != nil {
return fmt.Errorf("save resume time: %w", err)
}
}
// Increment play count (also updates last_played_at via view date)
if shouldAddView {
if _, err := t.sceneWriter.AddViews(ctx, session.SceneID, []time.Time{time.Now()}); err != nil {
return fmt.Errorf("add view: %w", err)
}
session.PlayCountAdded = true
logger.Debugf("[DLNA Activity] Incremented play count for scene %d (%.1f%% watched)",
session.SceneID, percentWatched)
}
return nil
}); err != nil {
logger.Warnf("[DLNA Activity] Failed to save activity for scene %d: %v", session.SceneID, err)
}
}
// isEnabled returns true if activity tracking is enabled.
func (t *ActivityTracker) isEnabled() bool {
if t.config == nil {
return true // Default to enabled
}
return t.config.GetDLNAActivityTrackingEnabled()
}
// getMinimumPlayPercent returns the minimum play percentage for incrementing play count.
func (t *ActivityTracker) getMinimumPlayPercent() int {
if t.config == nil {
return 0 // Default: any play increments count (matches frontend default)
}
return t.config.GetMinimumPlayPercent()
}

View file

@ -0,0 +1,420 @@
package dlna
import (
"context"
"sync"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
// mockSceneWriter is a mock implementation of SceneActivityWriter
type mockSceneWriter struct {
mu sync.Mutex
saveActivityCalls []saveActivityCall
addViewsCalls []addViewsCall
}
type saveActivityCall struct {
sceneID int
resumeTime *float64
playDuration *float64
}
type addViewsCall struct {
sceneID int
dates []time.Time
}
func (m *mockSceneWriter) SaveActivity(_ context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) {
m.mu.Lock()
m.saveActivityCalls = append(m.saveActivityCalls, saveActivityCall{
sceneID: sceneID,
resumeTime: resumeTime,
playDuration: playDuration,
})
m.mu.Unlock()
return true, nil
}
func (m *mockSceneWriter) AddViews(_ context.Context, sceneID int, dates []time.Time) ([]time.Time, error) {
m.mu.Lock()
m.addViewsCalls = append(m.addViewsCalls, addViewsCall{
sceneID: sceneID,
dates: dates,
})
m.mu.Unlock()
return dates, nil
}
// mockConfig is a mock implementation of ActivityConfig
type mockConfig struct {
enabled bool
minPlayPercent int
}
func (c *mockConfig) GetDLNAActivityTrackingEnabled() bool {
return c.enabled
}
func (c *mockConfig) GetMinimumPlayPercent() int {
return c.minPlayPercent
}
func TestStreamSession_PercentWatched(t *testing.T) {
now := time.Now()
tests := []struct {
name string
startTime time.Time
lastActivity time.Time
videoDuration float64
expected float64
}{
{
name: "no video duration",
startTime: now.Add(-60 * time.Second),
lastActivity: now,
videoDuration: 0,
expected: 0,
},
{
name: "half watched",
startTime: now.Add(-60 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 1 minute = 50%
expected: 50.0,
},
{
name: "fully watched",
startTime: now.Add(-120 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 2 minutes = 100%
expected: 100.0,
},
{
name: "quarter watched",
startTime: now.Add(-30 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 30 seconds = 25%
expected: 25.0,
},
{
name: "elapsed exceeds duration - capped at 100%",
startTime: now.Add(-180 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, but 3 minutes elapsed = capped at 100%
expected: 100.0,
},
{
name: "no elapsed time",
startTime: now,
lastActivity: now,
videoDuration: 120.0,
expected: 0,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
session := &streamSession{
StartTime: tt.startTime,
LastActivity: tt.lastActivity,
VideoDuration: tt.videoDuration,
}
result := session.percentWatched()
assert.InDelta(t, tt.expected, result, 0.01)
})
}
}
func TestStreamSession_EstimatedResumeTime(t *testing.T) {
now := time.Now()
tests := []struct {
name string
startTime time.Time
lastActivity time.Time
videoDuration float64
expected float64
}{
{
name: "no elapsed time",
startTime: now,
lastActivity: now,
videoDuration: 120.0,
expected: 0,
},
{
name: "half way through",
startTime: now.Add(-60 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 1 minute = resume at 60s
expected: 60.0,
},
{
name: "quarter way through",
startTime: now.Add(-30 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 2 minutes, watched for 30 seconds = resume at 30s
expected: 30.0,
},
{
name: "98% complete - should reset to 0",
startTime: now.Add(-118 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 98.3% elapsed, should reset
expected: 0,
},
{
name: "100% complete - should reset to 0",
startTime: now.Add(-120 * time.Second),
lastActivity: now,
videoDuration: 120.0,
expected: 0,
},
{
name: "elapsed exceeds duration - capped and reset to 0",
startTime: now.Add(-180 * time.Second),
lastActivity: now,
videoDuration: 120.0, // 150% elapsed, capped at 100%, reset to 0
expected: 0,
},
{
name: "no video duration",
startTime: now.Add(-60 * time.Second),
lastActivity: now,
videoDuration: 0,
expected: 0,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
session := &streamSession{
StartTime: tt.startTime,
LastActivity: tt.lastActivity,
VideoDuration: tt.videoDuration,
}
result := session.estimatedResumeTime()
assert.InDelta(t, tt.expected, result, 1.0) // Allow 1 second tolerance
})
}
}
func TestSessionKey(t *testing.T) {
key := sessionKey("192.168.1.100", 42)
assert.Equal(t, "192.168.1.100:42", key)
}
func TestActivityTracker_RecordRequest(t *testing.T) {
config := &mockConfig{enabled: true, minPlayPercent: 50}
// Create tracker without starting the goroutine (for unit testing)
tracker := &ActivityTracker{
txnManager: nil, // Don't need DB for this test
sceneWriter: nil,
config: config,
sessionTimeout: DefaultSessionTimeout,
sessions: make(map[string]*streamSession),
}
// Record first request - should create new session
tracker.RecordRequest(42, "192.168.1.100", 120.0)
tracker.mutex.RLock()
session := tracker.sessions["192.168.1.100:42"]
tracker.mutex.RUnlock()
assert.NotNil(t, session)
assert.Equal(t, 42, session.SceneID)
assert.Equal(t, "192.168.1.100", session.ClientIP)
assert.Equal(t, 120.0, session.VideoDuration)
assert.False(t, session.StartTime.IsZero())
assert.False(t, session.LastActivity.IsZero())
// Record second request - should update LastActivity
firstActivity := session.LastActivity
time.Sleep(10 * time.Millisecond)
tracker.RecordRequest(42, "192.168.1.100", 120.0)
tracker.mutex.RLock()
session = tracker.sessions["192.168.1.100:42"]
tracker.mutex.RUnlock()
assert.True(t, session.LastActivity.After(firstActivity))
}
func TestActivityTracker_DisabledTracking(t *testing.T) {
config := &mockConfig{enabled: false, minPlayPercent: 50}
// Create tracker without starting the goroutine (for unit testing)
tracker := &ActivityTracker{
txnManager: nil,
sceneWriter: nil,
config: config,
sessionTimeout: DefaultSessionTimeout,
sessions: make(map[string]*streamSession),
}
// Record request - should be ignored when tracking is disabled
tracker.RecordRequest(42, "192.168.1.100", 120.0)
tracker.mutex.RLock()
sessionCount := len(tracker.sessions)
tracker.mutex.RUnlock()
assert.Equal(t, 0, sessionCount)
}
func TestActivityTracker_SessionExpiration(t *testing.T) {
// For this test, we'll test the session expiration logic directly
// without the full transaction manager integration
sceneWriter := &mockSceneWriter{}
config := &mockConfig{enabled: true, minPlayPercent: 10}
// Create a tracker with nil txnManager - we'll test processCompletedSession separately
// Here we just verify the session management logic
tracker := &ActivityTracker{
txnManager: nil, // Skip DB calls for this test
sceneWriter: sceneWriter,
config: config,
sessionTimeout: 100 * time.Millisecond,
sessions: make(map[string]*streamSession),
}
// Manually add a session
// Use a short video duration (1 second) so the test can verify expiration quickly.
now := time.Now()
tracker.sessions["192.168.1.100:42"] = &streamSession{
SceneID: 42,
ClientIP: "192.168.1.100",
StartTime: now.Add(-5 * time.Second), // Started 5 seconds ago
LastActivity: now.Add(-200 * time.Millisecond), // Last activity 200ms ago (> 100ms timeout)
VideoDuration: 1.0, // Short video so timeSinceStart > videoDuration
}
// Verify session exists
assert.Len(t, tracker.sessions, 1)
// Process expired sessions - this will try to save activity but txnManager is nil
// so it will skip the DB calls but still remove the session
tracker.processExpiredSessions()
// Verify session was removed (even though DB calls were skipped)
assert.Len(t, tracker.sessions, 0)
}
func TestActivityTracker_SessionExpiration_StoppedEarly(t *testing.T) {
// Test that sessions expire when user stops watching early (before video ends)
// This was a bug where sessions wouldn't expire until video duration passed
config := &mockConfig{enabled: true, minPlayPercent: 10}
tracker := &ActivityTracker{
txnManager: nil,
sceneWriter: nil,
config: config,
sessionTimeout: 100 * time.Millisecond,
sessions: make(map[string]*streamSession),
}
// User started watching a 30-minute video but stopped after 5 seconds
now := time.Now()
tracker.sessions["192.168.1.100:42"] = &streamSession{
SceneID: 42,
ClientIP: "192.168.1.100",
StartTime: now.Add(-5 * time.Second), // Started 5 seconds ago
LastActivity: now.Add(-200 * time.Millisecond), // Last activity 200ms ago (> 100ms timeout)
VideoDuration: 1800.0, // 30 minute video - much longer than elapsed time
}
assert.Len(t, tracker.sessions, 1)
// Session should expire because timeSinceActivity > timeout
// Even though the video is 30 minutes and only 5 seconds have passed
tracker.processExpiredSessions()
// Verify session was expired
assert.Len(t, tracker.sessions, 0, "Session should expire when user stops early, not wait for video duration")
}
func TestActivityTracker_MinimumPlayPercentThreshold(t *testing.T) {
// Test the threshold logic without full transaction integration
config := &mockConfig{enabled: true, minPlayPercent: 75} // High threshold
tracker := &ActivityTracker{
txnManager: nil,
sceneWriter: nil,
config: config,
sessionTimeout: 50 * time.Millisecond,
sessions: make(map[string]*streamSession),
}
// Test that getMinimumPlayPercent returns the configured value
assert.Equal(t, 75, tracker.getMinimumPlayPercent())
// Create a session with 30% watched (36 seconds of a 120 second video)
now := time.Now()
session := &streamSession{
SceneID: 42,
StartTime: now.Add(-36 * time.Second),
LastActivity: now,
VideoDuration: 120.0,
}
// 30% is below 75% threshold
percentWatched := session.percentWatched()
assert.InDelta(t, 30.0, percentWatched, 0.1)
assert.False(t, percentWatched >= float64(tracker.getMinimumPlayPercent()))
}
func TestActivityTracker_MultipleSessions(t *testing.T) {
config := &mockConfig{enabled: true, minPlayPercent: 50}
// Create tracker without starting the goroutine (for unit testing)
tracker := &ActivityTracker{
txnManager: nil,
sceneWriter: nil,
config: config,
sessionTimeout: DefaultSessionTimeout,
sessions: make(map[string]*streamSession),
}
// Different clients watching same scene
tracker.RecordRequest(42, "192.168.1.100", 120.0)
tracker.RecordRequest(42, "192.168.1.101", 120.0)
// Same client watching different scenes
tracker.RecordRequest(43, "192.168.1.100", 180.0)
tracker.mutex.RLock()
assert.Len(t, tracker.sessions, 3)
tracker.mutex.RUnlock()
}
func TestActivityTracker_ShortSessionIgnored(t *testing.T) {
// Test that short sessions are ignored
// Create a session with only ~0.8% watched (1 second of a 120 second video)
now := time.Now()
session := &streamSession{
SceneID: 42,
ClientIP: "192.168.1.100",
StartTime: now.Add(-1 * time.Second), // Only 1 second
LastActivity: now,
VideoDuration: 120.0, // 2 minutes
}
// Verify percent watched is below threshold (1s / 120s = 0.83%)
assert.InDelta(t, 0.83, session.percentWatched(), 0.1)
// Verify elapsed time is short
elapsed := session.LastActivity.Sub(session.StartTime).Seconds()
assert.InDelta(t, 1.0, elapsed, 0.5)
// Both are below the minimum thresholds (1% and 5 seconds)
percentWatched := session.percentWatched()
shouldSkip := percentWatched < 1 && elapsed < 5
assert.True(t, shouldSkip, "Short session should be skipped")
}

View file

@ -278,6 +278,7 @@ type Server struct {
repository Repository
sceneServer sceneServer
ipWhitelistManager *ipWhitelistManager
activityTracker *ActivityTracker
VideoSortOrder string
subscribeLock sync.Mutex
@ -596,6 +597,7 @@ func (me *Server) initMux(mux *http.ServeMux) {
mux.HandleFunc(resPath, func(w http.ResponseWriter, r *http.Request) {
sceneId := r.URL.Query().Get("scene")
var scene *models.Scene
var videoDuration float64
repo := me.repository
err := repo.WithReadTxn(r.Context(), func(ctx context.Context) error {
sceneIdInt, err := strconv.Atoi(sceneId)
@ -603,6 +605,15 @@ func (me *Server) initMux(mux *http.ServeMux) {
return nil
}
scene, _ = repo.SceneFinder.Find(ctx, sceneIdInt)
if scene != nil {
// Load primary file to get duration for activity tracking
if err := scene.LoadPrimaryFile(ctx, repo.FileGetter); err != nil {
logger.Debugf("failed to load primary file for scene %d: %v", sceneIdInt, err)
}
if f := scene.Files.Primary(); f != nil {
videoDuration = f.Duration
}
}
return nil
})
if err != nil {
@ -615,6 +626,14 @@ func (me *Server) initMux(mux *http.ServeMux) {
w.Header().Set("transferMode.dlna.org", "Streaming")
w.Header().Set("contentFeatures.dlna.org", "DLNA.ORG_OP=01;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=01500000000000000000000000000000")
// Track activity - uses time-based tracking, updated on each request
if me.activityTracker != nil {
sceneIdInt, _ := strconv.Atoi(sceneId)
clientIP, _, _ := net.SplitHostPort(r.RemoteAddr)
me.activityTracker.RecordRequest(sceneIdInt, clientIP, videoDuration)
}
me.sceneServer.StreamSceneDirect(scene, w, r)
})
mux.HandleFunc(rootDescPath, func(w http.ResponseWriter, r *http.Request) {

View file

@ -77,13 +77,29 @@ type Config interface {
GetDLNADefaultIPWhitelist() []string
GetVideoSortOrder() string
GetDLNAPortAsString() string
GetDLNAActivityTrackingEnabled() bool
}
// activityConfig wraps Config to implement ActivityConfig.
type activityConfig struct {
config Config
minPlayPercent int // cached from UI config
}
func (c *activityConfig) GetDLNAActivityTrackingEnabled() bool {
return c.config.GetDLNAActivityTrackingEnabled()
}
func (c *activityConfig) GetMinimumPlayPercent() int {
return c.minPlayPercent
}
type Service struct {
repository Repository
config Config
sceneServer sceneServer
ipWhitelistMgr *ipWhitelistManager
repository Repository
config Config
sceneServer sceneServer
ipWhitelistMgr *ipWhitelistManager
activityTracker *ActivityTracker
server *Server
running bool
@ -155,6 +171,7 @@ func (s *Service) init() error {
repository: s.repository,
sceneServer: s.sceneServer,
ipWhitelistManager: s.ipWhitelistMgr,
activityTracker: s.activityTracker,
Interfaces: interfaces,
HTTPConn: func() net.Listener {
conn, err := net.Listen("tcp", dmsConfig.Http)
@ -215,7 +232,14 @@ func (s *Service) init() error {
// }
// NewService initialises and returns a new DLNA service.
func NewService(repo Repository, cfg Config, sceneServer sceneServer) *Service {
// The sceneWriter parameter should implement SceneActivityWriter (typically models.SceneReaderWriter).
// The minPlayPercent parameter is the minimum percentage of video that must be played to increment play count.
func NewService(repo Repository, cfg Config, sceneServer sceneServer, sceneWriter SceneActivityWriter, minPlayPercent int) *Service {
activityCfg := &activityConfig{
config: cfg,
minPlayPercent: minPlayPercent,
}
ret := &Service{
repository: repo,
sceneServer: sceneServer,
@ -223,7 +247,8 @@ func NewService(repo Repository, cfg Config, sceneServer sceneServer) *Service {
ipWhitelistMgr: &ipWhitelistManager{
config: cfg,
},
mutex: sync.Mutex{},
activityTracker: NewActivityTracker(repo.TxnManager, sceneWriter, activityCfg),
mutex: sync.Mutex{},
}
return ret
@ -283,6 +308,12 @@ func (s *Service) Stop(duration *time.Duration) {
if s.running {
logger.Info("Stopping DLNA")
// Stop activity tracker first to process any pending sessions
if s.activityTracker != nil {
s.activityTracker.Stop()
}
err := s.server.Close()
if err != nil {
logger.Error(err)

View file

@ -219,6 +219,7 @@ const (
DisableDropdownCreateStudio = "disable_dropdown_create.studio"
DisableDropdownCreateTag = "disable_dropdown_create.tag"
DisableDropdownCreateMovie = "disable_dropdown_create.movie"
DisableDropdownCreateGallery = "disable_dropdown_create.gallery"
HandyKey = "handy_key"
FunscriptOffset = "funscript_offset"
@ -1311,6 +1312,7 @@ func (i *Config) GetDisableDropdownCreate() *ConfigDisableDropdownCreate {
Studio: i.getBool(DisableDropdownCreateStudio),
Tag: i.getBool(DisableDropdownCreateTag),
Movie: i.getBool(DisableDropdownCreateMovie),
Gallery: i.getBool(DisableDropdownCreateGallery),
}
}
@ -1321,6 +1323,26 @@ func (i *Config) GetUIConfiguration() map[string]interface{} {
return i.forKey(UI).Cut(UI).Raw()
}
// GetMinimumPlayPercent returns the minimum percentage of a video that must be
// watched before incrementing the play count. Returns 0 if not configured.
func (i *Config) GetMinimumPlayPercent() int {
uiConfig := i.GetUIConfiguration()
if uiConfig == nil {
return 0
}
if val, ok := uiConfig["minimumPlayPercent"]; ok {
switch v := val.(type) {
case int:
return v
case float64:
return int(v)
case int64:
return int(v)
}
}
return 0
}
func (i *Config) SetUIConfiguration(v map[string]interface{}) {
i.Lock()
defer i.Unlock()
@ -1613,6 +1635,22 @@ func (i *Config) GetDLNAPortAsString() string {
return ":" + strconv.Itoa(i.GetDLNAPort())
}
// GetDLNAActivityTrackingEnabled returns true if DLNA activity tracking is enabled.
// This uses the same "trackActivity" UI setting that controls frontend play history tracking.
// When enabled, scenes played via DLNA will have their play count and duration tracked.
func (i *Config) GetDLNAActivityTrackingEnabled() bool {
uiConfig := i.GetUIConfiguration()
if uiConfig == nil {
return true // Default to enabled
}
if val, ok := uiConfig["trackActivity"]; ok {
if v, ok := val.(bool); ok {
return v
}
}
return true // Default to enabled
}
// GetVideoSortOrder returns the sort order to display videos. If
// empty, videos will be sorted by titles.
func (i *Config) GetVideoSortOrder() string {

View file

@ -105,4 +105,5 @@ type ConfigDisableDropdownCreate struct {
Tag bool `json:"tag"`
Studio bool `json:"studio"`
Movie bool `json:"movie"`
Gallery bool `json:"gallery"`
}

View file

@ -78,7 +78,7 @@ func Initialize(cfg *config.Config, l *log.Logger) (*Manager, error) {
}
dlnaRepository := dlna.NewRepository(repo)
dlnaService := dlna.NewService(dlnaRepository, cfg, sceneServer)
dlnaService := dlna.NewService(dlnaRepository, cfg, sceneServer, repo.Scene, cfg.GetMinimumPlayPercent())
mgr := &Manager{
Config: cfg,
@ -313,6 +313,7 @@ func (s *Manager) RefreshFFMpeg(ctx context.Context) {
s.FFMpeg = ffmpeg.NewEncoder(ffmpegPath)
s.FFProbe = ffmpeg.NewFFProbe(ffprobePath)
s.FFMpeg.InitHWSupport(ctx)
// initialise hardware support with background context
s.FFMpeg.InitHWSupport(context.Background())
}
}

View file

@ -219,8 +219,11 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error {
// paths since they must not be relative. The config file property is
// resolved to an absolute path when stash is run normally, so convert
// relative paths to absolute paths during setup.
configFile, _ := filepath.Abs(input.ConfigLocation)
// #6287 - this should no longer be necessary since the ffmpeg code
// converts to absolute paths. Converting the config location to
// absolute means that scraper and plugin paths default to absolute
// which we don't want.
configFile := input.ConfigLocation
configDir := filepath.Dir(configFile)
if exists, _ := fsutil.DirExists(configDir); !exists {

View file

@ -411,12 +411,13 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
}
}
if j.input.Markers {
if j.input.Markers || j.input.MarkerImagePreviews || j.input.MarkerScreenshots {
task := &GenerateMarkersTask{
repository: r,
Scene: scene,
Overwrite: j.overwrite,
fileNamingAlgorithm: j.fileNamingAlgo,
VideoPreview: j.input.Markers,
ImagePreview: j.input.MarkerImagePreviews,
Screenshot: j.input.MarkerScreenshots,
@ -488,6 +489,9 @@ func (j *GenerateJob) queueMarkerJob(g *generate.Generator, marker *models.Scene
Marker: marker,
Overwrite: j.overwrite,
fileNamingAlgorithm: j.fileNamingAlgo,
VideoPreview: j.input.Markers,
ImagePreview: j.input.MarkerImagePreviews,
Screenshot: j.input.MarkerScreenshots,
generator: g,
}
j.totals.markers++

View file

@ -18,6 +18,7 @@ type GenerateMarkersTask struct {
Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
VideoPreview bool
ImagePreview bool
Screenshot bool
@ -115,9 +116,11 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene
g := t.generator
if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil {
logger.Errorf("[generator] failed to generate marker video: %v", err)
logErrorOutput(err)
if t.VideoPreview {
if err := g.MarkerPreviewVideo(context.TODO(), videoFile.Path, sceneHash, seconds, sceneMarker.EndSeconds, instance.Config.GetPreviewAudio()); err != nil {
logger.Errorf("[generator] failed to generate marker video: %v", err)
logErrorOutput(err)
}
}
if t.ImagePreview {
@ -164,7 +167,7 @@ func (t *GenerateMarkersTask) markerExists(sceneChecksum string, seconds int) bo
return false
}
videoExists := t.videoExists(sceneChecksum, seconds)
videoExists := !t.VideoPreview || t.videoExists(sceneChecksum, seconds)
imageExists := !t.ImagePreview || t.imageExists(sceneChecksum, seconds)
screenshotExists := !t.Screenshot || t.screenshotExists(sceneChecksum, seconds)

View file

@ -88,7 +88,7 @@ func (t *stashBoxBatchPerformerTagTask) findStashBoxPerformer(ctx context.Contex
performer = mergedPerformer
}
}
case t.performer != nil:
case t.performer != nil: // tagging or updating existing performer
var remoteID string
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
qb := r.Performer
@ -123,6 +123,9 @@ func (t *stashBoxBatchPerformerTagTask) findStashBoxPerformer(ctx context.Contex
performer = mergedPerformer
}
}
} else {
// find by performer name instead
performer, err = client.FindPerformerByName(ctx, t.performer.Name)
}
}
@ -328,6 +331,9 @@ func (t *stashBoxBatchStudioTagTask) findStashBoxStudio(ctx context.Context) (*m
if remoteID != "" {
studio, err = client.FindStudio(ctx, remoteID)
} else {
// find by studio name instead
studio, err = client.FindStudio(ctx, t.studio.Name)
}
}

View file

@ -36,6 +36,32 @@ const minHeight int = 480
// Tests all (given) hardware codec's
func (f *FFMpeg) InitHWSupport(ctx context.Context) {
// do the hardware codec tests in a separate goroutine to avoid blocking
done := make(chan struct{})
go func() {
f.initHWSupport(ctx)
close(done)
}()
// log if the initialization takes too long
const hwInitLogTimeoutSecondsDefault = 5
hwInitLogTimeoutSeconds := hwInitLogTimeoutSecondsDefault * time.Second
timer := time.NewTimer(hwInitLogTimeoutSeconds)
go func() {
select {
case <-timer.C:
logger.Warnf("[InitHWSupport] Hardware codec initialization is taking longer than %s...", hwInitLogTimeoutSeconds)
logger.Info("[InitHWSupport] Hardware encoding will not be available until initialization is complete.")
case <-done:
if !timer.Stop() {
<-timer.C
}
}
}()
}
func (f *FFMpeg) initHWSupport(ctx context.Context) {
var hwCodecSupport []VideoCodec
// Note that the first compatible codec is returned, so order is important
@ -83,6 +109,7 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
defer cancel()
cmd := f.Command(testCtx, args)
cmd.WaitDelay = time.Second
logger.Tracef("[InitHWSupport] Testing codec %s: %v", codec, cmd.Args)
var stderr bytes.Buffer
@ -112,6 +139,8 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
}
logger.Info(outstr)
f.hwCodecSupportMutex.Lock()
defer f.hwCodecSupportMutex.Unlock()
f.hwCodecSupport = hwCodecSupport
}
@ -334,8 +363,11 @@ func (f *FFMpeg) hwApplyFullHWFilter(args VideoFilter, codec VideoCodec, fullhw
args = args.Append("scale_qsv=format=nv12")
}
case VideoCodecRK264:
// For Rockchip, no extra mapping here. If there is no scale filter,
// leave frames in DRM_PRIME for the encoder.
// Full-hw decode on 10-bit sources often produces DRM_PRIME with sw_pix_fmt=nv15.
// h264_rkmpp does NOT accept nv15, so we must force a conversion to nv12
if fullhw {
args = args.Append("scale_rkrga=w=iw:h=ih:format=nv12")
}
}
return args
@ -370,7 +402,7 @@ func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []in
// by downloading the scaled frame to system RAM and re-uploading it.
// The filter chain below uses a zero-copy approach, passing the hardware-scaled
// frame directly to the encoder. This is more efficient but may be less stable.
template = "scale_rkrga=$value"
template = "scale_rkrga=$value:format=nv12"
default:
return VideoFilter(sargs)
}
@ -411,7 +443,7 @@ func (f *FFMpeg) hwMaxResFilter(toCodec VideoCodec, vf *models.VideoFile, reqHei
// Return if a hardware accelerated for HLS is available
func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec {
for _, element := range f.hwCodecSupport {
for _, element := range f.getHWCodecSupport() {
switch element {
case VideoCodecN264,
VideoCodecN264H,
@ -429,7 +461,7 @@ func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec {
// Return if a hardware accelerated codec for MP4 is available
func (f *FFMpeg) hwCodecMP4Compatible() *VideoCodec {
for _, element := range f.hwCodecSupport {
for _, element := range f.getHWCodecSupport() {
switch element {
case VideoCodecN264,
VideoCodecN264H,
@ -445,7 +477,7 @@ func (f *FFMpeg) hwCodecMP4Compatible() *VideoCodec {
// Return if a hardware accelerated codec for WebM is available
func (f *FFMpeg) hwCodecWEBMCompatible() *VideoCodec {
for _, element := range f.hwCodecSupport {
for _, element := range f.getHWCodecSupport() {
switch element {
case VideoCodecIVP9,
VideoCodecVVP9:

View file

@ -10,6 +10,7 @@ import (
"regexp"
"strconv"
"strings"
"sync"
stashExec "github.com/stashapp/stash/pkg/exec"
"github.com/stashapp/stash/pkg/fsutil"
@ -216,9 +217,10 @@ func (v Version) String() string {
// FFMpeg provides an interface to ffmpeg.
type FFMpeg struct {
ffmpeg string
version Version
hwCodecSupport []VideoCodec
ffmpeg string
version Version
hwCodecSupport []VideoCodec
hwCodecSupportMutex sync.RWMutex
}
// Creates a new FFMpeg encoder
@ -241,3 +243,9 @@ func (f *FFMpeg) Command(ctx context.Context, args []string) *exec.Cmd {
func (f *FFMpeg) Path() string {
return f.ffmpeg
}
func (f *FFMpeg) getHWCodecSupport() []VideoCodec {
f.hwCodecSupportMutex.RLock()
defer f.hwCodecSupportMutex.RUnlock()
return f.hwCodecSupport
}

View file

@ -7,6 +7,7 @@ import (
"io/fs"
"os"
"path/filepath"
"runtime/debug"
"strings"
"sync"
"time"
@ -15,7 +16,6 @@ import (
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
"github.com/stashapp/stash/pkg/utils"
)
const (
@ -179,7 +179,16 @@ func (s *scanJob) execute(ctx context.Context) {
wg.Add(1)
go func() {
defer wg.Done()
defer func() {
wg.Done()
// handle panics in goroutine
if p := recover(); p != nil {
logger.Errorf("panic while queuing files for scan: %v", p)
logger.Errorf(string(debug.Stack()))
}
}()
if err := s.queueFiles(ctx, paths); err != nil {
if errors.Is(err, context.Canceled) {
return
@ -205,6 +214,15 @@ func (s *scanJob) execute(ctx context.Context) {
}
func (s *scanJob) queueFiles(ctx context.Context, paths []string) error {
defer func() {
close(s.fileQueue)
if s.ProgressReports != nil {
s.ProgressReports.AddTotal(s.count)
s.ProgressReports.Definite()
}
}()
var err error
s.ProgressReports.ExecuteTask("Walking directory tree", func() {
for _, p := range paths {
@ -215,13 +233,6 @@ func (s *scanJob) queueFiles(ctx context.Context, paths []string) error {
}
})
close(s.fileQueue)
if s.ProgressReports != nil {
s.ProgressReports.AddTotal(s.count)
s.ProgressReports.Definite()
}
return err
}
@ -719,7 +730,7 @@ func (s *scanJob) handleFile(ctx context.Context, f scanFile) error {
// scan zip files with a different context that is not cancellable
// cancelling while scanning zip file contents results in the scan
// contents being partially completed
zipCtx := utils.ValueOnlyContext{Context: ctx}
zipCtx := context.WithoutCancel(ctx)
if err := s.scanZipFile(zipCtx, f); err != nil {
logger.Errorf("Error scanning zip file %q: %v", f.Path, err)
@ -884,7 +895,8 @@ func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) {
}
zipPath := f.ZipFile.Base().Path
return fs.OpenZip(zipPath, f.Size)
zipSize := f.ZipFile.Base().Size
return fs.OpenZip(zipPath, zipSize)
}
func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) {

View file

@ -7,7 +7,6 @@ import (
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
const maxGraveyardSize = 10
@ -179,7 +178,8 @@ func (m *Manager) dispatch(ctx context.Context, j *Job) (done chan struct{}) {
j.StartTime = &t
j.Status = StatusRunning
ctx, cancelFunc := context.WithCancel(utils.ValueOnlyContext{Context: ctx})
// create a cancellable context for the job that is not canceled by the outer context
ctx, cancelFunc := context.WithCancel(context.WithoutCancel(ctx))
j.cancelFunc = cancelFunc
done = make(chan struct{})

View file

@ -9,6 +9,8 @@ type CustomFieldsInput struct {
Full map[string]interface{} `json:"full"`
// If populated, only the keys in this map will be updated
Partial map[string]interface{} `json:"partial"`
// Remove any keys in this list
Remove []string `json:"remove"`
}
type CustomFieldsReader interface {

View file

@ -1,31 +1,63 @@
package models
import (
"fmt"
"time"
"github.com/stashapp/stash/pkg/utils"
)
type DatePrecision int
const (
// default precision is day
DatePrecisionDay DatePrecision = iota
DatePrecisionMonth
DatePrecisionYear
)
// Date wraps a time.Time with a format of "YYYY-MM-DD"
type Date struct {
time.Time
Precision DatePrecision
}
const dateFormat = "2006-01-02"
var dateFormatPrecision = []string{
"2006-01-02",
"2006-01",
"2006",
}
func (d Date) String() string {
return d.Format(dateFormat)
return d.Format(dateFormatPrecision[d.Precision])
}
func (d Date) After(o Date) bool {
return d.Time.After(o.Time)
}
// ParseDate uses utils.ParseDateStringAsTime to parse a string into a date.
// ParseDate tries to parse the input string into a date using utils.ParseDateStringAsTime.
// If that fails, it attempts to parse the string with decreasing precision (month, then year).
// It returns a Date struct with the appropriate precision set, or an error if all parsing attempts fail.
func ParseDate(s string) (Date, error) {
var errs []error
// default parse to day precision
ret, err := utils.ParseDateStringAsTime(s)
if err != nil {
return Date{}, err
if err == nil {
return Date{Time: ret, Precision: DatePrecisionDay}, nil
}
return Date{Time: ret}, nil
errs = append(errs, err)
// try month and year precision
for i, format := range dateFormatPrecision[1:] {
ret, err := time.Parse(format, s)
if err == nil {
return Date{Time: ret, Precision: DatePrecision(i + 1)}, nil
}
errs = append(errs, err)
}
return Date{}, fmt.Errorf("failed to parse date %q: %v", s, errs)
}

50
pkg/models/date_test.go Normal file
View file

@ -0,0 +1,50 @@
package models
import (
"testing"
"time"
)
func TestParseDateStringAsTime(t *testing.T) {
tests := []struct {
name string
input string
output Date
expectError bool
}{
// Full date formats (existing support)
{"RFC3339", "2014-01-02T15:04:05Z", Date{Time: time.Date(2014, 1, 2, 15, 4, 5, 0, time.UTC), Precision: DatePrecisionDay}, false},
{"Date only", "2014-01-02", Date{Time: time.Date(2014, 1, 2, 0, 0, 0, 0, time.UTC), Precision: DatePrecisionDay}, false},
{"Date with time", "2014-01-02 15:04:05", Date{Time: time.Date(2014, 1, 2, 15, 4, 5, 0, time.UTC), Precision: DatePrecisionDay}, false},
// Partial date formats (new support)
{"Year-Month", "2006-08", Date{Time: time.Date(2006, 8, 1, 0, 0, 0, 0, time.UTC), Precision: DatePrecisionMonth}, false},
{"Year only", "2014", Date{Time: time.Date(2014, 1, 1, 0, 0, 0, 0, time.UTC), Precision: DatePrecisionYear}, false},
// Invalid formats
{"Invalid format", "not-a-date", Date{}, true},
{"Empty string", "", Date{}, true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := ParseDate(tt.input)
if tt.expectError {
if err == nil {
t.Errorf("Expected error for input %q, but got none", tt.input)
}
return
}
if err != nil {
t.Errorf("Unexpected error for input %q: %v", tt.input, err)
return
}
if !result.Time.Equal(tt.output.Time) || result.Precision != tt.output.Precision {
t.Errorf("For input %q, expected output %+v, got %+v", tt.input, tt.output, result)
}
})
}
}

View file

@ -473,6 +473,20 @@ func (_m *PerformerReaderWriter) HasImage(ctx context.Context, performerID int)
return r0, r1
}
// Merge provides a mock function with given fields: ctx, source, destination
func (_m *PerformerReaderWriter) Merge(ctx context.Context, source []int, destination int) error {
ret := _m.Called(ctx, source, destination)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, []int, int) error); ok {
r0 = rf(ctx, source, destination)
} else {
r0 = ret.Error(0)
}
return r0
}
// Query provides a mock function with given fields: ctx, performerFilter, findFilter
func (_m *PerformerReaderWriter) Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) {
ret := _m.Called(ctx, performerFilter, findFilter)

View file

@ -32,7 +32,7 @@ func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Stu
ret := NewStudio()
ret.Name = strings.TrimSpace(s.Name)
if s.RemoteSiteID != nil && endpoint != "" {
if s.RemoteSiteID != nil && endpoint != "" && *s.RemoteSiteID != "" {
ret.StashIDs = NewRelatedStashIDs([]StashID{
{
Endpoint: endpoint,
@ -141,7 +141,7 @@ func (s *ScrapedStudio) ToPartial(id string, endpoint string, excluded map[strin
}
}
if s.RemoteSiteID != nil && endpoint != "" {
if s.RemoteSiteID != nil && endpoint != "" && *s.RemoteSiteID != "" {
ret.StashIDs = &UpdateStashIDs{
StashIDs: existingStashIDs,
Mode: RelationshipUpdateModeSet,
@ -306,7 +306,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool
}
}
if p.RemoteSiteID != nil && endpoint != "" {
if p.RemoteSiteID != nil && endpoint != "" && *p.RemoteSiteID != "" {
ret.StashIDs = NewRelatedStashIDs([]StashID{
{
Endpoint: endpoint,
@ -435,7 +435,7 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
}
}
if p.RemoteSiteID != nil && endpoint != "" {
if p.RemoteSiteID != nil && endpoint != "" && *p.RemoteSiteID != "" {
ret.StashIDs = &UpdateStashIDs{
StashIDs: existingStashIDs,
Mode: RelationshipUpdateModeSet,
@ -464,7 +464,7 @@ func (t *ScrapedTag) ToTag(endpoint string, excluded map[string]bool) *Tag {
ret := NewTag()
ret.Name = t.Name
if t.RemoteSiteID != nil && endpoint != "" {
if t.RemoteSiteID != nil && endpoint != "" && *t.RemoteSiteID != "" {
ret.StashIDs = NewRelatedStashIDs([]StashID{
{
Endpoint: endpoint,

View file

@ -166,6 +166,8 @@ type PerformerFilterType struct {
StashID *StringCriterionInput `json:"stash_id"`
// Filter by StashID Endpoint
StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"`
// Filter by StashIDs Endpoint
StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"`
// Filter by rating expressed as 1-100
Rating100 *IntCriterionInput `json:"rating100"`
// Filter by url

View file

@ -92,6 +92,8 @@ type PerformerWriter interface {
PerformerCreator
PerformerUpdater
PerformerDestroyer
Merge(ctx context.Context, source []int, destination int) error
}
// PerformerReaderWriter provides all performer methods.

View file

@ -79,6 +79,8 @@ type SceneFilterType struct {
StashID *StringCriterionInput `json:"stash_id"`
// Filter by StashID Endpoint
StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"`
// Filter by StashIDs Endpoint
StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"`
// Filter by url
URL *StringCriterionInput `json:"url"`
// Filter by interactive

View file

@ -129,8 +129,16 @@ func (u *UpdateStashIDs) Set(v StashID) {
type StashIDCriterionInput struct {
// If present, this value is treated as a predicate.
// That is, it will filter based on stash_ids with the matching endpoint
// That is, it will filter based on stash_id with the matching endpoint
Endpoint *string `json:"endpoint"`
StashID *string `json:"stash_id"`
Modifier CriterionModifier `json:"modifier"`
}
type StashIDsCriterionInput struct {
// If present, this value is treated as a predicate.
// That is, it will filter based on stash_ids with the matching endpoint
Endpoint *string `json:"endpoint"`
StashIDs []*string `json:"stash_ids"`
Modifier CriterionModifier `json:"modifier"`
}

View file

@ -10,6 +10,8 @@ type StudioFilterType struct {
StashID *StringCriterionInput `json:"stash_id"`
// Filter by StashID Endpoint
StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"`
// Filter by StashIDs Endpoint
StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"`
// Filter to only include studios missing this property
IsMissing *string `json:"is_missing"`
// Filter by rating expressed as 1-100

View file

@ -40,6 +40,10 @@ type TagFilterType struct {
ChildCount *IntCriterionInput `json:"child_count"`
// Filter by autotag ignore value
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
// Filter by StashID Endpoint
StashIDEndpoint *StashIDCriterionInput `json:"stash_id_endpoint"`
// Filter by StashIDs Endpoint
StashIDsEndpoint *StashIDsCriterionInput `json:"stash_ids_endpoint"`
// Filter by related scenes that meet this criteria
ScenesFilter *SceneFilterType `json:"scenes_filter"`
// Filter by related images that meet this criteria

View file

@ -68,6 +68,12 @@ func processImageField(ctx context.Context, imageField *string, client *http.Cli
return nil
}
// don't try to get the image if it doesn't appear to be a URL
// this allows scrapers to return base64 data URIs directly
if !strings.HasPrefix(*imageField, "http") {
return nil
}
img, err := getImage(ctx, *imageField, client, globalConfig)
if err != nil {
return err

View file

@ -261,7 +261,7 @@ func (s *xpathScraper) scrapeImageByImage(ctx context.Context, image *models.Ima
func (s *xpathScraper) loadURL(ctx context.Context, url string) (*html.Node, error) {
r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig)
if err != nil {
return nil, err
return nil, fmt.Errorf("failed to load URL %q: %w", url, err)
}
ret, err := html.Parse(r)

View file

@ -1012,6 +1012,11 @@ func (h *stashIDCriterionHandler) handle(ctx context.Context, f *filterBuilder)
return
}
// ideally, this handler should just convert to stashIDsCriterionHandler
// but there are some differences in how the existing handler works compared
// to the new code, specifically because this code uses the stringCriterionHandler.
// To minimise potential regressions, we'll keep the existing logic for now.
stashIDRepo := h.stashIDRepository
t := stashIDRepo.tableName
if h.stashIDTableAs != "" {
@ -1036,6 +1041,53 @@ func (h *stashIDCriterionHandler) handle(ctx context.Context, f *filterBuilder)
}, t+".stash_id")(ctx, f)
}
type stashIDsCriterionHandler struct {
c *models.StashIDsCriterionInput
stashIDRepository *stashIDRepository
stashIDTableAs string
parentIDCol string
}
func (h *stashIDsCriterionHandler) handle(ctx context.Context, f *filterBuilder) {
if h.c == nil {
return
}
stashIDRepo := h.stashIDRepository
t := stashIDRepo.tableName
if h.stashIDTableAs != "" {
t = h.stashIDTableAs
}
joinClause := fmt.Sprintf("%s.%s = %s", t, stashIDRepo.idColumn, h.parentIDCol)
if h.c.Endpoint != nil && *h.c.Endpoint != "" {
joinClause += fmt.Sprintf(" AND %s.endpoint = '%s'", t, *h.c.Endpoint)
}
f.addLeftJoin(stashIDRepo.tableName, h.stashIDTableAs, joinClause)
switch h.c.Modifier {
case models.CriterionModifierIsNull:
f.addWhere(fmt.Sprintf("%s.stash_id IS NULL", t))
case models.CriterionModifierNotNull:
f.addWhere(fmt.Sprintf("%s.stash_id IS NOT NULL", t))
case models.CriterionModifierEquals:
var clauses []sqlClause
for _, id := range h.c.StashIDs {
clauses = append(clauses, makeClause(fmt.Sprintf("%s.stash_id = ?", t), id))
}
f.whereClauses = append(f.whereClauses, orClauses(clauses...))
case models.CriterionModifierNotEquals:
var clauses []sqlClause
for _, id := range h.c.StashIDs {
clauses = append(clauses, makeClause(fmt.Sprintf("%s.stash_id != ?", t), id))
}
f.whereClauses = append(f.whereClauses, andClauses(clauses...))
default:
f.setError(fmt.Errorf("invalid modifier %s for stash IDs criterion", h.c.Modifier))
}
}
type relatedFilterHandler struct {
relatedIDCol string
relatedRepo repository

View file

@ -41,18 +41,31 @@ func (s *customFieldsStore) SetCustomFields(ctx context.Context, id int, values
case values.Partial != nil:
partial = true
valMap = values.Partial
default:
return nil
}
if err := s.validateCustomFields(valMap); err != nil {
if valMap != nil {
if err := s.validateCustomFields(valMap, values.Remove); err != nil {
return err
}
if err := s.setCustomFields(ctx, id, valMap, partial); err != nil {
return err
}
}
if err := s.deleteCustomFields(ctx, id, values.Remove); err != nil {
return err
}
return s.setCustomFields(ctx, id, valMap, partial)
return nil
}
func (s *customFieldsStore) validateCustomFields(values map[string]interface{}) error {
func (s *customFieldsStore) validateCustomFields(values map[string]interface{}, deleteKeys []string) error {
// if values is nil, nothing to validate
if values == nil {
return nil
}
// ensure that custom field names are valid
// no leading or trailing whitespace, no empty strings
for k := range values {
@ -61,6 +74,13 @@ func (s *customFieldsStore) validateCustomFields(values map[string]interface{})
}
}
// ensure delete keys are not also in values
for _, k := range deleteKeys {
if _, ok := values[k]; ok {
return fmt.Errorf("custom field name %q cannot be in both values and delete keys", k)
}
}
return nil
}
@ -130,6 +150,22 @@ func (s *customFieldsStore) setCustomFields(ctx context.Context, id int, values
return nil
}
func (s *customFieldsStore) deleteCustomFields(ctx context.Context, id int, keys []string) error {
if len(keys) == 0 {
return nil
}
q := dialect.Delete(s.table).
Where(s.fk.Eq(id)).
Where(goqu.I("field").In(keys))
if _, err := exec(ctx, q); err != nil {
return fmt.Errorf("deleting custom fields: %w", err)
}
return nil
}
func (s *customFieldsStore) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) {
q := dialect.Select("field", "value").From(s.table).Where(s.fk.Eq(id))

View file

@ -64,6 +64,18 @@ func TestSetCustomFields(t *testing.T) {
}),
false,
},
{
"valid remove",
models.CustomFieldsInput{
Remove: []string{"real"},
},
func() map[string]interface{} {
m := getPerformerCustomFields(performerIdx)
delete(m, "real")
return m
}(),
false,
},
{
"leading space full",
models.CustomFieldsInput{
@ -144,16 +156,38 @@ func TestSetCustomFields(t *testing.T) {
nil,
true,
},
{
"invalid remove full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"key": "value",
},
Remove: []string{"key"},
},
nil,
true,
},
{
"invalid remove partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"real": float64(4.56),
},
Remove: []string{"real"},
},
nil,
true,
},
}
// use performer custom fields store
store := db.Performer
id := performerIDs[performerIdx]
assert := assert.New(t)
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
err := store.SetCustomFields(ctx, id, tt.input)
if (err != nil) != tt.wantErr {
t.Errorf("SetCustomFields() error = %v, wantErr %v", err, tt.wantErr)

View file

@ -34,7 +34,7 @@ const (
cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE"
)
var appSchemaVersion uint = 74
var appSchemaVersion uint = 75
//go:embed migrations/*.sql
var migrationsBox embed.FS

View file

@ -5,6 +5,7 @@ import (
"time"
"github.com/stashapp/stash/pkg/models"
"gopkg.in/guregu/null.v4"
)
const sqliteDateLayout = "2006-01-02"
@ -54,12 +55,12 @@ func (d NullDate) Value() (driver.Value, error) {
return d.Date.Format(sqliteDateLayout), nil
}
func (d *NullDate) DatePtr() *models.Date {
func (d *NullDate) DatePtr(precision null.Int) *models.Date {
if d == nil || !d.Valid {
return nil
}
return &models.Date{Time: d.Date}
return &models.Date{Time: d.Date, Precision: models.DatePrecision(precision.Int64)}
}
func NullDateFromDatePtr(d *models.Date) NullDate {
@ -68,3 +69,11 @@ func NullDateFromDatePtr(d *models.Date) NullDate {
}
return NullDate{Date: d.Time, Valid: true}
}
func datePrecisionFromDatePtr(d *models.Date) null.Int {
if d == nil {
// default to day precision
return null.Int{}
}
return null.IntFrom(int64(d.Precision))
}

View file

@ -96,6 +96,9 @@ type join struct {
onClause string
joinType string
args []interface{}
// if true, indicates this is required for sorting only
sort bool
}
// equals returns true if the other join alias/table is equal to this one
@ -127,30 +130,45 @@ func (j join) toSQL() string {
type joins []join
// addUnique only adds if not already present
// returns true if added
func (j *joins) addUnique(newJoin join) bool {
found := false
for i, jj := range *j {
if jj.equals(newJoin) {
found = true
// if sort is false on the new join, but true on the existing, set the false
if !newJoin.sort && jj.sort {
(*j)[i].sort = false
}
break
}
}
if !found {
*j = append(*j, newJoin)
}
return !found
}
func (j *joins) add(newJoins ...join) {
// only add if not already joined
for _, newJoin := range newJoins {
found := false
for _, jj := range *j {
if jj.equals(newJoin) {
found = true
break
}
}
if !found {
*j = append(*j, newJoin)
}
j.addUnique(newJoin)
}
}
func (j *joins) toSQL() string {
func (j *joins) toSQL(includeSortPagination bool) string {
if len(*j) == 0 {
return ""
}
var ret []string
for _, jj := range *j {
// skip sort-only joins if not including sort/pagination
if !includeSortPagination && jj.sort {
continue
}
ret = append(ret, jj.toSQL())
}

View file

@ -30,12 +30,13 @@ const (
)
type galleryRow struct {
ID int `db:"id" goqu:"skipinsert"`
Title zero.String `db:"title"`
Code zero.String `db:"code"`
Date NullDate `db:"date"`
Details zero.String `db:"details"`
Photographer zero.String `db:"photographer"`
ID int `db:"id" goqu:"skipinsert"`
Title zero.String `db:"title"`
Code zero.String `db:"code"`
Date NullDate `db:"date"`
DatePrecision null.Int `db:"date_precision"`
Details zero.String `db:"details"`
Photographer zero.String `db:"photographer"`
// expressed as 1-100
Rating null.Int `db:"rating"`
Organized bool `db:"organized"`
@ -50,6 +51,7 @@ func (r *galleryRow) fromGallery(o models.Gallery) {
r.Title = zero.StringFrom(o.Title)
r.Code = zero.StringFrom(o.Code)
r.Date = NullDateFromDatePtr(o.Date)
r.DatePrecision = datePrecisionFromDatePtr(o.Date)
r.Details = zero.StringFrom(o.Details)
r.Photographer = zero.StringFrom(o.Photographer)
r.Rating = intFromPtr(o.Rating)
@ -74,7 +76,7 @@ func (r *galleryQueryRow) resolve() *models.Gallery {
ID: r.ID,
Title: r.Title.String,
Code: r.Code.String,
Date: r.Date.DatePtr(),
Date: r.Date.DatePtr(r.DatePrecision),
Details: r.Details.String,
Photographer: r.Photographer.String,
Rating: nullIntPtr(r.Rating),
@ -102,7 +104,7 @@ type galleryRowRecord struct {
func (r *galleryRowRecord) fromPartial(o models.GalleryPartial) {
r.setNullString("title", o.Title)
r.setNullString("code", o.Code)
r.setNullDate("date", o.Date)
r.setNullDate("date", "date_precision", o.Date)
r.setNullString("details", o.Details)
r.setNullString("photographer", o.Photographer)
r.setNullInt("rating", o.Rating)
@ -800,10 +802,12 @@ func (qb *GalleryStore) setGallerySort(query *queryBuilder, findFilter *models.F
addFileTable := func() {
query.addJoins(
join{
sort: true,
table: galleriesFilesTable,
onClause: "galleries_files.gallery_id = galleries.id",
},
join{
sort: true,
table: fileTable,
onClause: "galleries_files.file_id = files.id",
},
@ -813,10 +817,12 @@ func (qb *GalleryStore) setGallerySort(query *queryBuilder, findFilter *models.F
addFolderTable := func() {
query.addJoins(
join{
sort: true,
table: folderTable,
onClause: "folders.id = galleries.folder_id",
},
join{
sort: true,
table: folderTable,
as: "file_folder",
onClause: "files.parent_folder_id = file_folder.id",

View file

@ -32,11 +32,12 @@ const (
)
type groupRow struct {
ID int `db:"id" goqu:"skipinsert"`
Name zero.String `db:"name"`
Aliases zero.String `db:"aliases"`
Duration null.Int `db:"duration"`
Date NullDate `db:"date"`
ID int `db:"id" goqu:"skipinsert"`
Name zero.String `db:"name"`
Aliases zero.String `db:"aliases"`
Duration null.Int `db:"duration"`
Date NullDate `db:"date"`
DatePrecision null.Int `db:"date_precision"`
// expressed as 1-100
Rating null.Int `db:"rating"`
StudioID null.Int `db:"studio_id,omitempty"`
@ -56,6 +57,7 @@ func (r *groupRow) fromGroup(o models.Group) {
r.Aliases = zero.StringFrom(o.Aliases)
r.Duration = intFromPtr(o.Duration)
r.Date = NullDateFromDatePtr(o.Date)
r.DatePrecision = datePrecisionFromDatePtr(o.Date)
r.Rating = intFromPtr(o.Rating)
r.StudioID = intFromPtr(o.StudioID)
r.Director = zero.StringFrom(o.Director)
@ -70,7 +72,7 @@ func (r *groupRow) resolve() *models.Group {
Name: r.Name.String,
Aliases: r.Aliases.String,
Duration: nullIntPtr(r.Duration),
Date: r.Date.DatePtr(),
Date: r.Date.DatePtr(r.DatePrecision),
Rating: nullIntPtr(r.Rating),
StudioID: nullIntPtr(r.StudioID),
Director: r.Director.String,
@ -90,7 +92,7 @@ func (r *groupRowRecord) fromPartial(o models.GroupPartial) {
r.setNullString("name", o.Name)
r.setNullString("aliases", o.Aliases)
r.setNullInt("duration", o.Duration)
r.setNullDate("date", o.Date)
r.setNullDate("date", "date_precision", o.Date)
r.setNullInt("rating", o.Rating)
r.setNullInt("studio_id", o.StudioID)
r.setNullString("director", o.Director)
@ -518,7 +520,7 @@ func (qb *GroupStore) setGroupSort(query *queryBuilder, findFilter *models.FindF
} else {
// this will give unexpected results if the query is not filtered by a parent group and
// the group has multiple parents and order indexes
query.join(groupRelationsTable, "", "groups.id = groups_relations.sub_id")
query.joinSort(groupRelationsTable, "", "groups.id = groups_relations.sub_id")
query.sortAndPagination += getSort("order_index", direction, groupRelationsTable)
}
case "tag_count":

View file

@ -34,15 +34,16 @@ type imageRow struct {
Title zero.String `db:"title"`
Code zero.String `db:"code"`
// expressed as 1-100
Rating null.Int `db:"rating"`
Date NullDate `db:"date"`
Details zero.String `db:"details"`
Photographer zero.String `db:"photographer"`
Organized bool `db:"organized"`
OCounter int `db:"o_counter"`
StudioID null.Int `db:"studio_id,omitempty"`
CreatedAt Timestamp `db:"created_at"`
UpdatedAt Timestamp `db:"updated_at"`
Rating null.Int `db:"rating"`
Date NullDate `db:"date"`
DatePrecision null.Int `db:"date_precision"`
Details zero.String `db:"details"`
Photographer zero.String `db:"photographer"`
Organized bool `db:"organized"`
OCounter int `db:"o_counter"`
StudioID null.Int `db:"studio_id,omitempty"`
CreatedAt Timestamp `db:"created_at"`
UpdatedAt Timestamp `db:"updated_at"`
}
func (r *imageRow) fromImage(i models.Image) {
@ -51,6 +52,7 @@ func (r *imageRow) fromImage(i models.Image) {
r.Code = zero.StringFrom(i.Code)
r.Rating = intFromPtr(i.Rating)
r.Date = NullDateFromDatePtr(i.Date)
r.DatePrecision = datePrecisionFromDatePtr(i.Date)
r.Details = zero.StringFrom(i.Details)
r.Photographer = zero.StringFrom(i.Photographer)
r.Organized = i.Organized
@ -74,7 +76,7 @@ func (r *imageQueryRow) resolve() *models.Image {
Title: r.Title.String,
Code: r.Code.String,
Rating: nullIntPtr(r.Rating),
Date: r.Date.DatePtr(),
Date: r.Date.DatePtr(r.DatePrecision),
Details: r.Details.String,
Photographer: r.Photographer.String,
Organized: r.Organized,
@ -103,7 +105,7 @@ func (r *imageRowRecord) fromPartial(i models.ImagePartial) {
r.setNullString("title", i.Title)
r.setNullString("code", i.Code)
r.setNullInt("rating", i.Rating)
r.setNullDate("date", i.Date)
r.setNullDate("date", "date_precision", i.Date)
r.setNullString("details", i.Details)
r.setNullString("photographer", i.Photographer)
r.setBool("organized", i.Organized)
@ -940,6 +942,7 @@ var imageSortOptions = sortOptions{
"performer_count",
"random",
"rating",
"resolution",
"tag_count",
"title",
"updated_at",
@ -965,10 +968,12 @@ func (qb *ImageStore) setImageSortAndPagination(q *queryBuilder, findFilter *mod
addFilesJoin := func() {
q.addJoins(
join{
sort: true,
table: imagesFilesTable,
onClause: "images_files.image_id = images.id",
},
join{
sort: true,
table: fileTable,
onClause: "images_files.file_id = files.id",
},
@ -977,6 +982,7 @@ func (qb *ImageStore) setImageSortAndPagination(q *queryBuilder, findFilter *mod
addFolderJoin := func() {
q.addJoins(join{
sort: true,
table: folderTable,
onClause: "files.parent_folder_id = folders.id",
})
@ -996,6 +1002,14 @@ func (qb *ImageStore) setImageSortAndPagination(q *queryBuilder, findFilter *mod
case "mod_time", "filesize":
addFilesJoin()
sortClause = getSort(sort, direction, "files")
case "resolution":
addFilesJoin()
q.addJoins(join{
sort: true,
table: imageFileTable,
onClause: "images_files.file_id = image_files.file_id",
})
sortClause = " ORDER BY MIN(image_files.width, image_files.height) " + direction
case "title":
addFilesJoin()
addFolderJoin()

View file

@ -0,0 +1,13 @@
ALTER TABLE "scenes" ADD COLUMN "date_precision" TINYINT;
ALTER TABLE "images" ADD COLUMN "date_precision" TINYINT;
ALTER TABLE "galleries" ADD COLUMN "date_precision" TINYINT;
ALTER TABLE "groups" ADD COLUMN "date_precision" TINYINT;
ALTER TABLE "performers" ADD COLUMN "birthdate_precision" TINYINT;
ALTER TABLE "performers" ADD COLUMN "death_date_precision" TINYINT;
UPDATE "scenes" SET "date_precision" = 0 WHERE "date" IS NOT NULL;
UPDATE "images" SET "date_precision" = 0 WHERE "date" IS NOT NULL;
UPDATE "galleries" SET "date_precision" = 0 WHERE "date" IS NOT NULL;
UPDATE "groups" SET "date_precision" = 0 WHERE "date" IS NOT NULL;
UPDATE "performers" SET "birthdate_precision" = 0 WHERE "birthdate" IS NOT NULL;
UPDATE "performers" SET "death_date_precision" = 0 WHERE "death_date" IS NOT NULL;

View file

@ -30,32 +30,34 @@ const (
)
type performerRow struct {
ID int `db:"id" goqu:"skipinsert"`
Name null.String `db:"name"` // TODO: make schema non-nullable
Disambigation zero.String `db:"disambiguation"`
Gender zero.String `db:"gender"`
Birthdate NullDate `db:"birthdate"`
Ethnicity zero.String `db:"ethnicity"`
Country zero.String `db:"country"`
EyeColor zero.String `db:"eye_color"`
Height null.Int `db:"height"`
Measurements zero.String `db:"measurements"`
FakeTits zero.String `db:"fake_tits"`
PenisLength null.Float `db:"penis_length"`
Circumcised zero.String `db:"circumcised"`
CareerLength zero.String `db:"career_length"`
Tattoos zero.String `db:"tattoos"`
Piercings zero.String `db:"piercings"`
Favorite bool `db:"favorite"`
CreatedAt Timestamp `db:"created_at"`
UpdatedAt Timestamp `db:"updated_at"`
ID int `db:"id" goqu:"skipinsert"`
Name null.String `db:"name"` // TODO: make schema non-nullable
Disambigation zero.String `db:"disambiguation"`
Gender zero.String `db:"gender"`
Birthdate NullDate `db:"birthdate"`
BirthdatePrecision null.Int `db:"birthdate_precision"`
Ethnicity zero.String `db:"ethnicity"`
Country zero.String `db:"country"`
EyeColor zero.String `db:"eye_color"`
Height null.Int `db:"height"`
Measurements zero.String `db:"measurements"`
FakeTits zero.String `db:"fake_tits"`
PenisLength null.Float `db:"penis_length"`
Circumcised zero.String `db:"circumcised"`
CareerLength zero.String `db:"career_length"`
Tattoos zero.String `db:"tattoos"`
Piercings zero.String `db:"piercings"`
Favorite bool `db:"favorite"`
CreatedAt Timestamp `db:"created_at"`
UpdatedAt Timestamp `db:"updated_at"`
// expressed as 1-100
Rating null.Int `db:"rating"`
Details zero.String `db:"details"`
DeathDate NullDate `db:"death_date"`
HairColor zero.String `db:"hair_color"`
Weight null.Int `db:"weight"`
IgnoreAutoTag bool `db:"ignore_auto_tag"`
Rating null.Int `db:"rating"`
Details zero.String `db:"details"`
DeathDate NullDate `db:"death_date"`
DeathDatePrecision null.Int `db:"death_date_precision"`
HairColor zero.String `db:"hair_color"`
Weight null.Int `db:"weight"`
IgnoreAutoTag bool `db:"ignore_auto_tag"`
// not used in resolution or updates
ImageBlob zero.String `db:"image_blob"`
@ -69,6 +71,7 @@ func (r *performerRow) fromPerformer(o models.Performer) {
r.Gender = zero.StringFrom(o.Gender.String())
}
r.Birthdate = NullDateFromDatePtr(o.Birthdate)
r.BirthdatePrecision = datePrecisionFromDatePtr(o.Birthdate)
r.Ethnicity = zero.StringFrom(o.Ethnicity)
r.Country = zero.StringFrom(o.Country)
r.EyeColor = zero.StringFrom(o.EyeColor)
@ -88,6 +91,7 @@ func (r *performerRow) fromPerformer(o models.Performer) {
r.Rating = intFromPtr(o.Rating)
r.Details = zero.StringFrom(o.Details)
r.DeathDate = NullDateFromDatePtr(o.DeathDate)
r.DeathDatePrecision = datePrecisionFromDatePtr(o.DeathDate)
r.HairColor = zero.StringFrom(o.HairColor)
r.Weight = intFromPtr(o.Weight)
r.IgnoreAutoTag = o.IgnoreAutoTag
@ -98,7 +102,7 @@ func (r *performerRow) resolve() *models.Performer {
ID: r.ID,
Name: r.Name.String,
Disambiguation: r.Disambigation.String,
Birthdate: r.Birthdate.DatePtr(),
Birthdate: r.Birthdate.DatePtr(r.BirthdatePrecision),
Ethnicity: r.Ethnicity.String,
Country: r.Country.String,
EyeColor: r.EyeColor.String,
@ -115,7 +119,7 @@ func (r *performerRow) resolve() *models.Performer {
// expressed as 1-100
Rating: nullIntPtr(r.Rating),
Details: r.Details.String,
DeathDate: r.DeathDate.DatePtr(),
DeathDate: r.DeathDate.DatePtr(r.DeathDatePrecision),
HairColor: r.HairColor.String,
Weight: nullIntPtr(r.Weight),
IgnoreAutoTag: r.IgnoreAutoTag,
@ -142,7 +146,7 @@ func (r *performerRowRecord) fromPartial(o models.PerformerPartial) {
r.setString("name", o.Name)
r.setNullString("disambiguation", o.Disambiguation)
r.setNullString("gender", o.Gender)
r.setNullDate("birthdate", o.Birthdate)
r.setNullDate("birthdate", "birthdate_precision", o.Birthdate)
r.setNullString("ethnicity", o.Ethnicity)
r.setNullString("country", o.Country)
r.setNullString("eye_color", o.EyeColor)
@ -159,7 +163,7 @@ func (r *performerRowRecord) fromPartial(o models.PerformerPartial) {
r.setTimestamp("updated_at", o.UpdatedAt)
r.setNullInt("rating", o.Rating)
r.setNullString("details", o.Details)
r.setNullDate("death_date", o.DeathDate)
r.setNullDate("death_date", "death_date_precision", o.DeathDate)
r.setNullString("hair_color", o.HairColor)
r.setNullInt("weight", o.Weight)
r.setBool("ignore_auto_tag", o.IgnoreAutoTag)
@ -889,3 +893,58 @@ func (qb *PerformerStore) FindByStashIDStatus(ctx context.Context, hasStashID bo
return ret, nil
}
func (qb *PerformerStore) Merge(ctx context.Context, source []int, destination int) error {
if len(source) == 0 {
return nil
}
inBinding := getInBinding(len(source))
args := []interface{}{destination}
srcArgs := make([]interface{}, len(source))
for i, id := range source {
if id == destination {
return errors.New("cannot merge where source == destination")
}
srcArgs[i] = id
}
args = append(args, srcArgs...)
performerTables := map[string]string{
performersScenesTable: sceneIDColumn,
performersGalleriesTable: galleryIDColumn,
performersImagesTable: imageIDColumn,
performersTagsTable: tagIDColumn,
}
args = append(args, destination)
// for each table, update source performer ids to destination performer id, ignoring duplicates
for table, idColumn := range performerTables {
_, err := dbWrapper.Exec(ctx, `UPDATE OR IGNORE `+table+`
SET performer_id = ?
WHERE performer_id IN `+inBinding+`
AND NOT EXISTS(SELECT 1 FROM `+table+` o WHERE o.`+idColumn+` = `+table+`.`+idColumn+` AND o.performer_id = ?)`,
args...,
)
if err != nil {
return err
}
// delete source performer ids from the table where they couldn't be set
if _, err := dbWrapper.Exec(ctx, `DELETE FROM `+table+` WHERE performer_id IN `+inBinding, srcArgs...); err != nil {
return err
}
}
for _, id := range source {
err := qb.Destroy(ctx, id)
if err != nil {
return err
}
}
return nil
}

View file

@ -148,6 +148,12 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler {
stashIDTableAs: "performer_stash_ids",
parentIDCol: "performers.id",
},
&stashIDsCriterionHandler{
c: filter.StashIDsEndpoint,
stashIDRepository: &performerRepository.stashIDs,
stashIDTableAs: "performer_stash_ids",
parentIDCol: "performers.id",
},
qb.aliasCriterionHandler(filter.Aliases),
@ -447,7 +453,7 @@ func (qb *performerFilterHandler) studiosCriterionHandler(studios *models.Hierar
return
}
if len(studios.Value) == 0 {
if len(studios.Value) == 0 && len(studios.Excludes) == 0 {
return
}
@ -464,27 +470,54 @@ func (qb *performerFilterHandler) studiosCriterionHandler(studios *models.Hierar
return
}
const derivedPerformerStudioTable = "performer_studio"
valuesClause, err := getHierarchicalValues(ctx, studios.Value, studioTable, "", "parent_id", "child_id", studios.Depth)
if err != nil {
f.setError(err)
return
}
f.addWith("studio(root_id, item_id) AS (" + valuesClause + ")")
if len(studios.Value) > 0 {
const derivedPerformerStudioTable = "performer_studio"
valuesClause, err := getHierarchicalValues(ctx, studios.Value, studioTable, "", "parent_id", "child_id", studios.Depth)
if err != nil {
f.setError(err)
return
}
f.addWith("studio(root_id, item_id) AS (" + valuesClause + ")")
templStr := `SELECT performer_id FROM {primaryTable}
templStr := `SELECT performer_id FROM {primaryTable}
INNER JOIN {joinTable} ON {primaryTable}.id = {joinTable}.{primaryFK}
INNER JOIN studio ON {primaryTable}.studio_id = studio.item_id`
var unions []string
for _, c := range formatMaps {
unions = append(unions, utils.StrFormat(templStr, c))
}
f.addWith(fmt.Sprintf("%s AS (%s)", derivedPerformerStudioTable, strings.Join(unions, " UNION ")))
f.addLeftJoin(derivedPerformerStudioTable, "", fmt.Sprintf("performers.id = %s.performer_id", derivedPerformerStudioTable))
f.addWhere(fmt.Sprintf("%s.performer_id IS %s NULL", derivedPerformerStudioTable, clauseCondition))
}
// #6412 - handle excludes as well
if len(studios.Excludes) > 0 {
excludeValuesClause, err := getHierarchicalValues(ctx, studios.Excludes, studioTable, "", "parent_id", "child_id", studios.Depth)
if err != nil {
f.setError(err)
return
}
f.addWith("exclude_studio(root_id, item_id) AS (" + excludeValuesClause + ")")
excludeTemplStr := `SELECT performer_id FROM {primaryTable}
INNER JOIN {joinTable} ON {primaryTable}.id = {joinTable}.{primaryFK}
INNER JOIN studio ON {primaryTable}.studio_id = studio.item_id`
INNER JOIN exclude_studio ON {primaryTable}.studio_id = exclude_studio.item_id`
var unions []string
for _, c := range formatMaps {
unions = append(unions, utils.StrFormat(templStr, c))
var unions []string
for _, c := range formatMaps {
unions = append(unions, utils.StrFormat(excludeTemplStr, c))
}
const excludePerformerStudioTable = "performer_studio_exclude"
f.addWith(fmt.Sprintf("%s AS (%s)", excludePerformerStudioTable, strings.Join(unions, " UNION ")))
f.addLeftJoin(excludePerformerStudioTable, "", fmt.Sprintf("performers.id = %s.performer_id", excludePerformerStudioTable))
f.addWhere(fmt.Sprintf("%s.performer_id IS NULL", excludePerformerStudioTable))
}
f.addWith(fmt.Sprintf("%s AS (%s)", derivedPerformerStudioTable, strings.Join(unions, " UNION ")))
f.addLeftJoin(derivedPerformerStudioTable, "", fmt.Sprintf("performers.id = %s.performer_id", derivedPerformerStudioTable))
f.addWhere(fmt.Sprintf("%s.performer_id IS %s NULL", derivedPerformerStudioTable, clauseCondition))
}
}
}

View file

@ -1069,6 +1069,8 @@ func TestPerformerQuery(t *testing.T) {
var (
endpoint = performerStashID(performerIdxWithGallery).Endpoint
stashID = performerStashID(performerIdxWithGallery).StashID
stashID2 = performerStashID(performerIdx1WithGallery).StashID
stashIDs = []*string{&stashID, &stashID2}
)
tests := []struct {
@ -1133,6 +1135,60 @@ func TestPerformerQuery(t *testing.T) {
nil,
false,
},
{
"stash ids with endpoint",
nil,
&models.PerformerFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
StashIDs: stashIDs,
Modifier: models.CriterionModifierEquals,
},
},
[]int{performerIdxWithGallery, performerIdx1WithGallery},
nil,
false,
},
{
"exclude stash ids with endpoint",
nil,
&models.PerformerFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
StashIDs: stashIDs,
Modifier: models.CriterionModifierNotEquals,
},
},
nil,
[]int{performerIdxWithGallery, performerIdx1WithGallery},
false,
},
{
"null stash ids with endpoint",
nil,
&models.PerformerFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
Modifier: models.CriterionModifierIsNull,
},
},
nil,
[]int{performerIdxWithGallery, performerIdx1WithGallery},
false,
},
{
"not null stash ids with endpoint",
nil,
&models.PerformerFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
Modifier: models.CriterionModifierNotNull,
},
},
[]int{performerIdxWithGallery, performerIdx1WithGallery},
nil,
false,
},
{
"circumcised (cut)",
nil,
@ -1160,6 +1216,98 @@ func TestPerformerQuery(t *testing.T) {
[]int{performerIdx1WithScene, performerIdxWithScene},
false,
},
{
"include scene studio",
nil,
&models.PerformerFilterType{
Studios: &models.HierarchicalMultiCriterionInput{
Value: []string{strconv.Itoa(studioIDs[studioIdxWithScenePerformer])},
Modifier: models.CriterionModifierIncludes,
},
},
[]int{performerIdxWithSceneStudio},
nil,
false,
},
{
"include image studio",
nil,
&models.PerformerFilterType{
Studios: &models.HierarchicalMultiCriterionInput{
Value: []string{strconv.Itoa(studioIDs[studioIdxWithImagePerformer])},
Modifier: models.CriterionModifierIncludes,
},
},
[]int{performerIdxWithImageStudio},
nil,
false,
},
{
"include gallery studio",
nil,
&models.PerformerFilterType{
Studios: &models.HierarchicalMultiCriterionInput{
Value: []string{strconv.Itoa(studioIDs[studioIdxWithGalleryPerformer])},
Modifier: models.CriterionModifierIncludes,
},
},
[]int{performerIdxWithGalleryStudio},
nil,
false,
},
{
"exclude scene studio",
nil,
&models.PerformerFilterType{
Studios: &models.HierarchicalMultiCriterionInput{
Value: []string{strconv.Itoa(studioIDs[studioIdxWithScenePerformer])},
Modifier: models.CriterionModifierExcludes,
},
},
nil,
[]int{performerIdxWithSceneStudio},
false,
},
{
"exclude image studio",
nil,
&models.PerformerFilterType{
Studios: &models.HierarchicalMultiCriterionInput{
Value: []string{strconv.Itoa(studioIDs[studioIdxWithImagePerformer])},
Modifier: models.CriterionModifierExcludes,
},
},
nil,
[]int{performerIdxWithImageStudio},
false,
},
{
"exclude gallery studio",
nil,
&models.PerformerFilterType{
Studios: &models.HierarchicalMultiCriterionInput{
Value: []string{strconv.Itoa(studioIDs[studioIdxWithGalleryPerformer])},
Modifier: models.CriterionModifierExcludes,
},
},
nil,
[]int{performerIdxWithGalleryStudio},
false,
},
{
"include and exclude scene studio",
nil,
&models.PerformerFilterType{
Studios: &models.HierarchicalMultiCriterionInput{
Value: []string{strconv.Itoa(studioIDs[studioIdx1WithTwoScenePerformer])},
Modifier: models.CriterionModifierIncludes,
Excludes: []string{strconv.Itoa(studioIDs[studioIdx2WithTwoScenePerformer])},
},
},
nil,
[]int{performerIdxWithTwoSceneStudio},
false,
},
}
for _, tt := range tests {
@ -2260,7 +2408,7 @@ func TestPerformerQuerySortScenesCount(t *testing.T) {
assert.True(t, len(performers) > 0)
lastPerformer := performers[len(performers)-1]
assert.Equal(t, performerIDs[performerIdxWithTag], lastPerformer.ID)
assert.Equal(t, performerIDs[performerIdxWithTwoSceneStudio], lastPerformer.ID)
return nil
})
@ -2432,6 +2580,146 @@ func TestPerformerStore_FindByStashIDStatus(t *testing.T) {
}
}
func TestPerformerMerge(t *testing.T) {
tests := []struct {
name string
srcIdxs []int
destIdx int
wantErr bool
}{
{
name: "merge into self",
srcIdxs: []int{performerIdx1WithDupName},
destIdx: performerIdx1WithDupName,
wantErr: true,
},
{
name: "merge multiple",
srcIdxs: []int{
performerIdx2WithScene,
performerIdxWithTwoScenes,
performerIdx1WithImage,
performerIdxWithTwoImages,
performerIdxWithGallery,
performerIdxWithTwoGalleries,
performerIdxWithTag,
performerIdxWithTwoTags,
},
destIdx: tagIdxWithPerformer,
wantErr: false,
},
}
qb := db.Performer
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
// load src tag ids to compare after merge
performerTagIds := make(map[int][]int)
for _, srcIdx := range tt.srcIdxs {
srcPerformer, err := qb.Find(ctx, performerIDs[srcIdx])
if err != nil {
t.Errorf("Error finding performer: %s", err.Error())
}
if err := srcPerformer.LoadTagIDs(ctx, qb); err != nil {
t.Errorf("Error loading performer tag IDs: %s", err.Error())
}
srcTagIDs := srcPerformer.TagIDs.List()
performerTagIds[srcIdx] = srcTagIDs
}
err := qb.Merge(ctx, indexesToIDs(tagIDs, tt.srcIdxs), tagIDs[tt.destIdx])
if (err != nil) != tt.wantErr {
t.Errorf("PerformerStore.Merge() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err != nil {
return
}
// ensure source performers are destroyed
for _, srcIdx := range tt.srcIdxs {
p, err := qb.Find(ctx, performerIDs[srcIdx])
// not found returns nil performer and nil error
if err != nil {
t.Errorf("Error finding performer: %s", err.Error())
continue
}
assert.Nil(p)
}
// ensure items point to new performer
for _, srcIdx := range tt.srcIdxs {
sceneIdxs := scenePerformers.reverseLookup(srcIdx)
for _, sceneIdx := range sceneIdxs {
s, err := db.Scene.Find(ctx, sceneIDs[sceneIdx])
if err != nil {
t.Errorf("Error finding scene: %s", err.Error())
}
if err := s.LoadPerformerIDs(ctx, db.Scene); err != nil {
t.Errorf("Error loading scene performer IDs: %s", err.Error())
}
scenePerformerIDs := s.PerformerIDs.List()
assert.Contains(scenePerformerIDs, performerIDs[tt.destIdx])
assert.NotContains(scenePerformerIDs, performerIDs[srcIdx])
}
imageIdxs := imagePerformers.reverseLookup(srcIdx)
for _, imageIdx := range imageIdxs {
i, err := db.Image.Find(ctx, imageIDs[imageIdx])
if err != nil {
t.Errorf("Error finding image: %s", err.Error())
}
if err := i.LoadPerformerIDs(ctx, db.Image); err != nil {
t.Errorf("Error loading image performer IDs: %s", err.Error())
}
imagePerformerIDs := i.PerformerIDs.List()
assert.Contains(imagePerformerIDs, performerIDs[tt.destIdx])
assert.NotContains(imagePerformerIDs, performerIDs[srcIdx])
}
galleryIdxs := galleryPerformers.reverseLookup(srcIdx)
for _, galleryIdx := range galleryIdxs {
g, err := db.Gallery.Find(ctx, galleryIDs[galleryIdx])
if err != nil {
t.Errorf("Error finding gallery: %s", err.Error())
}
if err := g.LoadPerformerIDs(ctx, db.Gallery); err != nil {
t.Errorf("Error loading gallery performer IDs: %s", err.Error())
}
galleryPerformerIDs := g.PerformerIDs.List()
assert.Contains(galleryPerformerIDs, performerIDs[tt.destIdx])
assert.NotContains(galleryPerformerIDs, performerIDs[srcIdx])
}
}
// ensure tags were merged
destPerformer, err := qb.Find(ctx, performerIDs[tt.destIdx])
if err != nil {
t.Errorf("Error finding performer: %s", err.Error())
}
if err := destPerformer.LoadTagIDs(ctx, qb); err != nil {
t.Errorf("Error loading performer tag IDs: %s", err.Error())
}
destTagIDs := destPerformer.TagIDs.List()
for _, srcIdx := range tt.srcIdxs {
for _, tagID := range performerTagIds[srcIdx] {
assert.Contains(destTagIDs, tagID)
}
}
})
}
}
// TODO Update
// TODO Destroy
// TODO Find

View file

@ -24,8 +24,8 @@ type queryBuilder struct {
sortAndPagination string
}
func (qb queryBuilder) body() string {
return fmt.Sprintf("SELECT %s FROM %s%s", strings.Join(qb.columns, ", "), qb.from, qb.joins.toSQL())
func (qb queryBuilder) body(includeSortPagination bool) string {
return fmt.Sprintf("SELECT %s FROM %s%s", strings.Join(qb.columns, ", "), qb.from, qb.joins.toSQL(includeSortPagination))
}
func (qb *queryBuilder) addColumn(column string) {
@ -33,7 +33,7 @@ func (qb *queryBuilder) addColumn(column string) {
}
func (qb queryBuilder) toSQL(includeSortPagination bool) string {
body := qb.body()
body := qb.body(includeSortPagination)
withClause := ""
if len(qb.withClauses) > 0 {
@ -59,12 +59,14 @@ func (qb queryBuilder) findIDs(ctx context.Context) ([]int, error) {
}
func (qb queryBuilder) executeFind(ctx context.Context) ([]int, int, error) {
body := qb.body()
const includeSortPagination = true
body := qb.body(includeSortPagination)
return qb.repository.executeFindQuery(ctx, body, qb.args, qb.sortAndPagination, qb.whereClauses, qb.havingClauses, qb.withClauses, qb.recursiveWith)
}
func (qb queryBuilder) executeCount(ctx context.Context) (int, error) {
body := qb.body()
const includeSortPagination = false
body := qb.body(includeSortPagination)
withClause := ""
if len(qb.withClauses) > 0 {
@ -131,10 +133,23 @@ func (qb *queryBuilder) join(table, as, onClause string) {
qb.joins.add(newJoin)
}
func (qb *queryBuilder) joinSort(table, as, onClause string) {
newJoin := join{
sort: true,
table: table,
as: as,
onClause: onClause,
joinType: "LEFT",
}
qb.joins.add(newJoin)
}
func (qb *queryBuilder) addJoins(joins ...join) {
qb.joins.add(joins...)
for _, j := range joins {
qb.args = append(qb.args, j.args...)
if qb.joins.addUnique(j) {
qb.args = append(qb.args, j.args...)
}
}
}

View file

@ -100,8 +100,9 @@ func (r *updateRecord) setNullTimestamp(destField string, v models.OptionalTime)
}
}
func (r *updateRecord) setNullDate(destField string, v models.OptionalDate) {
func (r *updateRecord) setNullDate(destField string, precisionField string, v models.OptionalDate) {
if v.Set {
r.set(destField, NullDateFromDatePtr(v.Ptr()))
r.set(precisionField, datePrecisionFromDatePtr(v.Ptr()))
}
}

View file

@ -96,7 +96,7 @@ func (r *repository) runIdsQuery(ctx context.Context, query string, args []inter
}
func (r *repository) queryFunc(ctx context.Context, query string, args []interface{}, single bool, f func(rows *sqlx.Rows) error) error {
rows, err := dbWrapper.Queryx(ctx, query, args...)
rows, err := dbWrapper.QueryxContext(ctx, query, args...)
if err != nil && !errors.Is(err, sql.ErrNoRows) {
return err
@ -119,13 +119,12 @@ func (r *repository) queryFunc(ctx context.Context, query string, args []interfa
return nil
}
// queryStruct executes a query and scans the result into the provided struct.
// Unlike the other query methods, this will return an error if no rows are found.
func (r *repository) queryStruct(ctx context.Context, query string, args []interface{}, out interface{}) error {
if err := r.queryFunc(ctx, query, args, true, func(rows *sqlx.Rows) error {
if err := rows.StructScan(out); err != nil {
return err
}
return nil
}); err != nil {
// changed from queryFunc, since it was not logging the performance correctly,
// since the query doesn't actually execute until Scan is called
if err := dbWrapper.Get(ctx, out, query, args...); err != nil {
return fmt.Errorf("executing query: %s [%v]: %w", query, args, err)
}

View file

@ -76,12 +76,13 @@ ORDER BY files.size DESC;
`
type sceneRow struct {
ID int `db:"id" goqu:"skipinsert"`
Title zero.String `db:"title"`
Code zero.String `db:"code"`
Details zero.String `db:"details"`
Director zero.String `db:"director"`
Date NullDate `db:"date"`
ID int `db:"id" goqu:"skipinsert"`
Title zero.String `db:"title"`
Code zero.String `db:"code"`
Details zero.String `db:"details"`
Director zero.String `db:"director"`
Date NullDate `db:"date"`
DatePrecision null.Int `db:"date_precision"`
// expressed as 1-100
Rating null.Int `db:"rating"`
Organized bool `db:"organized"`
@ -102,6 +103,7 @@ func (r *sceneRow) fromScene(o models.Scene) {
r.Details = zero.StringFrom(o.Details)
r.Director = zero.StringFrom(o.Director)
r.Date = NullDateFromDatePtr(o.Date)
r.DatePrecision = datePrecisionFromDatePtr(o.Date)
r.Rating = intFromPtr(o.Rating)
r.Organized = o.Organized
r.StudioID = intFromPtr(o.StudioID)
@ -127,7 +129,7 @@ func (r *sceneQueryRow) resolve() *models.Scene {
Code: r.Code.String,
Details: r.Details.String,
Director: r.Director.String,
Date: r.Date.DatePtr(),
Date: r.Date.DatePtr(r.DatePrecision),
Rating: nullIntPtr(r.Rating),
Organized: r.Organized,
StudioID: nullIntPtr(r.StudioID),
@ -159,7 +161,7 @@ func (r *sceneRowRecord) fromPartial(o models.ScenePartial) {
r.setNullString("code", o.Code)
r.setNullString("details", o.Details)
r.setNullString("director", o.Director)
r.setNullDate("date", o.Date)
r.setNullDate("date", "date_precision", o.Date)
r.setNullInt("rating", o.Rating)
r.setBool("organized", o.Organized)
r.setNullInt("studio_id", o.StudioID)
@ -1136,6 +1138,7 @@ var sceneSortOptions = sortOptions{
"perceptual_similarity",
"random",
"rating",
"resolution",
"studio",
"tag_count",
"title",
@ -1157,10 +1160,12 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
addFileTable := func() {
query.addJoins(
join{
sort: true,
table: scenesFilesTable,
onClause: "scenes_files.scene_id = scenes.id",
},
join{
sort: true,
table: fileTable,
onClause: "scenes_files.file_id = files.id",
},
@ -1171,6 +1176,7 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
addFileTable()
query.addJoins(
join{
sort: true,
table: videoFileTable,
onClause: "video_files.file_id = scenes_files.file_id",
},
@ -1180,6 +1186,7 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
addFolderTable := func() {
query.addJoins(
join{
sort: true,
table: folderTable,
onClause: "files.parent_folder_id = folders.id",
},
@ -1189,10 +1196,10 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
direction := findFilter.GetDirection()
switch sort {
case "movie_scene_number":
query.join(groupsScenesTable, "", "scenes.id = groups_scenes.scene_id")
query.joinSort(groupsScenesTable, "", "scenes.id = groups_scenes.scene_id")
query.sortAndPagination += getSort("scene_index", direction, groupsScenesTable)
case "group_scene_number":
query.join(groupsScenesTable, "scene_group", "scenes.id = scene_group.scene_id")
query.joinSort(groupsScenesTable, "scene_group", "scenes.id = scene_group.scene_id")
query.sortAndPagination += getSort("scene_index", direction, "scene_group")
case "tag_count":
query.sortAndPagination += getCountSort(sceneTable, scenesTagsTable, sceneIDColumn, direction)
@ -1210,6 +1217,7 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
addFileTable()
query.addJoins(
join{
sort: true,
table: fingerprintTable,
as: "fingerprints_phash",
onClause: "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'",
@ -1229,6 +1237,9 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
sort = "frame_rate"
addVideoFileTable()
query.sortAndPagination += getSort(sort, direction, videoFileTable)
case "resolution":
addVideoFileTable()
query.sortAndPagination += fmt.Sprintf(" ORDER BY MIN(%s.width, %s.height) %s", videoFileTable, videoFileTable, getSortDirection(direction))
case "filesize":
addFileTable()
query.sortAndPagination += getSort(sort, direction, fileTable)
@ -1274,7 +1285,7 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
getSortDirection(direction),
)
case "studio":
query.join(studioTable, "", "scenes.studio_id = studios.id")
query.joinSort(studioTable, "", "scenes.studio_id = studios.id")
query.sortAndPagination += getSort("name", direction, studioTable)
default:
query.sortAndPagination += getSort(sort, direction, "scenes")

View file

@ -114,13 +114,18 @@ func (qb *sceneFilterHandler) criterionHandler() criterionHandler {
stringCriterionHandler(sceneFilter.StashID, "scene_stash_ids.stash_id")(ctx, f)
}
}),
&stashIDCriterionHandler{
c: sceneFilter.StashIDEndpoint,
stashIDRepository: &sceneRepository.stashIDs,
stashIDTableAs: "scene_stash_ids",
parentIDCol: "scenes.id",
},
&stashIDsCriterionHandler{
c: sceneFilter.StashIDsEndpoint,
stashIDRepository: &sceneRepository.stashIDs,
stashIDTableAs: "scene_stash_ids",
parentIDCol: "scenes.id",
},
boolCriterionHandler(sceneFilter.Interactive, "video_files.interactive", qb.addVideoFilesTable),
intCriterionHandler(sceneFilter.InteractiveSpeed, "video_files.interactive_speed", qb.addVideoFilesTable),

View file

@ -392,10 +392,10 @@ func (qb *SceneMarkerStore) setSceneMarkerSort(query *queryBuilder, findFilter *
switch sort {
case "scenes_updated_at":
sort = "updated_at"
query.join(sceneTable, "", "scenes.id = scene_markers.scene_id")
query.joinSort(sceneTable, "", "scenes.id = scene_markers.scene_id")
query.sortAndPagination += getSort(sort, direction, sceneTable)
case "title":
query.join(tagTable, "", "scene_markers.primary_tag_id = tags.id")
query.joinSort(tagTable, "", "scene_markers.primary_tag_id = tags.id")
query.sortAndPagination += " ORDER BY COALESCE(NULLIF(scene_markers.title,''), tags.name) COLLATE NATURAL_CI " + direction
case "duration":
sort = "(scene_markers.end_seconds - scene_markers.seconds)"

View file

@ -2098,6 +2098,8 @@ func TestSceneQuery(t *testing.T) {
var (
endpoint = sceneStashID(sceneIdxWithGallery).Endpoint
stashID = sceneStashID(sceneIdxWithGallery).StashID
stashID2 = sceneStashID(sceneIdxWithPerformer).StashID
stashIDs = []*string{&stashID, &stashID2}
depth = -1
)
@ -2203,6 +2205,60 @@ func TestSceneQuery(t *testing.T) {
nil,
false,
},
{
"stash ids with endpoint",
nil,
&models.SceneFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
StashIDs: stashIDs,
Modifier: models.CriterionModifierEquals,
},
},
[]int{sceneIdxWithGallery, sceneIdxWithPerformer},
nil,
false,
},
{
"exclude stash ids with endpoint",
nil,
&models.SceneFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
StashIDs: stashIDs,
Modifier: models.CriterionModifierNotEquals,
},
},
nil,
[]int{sceneIdxWithGallery, sceneIdxWithPerformer},
false,
},
{
"null stash ids with endpoint",
nil,
&models.SceneFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
Modifier: models.CriterionModifierIsNull,
},
},
nil,
[]int{sceneIdxWithGallery, sceneIdxWithPerformer},
false,
},
{
"not null stash ids with endpoint",
nil,
&models.SceneFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
Modifier: models.CriterionModifierNotNull,
},
},
[]int{sceneIdxWithGallery, sceneIdxWithPerformer},
nil,
false,
},
{
"with studio id 0 including child studios",
nil,

View file

@ -77,6 +77,8 @@ const (
sceneIdxWithPerformerTwoTags
sceneIdxWithSpacedName
sceneIdxWithStudioPerformer
sceneIdx1WithTwoStudioPerformer
sceneIdx2WithTwoStudioPerformer
sceneIdxWithGrandChildStudio
sceneIdxMissingPhash
sceneIdxWithPerformerParentTag
@ -138,6 +140,7 @@ const (
performerIdxWithSceneStudio
performerIdxWithImageStudio
performerIdxWithGalleryStudio
performerIdxWithTwoSceneStudio
performerIdxWithParentTag
// new indexes above
// performers with dup names start from the end
@ -257,6 +260,8 @@ const (
studioIdxWithScenePerformer
studioIdxWithImagePerformer
studioIdxWithGalleryPerformer
studioIdx1WithTwoScenePerformer
studioIdx2WithTwoScenePerformer
studioIdxWithTag
studioIdx2WithTag
studioIdxWithTwoTags
@ -384,16 +389,18 @@ var (
}
scenePerformers = linkMap{
sceneIdxWithPerformer: {performerIdxWithScene},
sceneIdxWithTwoPerformers: {performerIdx1WithScene, performerIdx2WithScene},
sceneIdxWithThreePerformers: {performerIdx1WithScene, performerIdx2WithScene, performerIdx3WithScene},
sceneIdxWithPerformerTag: {performerIdxWithTag},
sceneIdxWithTwoPerformerTag: {performerIdxWithTag, performerIdx2WithTag},
sceneIdxWithPerformerTwoTags: {performerIdxWithTwoTags},
sceneIdx1WithPerformer: {performerIdxWithTwoScenes},
sceneIdx2WithPerformer: {performerIdxWithTwoScenes},
sceneIdxWithStudioPerformer: {performerIdxWithSceneStudio},
sceneIdxWithPerformerParentTag: {performerIdxWithParentTag},
sceneIdxWithPerformer: {performerIdxWithScene},
sceneIdxWithTwoPerformers: {performerIdx1WithScene, performerIdx2WithScene},
sceneIdxWithThreePerformers: {performerIdx1WithScene, performerIdx2WithScene, performerIdx3WithScene},
sceneIdxWithPerformerTag: {performerIdxWithTag},
sceneIdxWithTwoPerformerTag: {performerIdxWithTag, performerIdx2WithTag},
sceneIdxWithPerformerTwoTags: {performerIdxWithTwoTags},
sceneIdx1WithPerformer: {performerIdxWithTwoScenes},
sceneIdx2WithPerformer: {performerIdxWithTwoScenes},
sceneIdxWithStudioPerformer: {performerIdxWithSceneStudio},
sceneIdx1WithTwoStudioPerformer: {performerIdxWithTwoSceneStudio},
sceneIdx2WithTwoStudioPerformer: {performerIdxWithTwoSceneStudio},
sceneIdxWithPerformerParentTag: {performerIdxWithParentTag},
}
sceneGalleries = linkMap{
@ -406,11 +413,13 @@ var (
}
sceneStudios = map[int]int{
sceneIdxWithStudio: studioIdxWithScene,
sceneIdx1WithStudio: studioIdxWithTwoScenes,
sceneIdx2WithStudio: studioIdxWithTwoScenes,
sceneIdxWithStudioPerformer: studioIdxWithScenePerformer,
sceneIdxWithGrandChildStudio: studioIdxWithGrandParent,
sceneIdxWithStudio: studioIdxWithScene,
sceneIdx1WithStudio: studioIdxWithTwoScenes,
sceneIdx2WithStudio: studioIdxWithTwoScenes,
sceneIdxWithStudioPerformer: studioIdxWithScenePerformer,
sceneIdx1WithTwoStudioPerformer: studioIdx1WithTwoScenePerformer,
sceneIdx2WithTwoStudioPerformer: studioIdx2WithTwoScenePerformer,
sceneIdxWithGrandChildStudio: studioIdxWithGrandParent,
}
)
@ -1070,7 +1079,7 @@ func getObjectDate(index int) *models.Date {
func sceneStashID(i int) models.StashID {
return models.StashID{
StashID: getSceneStringValue(i, "stashid"),
Endpoint: getSceneStringValue(i, "endpoint"),
Endpoint: getSceneStringValue(0, "endpoint"),
UpdatedAt: epochTime,
}
}
@ -1538,7 +1547,7 @@ func getIgnoreAutoTag(index int) bool {
func performerStashID(i int) models.StashID {
return models.StashID{
StashID: getPerformerStringValue(i, "stashid"),
Endpoint: getPerformerStringValue(i, "endpoint"),
Endpoint: getPerformerStringValue(0, "endpoint"),
}
}
@ -1688,6 +1697,13 @@ func getTagChildCount(id int) int {
return 0
}
func tagStashID(i int) models.StashID {
return models.StashID{
StashID: getTagStringValue(i, "stashid"),
Endpoint: getTagStringValue(0, "endpoint"),
}
}
// createTags creates n tags with plain Name and o tags with camel cased NaMe included
func createTags(ctx context.Context, tqb models.TagReaderWriter, n int, o int) error {
const namePlain = "Name"
@ -1709,6 +1725,12 @@ func createTags(ctx context.Context, tqb models.TagReaderWriter, n int, o int) e
IgnoreAutoTag: getIgnoreAutoTag(i),
}
if (index+1)%5 != 0 {
tag.StashIDs = models.NewRelatedStashIDs([]models.StashID{
tagStashID(i),
})
}
err := tqb.Create(ctx, &tag)
if err != nil {

View file

@ -137,6 +137,8 @@ func getCountSort(primaryTable, joinTable, primaryFK, direction string) string {
return fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM %s AS sort WHERE sort.%s = %s.id) %s", joinTable, primaryFK, primaryTable, getSortDirection(direction))
}
// getStringSearchClause returns a sqlClause for searching strings in the provided columns.
// It is used for includes and excludes string criteria.
func getStringSearchClause(columns []string, q string, not bool) sqlClause {
var likeClauses []string
var args []interface{}

View file

@ -72,6 +72,12 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler {
stashIDTableAs: "studio_stash_ids",
parentIDCol: "studios.id",
},
&stashIDsCriterionHandler{
c: studioFilter.StashIDsEndpoint,
stashIDRepository: &studioRepository.stashIDs,
stashIDTableAs: "studio_stash_ids",
parentIDCol: "studios.id",
},
qb.isMissingCriterionHandler(studioFilter.IsMissing),
qb.tagCountCriterionHandler(studioFilter.TagCount),

View file

@ -859,6 +859,8 @@ func (qb *TagStore) Merge(ctx context.Context, source []int, destination int) er
}
args = append(args, destination)
// for each table, update source tag ids to destination tag id, ignoring duplicates
for table, idColumn := range tagTables {
_, err := dbWrapper.Exec(ctx, `UPDATE OR IGNORE `+table+`
SET tag_id = ?

View file

@ -84,6 +84,20 @@ func (qb *tagFilterHandler) criterionHandler() criterionHandler {
tagHierarchyHandler.ChildrenCriterionHandler(tagFilter.Children),
tagHierarchyHandler.ParentCountCriterionHandler(tagFilter.ParentCount),
tagHierarchyHandler.ChildCountCriterionHandler(tagFilter.ChildCount),
&stashIDCriterionHandler{
c: tagFilter.StashIDEndpoint,
stashIDRepository: &tagRepository.stashIDs,
stashIDTableAs: "tag_stash_ids",
parentIDCol: "tags.id",
},
&stashIDsCriterionHandler{
c: tagFilter.StashIDsEndpoint,
stashIDRepository: &tagRepository.stashIDs,
stashIDTableAs: "tag_stash_ids",
parentIDCol: "tags.id",
},
&timestampCriterionHandler{tagFilter.CreatedAt, "tags.created_at", nil},
&timestampCriterionHandler{tagFilter.UpdatedAt, "tags.updated_at", nil},

View file

@ -343,6 +343,165 @@ func queryTags(ctx context.Context, t *testing.T, qb models.TagReader, tagFilter
return tags
}
func tagsToIDs(i []*models.Tag) []int {
ret := make([]int, len(i))
for i, v := range i {
ret[i] = v.ID
}
return ret
}
func TestTagQuery(t *testing.T) {
var (
endpoint = tagStashID(tagIdxWithPerformer).Endpoint
stashID = tagStashID(tagIdxWithPerformer).StashID
stashID2 = tagStashID(tagIdx1WithPerformer).StashID
stashIDs = []*string{&stashID, &stashID2}
)
tests := []struct {
name string
findFilter *models.FindFilterType
filter *models.TagFilterType
includeIdxs []int
excludeIdxs []int
wantErr bool
}{
{
"stash id with endpoint",
nil,
&models.TagFilterType{
StashIDEndpoint: &models.StashIDCriterionInput{
Endpoint: &endpoint,
StashID: &stashID,
Modifier: models.CriterionModifierEquals,
},
},
[]int{tagIdxWithPerformer},
nil,
false,
},
{
"exclude stash id with endpoint",
nil,
&models.TagFilterType{
StashIDEndpoint: &models.StashIDCriterionInput{
Endpoint: &endpoint,
StashID: &stashID,
Modifier: models.CriterionModifierNotEquals,
},
},
nil,
[]int{tagIdxWithPerformer},
false,
},
{
"null stash id with endpoint",
nil,
&models.TagFilterType{
StashIDEndpoint: &models.StashIDCriterionInput{
Endpoint: &endpoint,
Modifier: models.CriterionModifierIsNull,
},
},
nil,
[]int{tagIdxWithPerformer},
false,
},
{
"not null stash id with endpoint",
nil,
&models.TagFilterType{
StashIDEndpoint: &models.StashIDCriterionInput{
Endpoint: &endpoint,
Modifier: models.CriterionModifierNotNull,
},
},
[]int{tagIdxWithPerformer},
nil,
false,
},
{
"stash ids with endpoint",
nil,
&models.TagFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
StashIDs: stashIDs,
Modifier: models.CriterionModifierEquals,
},
},
[]int{tagIdxWithPerformer, tagIdx1WithPerformer},
nil,
false,
},
{
"exclude stash ids with endpoint",
nil,
&models.TagFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
StashIDs: stashIDs,
Modifier: models.CriterionModifierNotEquals,
},
},
nil,
[]int{tagIdxWithPerformer, tagIdx1WithPerformer},
false,
},
{
"null stash ids with endpoint",
nil,
&models.TagFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
Modifier: models.CriterionModifierIsNull,
},
},
nil,
[]int{tagIdxWithPerformer, tagIdx1WithPerformer},
false,
},
{
"not null stash ids with endpoint",
nil,
&models.TagFilterType{
StashIDsEndpoint: &models.StashIDsCriterionInput{
Endpoint: &endpoint,
Modifier: models.CriterionModifierNotNull,
},
},
[]int{tagIdxWithPerformer, tagIdx1WithPerformer},
nil,
false,
},
}
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
tags, _, err := db.Tag.Query(ctx, tt.filter, tt.findFilter)
if (err != nil) != tt.wantErr {
t.Errorf("PerformerStore.Query() error = %v, wantErr %v", err, tt.wantErr)
return
}
ids := tagsToIDs(tags)
include := indexesToIDs(tagIDs, tt.includeIdxs)
exclude := indexesToIDs(tagIDs, tt.excludeIdxs)
for _, i := range include {
assert.Contains(ids, i)
}
for _, e := range exclude {
assert.NotContains(ids, e)
}
})
}
}
func TestTagQueryIsMissingImage(t *testing.T) {
withTxn(func(ctx context.Context) error {
qb := db.Tag

View file

@ -16,8 +16,8 @@ const (
type dbReader interface {
Get(dest interface{}, query string, args ...interface{}) error
Select(dest interface{}, query string, args ...interface{}) error
Queryx(query string, args ...interface{}) (*sqlx.Rows, error)
GetContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error
SelectContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error
QueryxContext(ctx context.Context, query string, args ...interface{}) (*sqlx.Rows, error)
}
@ -54,7 +54,7 @@ func (*dbWrapperType) Get(ctx context.Context, dest interface{}, query string, a
}
start := time.Now()
err = tx.Get(dest, query, args...)
err = tx.GetContext(ctx, dest, query, args...)
logSQL(start, query, args...)
return sqlError(err, query, args...)
@ -67,7 +67,7 @@ func (*dbWrapperType) Select(ctx context.Context, dest interface{}, query string
}
start := time.Now()
err = tx.Select(dest, query, args...)
err = tx.SelectContext(ctx, dest, query, args...)
logSQL(start, query, args...)
return sqlError(err, query, args...)
@ -80,23 +80,14 @@ func (*dbWrapperType) Queryx(ctx context.Context, query string, args ...interfac
}
start := time.Now()
ret, err := tx.Queryx(query, args...)
ret, err := tx.QueryxContext(ctx, query, args...)
logSQL(start, query, args...)
return ret, sqlError(err, query, args...)
}
func (*dbWrapperType) QueryxContext(ctx context.Context, query string, args ...interface{}) (*sqlx.Rows, error) {
tx, err := getDBReader(ctx)
if err != nil {
return nil, sqlError(err, query, args...)
}
start := time.Now()
ret, err := tx.QueryxContext(ctx, query, args...)
logSQL(start, query, args...)
return ret, sqlError(err, query, args...)
return dbWrapper.Queryx(ctx, query, args...)
}
func (*dbWrapperType) NamedExec(ctx context.Context, query string, arg interface{}) (sql.Result, error) {
@ -106,7 +97,7 @@ func (*dbWrapperType) NamedExec(ctx context.Context, query string, arg interface
}
start := time.Now()
ret, err := tx.NamedExec(query, arg)
ret, err := tx.NamedExecContext(ctx, query, arg)
logSQL(start, query, arg)
return ret, sqlError(err, query, arg)
@ -119,7 +110,7 @@ func (*dbWrapperType) Exec(ctx context.Context, query string, args ...interface{
}
start := time.Now()
ret, err := tx.Exec(query, args...)
ret, err := tx.ExecContext(ctx, query, args...)
logSQL(start, query, args...)
return ret, sqlError(err, query, args...)

View file

@ -17,6 +17,8 @@ type StashBoxGraphQLClient interface {
FindPerformerByID(ctx context.Context, id string, interceptors ...clientv2.RequestInterceptor) (*FindPerformerByID, error)
FindSceneByID(ctx context.Context, id string, interceptors ...clientv2.RequestInterceptor) (*FindSceneByID, error)
FindStudio(ctx context.Context, id *string, name *string, interceptors ...clientv2.RequestInterceptor) (*FindStudio, error)
FindTag(ctx context.Context, id *string, name *string, interceptors ...clientv2.RequestInterceptor) (*FindTag, error)
QueryTags(ctx context.Context, input TagQueryInput, interceptors ...clientv2.RequestInterceptor) (*QueryTags, error)
SubmitFingerprint(ctx context.Context, input FingerprintSubmission, interceptors ...clientv2.RequestInterceptor) (*SubmitFingerprint, error)
Me(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*Me, error)
SubmitSceneDraft(ctx context.Context, input SceneDraftInput, interceptors ...clientv2.RequestInterceptor) (*SubmitSceneDraft, error)
@ -642,6 +644,24 @@ func (t *FindStudio_FindStudio_StudioFragment_Parent) GetName() string {
return t.Name
}
type QueryTags_QueryTags struct {
Count int "json:\"count\" graphql:\"count\""
Tags []*TagFragment "json:\"tags\" graphql:\"tags\""
}
func (t *QueryTags_QueryTags) GetCount() int {
if t == nil {
t = &QueryTags_QueryTags{}
}
return t.Count
}
func (t *QueryTags_QueryTags) GetTags() []*TagFragment {
if t == nil {
t = &QueryTags_QueryTags{}
}
return t.Tags
}
type Me_Me struct {
Name string "json:\"name\" graphql:\"name\""
}
@ -763,6 +783,28 @@ func (t *FindStudio) GetFindStudio() *StudioFragment {
return t.FindStudio
}
type FindTag struct {
FindTag *TagFragment "json:\"findTag,omitempty\" graphql:\"findTag\""
}
func (t *FindTag) GetFindTag() *TagFragment {
if t == nil {
t = &FindTag{}
}
return t.FindTag
}
type QueryTags struct {
QueryTags QueryTags_QueryTags "json:\"queryTags\" graphql:\"queryTags\""
}
func (t *QueryTags) GetQueryTags() *QueryTags_QueryTags {
if t == nil {
t = &QueryTags{}
}
return &t.QueryTags
}
type SubmitFingerprint struct {
SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\""
}
@ -1695,6 +1737,66 @@ func (c *Client) FindStudio(ctx context.Context, id *string, name *string, inter
return &res, nil
}
const FindTagDocument = `query FindTag ($id: ID, $name: String) {
findTag(id: $id, name: $name) {
... TagFragment
}
}
fragment TagFragment on Tag {
name
id
}
`
func (c *Client) FindTag(ctx context.Context, id *string, name *string, interceptors ...clientv2.RequestInterceptor) (*FindTag, error) {
vars := map[string]any{
"id": id,
"name": name,
}
var res FindTag
if err := c.Client.Post(ctx, "FindTag", FindTagDocument, &res, vars, interceptors...); err != nil {
if c.Client.ParseDataWhenErrors {
return &res, err
}
return nil, err
}
return &res, nil
}
const QueryTagsDocument = `query QueryTags ($input: TagQueryInput!) {
queryTags(input: $input) {
count
tags {
... TagFragment
}
}
}
fragment TagFragment on Tag {
name
id
}
`
func (c *Client) QueryTags(ctx context.Context, input TagQueryInput, interceptors ...clientv2.RequestInterceptor) (*QueryTags, error) {
vars := map[string]any{
"input": input,
}
var res QueryTags
if err := c.Client.Post(ctx, "QueryTags", QueryTagsDocument, &res, vars, interceptors...); err != nil {
if c.Client.ParseDataWhenErrors {
return &res, err
}
return nil, err
}
return &res, nil
}
const SubmitFingerprintDocument = `mutation SubmitFingerprint ($input: FingerprintSubmission!) {
submitFingerprint(input: $input)
}
@ -1796,6 +1898,8 @@ var DocumentOperationNames = map[string]string{
FindPerformerByIDDocument: "FindPerformerByID",
FindSceneByIDDocument: "FindSceneByID",
FindStudioDocument: "FindStudio",
FindTagDocument: "FindTag",
QueryTagsDocument: "QueryTags",
SubmitFingerprintDocument: "SubmitFingerprint",
MeDocument: "Me",
SubmitSceneDraftDocument: "SubmitSceneDraft",

View file

@ -125,8 +125,8 @@ func translateGender(gender *graphql.GenderEnum) *string {
return nil
}
func formatMeasurements(m graphql.MeasurementsFragment) *string {
if m.BandSize != nil && m.CupSize != nil && m.Hip != nil && m.Waist != nil {
func formatMeasurements(m *graphql.MeasurementsFragment) *string {
if m != nil && m.BandSize != nil && m.CupSize != nil && m.Hip != nil && m.Waist != nil {
ret := fmt.Sprintf("%d%s-%d-%d", *m.BandSize, *m.CupSize, *m.Waist, *m.Hip)
return &ret
}
@ -209,7 +209,7 @@ func performerFragmentToScrapedPerformer(p graphql.PerformerFragment) *models.Sc
Name: &p.Name,
Disambiguation: p.Disambiguation,
Country: p.Country,
Measurements: formatMeasurements(*p.Measurements),
Measurements: formatMeasurements(p.Measurements),
CareerLength: formatCareerLength(p.CareerStartYear, p.CareerEndYear),
Tattoos: formatBodyModifications(p.Tattoos),
Piercings: formatBodyModifications(p.Piercings),

67
pkg/stashbox/tag.go Normal file
View file

@ -0,0 +1,67 @@
package stashbox
import (
"context"
"github.com/google/uuid"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/stashbox/graphql"
)
// QueryTag searches for tags by name or ID.
// If query is a valid UUID, it searches by ID (returns single result).
// Otherwise, it searches by name (returns multiple results).
func (c Client) QueryTag(ctx context.Context, query string) ([]*models.ScrapedTag, error) {
_, err := uuid.Parse(query)
if err == nil {
// Query is a UUID, use findTag for exact match
return c.findTagByID(ctx, query)
}
// Otherwise search by name
return c.queryTagsByName(ctx, query)
}
func (c Client) findTagByID(ctx context.Context, id string) ([]*models.ScrapedTag, error) {
tag, err := c.client.FindTag(ctx, &id, nil)
if err != nil {
return nil, err
}
if tag.FindTag == nil {
return nil, nil
}
return []*models.ScrapedTag{{
Name: tag.FindTag.Name,
RemoteSiteID: &tag.FindTag.ID,
}}, nil
}
func (c Client) queryTagsByName(ctx context.Context, name string) ([]*models.ScrapedTag, error) {
input := graphql.TagQueryInput{
Name: &name,
Page: 1,
PerPage: 25,
Direction: graphql.SortDirectionEnumAsc,
Sort: graphql.TagSortEnumName,
}
result, err := c.client.QueryTags(ctx, input)
if err != nil {
return nil, err
}
if result.QueryTags.Tags == nil {
return nil, nil
}
var ret []*models.ScrapedTag
for _, t := range result.QueryTags.Tags {
ret = append(ret, &models.ScrapedTag{
Name: t.Name,
RemoteSiteID: &t.ID,
})
}
return ret, nil
}

View file

@ -1,22 +0,0 @@
package utils
import (
"context"
"time"
)
type ValueOnlyContext struct {
context.Context
}
func (ValueOnlyContext) Deadline() (deadline time.Time, ok bool) {
return
}
func (ValueOnlyContext) Done() <-chan struct{} {
return nil
}
func (ValueOnlyContext) Err() error {
return nil
}

View file

@ -23,17 +23,5 @@ func ParseDateStringAsTime(dateString string) (time.Time, error) {
return t, nil
}
// Support partial dates: year-month format
t, e = time.Parse("2006-01", dateString)
if e == nil {
return t, nil
}
// Support partial dates: year only format
t, e = time.Parse("2006", dateString)
if e == nil {
return t, nil
}
return time.Time{}, fmt.Errorf("ParseDateStringAsTime failed: dateString <%s>", dateString)
}

View file

@ -2,7 +2,6 @@ package utils
import (
"testing"
"time"
)
func TestParseDateStringAsTime(t *testing.T) {
@ -16,13 +15,11 @@ func TestParseDateStringAsTime(t *testing.T) {
{"Date only", "2014-01-02", false},
{"Date with time", "2014-01-02 15:04:05", false},
// Partial date formats (new support)
{"Year-Month", "2006-08", false},
{"Year only", "2014", false},
// Invalid formats
{"Invalid format", "not-a-date", true},
{"Empty string", "", true},
{"Year-Month", "2006-08", true},
{"Year only", "2014", true},
}
for _, tt := range tests {
@ -44,37 +41,3 @@ func TestParseDateStringAsTime(t *testing.T) {
})
}
}
func TestParseDateStringAsTime_YearOnly(t *testing.T) {
result, err := ParseDateStringAsTime("2014")
if err != nil {
t.Fatalf("Failed to parse year-only date: %v", err)
}
if result.Year() != 2014 {
t.Errorf("Expected year 2014, got %d", result.Year())
}
if result.Month() != time.January {
t.Errorf("Expected month January, got %s", result.Month())
}
if result.Day() != 1 {
t.Errorf("Expected day 1, got %d", result.Day())
}
}
func TestParseDateStringAsTime_YearMonth(t *testing.T) {
result, err := ParseDateStringAsTime("2006-08")
if err != nil {
t.Fatalf("Failed to parse year-month date: %v", err)
}
if result.Year() != 2006 {
t.Errorf("Expected year 2006, got %d", result.Year())
}
if result.Month() != time.August {
t.Errorf("Expected month August, got %s", result.Month())
}
if result.Day() != 1 {
t.Errorf("Expected day 1, got %d", result.Day())
}
}

View file

@ -107,6 +107,7 @@ fragment ConfigInterfaceData on ConfigInterfaceResult {
tag
studio
movie
gallery
}
handyKey
funscriptOffset

View file

@ -1,3 +1,4 @@
# Full fragment for detail views - includes recursive counts
fragment GroupData on Group {
id
name
@ -39,3 +40,44 @@ fragment GroupData on Group {
title
}
}
# Lightweight fragment for list views - excludes expensive recursive counts
# The _all fields (depth: -1) cause 10+ second queries on large databases
fragment ListGroupData on Group {
id
name
aliases
duration
date
rating100
director
studio {
...SlimStudioData
}
tags {
...SlimTagData
}
containing_groups {
group {
...SlimGroupData
}
description
}
synopsis
urls
front_image_path
back_image_path
scene_count
performer_count
sub_group_count
o_counter
scenes {
id
title
}
}

View file

@ -6,4 +6,10 @@ fragment SlimTagData on Tag {
image_path
parent_count
child_count
stash_ids {
endpoint
stash_id
updated_at
}
}

View file

@ -50,4 +50,43 @@ fragment SelectTagData on Tag {
name
sort_name
}
stash_ids {
endpoint
stash_id
updated_at
}
}
# Optimized fragment for tag list page - excludes expensive recursive *_count_all fields
fragment TagListData on Tag {
id
name
sort_name
description
aliases
ignore_auto_tag
favorite
stash_ids {
endpoint
stash_id
updated_at
}
image_path
# Direct counts only - no recursive depth queries
scene_count
scene_marker_count
image_count
gallery_count
performer_count
studio_count
group_count
parents {
...SlimTagData
}
children {
...SlimTagData
}
}

View file

@ -23,3 +23,9 @@ mutation PerformerDestroy($id: ID!) {
mutation PerformersDestroy($ids: [ID!]!) {
performersDestroy(ids: $ids)
}
mutation PerformerMerge($input: PerformerMergeInput!) {
performerMerge(input: $input) {
id
}
}

View file

@ -9,14 +9,27 @@ query FindImages(
image_ids: $image_ids
) {
count
megapixels
filesize
images {
...SlimImageData
}
}
}
query FindImagesMetadata(
$filter: FindFilterType
$image_filter: ImageFilterType
$image_ids: [Int!]
) {
findImages(
filter: $filter
image_filter: $image_filter
image_ids: $image_ids
) {
megapixels
filesize
}
}
query FindImage($id: ID!, $checksum: String) {
findImage(id: $id, checksum: $checksum) {
...ImageData

View file

@ -2,7 +2,7 @@ query FindGroups($filter: FindFilterType, $group_filter: GroupFilterType) {
findGroups(filter: $filter, group_filter: $group_filter) {
count
groups {
...GroupData
...ListGroupData
}
}
}

Some files were not shown because too many files have changed in this diff Show more