Merge branch 'stashapp:develop' into line-break-titles

This commit is contained in:
randemgame 2024-12-04 03:12:57 +02:00 committed by GitHub
commit 8f8211eeee
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
97 changed files with 3386 additions and 495 deletions

3
go.mod
View file

@ -26,6 +26,7 @@ require (
github.com/gorilla/sessions v1.2.1
github.com/gorilla/websocket v1.5.0
github.com/hashicorp/golang-lru/v2 v2.0.7
github.com/hasura/go-graphql-client v0.13.1
github.com/jinzhu/copier v0.4.0
github.com/jmoiron/sqlx v1.4.0
github.com/json-iterator/go v1.1.12
@ -39,7 +40,6 @@ require (
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cast v1.6.0
github.com/spf13/pflag v1.0.5
@ -67,6 +67,7 @@ require (
github.com/asticode/go-astikit v0.20.0 // indirect
github.com/asticode/go-astits v1.8.0 // indirect
github.com/chromedp/sysutil v1.0.0 // indirect
github.com/coder/websocket v1.8.12 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.7.0 // indirect

6
go.sum
View file

@ -153,6 +153,8 @@ github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWH
github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/coder/websocket v1.8.12 h1:5bUXkEPPIbewrnkU8LTCLVaxi4N4J8ahufH2vlo4NAo=
github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
@ -394,6 +396,8 @@ github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoI
github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M=
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
github.com/hasura/go-graphql-client v0.13.1 h1:kKbjhxhpwz58usVl+Xvgah/TDha5K2akNTRQdsEHN6U=
github.com/hasura/go-graphql-client v0.13.1/go.mod h1:k7FF7h53C+hSNFRG3++DdVZWIuHdCaTbI7siTJ//zGQ=
github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs=
github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E=
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
@ -591,8 +595,6 @@ github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDN
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk=
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=

View file

@ -300,6 +300,7 @@ type Mutation {
sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker
sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker
sceneMarkerDestroy(id: ID!): Boolean!
sceneMarkersDestroy(ids: [ID!]!): Boolean!
sceneAssignFile(input: AssignSceneFileInput!): Boolean!

View file

@ -91,6 +91,12 @@ input StashIDCriterionInput {
modifier: CriterionModifier!
}
input CustomFieldCriterionInput {
field: String!
value: [Any!]
modifier: CriterionModifier!
}
input PerformerFilterType {
AND: PerformerFilterType
OR: PerformerFilterType
@ -182,6 +188,8 @@ input PerformerFilterType {
created_at: TimestampCriterionInput
"Filter by last update time"
updated_at: TimestampCriterionInput
custom_fields: [CustomFieldCriterionInput!]
}
input SceneMarkerFilterType {
@ -193,6 +201,8 @@ input SceneMarkerFilterType {
performers: MultiCriterionInput
"Filter to only include scene markers from these scenes"
scenes: MultiCriterionInput
"Filter by duration (in seconds)"
duration: FloatCriterionInput
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"

View file

@ -338,3 +338,10 @@ type SystemStatus {
input MigrateInput {
backupPath: String!
}
input CustomFieldsInput {
"If populated, the entire custom fields map will be replaced with this value"
full: Map
"If populated, only the keys in this map will be updated"
partial: Map
}

View file

@ -58,6 +58,8 @@ type Performer {
updated_at: Time!
groups: [Group!]!
movies: [Movie!]! @deprecated(reason: "use groups instead")
custom_fields: Map!
}
input PerformerCreateInput {
@ -93,6 +95,8 @@ input PerformerCreateInput {
hair_color: String
weight: Int
ignore_auto_tag: Boolean
custom_fields: Map
}
input PerformerUpdateInput {
@ -129,6 +133,8 @@ input PerformerUpdateInput {
hair_color: String
weight: Int
ignore_auto_tag: Boolean
custom_fields: CustomFieldsInput
}
input BulkUpdateStrings {
@ -167,6 +173,8 @@ input BulkPerformerUpdateInput {
hair_color: String
weight: Int
ignore_auto_tag: Boolean
custom_fields: CustomFieldsInput
}
input PerformerDestroyInput {

View file

@ -16,12 +16,12 @@ import (
const (
tripwireActivatedErrMsg = "Stash is exposed to the public internet without authentication, and is not serving any more content to protect your privacy. " +
"More information and fixes are available at https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet"
"More information and fixes are available at https://docs.stashapp.cc/faq/setup/#protecting-against-accidental-exposure-to-the-internet"
externalAccessErrMsg = "You have attempted to access Stash over the internet, and authentication is not enabled. " +
"This is extremely dangerous! The whole world can see your your stash page and browse your files! " +
"Stash is not answering any other requests to protect your privacy. " +
"Please read the log entry or visit https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet"
"Please read the log entry or visit https://docs.stashapp.cc/faq/setup/#protecting-against-accidental-exposure-to-the-internet"
)
func allowUnauthenticated(r *http.Request) bool {

36
internal/api/json.go Normal file
View file

@ -0,0 +1,36 @@
package api
import (
"encoding/json"
"strings"
)
// JSONNumberToNumber converts a JSON number to either a float64 or int64.
func jsonNumberToNumber(n json.Number) interface{} {
if strings.Contains(string(n), ".") {
f, _ := n.Float64()
return f
}
ret, _ := n.Int64()
return ret
}
// ConvertMapJSONNumbers converts all JSON numbers in a map to either float64 or int64.
func convertMapJSONNumbers(m map[string]interface{}) (ret map[string]interface{}) {
if m == nil {
return nil
}
ret = make(map[string]interface{})
for k, v := range m {
if n, ok := v.(json.Number); ok {
ret[k] = jsonNumberToNumber(n)
} else if mm, ok := v.(map[string]interface{}); ok {
ret[k] = convertMapJSONNumbers(mm)
} else {
ret[k] = v
}
}
return ret
}

60
internal/api/json_test.go Normal file
View file

@ -0,0 +1,60 @@
package api
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
)
func TestConvertMapJSONNumbers(t *testing.T) {
tests := []struct {
name string
input map[string]interface{}
expected map[string]interface{}
}{
{
name: "Convert JSON numbers to numbers",
input: map[string]interface{}{
"int": json.Number("12"),
"float": json.Number("12.34"),
"string": "foo",
},
expected: map[string]interface{}{
"int": int64(12),
"float": 12.34,
"string": "foo",
},
},
{
name: "Convert JSON numbers to numbers in nested maps",
input: map[string]interface{}{
"foo": map[string]interface{}{
"int": json.Number("56"),
"float": json.Number("56.78"),
"nested-string": "bar",
},
"int": json.Number("12"),
"float": json.Number("12.34"),
"string": "foo",
},
expected: map[string]interface{}{
"foo": map[string]interface{}{
"int": int64(56),
"float": 56.78,
"nested-string": "bar",
},
"int": int64(12),
"float": 12.34,
"string": "foo",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := convertMapJSONNumbers(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}

View file

@ -0,0 +1,221 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// CustomFieldsLoaderConfig captures the config to create a new CustomFieldsLoader
type CustomFieldsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]models.CustomFieldMap, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewCustomFieldsLoader creates a new CustomFieldsLoader given a fetch, wait, and maxBatch
func NewCustomFieldsLoader(config CustomFieldsLoaderConfig) *CustomFieldsLoader {
return &CustomFieldsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// CustomFieldsLoader batches and caches requests
type CustomFieldsLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]models.CustomFieldMap, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]models.CustomFieldMap
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *customFieldsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type customFieldsLoaderBatch struct {
keys []int
data []models.CustomFieldMap
error []error
closing bool
done chan struct{}
}
// Load a CustomFieldMap by key, batching and caching will be applied automatically
func (l *CustomFieldsLoader) Load(key int) (models.CustomFieldMap, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a CustomFieldMap.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *CustomFieldsLoader) LoadThunk(key int) func() (models.CustomFieldMap, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (models.CustomFieldMap, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &customFieldsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (models.CustomFieldMap, error) {
<-batch.done
var data models.CustomFieldMap
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *CustomFieldsLoader) LoadAll(keys []int) ([]models.CustomFieldMap, []error) {
results := make([]func() (models.CustomFieldMap, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
customFieldMaps := make([]models.CustomFieldMap, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
customFieldMaps[i], errors[i] = thunk()
}
return customFieldMaps, errors
}
// LoadAllThunk returns a function that when called will block waiting for a CustomFieldMaps.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *CustomFieldsLoader) LoadAllThunk(keys []int) func() ([]models.CustomFieldMap, []error) {
results := make([]func() (models.CustomFieldMap, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]models.CustomFieldMap, []error) {
customFieldMaps := make([]models.CustomFieldMap, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
customFieldMaps[i], errors[i] = thunk()
}
return customFieldMaps, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *CustomFieldsLoader) Prime(key int, value models.CustomFieldMap) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *CustomFieldsLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *CustomFieldsLoader) unsafeSet(key int, value models.CustomFieldMap) {
if l.cache == nil {
l.cache = map[int]models.CustomFieldMap{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *customFieldsLoaderBatch) keyIndex(l *CustomFieldsLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *customFieldsLoaderBatch) startTimer(l *CustomFieldsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *customFieldsLoaderBatch) end(l *CustomFieldsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -13,6 +13,7 @@
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden CustomFieldsLoader int github.com/stashapp/stash/pkg/models.CustomFieldMap
//go:generate go run github.com/vektah/dataloaden SceneOCountLoader int int
//go:generate go run github.com/vektah/dataloaden ScenePlayCountLoader int int
//go:generate go run github.com/vektah/dataloaden SceneOHistoryLoader int []time.Time
@ -51,13 +52,16 @@ type Loaders struct {
ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader
GalleryByID *GalleryLoader
ImageByID *ImageLoader
PerformerByID *PerformerLoader
StudioByID *StudioLoader
TagByID *TagLoader
GroupByID *GroupLoader
FileByID *FileLoader
GalleryByID *GalleryLoader
ImageByID *ImageLoader
PerformerByID *PerformerLoader
PerformerCustomFields *CustomFieldsLoader
StudioByID *StudioLoader
TagByID *TagLoader
GroupByID *GroupLoader
FileByID *FileLoader
}
type Middleware struct {
@ -88,6 +92,11 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchPerformers(ctx),
},
PerformerCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchPerformerCustomFields(ctx),
},
StudioByID: &StudioLoader{
wait: wait,
maxBatch: maxBatch,
@ -214,6 +223,18 @@ func (m Middleware) fetchPerformers(ctx context.Context) func(keys []int) ([]*mo
}
}
func (m Middleware) fetchPerformerCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Performer.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*models.Studio, []error) {
return func(keys []int) (ret []*models.Studio, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {

View file

@ -268,6 +268,19 @@ func (r *performerResolver) Groups(ctx context.Context, obj *models.Performer) (
return ret, nil
}
func (r *performerResolver) CustomFields(ctx context.Context, obj *models.Performer) (map[string]interface{}, error) {
m, err := loaders.From(ctx).PerformerCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}
// deprecated
func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Group, err error) {
return r.Groups(ctx, obj)

View file

@ -2,6 +2,7 @@ package api
import (
"context"
"encoding/json"
"errors"
"fmt"
"path/filepath"
@ -643,10 +644,14 @@ func (r *mutationResolver) ConfigureUI(ctx context.Context, input map[string]int
c := config.GetInstance()
if input != nil {
// #5483 - convert JSON numbers to float64 or int64
input = convertMapJSONNumbers(input)
c.SetUIConfiguration(input)
}
if partial != nil {
// #5483 - convert JSON numbers to float64 or int64
partial = convertMapJSONNumbers(partial)
// merge partial into existing config
existing := c.GetUIConfiguration()
utils.MergeMaps(existing, partial)
@ -664,6 +669,14 @@ func (r *mutationResolver) ConfigureUISetting(ctx context.Context, key string, v
c := config.GetInstance()
cfg := utils.NestedMap(c.GetUIConfiguration())
// #5483 - convert JSON numbers to float64 or int64
if m, ok := value.(map[string]interface{}); ok {
value = convertMapJSONNumbers(m)
} else if n, ok := value.(json.Number); ok {
value = jsonNumberToNumber(n)
}
cfg.Set(key, value)
return r.ConfigureUI(ctx, cfg, nil)
@ -671,6 +684,9 @@ func (r *mutationResolver) ConfigureUISetting(ctx context.Context, key string, v
func (r *mutationResolver) ConfigurePlugin(ctx context.Context, pluginID string, input map[string]interface{}) (map[string]interface{}, error) {
c := config.GetInstance()
// #5483 - convert JSON numbers to float64 or int64
input = convertMapJSONNumbers(input)
c.SetPluginConfiguration(pluginID, input)
if err := c.Write(); err != nil {

View file

@ -108,7 +108,13 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return err
}
err = qb.Create(ctx, &newPerformer)
i := &models.CreatePerformerInput{
Performer: &newPerformer,
// convert json.Numbers to int/float
CustomFields: convertMapJSONNumbers(input.CustomFields),
}
err = qb.Create(ctx, i)
if err != nil {
return err
}
@ -290,6 +296,11 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
return nil, fmt.Errorf("converting tag ids: %w", err)
}
updatedPerformer.CustomFields = input.CustomFields
// convert json.Numbers to int/float
updatedPerformer.CustomFields.Full = convertMapJSONNumbers(updatedPerformer.CustomFields.Full)
updatedPerformer.CustomFields.Partial = convertMapJSONNumbers(updatedPerformer.CustomFields.Partial)
var imageData []byte
imageIncluded := translator.hasField("image")
if input.Image != nil {

View file

@ -814,11 +814,16 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar
}
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
markerID, err := strconv.Atoi(id)
return r.SceneMarkersDestroy(ctx, []string{id})
}
func (r *mutationResolver) SceneMarkersDestroy(ctx context.Context, markerIDs []string) (bool, error) {
ids, err := stringslice.StringSliceToIntSlice(markerIDs)
if err != nil {
return false, fmt.Errorf("converting id: %w", err)
return false, fmt.Errorf("converting ids: %w", err)
}
var markers []*models.SceneMarker
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{
@ -831,35 +836,45 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
qb := r.repository.SceneMarker
sqb := r.repository.Scene
marker, err := qb.Find(ctx, markerID)
for _, markerID := range ids {
marker, err := qb.Find(ctx, markerID)
if err != nil {
return err
if err != nil {
return err
}
if marker == nil {
return fmt.Errorf("scene marker with id %d not found", markerID)
}
s, err := sqb.Find(ctx, marker.SceneID)
if err != nil {
return err
}
if s == nil {
return fmt.Errorf("scene with id %d not found", marker.SceneID)
}
markers = append(markers, marker)
if err := scene.DestroyMarker(ctx, s, marker, qb, fileDeleter); err != nil {
return err
}
}
if marker == nil {
return fmt.Errorf("scene marker with id %d not found", markerID)
}
s, err := sqb.Find(ctx, marker.SceneID)
if err != nil {
return err
}
if s == nil {
return fmt.Errorf("scene with id %d not found", marker.SceneID)
}
return scene.DestroyMarker(ctx, s, marker, qb, fileDeleter)
return nil
}); err != nil {
fileDeleter.Rollback()
return false, err
}
// perform the post-commit actions
fileDeleter.Commit()
r.hookExecutor.ExecutePostHooks(ctx, markerID, hook.SceneMarkerDestroyPost, id, nil)
for _, marker := range markers {
r.hookExecutor.ExecutePostHooks(ctx, marker.ID, hook.SceneMarkerDestroyPost, markerIDs, nil)
}
return true, nil
}

View file

@ -91,7 +91,7 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error {
Name: testName,
}
err := pqb.Create(ctx, &performer)
err := pqb.Create(ctx, &models.CreatePerformerInput{Performer: &performer})
if err != nil {
return err
}

View file

@ -41,7 +41,7 @@ func createMissingPerformer(ctx context.Context, endpoint string, w PerformerCre
return nil, err
}
err = w.Create(ctx, newPerformer)
err = w.Create(ctx, &models.CreatePerformerInput{Performer: newPerformer})
if err != nil {
return nil, fmt.Errorf("error creating performer: %w", err)
}

View file

@ -24,8 +24,8 @@ func Test_getPerformerID(t *testing.T) {
db := mocks.NewDatabase()
db.Performer.On("Create", testCtx, mock.Anything).Run(func(args mock.Arguments) {
p := args.Get(1).(*models.Performer)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) {
p := args.Get(1).(*models.CreatePerformerInput)
p.ID = validStoredID
}).Return(nil)
@ -154,14 +154,14 @@ func Test_createMissingPerformer(t *testing.T) {
db := mocks.NewDatabase()
db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.Performer) bool {
db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.CreatePerformerInput) bool {
return p.Name == validName
})).Run(func(args mock.Arguments) {
p := args.Get(1).(*models.Performer)
p := args.Get(1).(*models.CreatePerformerInput)
p.ID = performerID
}).Return(nil)
db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.Performer) bool {
db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.CreatePerformerInput) bool {
return p.Name == invalidName
})).Return(errors.New("error creating performer"))

View file

@ -1533,7 +1533,7 @@ func (i *Config) GetDefaultGenerateSettings() *models.GenerateMetadataOptions {
}
// GetDangerousAllowPublicWithoutAuth determines if the security feature is enabled.
// See https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet
// See https://docs.stashapp.cc/faq/setup/#protecting-against-accidental-exposure-to-the-internet
func (i *Config) GetDangerousAllowPublicWithoutAuth() bool {
return i.getBool(dangerousAllowPublicWithoutAuth)
}

View file

@ -194,7 +194,7 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m
return err
}
if err := qb.Create(ctx, newPerformer); err != nil {
if err := qb.Create(ctx, &models.CreatePerformerInput{Performer: newPerformer}); err != nil {
return err
}

View file

@ -188,7 +188,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod
newPerformer := models.NewPerformer()
newPerformer.Name = name
err := i.PerformerWriter.Create(ctx, &newPerformer)
err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{
Performer: &newPerformer,
})
if err != nil {
return nil, err
}

View file

@ -201,8 +201,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) {
}
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) {
performer := args.Get(1).(*models.Performer)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) {
performer := args.Get(1).(*models.CreatePerformerInput)
performer.ID = existingPerformerID
}).Return(nil)
@ -235,7 +235,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
}
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error"))
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error"))
err := i.PreImport(testCtx)
assert.NotNil(t, err)

View file

@ -274,7 +274,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod
newPerformer := models.NewPerformer()
newPerformer.Name = name
err := i.PerformerWriter.Create(ctx, &newPerformer)
err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{
Performer: &newPerformer,
})
if err != nil {
return nil, err
}

View file

@ -163,8 +163,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) {
}
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) {
performer := args.Get(1).(*models.Performer)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) {
performer := args.Get(1).(*models.CreatePerformerInput)
performer.ID = existingPerformerID
}).Return(nil)
@ -197,7 +197,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
}
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error"))
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error"))
err := i.PreImport(testCtx)
assert.NotNil(t, err)

View file

@ -0,0 +1,17 @@
package models
import "context"
type CustomFieldMap map[string]interface{}
type CustomFieldsInput struct {
// If populated, the entire custom fields map will be replaced with this value
Full map[string]interface{} `json:"full"`
// If populated, only the keys in this map will be updated
Partial map[string]interface{} `json:"partial"`
}
type CustomFieldsReader interface {
GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error)
GetCustomFieldsBulk(ctx context.Context, ids []int) ([]CustomFieldMap, error)
}

View file

@ -194,3 +194,9 @@ type PhashDistanceCriterionInput struct {
type OrientationCriterionInput struct {
Value []OrientationEnum `json:"value"`
}
type CustomFieldCriterionInput struct {
Field string `json:"field"`
Value []any `json:"value"`
Modifier CriterionModifier `json:"modifier"`
}

View file

@ -65,6 +65,8 @@ type Performer struct {
StashIDs []models.StashID `json:"stash_ids,omitempty"`
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
CustomFields map[string]interface{} `json:"custom_fields,omitempty"`
// deprecated - for import only
URL string `json:"url,omitempty"`
Twitter string `json:"twitter,omitempty"`

View file

@ -80,11 +80,11 @@ func (_m *PerformerReaderWriter) CountByTagID(ctx context.Context, tagID int) (i
}
// Create provides a mock function with given fields: ctx, newPerformer
func (_m *PerformerReaderWriter) Create(ctx context.Context, newPerformer *models.Performer) error {
func (_m *PerformerReaderWriter) Create(ctx context.Context, newPerformer *models.CreatePerformerInput) error {
ret := _m.Called(ctx, newPerformer)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, *models.Performer) error); ok {
if rf, ok := ret.Get(0).(func(context.Context, *models.CreatePerformerInput) error); ok {
r0 = rf(ctx, newPerformer)
} else {
r0 = ret.Error(0)
@ -314,6 +314,52 @@ func (_m *PerformerReaderWriter) GetAliases(ctx context.Context, relatedID int)
return r0, r1
}
// GetCustomFields provides a mock function with given fields: ctx, id
func (_m *PerformerReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) {
ret := _m.Called(ctx, id)
var r0 map[string]interface{}
if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok {
r0 = rf(ctx, id)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(map[string]interface{})
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids
func (_m *PerformerReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) {
ret := _m.Called(ctx, ids)
var r0 []models.CustomFieldMap
if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]models.CustomFieldMap)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetImage provides a mock function with given fields: ctx, performerID
func (_m *PerformerReaderWriter) GetImage(ctx context.Context, performerID int) ([]byte, error) {
ret := _m.Called(ctx, performerID)
@ -502,11 +548,11 @@ func (_m *PerformerReaderWriter) QueryForAutoTag(ctx context.Context, words []st
}
// Update provides a mock function with given fields: ctx, updatedPerformer
func (_m *PerformerReaderWriter) Update(ctx context.Context, updatedPerformer *models.Performer) error {
func (_m *PerformerReaderWriter) Update(ctx context.Context, updatedPerformer *models.UpdatePerformerInput) error {
ret := _m.Called(ctx, updatedPerformer)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, *models.Performer) error); ok {
if rf, ok := ret.Get(0).(func(context.Context, *models.UpdatePerformerInput) error); ok {
r0 = rf(ctx, updatedPerformer)
} else {
r0 = ret.Error(0)

View file

@ -39,6 +39,18 @@ type Performer struct {
StashIDs RelatedStashIDs `json:"stash_ids"`
}
type CreatePerformerInput struct {
*Performer
CustomFields map[string]interface{} `json:"custom_fields"`
}
type UpdatePerformerInput struct {
*Performer
CustomFields CustomFieldsInput `json:"custom_fields"`
}
func NewPerformer() Performer {
currentTime := time.Now()
return Performer{
@ -80,6 +92,8 @@ type PerformerPartial struct {
Aliases *UpdateStrings
TagIDs *UpdateIDs
StashIDs *UpdateStashIDs
CustomFields CustomFieldsInput
}
func NewPerformerPartial() PerformerPartial {

View file

@ -198,6 +198,9 @@ type PerformerFilterType struct {
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at
UpdatedAt *TimestampCriterionInput `json:"updated_at"`
// Filter by custom fields
CustomFields []CustomFieldCriterionInput `json:"custom_fields"`
}
type PerformerCreateInput struct {
@ -234,6 +237,8 @@ type PerformerCreateInput struct {
HairColor *string `json:"hair_color"`
Weight *int `json:"weight"`
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
CustomFields map[string]interface{} `json:"custom_fields"`
}
type PerformerUpdateInput struct {
@ -271,4 +276,6 @@ type PerformerUpdateInput struct {
HairColor *string `json:"hair_color"`
Weight *int `json:"weight"`
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
CustomFields CustomFieldsInput `json:"custom_fields"`
}

View file

@ -43,12 +43,12 @@ type PerformerCounter interface {
// PerformerCreator provides methods to create performers.
type PerformerCreator interface {
Create(ctx context.Context, newPerformer *Performer) error
Create(ctx context.Context, newPerformer *CreatePerformerInput) error
}
// PerformerUpdater provides methods to update performers.
type PerformerUpdater interface {
Update(ctx context.Context, updatedPerformer *Performer) error
Update(ctx context.Context, updatedPerformer *UpdatePerformerInput) error
UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error)
UpdateImage(ctx context.Context, performerID int, image []byte) error
}
@ -80,6 +80,8 @@ type PerformerReader interface {
TagIDLoader
URLLoader
CustomFieldsReader
All(ctx context.Context) ([]*Performer, error)
GetImage(ctx context.Context, performerID int) ([]byte, error)
HasImage(ctx context.Context, performerID int) (bool, error)

View file

@ -11,6 +11,8 @@ type SceneMarkerFilterType struct {
Performers *MultiCriterionInput `json:"performers"`
// Filter to only include scene markers from these scenes
Scenes *MultiCriterionInput `json:"scenes"`
// Filter by duration (in seconds)
Duration *FloatCriterionInput `json:"duration"`
// Filter by created at
CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at

View file

@ -17,6 +17,7 @@ type ImageAliasStashIDGetter interface {
models.AliasLoader
models.StashIDLoader
models.URLLoader
models.CustomFieldsReader
}
// ToJSON converts a Performer object into its JSON equivalent.
@ -87,6 +88,12 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode
newPerformerJSON.StashIDs = performer.StashIDs.List()
var err error
newPerformerJSON.CustomFields, err = reader.GetCustomFields(ctx, performer.ID)
if err != nil {
return nil, fmt.Errorf("getting performer custom fields: %v", err)
}
image, err := reader.GetImage(ctx, performer.ID)
if err != nil {
logger.Errorf("Error getting performer image: %v", err)

View file

@ -15,9 +15,11 @@ import (
)
const (
performerID = 1
noImageID = 2
errImageID = 3
performerID = 1
noImageID = 2
errImageID = 3
customFieldsID = 4
errCustomFieldsID = 5
)
const (
@ -50,6 +52,11 @@ var (
penisLength = 1.23
circumcisedEnum = models.CircumisedEnumCut
circumcised = circumcisedEnum.String()
emptyCustomFields = make(map[string]interface{})
customFields = map[string]interface{}{
"customField1": "customValue1",
}
)
var imageBytes = []byte("imageBytes")
@ -118,8 +125,8 @@ func createEmptyPerformer(id int) models.Performer {
}
}
func createFullJSONPerformer(name string, image string) *jsonschema.Performer {
return &jsonschema.Performer{
func createFullJSONPerformer(name string, image string, withCustomFields bool) *jsonschema.Performer {
ret := &jsonschema.Performer{
Name: name,
Disambiguation: disambiguation,
URLs: []string{url, twitter, instagram},
@ -152,7 +159,13 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer {
Weight: weight,
StashIDs: stashIDs,
IgnoreAutoTag: autoTagIgnored,
CustomFields: emptyCustomFields,
}
if withCustomFields {
ret.CustomFields = customFields
}
return ret
}
func createEmptyJSONPerformer() *jsonschema.Performer {
@ -166,13 +179,15 @@ func createEmptyJSONPerformer() *jsonschema.Performer {
UpdatedAt: json.JSONTime{
Time: updateTime,
},
CustomFields: emptyCustomFields,
}
}
type testScenario struct {
input models.Performer
expected *jsonschema.Performer
err bool
input models.Performer
customFields map[string]interface{}
expected *jsonschema.Performer
err bool
}
var scenarios []testScenario
@ -181,20 +196,36 @@ func initTestTable() {
scenarios = []testScenario{
{
*createFullPerformer(performerID, performerName),
createFullJSONPerformer(performerName, image),
emptyCustomFields,
createFullJSONPerformer(performerName, image, false),
false,
},
{
*createFullPerformer(customFieldsID, performerName),
customFields,
createFullJSONPerformer(performerName, image, true),
false,
},
{
createEmptyPerformer(noImageID),
emptyCustomFields,
createEmptyJSONPerformer(),
false,
},
{
*createFullPerformer(errImageID, performerName),
createFullJSONPerformer(performerName, ""),
emptyCustomFields,
createFullJSONPerformer(performerName, "", false),
// failure to get image should not cause an error
false,
},
{
*createFullPerformer(errCustomFieldsID, performerName),
customFields,
nil,
// failure to get custom fields should cause an error
true,
},
}
}
@ -204,11 +235,19 @@ func TestToJSON(t *testing.T) {
db := mocks.NewDatabase()
imageErr := errors.New("error getting image")
customFieldsErr := errors.New("error getting custom fields")
db.Performer.On("GetImage", testCtx, performerID).Return(imageBytes, nil).Once()
db.Performer.On("GetImage", testCtx, customFieldsID).Return(imageBytes, nil).Once()
db.Performer.On("GetImage", testCtx, noImageID).Return(nil, nil).Once()
db.Performer.On("GetImage", testCtx, errImageID).Return(nil, imageErr).Once()
db.Performer.On("GetCustomFields", testCtx, performerID).Return(emptyCustomFields, nil).Once()
db.Performer.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once()
db.Performer.On("GetCustomFields", testCtx, noImageID).Return(emptyCustomFields, nil).Once()
db.Performer.On("GetCustomFields", testCtx, errImageID).Return(emptyCustomFields, nil).Once()
db.Performer.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, customFieldsErr).Once()
for i, s := range scenarios {
tag := s.input
json, err := ToJSON(testCtx, db.Performer, &tag)

View file

@ -25,13 +25,15 @@ type Importer struct {
Input jsonschema.Performer
MissingRefBehaviour models.ImportMissingRefEnum
ID int
performer models.Performer
imageData []byte
ID int
performer models.Performer
customFields models.CustomFieldMap
imageData []byte
}
func (i *Importer) PreImport(ctx context.Context) error {
i.performer = performerJSONToPerformer(i.Input)
i.customFields = i.Input.CustomFields
if err := i.populateTags(ctx); err != nil {
return err
@ -165,7 +167,10 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
}
func (i *Importer) Create(ctx context.Context) (*int, error) {
err := i.ReaderWriter.Create(ctx, &i.performer)
err := i.ReaderWriter.Create(ctx, &models.CreatePerformerInput{
Performer: &i.performer,
CustomFields: i.customFields,
})
if err != nil {
return nil, fmt.Errorf("error creating performer: %v", err)
}
@ -175,9 +180,13 @@ func (i *Importer) Create(ctx context.Context) (*int, error) {
}
func (i *Importer) Update(ctx context.Context, id int) error {
performer := i.performer
performer.ID = id
err := i.ReaderWriter.Update(ctx, &performer)
i.performer.ID = id
err := i.ReaderWriter.Update(ctx, &models.UpdatePerformerInput{
Performer: &i.performer,
CustomFields: models.CustomFieldsInput{
Full: i.customFields,
},
})
if err != nil {
return fmt.Errorf("error updating existing performer: %v", err)
}

View file

@ -53,13 +53,14 @@ func TestImporterPreImport(t *testing.T) {
assert.NotNil(t, err)
i.Input = *createFullJSONPerformer(performerName, image)
i.Input = *createFullJSONPerformer(performerName, image, true)
err = i.PreImport(testCtx)
assert.Nil(t, err)
expectedPerformer := *createFullPerformer(0, performerName)
assert.Equal(t, expectedPerformer, i.performer)
assert.Equal(t, models.CustomFieldMap(customFields), i.customFields)
}
func TestImporterPreImportWithTag(t *testing.T) {
@ -234,10 +235,18 @@ func TestCreate(t *testing.T) {
Name: performerName,
}
performerInput := models.CreatePerformerInput{
Performer: &performer,
}
performerErr := models.Performer{
Name: performerNameErr,
}
performerErrInput := models.CreatePerformerInput{
Performer: &performerErr,
}
i := Importer{
ReaderWriter: db.Performer,
TagWriter: db.Tag,
@ -245,11 +254,11 @@ func TestCreate(t *testing.T) {
}
errCreate := errors.New("Create error")
db.Performer.On("Create", testCtx, &performer).Run(func(args mock.Arguments) {
arg := args.Get(1).(*models.Performer)
db.Performer.On("Create", testCtx, &performerInput).Run(func(args mock.Arguments) {
arg := args.Get(1).(*models.CreatePerformerInput)
arg.ID = performerID
}).Return(nil).Once()
db.Performer.On("Create", testCtx, &performerErr).Return(errCreate).Once()
db.Performer.On("Create", testCtx, &performerErrInput).Return(errCreate).Once()
id, err := i.Create(testCtx)
assert.Equal(t, performerID, *id)
@ -284,7 +293,10 @@ func TestUpdate(t *testing.T) {
// id needs to be set for the mock input
performer.ID = performerID
db.Performer.On("Update", testCtx, &performer).Return(nil).Once()
performerInput := models.UpdatePerformerInput{
Performer: &performer,
}
db.Performer.On("Update", testCtx, &performerInput).Return(nil).Once()
err := i.Update(testCtx, performerID)
assert.Nil(t, err)
@ -293,7 +305,10 @@ func TestUpdate(t *testing.T) {
// need to set id separately
performerErr.ID = errImageID
db.Performer.On("Update", testCtx, &performerErr).Return(errUpdate).Once()
performerErrInput := models.UpdatePerformerInput{
Performer: &performerErr,
}
db.Performer.On("Update", testCtx, &performerErrInput).Return(errUpdate).Once()
err = i.Update(testCtx, errImageID)
assert.NotNil(t, err)

View file

@ -8,7 +8,7 @@ import (
"errors"
"fmt"
"github.com/shurcooL/graphql"
graphql "github.com/hasura/go-graphql-client"
"github.com/stashapp/stash/pkg/plugin/common/log"
)

View file

@ -8,7 +8,7 @@ import (
"net/url"
"strconv"
"github.com/shurcooL/graphql"
graphql "github.com/hasura/go-graphql-client"
"github.com/stashapp/stash/pkg/plugin/common"
)

View file

@ -325,7 +325,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod
newPerformer := models.NewPerformer()
newPerformer.Name = name
err := i.PerformerWriter.Create(ctx, &newPerformer)
err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{
Performer: &newPerformer,
})
if err != nil {
return nil, err
}

View file

@ -327,8 +327,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) {
}
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) {
p := args.Get(1).(*models.Performer)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) {
p := args.Get(1).(*models.CreatePerformerInput)
p.ID = existingPerformerID
}).Return(nil)
@ -361,7 +361,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
}
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error"))
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error"))
err := i.PreImport(testCtx)
assert.NotNil(t, err)

View file

@ -114,7 +114,8 @@ func (c config) validate() error {
}
type stashServer struct {
URL string `yaml:"url"`
URL string `yaml:"url"`
ApiKey string `yaml:"apiKey"`
}
type scraperTypeConfig struct {

55
pkg/scraper/graphql.go Normal file
View file

@ -0,0 +1,55 @@
package scraper
import (
"errors"
"strings"
"github.com/hasura/go-graphql-client"
)
type graphqlErrors []error
func (e graphqlErrors) Error() string {
b := strings.Builder{}
for _, err := range e {
_, _ = b.WriteString(err.Error())
}
return b.String()
}
type graphqlError struct {
err graphql.Error
}
func (e graphqlError) Error() string {
unwrapped := e.err.Unwrap()
if unwrapped != nil {
var networkErr graphql.NetworkError
if errors.As(unwrapped, &networkErr) {
if networkErr.StatusCode() == 422 {
return networkErr.Body()
}
}
}
return e.err.Error()
}
// convertGraphqlError converts a graphql.Error or graphql.Errors into an error with a useful message.
// graphql.Error swallows important information, so we need to convert it to a more useful error type.
func convertGraphqlError(err error) error {
var gqlErrs graphql.Errors
if errors.As(err, &gqlErrs) {
ret := make(graphqlErrors, len(gqlErrs))
for i, e := range gqlErrs {
ret[i] = convertGraphqlError(e)
}
return ret
}
var gqlErr graphql.Error
if errors.As(err, &gqlErr) {
return graphqlError{gqlErr}
}
return err
}

View file

@ -122,13 +122,19 @@ func setGroupBackImage(ctx context.Context, client *http.Client, m *models.Scrap
return nil
}
func getImage(ctx context.Context, url string, client *http.Client, globalConfig GlobalConfig) (*string, error) {
type imageGetter struct {
client *http.Client
globalConfig GlobalConfig
requestModifier func(req *http.Request)
}
func (i *imageGetter) getImage(ctx context.Context, url string) (*string, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
return nil, err
}
userAgent := globalConfig.GetScraperUserAgent()
userAgent := i.globalConfig.GetScraperUserAgent()
if userAgent != "" {
req.Header.Set("User-Agent", userAgent)
}
@ -140,7 +146,11 @@ func getImage(ctx context.Context, url string, client *http.Client, globalConfig
req.Header.Set("Referer", req.URL.Scheme+"://"+req.Host+"/")
}
resp, err := client.Do(req)
if i.requestModifier != nil {
i.requestModifier(req)
}
resp, err := i.client.Do(req)
if err != nil {
return nil, err
@ -167,10 +177,19 @@ func getImage(ctx context.Context, url string, client *http.Client, globalConfig
return &img, nil
}
func getStashPerformerImage(ctx context.Context, stashURL string, performerID string, client *http.Client, globalConfig GlobalConfig) (*string, error) {
return getImage(ctx, stashURL+"/performer/"+performerID+"/image", client, globalConfig)
func getImage(ctx context.Context, url string, client *http.Client, globalConfig GlobalConfig) (*string, error) {
g := imageGetter{
client: client,
globalConfig: globalConfig,
}
return g.getImage(ctx, url)
}
func getStashSceneImage(ctx context.Context, stashURL string, sceneID string, client *http.Client, globalConfig GlobalConfig) (*string, error) {
return getImage(ctx, stashURL+"/scene/"+sceneID+"/screenshot", client, globalConfig)
func getStashPerformerImage(ctx context.Context, stashURL string, performerID string, imageGetter imageGetter) (*string, error) {
return imageGetter.getImage(ctx, stashURL+"/performer/"+performerID+"/image")
}
func getStashSceneImage(ctx context.Context, stashURL string, sceneID string, imageGetter imageGetter) (*string, error) {
return imageGetter.getImage(ctx, stashURL+"/scene/"+sceneID+"/screenshot")
}

View file

@ -4,9 +4,11 @@ import (
"context"
"fmt"
"net/http"
"strconv"
"strings"
graphql "github.com/hasura/go-graphql-client"
"github.com/jinzhu/copier"
"github.com/shurcooL/graphql"
"github.com/stashapp/stash/pkg/models"
)
@ -27,9 +29,21 @@ func newStashScraper(scraper scraperTypeConfig, client *http.Client, config conf
}
}
func setApiKeyHeader(apiKey string) func(req *http.Request) {
return func(req *http.Request) {
req.Header.Set("ApiKey", apiKey)
}
}
func (s *stashScraper) getStashClient() *graphql.Client {
url := s.config.StashServer.URL
return graphql.NewClient(url+"/graphql", nil)
url := s.config.StashServer.URL + "/graphql"
ret := graphql.NewClient(url, s.client)
if s.config.StashServer.ApiKey != "" {
ret = ret.WithRequestModifier(setApiKeyHeader(s.config.StashServer.ApiKey))
}
return ret
}
type stashFindPerformerNamePerformer struct {
@ -58,14 +72,12 @@ type scrapedTagStash struct {
type scrapedPerformerStash struct {
Name *string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
URLs []string `graphql:"urls" json:"urls"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Height *int `graphql:"height_cm" json:"height_cm"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
PenisLength *string `graphql:"penis_length" json:"penis_length"`
@ -73,12 +85,25 @@ type scrapedPerformerStash struct {
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Aliases []string `graphql:"alias_list" json:"alias_list"`
Tags []*scrapedTagStash `graphql:"tags" json:"tags"`
Details *string `graphql:"details" json:"details"`
DeathDate *string `graphql:"death_date" json:"death_date"`
HairColor *string `graphql:"hair_color" json:"hair_color"`
Weight *string `graphql:"weight" json:"weight"`
Weight *int `graphql:"weight" json:"weight"`
}
func (s *stashScraper) imageGetter() imageGetter {
ret := imageGetter{
client: s.client,
globalConfig: s.globalConfig,
}
if s.config.StashServer.ApiKey != "" {
ret.requestModifier = setApiKeyHeader(s.config.StashServer.ApiKey)
}
return ret
}
func (s *stashScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) {
@ -102,12 +127,12 @@ func (s *stashScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap
// get the id from the URL field
vars := map[string]interface{}{
"f": performerID,
"f": graphql.ID(performerID),
}
err := client.Query(ctx, &q, vars)
if err != nil {
return nil, err
return nil, convertGraphqlError(err)
}
// need to copy back to a scraped performer
@ -117,11 +142,28 @@ func (s *stashScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap
return nil, err
}
// convert alias list to aliases
aliasStr := strings.Join(q.FindPerformer.Aliases, ", ")
ret.Aliases = &aliasStr
// convert numeric to string
if q.FindPerformer.Height != nil {
heightStr := strconv.Itoa(*q.FindPerformer.Height)
ret.Height = &heightStr
}
if q.FindPerformer.Weight != nil {
weightStr := strconv.Itoa(*q.FindPerformer.Weight)
ret.Weight = &weightStr
}
// get the performer image directly
ret.Image, err = getStashPerformerImage(ctx, s.config.StashServer.URL, performerID, s.client, s.globalConfig)
ig := s.imageGetter()
img, err := getStashPerformerImage(ctx, s.config.StashServer.URL, performerID, ig)
if err != nil {
return nil, err
}
ret.Images = []string{*img}
ret.Image = img
return &ret, nil
}
@ -143,8 +185,15 @@ func (s *stashScraper) scrapedStashSceneToScrapedScene(ctx context.Context, scen
return nil, err
}
// get the performer image directly
ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, scene.ID, s.client, s.globalConfig)
// convert first in files to file
if len(scene.Files) > 0 {
f := scene.Files[0].SceneFileType()
ret.File = &f
}
// get the scene image directly
ig := s.imageGetter()
ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, scene.ID, ig)
if err != nil {
return nil, err
}
@ -175,7 +224,7 @@ func (s *stashScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC
err := client.Query(ctx, &q, vars)
if err != nil {
return nil, err
return nil, convertGraphqlError(err)
}
for _, scene := range q.FindScenes.Scenes {
@ -207,13 +256,41 @@ func (s *stashScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC
return nil, ErrNotSupported
}
type stashVideoFile struct {
Size int64 `graphql:"size" json:"size"`
Duration float64 `graphql:"duration" json:"duration"`
VideoCodec string `graphql:"video_codec" json:"video_codec"`
AudioCodec string `graphql:"audio_codec" json:"audio_codec"`
Width int `graphql:"width" json:"width"`
Height int `graphql:"height" json:"height"`
Framerate float64 `graphql:"frame_rate" json:"frame_rate"`
Bitrate int `graphql:"bit_rate" json:"bit_rate"`
}
func (f stashVideoFile) SceneFileType() models.SceneFileType {
ret := models.SceneFileType{
Duration: &f.Duration,
VideoCodec: &f.VideoCodec,
AudioCodec: &f.AudioCodec,
Width: &f.Width,
Height: &f.Height,
Framerate: &f.Framerate,
Bitrate: &f.Bitrate,
}
size := strconv.FormatInt(f.Size, 10)
ret.Size = &size
return ret
}
type scrapedSceneStash struct {
ID string `graphql:"id" json:"id"`
Title *string `graphql:"title" json:"title"`
Details *string `graphql:"details" json:"details"`
URL *string `graphql:"url" json:"url"`
URLs []string `graphql:"urls" json:"urls"`
Date *string `graphql:"date" json:"date"`
File *models.SceneFileType `graphql:"file" json:"file"`
Files []stashVideoFile `graphql:"files" json:"files"`
Studio *scrapedStudioStash `graphql:"studio" json:"studio"`
Tags []*scrapedTagStash `graphql:"tags" json:"tags"`
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
@ -239,12 +316,16 @@ func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sce
}
vars := map[string]interface{}{
"c": &input,
"c": input,
}
client := s.getStashClient()
if err := client.Query(ctx, &q, vars); err != nil {
return nil, err
return nil, convertGraphqlError(err)
}
if q.FindScene == nil {
return nil, nil
}
// need to copy back to a scraped scene
@ -254,7 +335,8 @@ func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sce
}
// get the performer image directly
ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, q.FindScene.ID, s.client, s.globalConfig)
ig := s.imageGetter()
ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, q.FindScene.ID, ig)
if err != nil {
return nil, err
}

View file

@ -81,6 +81,6 @@ func LogExternalAccessError(err ExternalAccessError) {
"You probably forwarded a port from your router. At the very least, add a password to stash in the settings. \n"+
"Stash will not serve requests until you edit config.yml, remove the security_tripwire_accessed_from_public_internet key and restart stash. \n"+
"This behaviour can be overridden (but not recommended) by setting dangerous_allow_public_without_auth to true in config.yml. \n"+
"More information is available at https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet \n"+
"More information is available at https://docs.stashapp.cc/faq/setup/#protecting-against-accidental-exposure-to-the-internet \n"+
"Stash is not answering any other requests to protect your privacy.", net.IP(err).String())
}

View file

@ -600,6 +600,10 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error {
return err
}
if err := db.anonymiseCustomFields(ctx, goqu.T(performersCustomFieldsTable.GetTable()), "performer_id"); err != nil {
return err
}
return nil
}
@ -1050,3 +1054,73 @@ func (db *Anonymiser) obfuscateString(in string, dict string) string {
return out.String()
}
func (db *Anonymiser) anonymiseCustomFields(ctx context.Context, table exp.IdentifierExpression, idColumn string) error {
lastID := 0
lastField := ""
total := 0
const logEvery = 10000
for gotSome := true; gotSome; {
if err := txn.WithTxn(ctx, db, func(ctx context.Context) error {
query := dialect.From(table).Select(
table.Col(idColumn),
table.Col("field"),
table.Col("value"),
).Where(
goqu.L("("+idColumn+", field)").Gt(goqu.L("(?, ?)", lastID, lastField)),
).Order(
table.Col(idColumn).Asc(), table.Col("field").Asc(),
).Limit(1000)
gotSome = false
const single = false
return queryFunc(ctx, query, single, func(rows *sqlx.Rows) error {
var (
id int
field string
value string
)
if err := rows.Scan(
&id,
&field,
&value,
); err != nil {
return err
}
set := goqu.Record{}
set["field"] = db.obfuscateString(field, letters)
set["value"] = db.obfuscateString(value, letters)
if len(set) > 0 {
stmt := dialect.Update(table).Set(set).Where(
table.Col(idColumn).Eq(id),
table.Col("field").Eq(field),
)
if _, err := exec(ctx, stmt); err != nil {
return fmt.Errorf("anonymising %s: %w", table.GetTable(), err)
}
}
lastID = id
lastField = field
gotSome = true
total++
if total%logEvery == 0 {
logger.Infof("Anonymised %d %s custom fields", total, table.GetTable())
}
return nil
})
}); err != nil {
return err
}
}
return nil
}

308
pkg/sqlite/custom_fields.go Normal file
View file

@ -0,0 +1,308 @@
package sqlite
import (
"context"
"fmt"
"regexp"
"strings"
"github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/models"
)
const maxCustomFieldNameLength = 64
type customFieldsStore struct {
table exp.IdentifierExpression
fk exp.IdentifierExpression
}
func (s *customFieldsStore) deleteForID(ctx context.Context, id int) error {
table := s.table
q := dialect.Delete(table).Where(s.fk.Eq(id))
_, err := exec(ctx, q)
if err != nil {
return fmt.Errorf("deleting from %s: %w", s.table.GetTable(), err)
}
return nil
}
func (s *customFieldsStore) SetCustomFields(ctx context.Context, id int, values models.CustomFieldsInput) error {
var partial bool
var valMap map[string]interface{}
switch {
case values.Full != nil:
partial = false
valMap = values.Full
case values.Partial != nil:
partial = true
valMap = values.Partial
default:
return nil
}
if err := s.validateCustomFields(valMap); err != nil {
return err
}
return s.setCustomFields(ctx, id, valMap, partial)
}
func (s *customFieldsStore) validateCustomFields(values map[string]interface{}) error {
// ensure that custom field names are valid
// no leading or trailing whitespace, no empty strings
for k := range values {
if err := s.validateCustomFieldName(k); err != nil {
return fmt.Errorf("custom field name %q: %w", k, err)
}
}
return nil
}
func (s *customFieldsStore) validateCustomFieldName(fieldName string) error {
// ensure that custom field names are valid
// no leading or trailing whitespace, no empty strings
if strings.TrimSpace(fieldName) == "" {
return fmt.Errorf("custom field name cannot be empty")
}
if fieldName != strings.TrimSpace(fieldName) {
return fmt.Errorf("custom field name cannot have leading or trailing whitespace")
}
if len(fieldName) > maxCustomFieldNameLength {
return fmt.Errorf("custom field name must be less than %d characters", maxCustomFieldNameLength+1)
}
return nil
}
func getSQLValueFromCustomFieldInput(input interface{}) (interface{}, error) {
switch v := input.(type) {
case []interface{}, map[string]interface{}:
// TODO - in future it would be nice to convert to a JSON string
// however, we would need some way to differentiate between a JSON string and a regular string
// for now, we will not support objects and arrays
return nil, fmt.Errorf("unsupported custom field value type: %T", input)
default:
return v, nil
}
}
func (s *customFieldsStore) sqlValueToValue(value interface{}) interface{} {
// TODO - if we ever support objects and arrays we will need to add support here
return value
}
func (s *customFieldsStore) setCustomFields(ctx context.Context, id int, values map[string]interface{}, partial bool) error {
if !partial {
// delete existing custom fields
if err := s.deleteForID(ctx, id); err != nil {
return err
}
}
if len(values) == 0 {
return nil
}
conflictKey := s.fk.GetCol().(string) + ", field"
// upsert new custom fields
q := dialect.Insert(s.table).Prepared(true).Cols(s.fk, "field", "value").
OnConflict(goqu.DoUpdate(conflictKey, goqu.Record{"value": goqu.I("excluded.value")}))
r := make([]interface{}, len(values))
var i int
for key, value := range values {
v, err := getSQLValueFromCustomFieldInput(value)
if err != nil {
return fmt.Errorf("getting SQL value for field %q: %w", key, err)
}
r[i] = goqu.Record{"field": key, "value": v, s.fk.GetCol().(string): id}
i++
}
if _, err := exec(ctx, q.Rows(r...)); err != nil {
return fmt.Errorf("inserting custom fields: %w", err)
}
return nil
}
func (s *customFieldsStore) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) {
q := dialect.Select("field", "value").From(s.table).Where(s.fk.Eq(id))
const single = false
ret := make(map[string]interface{})
err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var field string
var value interface{}
if err := rows.Scan(&field, &value); err != nil {
return fmt.Errorf("scanning custom fields: %w", err)
}
ret[field] = s.sqlValueToValue(value)
return nil
})
if err != nil {
return nil, fmt.Errorf("getting custom fields: %w", err)
}
return ret, nil
}
func (s *customFieldsStore) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) {
q := dialect.Select(s.fk.As("id"), "field", "value").From(s.table).Where(s.fk.In(ids))
const single = false
ret := make([]models.CustomFieldMap, len(ids))
idi := make(map[int]int, len(ids))
for i, id := range ids {
idi[id] = i
}
err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var id int
var field string
var value interface{}
if err := rows.Scan(&id, &field, &value); err != nil {
return fmt.Errorf("scanning custom fields: %w", err)
}
i := idi[id]
m := ret[i]
if m == nil {
m = make(map[string]interface{})
ret[i] = m
}
m[field] = s.sqlValueToValue(value)
return nil
})
if err != nil {
return nil, fmt.Errorf("getting custom fields: %w", err)
}
return ret, nil
}
type customFieldsFilterHandler struct {
table string
fkCol string
c []models.CustomFieldCriterionInput
idCol string
}
func (h *customFieldsFilterHandler) innerJoin(f *filterBuilder, as string, field string) {
joinOn := fmt.Sprintf("%s = %s.%s AND %s.field = ?", h.idCol, as, h.fkCol, as)
f.addInnerJoin(h.table, as, joinOn, field)
}
func (h *customFieldsFilterHandler) leftJoin(f *filterBuilder, as string, field string) {
joinOn := fmt.Sprintf("%s = %s.%s AND %s.field = ?", h.idCol, as, h.fkCol, as)
f.addLeftJoin(h.table, as, joinOn, field)
}
func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs string, cc models.CustomFieldCriterionInput) {
// convert values
cv := make([]interface{}, len(cc.Value))
for i, v := range cc.Value {
var err error
cv[i], err = getSQLValueFromCustomFieldInput(v)
if err != nil {
f.setError(err)
return
}
}
switch cc.Modifier {
case models.CriterionModifierEquals:
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%[1]s.value IN %s", joinAs, getInBinding(len(cv))), cv...)
case models.CriterionModifierNotEquals:
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%[1]s.value NOT IN %s", joinAs, getInBinding(len(cv))), cv...)
case models.CriterionModifierIncludes:
clauses := make([]sqlClause, len(cv))
for i, v := range cv {
clauses[i] = makeClause(fmt.Sprintf("%s.value LIKE ?", joinAs), fmt.Sprintf("%%%v%%", v))
}
h.innerJoin(f, joinAs, cc.Field)
f.whereClauses = append(f.whereClauses, clauses...)
case models.CriterionModifierExcludes:
for _, v := range cv {
f.addWhere(fmt.Sprintf("%[1]s.value NOT LIKE ?", joinAs), fmt.Sprintf("%%%v%%", v))
}
h.leftJoin(f, joinAs, cc.Field)
case models.CriterionModifierMatchesRegex:
for _, v := range cv {
vs, ok := v.(string)
if !ok {
f.setError(fmt.Errorf("unsupported custom field criterion value type: %T", v))
}
if _, err := regexp.Compile(vs); err != nil {
f.setError(err)
return
}
f.addWhere(fmt.Sprintf("(%s.value regexp ?)", joinAs), v)
}
h.innerJoin(f, joinAs, cc.Field)
case models.CriterionModifierNotMatchesRegex:
for _, v := range cv {
vs, ok := v.(string)
if !ok {
f.setError(fmt.Errorf("unsupported custom field criterion value type: %T", v))
}
if _, err := regexp.Compile(vs); err != nil {
f.setError(err)
return
}
f.addWhere(fmt.Sprintf("(%s.value IS NULL OR %[1]s.value NOT regexp ?)", joinAs), v)
}
h.leftJoin(f, joinAs, cc.Field)
case models.CriterionModifierIsNull:
h.leftJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value IS NULL OR TRIM(%[1]s.value) = ''", joinAs))
case models.CriterionModifierNotNull:
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("TRIM(%[1]s.value) != ''", joinAs))
case models.CriterionModifierBetween:
if len(cv) != 2 {
f.setError(fmt.Errorf("expected 2 values for custom field criterion modifier BETWEEN, got %d", len(cv)))
return
}
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value BETWEEN ? AND ?", joinAs), cv[0], cv[1])
case models.CriterionModifierNotBetween:
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value NOT BETWEEN ? AND ?", joinAs), cv[0], cv[1])
case models.CriterionModifierLessThan:
if len(cv) != 1 {
f.setError(fmt.Errorf("expected 1 value for custom field criterion modifier LESS_THAN, got %d", len(cv)))
return
}
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value < ?", joinAs), cv[0])
case models.CriterionModifierGreaterThan:
if len(cv) != 1 {
f.setError(fmt.Errorf("expected 1 value for custom field criterion modifier LESS_THAN, got %d", len(cv)))
return
}
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value > ?", joinAs), cv[0])
default:
f.setError(fmt.Errorf("unsupported custom field criterion modifier: %s", cc.Modifier))
}
}
func (h *customFieldsFilterHandler) handle(ctx context.Context, f *filterBuilder) {
if len(h.c) == 0 {
return
}
for i, cc := range h.c {
join := fmt.Sprintf("custom_fields_%d", i)
h.handleCriterion(f, join, cc)
}
}

View file

@ -0,0 +1,176 @@
//go:build integration
// +build integration
package sqlite_test
import (
"context"
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stretchr/testify/assert"
)
func TestSetCustomFields(t *testing.T) {
performerIdx := performerIdx1WithScene
mergeCustomFields := func(i map[string]interface{}) map[string]interface{} {
m := getPerformerCustomFields(performerIdx)
for k, v := range i {
m[k] = v
}
return m
}
tests := []struct {
name string
input models.CustomFieldsInput
expected map[string]interface{}
wantErr bool
}{
{
"valid full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"key": "value",
},
},
map[string]interface{}{
"key": "value",
},
false,
},
{
"valid partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"key": "value",
},
},
mergeCustomFields(map[string]interface{}{
"key": "value",
}),
false,
},
{
"valid partial overwrite",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"real": float64(4.56),
},
},
mergeCustomFields(map[string]interface{}{
"real": float64(4.56),
}),
false,
},
{
"leading space full",
models.CustomFieldsInput{
Full: map[string]interface{}{
" key": "value",
},
},
nil,
true,
},
{
"trailing space full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"key ": "value",
},
},
nil,
true,
},
{
"leading space partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
" key": "value",
},
},
nil,
true,
},
{
"trailing space partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"key ": "value",
},
},
nil,
true,
},
{
"big key full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"12345678901234567890123456789012345678901234567890123456789012345": "value",
},
},
nil,
true,
},
{
"big key partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"12345678901234567890123456789012345678901234567890123456789012345": "value",
},
},
nil,
true,
},
{
"empty key full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"": "value",
},
},
nil,
true,
},
{
"empty key partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"": "value",
},
},
nil,
true,
},
}
// use performer custom fields store
store := db.Performer
id := performerIDs[performerIdx]
assert := assert.New(t)
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
err := store.SetCustomFields(ctx, id, tt.input)
if (err != nil) != tt.wantErr {
t.Errorf("SetCustomFields() error = %v, wantErr %v", err, tt.wantErr)
return
}
if tt.wantErr {
return
}
actual, err := store.GetCustomFields(ctx, id)
if err != nil {
t.Errorf("GetCustomFields() error = %v", err)
return
}
assert.Equal(tt.expected, actual)
})
}
}

View file

@ -34,7 +34,7 @@ const (
cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE"
)
var appSchemaVersion uint = 70
var appSchemaVersion uint = 71
//go:embed migrations/*.sql
var migrationsBox embed.FS

View file

@ -95,6 +95,7 @@ type join struct {
as string
onClause string
joinType string
args []interface{}
}
// equals returns true if the other join alias/table is equal to this one
@ -229,12 +230,13 @@ func (f *filterBuilder) not(n *filterBuilder) {
// The AS is omitted if as is empty.
// This method does not add a join if it its alias/table name is already
// present in another existing join.
func (f *filterBuilder) addLeftJoin(table, as, onClause string) {
func (f *filterBuilder) addLeftJoin(table, as, onClause string, args ...interface{}) {
newJoin := join{
table: table,
as: as,
onClause: onClause,
joinType: "LEFT",
args: args,
}
f.joins.add(newJoin)
@ -245,12 +247,13 @@ func (f *filterBuilder) addLeftJoin(table, as, onClause string) {
// The AS is omitted if as is empty.
// This method does not add a join if it its alias/table name is already
// present in another existing join.
func (f *filterBuilder) addInnerJoin(table, as, onClause string) {
func (f *filterBuilder) addInnerJoin(table, as, onClause string, args ...interface{}) {
newJoin := join{
table: table,
as: as,
onClause: onClause,
joinType: "INNER",
args: args,
}
f.joins.add(newJoin)

View file

@ -0,0 +1,9 @@
CREATE TABLE `performer_custom_fields` (
`performer_id` integer NOT NULL,
`field` varchar(64) NOT NULL,
`value` BLOB NOT NULL,
PRIMARY KEY (`performer_id`, `field`),
foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE
);
CREATE INDEX `index_performer_custom_fields_field_value` ON `performer_custom_fields` (`field`, `value`);

View file

@ -226,6 +226,7 @@ var (
type PerformerStore struct {
blobJoinQueryBuilder
customFieldsStore
tableMgr *table
}
@ -236,6 +237,10 @@ func NewPerformerStore(blobStore *BlobStore) *PerformerStore {
blobStore: blobStore,
joinTable: performerTable,
},
customFieldsStore: customFieldsStore{
table: performersCustomFieldsTable,
fk: performersCustomFieldsTable.Col(performerIDColumn),
},
tableMgr: performerTableMgr,
}
}
@ -248,9 +253,9 @@ func (qb *PerformerStore) selectDataset() *goqu.SelectDataset {
return dialect.From(qb.table()).Select(qb.table().All())
}
func (qb *PerformerStore) Create(ctx context.Context, newObject *models.Performer) error {
func (qb *PerformerStore) Create(ctx context.Context, newObject *models.CreatePerformerInput) error {
var r performerRow
r.fromPerformer(*newObject)
r.fromPerformer(*newObject.Performer)
id, err := qb.tableMgr.insertID(ctx, r)
if err != nil {
@ -282,12 +287,17 @@ func (qb *PerformerStore) Create(ctx context.Context, newObject *models.Performe
}
}
const partial = false
if err := qb.setCustomFields(ctx, id, newObject.CustomFields, partial); err != nil {
return err
}
updated, err := qb.find(ctx, id)
if err != nil {
return fmt.Errorf("finding after create: %w", err)
}
*newObject = *updated
*newObject.Performer = *updated
return nil
}
@ -330,12 +340,16 @@ func (qb *PerformerStore) UpdatePartial(ctx context.Context, id int, partial mod
}
}
if err := qb.SetCustomFields(ctx, id, partial.CustomFields); err != nil {
return nil, err
}
return qb.find(ctx, id)
}
func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.Performer) error {
func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.UpdatePerformerInput) error {
var r performerRow
r.fromPerformer(*updatedObject)
r.fromPerformer(*updatedObject.Performer)
if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil {
return err
@ -365,6 +379,10 @@ func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.Perf
}
}
if err := qb.SetCustomFields(ctx, updatedObject.ID, updatedObject.CustomFields); err != nil {
return err
}
return nil
}

View file

@ -203,6 +203,13 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler {
performerRepository.tags.innerJoin(f, "performer_tag", "performers.id")
},
},
&customFieldsFilterHandler{
table: performersCustomFieldsTable.GetTable(),
fkCol: performerIDColumn,
c: filter.CustomFields,
idCol: "performers.id",
},
}
}

View file

@ -16,6 +16,12 @@ import (
"github.com/stretchr/testify/assert"
)
var testCustomFields = map[string]interface{}{
"string": "aaa",
"int": int64(123), // int64 to match the type of the field in the database
"real": 1.23,
}
func loadPerformerRelationships(ctx context.Context, expected models.Performer, actual *models.Performer) error {
if expected.Aliases.Loaded() {
if err := actual.LoadAliases(ctx, db.Performer); err != nil {
@ -81,57 +87,62 @@ func Test_PerformerStore_Create(t *testing.T) {
tests := []struct {
name string
newObject models.Performer
newObject models.CreatePerformerInput
wantErr bool
}{
{
"full",
models.Performer{
Name: name,
Disambiguation: disambiguation,
Gender: &gender,
URLs: models.NewRelatedStrings(urls),
Birthdate: &birthdate,
Ethnicity: ethnicity,
Country: country,
EyeColor: eyeColor,
Height: &height,
Measurements: measurements,
FakeTits: fakeTits,
PenisLength: &penisLength,
Circumcised: &circumcised,
CareerLength: careerLength,
Tattoos: tattoos,
Piercings: piercings,
Favorite: favorite,
Rating: &rating,
Details: details,
DeathDate: &deathdate,
HairColor: hairColor,
Weight: &weight,
IgnoreAutoTag: ignoreAutoTag,
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}),
Aliases: models.NewRelatedStrings(aliases),
StashIDs: models.NewRelatedStashIDs([]models.StashID{
{
StashID: stashID1,
Endpoint: endpoint1,
},
{
StashID: stashID2,
Endpoint: endpoint2,
},
}),
CreatedAt: createdAt,
UpdatedAt: updatedAt,
models.CreatePerformerInput{
Performer: &models.Performer{
Name: name,
Disambiguation: disambiguation,
Gender: &gender,
URLs: models.NewRelatedStrings(urls),
Birthdate: &birthdate,
Ethnicity: ethnicity,
Country: country,
EyeColor: eyeColor,
Height: &height,
Measurements: measurements,
FakeTits: fakeTits,
PenisLength: &penisLength,
Circumcised: &circumcised,
CareerLength: careerLength,
Tattoos: tattoos,
Piercings: piercings,
Favorite: favorite,
Rating: &rating,
Details: details,
DeathDate: &deathdate,
HairColor: hairColor,
Weight: &weight,
IgnoreAutoTag: ignoreAutoTag,
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}),
Aliases: models.NewRelatedStrings(aliases),
StashIDs: models.NewRelatedStashIDs([]models.StashID{
{
StashID: stashID1,
Endpoint: endpoint1,
},
{
StashID: stashID2,
Endpoint: endpoint2,
},
}),
CreatedAt: createdAt,
UpdatedAt: updatedAt,
},
CustomFields: testCustomFields,
},
false,
},
{
"invalid tag id",
models.Performer{
Name: name,
TagIDs: models.NewRelatedIDs([]int{invalidID}),
models.CreatePerformerInput{
Performer: &models.Performer{
Name: name,
TagIDs: models.NewRelatedIDs([]int{invalidID}),
},
},
true,
},
@ -155,16 +166,16 @@ func Test_PerformerStore_Create(t *testing.T) {
assert.NotZero(p.ID)
copy := tt.newObject
copy := *tt.newObject.Performer
copy.ID = p.ID
// load relationships
if err := loadPerformerRelationships(ctx, copy, &p); err != nil {
if err := loadPerformerRelationships(ctx, copy, p.Performer); err != nil {
t.Errorf("loadPerformerRelationships() error = %v", err)
return
}
assert.Equal(copy, p)
assert.Equal(copy, *p.Performer)
// ensure can find the performer
found, err := qb.Find(ctx, p.ID)
@ -183,6 +194,15 @@ func Test_PerformerStore_Create(t *testing.T) {
}
assert.Equal(copy, *found)
// ensure custom fields are set
cf, err := qb.GetCustomFields(ctx, p.ID)
if err != nil {
t.Errorf("PerformerStore.GetCustomFields() error = %v", err)
return
}
assert.Equal(tt.newObject.CustomFields, cf)
return
})
}
@ -228,77 +248,109 @@ func Test_PerformerStore_Update(t *testing.T) {
tests := []struct {
name string
updatedObject *models.Performer
updatedObject models.UpdatePerformerInput
wantErr bool
}{
{
"full",
&models.Performer{
ID: performerIDs[performerIdxWithGallery],
Name: name,
Disambiguation: disambiguation,
Gender: &gender,
URLs: models.NewRelatedStrings(urls),
Birthdate: &birthdate,
Ethnicity: ethnicity,
Country: country,
EyeColor: eyeColor,
Height: &height,
Measurements: measurements,
FakeTits: fakeTits,
PenisLength: &penisLength,
Circumcised: &circumcised,
CareerLength: careerLength,
Tattoos: tattoos,
Piercings: piercings,
Favorite: favorite,
Rating: &rating,
Details: details,
DeathDate: &deathdate,
HairColor: hairColor,
Weight: &weight,
IgnoreAutoTag: ignoreAutoTag,
Aliases: models.NewRelatedStrings(aliases),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{
{
StashID: stashID1,
Endpoint: endpoint1,
},
{
StashID: stashID2,
Endpoint: endpoint2,
},
}),
CreatedAt: createdAt,
UpdatedAt: updatedAt,
models.UpdatePerformerInput{
Performer: &models.Performer{
ID: performerIDs[performerIdxWithGallery],
Name: name,
Disambiguation: disambiguation,
Gender: &gender,
URLs: models.NewRelatedStrings(urls),
Birthdate: &birthdate,
Ethnicity: ethnicity,
Country: country,
EyeColor: eyeColor,
Height: &height,
Measurements: measurements,
FakeTits: fakeTits,
PenisLength: &penisLength,
Circumcised: &circumcised,
CareerLength: careerLength,
Tattoos: tattoos,
Piercings: piercings,
Favorite: favorite,
Rating: &rating,
Details: details,
DeathDate: &deathdate,
HairColor: hairColor,
Weight: &weight,
IgnoreAutoTag: ignoreAutoTag,
Aliases: models.NewRelatedStrings(aliases),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{
{
StashID: stashID1,
Endpoint: endpoint1,
},
{
StashID: stashID2,
Endpoint: endpoint2,
},
}),
CreatedAt: createdAt,
UpdatedAt: updatedAt,
},
},
false,
},
{
"clear nullables",
&models.Performer{
ID: performerIDs[performerIdxWithGallery],
Aliases: models.NewRelatedStrings([]string{}),
URLs: models.NewRelatedStrings([]string{}),
TagIDs: models.NewRelatedIDs([]int{}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
models.UpdatePerformerInput{
Performer: &models.Performer{
ID: performerIDs[performerIdxWithGallery],
Aliases: models.NewRelatedStrings([]string{}),
URLs: models.NewRelatedStrings([]string{}),
TagIDs: models.NewRelatedIDs([]int{}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
},
},
false,
},
{
"clear tag ids",
&models.Performer{
ID: performerIDs[sceneIdxWithTag],
TagIDs: models.NewRelatedIDs([]int{}),
models.UpdatePerformerInput{
Performer: &models.Performer{
ID: performerIDs[sceneIdxWithTag],
TagIDs: models.NewRelatedIDs([]int{}),
},
},
false,
},
{
"set custom fields",
models.UpdatePerformerInput{
Performer: &models.Performer{
ID: performerIDs[performerIdxWithGallery],
},
CustomFields: models.CustomFieldsInput{
Full: testCustomFields,
},
},
false,
},
{
"clear custom fields",
models.UpdatePerformerInput{
Performer: &models.Performer{
ID: performerIDs[performerIdxWithGallery],
},
CustomFields: models.CustomFieldsInput{
Full: map[string]interface{}{},
},
},
false,
},
{
"invalid tag id",
&models.Performer{
ID: performerIDs[sceneIdxWithGallery],
TagIDs: models.NewRelatedIDs([]int{invalidID}),
models.UpdatePerformerInput{
Performer: &models.Performer{
ID: performerIDs[sceneIdxWithGallery],
TagIDs: models.NewRelatedIDs([]int{invalidID}),
},
},
true,
},
@ -309,9 +361,9 @@ func Test_PerformerStore_Update(t *testing.T) {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
copy := *tt.updatedObject
copy := *tt.updatedObject.Performer
if err := qb.Update(ctx, tt.updatedObject); (err != nil) != tt.wantErr {
if err := qb.Update(ctx, &tt.updatedObject); (err != nil) != tt.wantErr {
t.Errorf("PerformerStore.Update() error = %v, wantErr %v", err, tt.wantErr)
}
@ -331,6 +383,17 @@ func Test_PerformerStore_Update(t *testing.T) {
}
assert.Equal(copy, *s)
// ensure custom fields are correct
if tt.updatedObject.CustomFields.Full != nil {
cf, err := qb.GetCustomFields(ctx, tt.updatedObject.ID)
if err != nil {
t.Errorf("PerformerStore.GetCustomFields() error = %v", err)
return
}
assert.Equal(tt.updatedObject.CustomFields.Full, cf)
}
})
}
}
@ -573,6 +636,79 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) {
}
}
func Test_PerformerStore_UpdatePartialCustomFields(t *testing.T) {
tests := []struct {
name string
id int
partial models.PerformerPartial
expected map[string]interface{} // nil to use the partial
}{
{
"set custom fields",
performerIDs[performerIdxWithGallery],
models.PerformerPartial{
CustomFields: models.CustomFieldsInput{
Full: testCustomFields,
},
},
nil,
},
{
"clear custom fields",
performerIDs[performerIdxWithGallery],
models.PerformerPartial{
CustomFields: models.CustomFieldsInput{
Full: map[string]interface{}{},
},
},
nil,
},
{
"partial custom fields",
performerIDs[performerIdxWithGallery],
models.PerformerPartial{
CustomFields: models.CustomFieldsInput{
Partial: map[string]interface{}{
"string": "bbb",
"new_field": "new",
},
},
},
map[string]interface{}{
"int": int64(3),
"real": 1.3,
"string": "bbb",
"new_field": "new",
},
},
}
for _, tt := range tests {
qb := db.Performer
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
_, err := qb.UpdatePartial(ctx, tt.id, tt.partial)
if err != nil {
t.Errorf("PerformerStore.UpdatePartial() error = %v", err)
return
}
// ensure custom fields are correct
cf, err := qb.GetCustomFields(ctx, tt.id)
if err != nil {
t.Errorf("PerformerStore.GetCustomFields() error = %v", err)
return
}
if tt.expected == nil {
assert.Equal(tt.partial.CustomFields.Full, cf)
} else {
assert.Equal(tt.expected, cf)
}
})
}
}
func TestPerformerFindBySceneID(t *testing.T) {
withTxn(func(ctx context.Context) error {
pqb := db.Performer
@ -1042,6 +1178,242 @@ func TestPerformerQuery(t *testing.T) {
}
}
func TestPerformerQueryCustomFields(t *testing.T) {
tests := []struct {
name string
filter *models.PerformerFilterType
includeIdxs []int
excludeIdxs []int
wantErr bool
}{
{
"equals",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierEquals,
Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")},
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"not equals",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierNotEquals,
Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")},
},
},
},
nil,
[]int{performerIdxWithGallery},
false,
},
{
"includes",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierIncludes,
Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")[9:]},
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"excludes",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierExcludes,
Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")[9:]},
},
},
},
nil,
[]int{performerIdxWithGallery},
false,
},
{
"regex",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierMatchesRegex,
Value: []any{".*13_custom"},
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"invalid regex",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierMatchesRegex,
Value: []any{"["},
},
},
},
nil,
nil,
true,
},
{
"not matches regex",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierNotMatchesRegex,
Value: []any{".*13_custom"},
},
},
},
nil,
[]int{performerIdxWithGallery},
false,
},
{
"invalid not matches regex",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierNotMatchesRegex,
Value: []any{"["},
},
},
},
nil,
nil,
true,
},
{
"null",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "not existing",
Modifier: models.CriterionModifierIsNull,
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"null",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierNotNull,
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"between",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "real",
Modifier: models.CriterionModifierBetween,
Value: []any{0.05, 0.15},
},
},
},
[]int{performerIdx1WithScene},
nil,
false,
},
{
"not between",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdx1WithScene, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "real",
Modifier: models.CriterionModifierNotBetween,
Value: []any{0.05, 0.15},
},
},
},
nil,
[]int{performerIdx1WithScene},
false,
},
}
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
performers, _, err := db.Performer.Query(ctx, tt.filter, nil)
if (err != nil) != tt.wantErr {
t.Errorf("PerformerStore.Query() error = %v, wantErr %v", err, tt.wantErr)
return
}
ids := performersToIDs(performers)
include := indexesToIDs(performerIDs, tt.includeIdxs)
exclude := indexesToIDs(performerIDs, tt.excludeIdxs)
for _, i := range include {
assert.Contains(ids, i)
}
for _, e := range exclude {
assert.NotContains(ids, e)
}
})
}
}
func TestPerformerQueryPenisLength(t *testing.T) {
var upper = 4.0
@ -1172,7 +1544,7 @@ func TestPerformerUpdatePerformerImage(t *testing.T) {
performer := models.Performer{
Name: name,
}
err := qb.Create(ctx, &performer)
err := qb.Create(ctx, &models.CreatePerformerInput{Performer: &performer})
if err != nil {
return fmt.Errorf("Error creating performer: %s", err.Error())
}
@ -1680,7 +2052,7 @@ func TestPerformerStashIDs(t *testing.T) {
performer := &models.Performer{
Name: name,
}
if err := qb.Create(ctx, performer); err != nil {
if err := qb.Create(ctx, &models.CreatePerformerInput{Performer: performer}); err != nil {
return fmt.Errorf("Error creating performer: %s", err.Error())
}

View file

@ -133,6 +133,9 @@ func (qb *queryBuilder) join(table, as, onClause string) {
func (qb *queryBuilder) addJoins(joins ...join) {
qb.joins.add(joins...)
for _, j := range joins {
qb.args = append(qb.args, j.args...)
}
}
func (qb *queryBuilder) addFilter(f *filterBuilder) error {
@ -151,6 +154,9 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) error {
qb.args = append(args, qb.args...)
}
// add joins here to insert args
qb.addJoins(f.getAllJoins()...)
clause, args = f.generateWhereClauses()
if len(clause) > 0 {
qb.addWhere(clause)
@ -169,8 +175,6 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) error {
qb.addArg(args...)
}
qb.addJoins(f.getAllJoins()...)
return nil
}

View file

@ -222,8 +222,8 @@ func (r *repository) innerJoin(j joiner, as string, parentIDCol string) {
}
type joiner interface {
addLeftJoin(table, as, onClause string)
addInnerJoin(table, as, onClause string)
addLeftJoin(table, as, onClause string, args ...interface{})
addInnerJoin(table, as, onClause string, args ...interface{})
}
type joinRepository struct {

View file

@ -367,6 +367,7 @@ var sceneMarkerSortOptions = sortOptions{
"scenes_updated_at",
"seconds",
"updated_at",
"duration",
}
func (qb *SceneMarkerStore) setSceneMarkerSort(query *queryBuilder, findFilter *models.FindFilterType) error {
@ -386,6 +387,9 @@ func (qb *SceneMarkerStore) setSceneMarkerSort(query *queryBuilder, findFilter *
case "title":
query.join(tagTable, "", "scene_markers.primary_tag_id = tags.id")
query.sortAndPagination += " ORDER BY COALESCE(NULLIF(scene_markers.title,''), tags.name) COLLATE NATURAL_CI " + direction
case "duration":
sort = "(scene_markers.end_seconds - scene_markers.seconds)"
query.sortAndPagination += getSort(sort, direction, sceneMarkerTable)
default:
query.sortAndPagination += getSort(sort, direction, sceneMarkerTable)
}

View file

@ -41,6 +41,7 @@ func (qb *sceneMarkerFilterHandler) criterionHandler() criterionHandler {
qb.sceneTagsCriterionHandler(sceneMarkerFilter.SceneTags),
qb.performersCriterionHandler(sceneMarkerFilter.Performers),
qb.scenesCriterionHandler(sceneMarkerFilter.Scenes),
floatCriterionHandler(sceneMarkerFilter.Duration, "COALESCE(scene_markers.end_seconds - scene_markers.seconds, NULL)", nil),
&timestampCriterionHandler{sceneMarkerFilter.CreatedAt, "scene_markers.created_at", nil},
&timestampCriterionHandler{sceneMarkerFilter.UpdatedAt, "scene_markers.updated_at", nil},
&dateCriterionHandler{sceneMarkerFilter.SceneDate, "scenes.date", qb.joinScenes},

View file

@ -391,6 +391,116 @@ func TestMarkerQuerySceneTags(t *testing.T) {
})
}
func markersToIDs(i []*models.SceneMarker) []int {
ret := make([]int, len(i))
for i, v := range i {
ret[i] = v.ID
}
return ret
}
func TestMarkerQueryDuration(t *testing.T) {
type test struct {
name string
markerFilter *models.SceneMarkerFilterType
include []int
exclude []int
}
cases := []test{
{
"is null",
&models.SceneMarkerFilterType{
Duration: &models.FloatCriterionInput{
Modifier: models.CriterionModifierIsNull,
},
},
[]int{markerIdxWithScene},
[]int{markerIdxWithDuration},
},
{
"not null",
&models.SceneMarkerFilterType{
Duration: &models.FloatCriterionInput{
Modifier: models.CriterionModifierNotNull,
},
},
[]int{markerIdxWithDuration},
[]int{markerIdxWithScene},
},
{
"equals",
&models.SceneMarkerFilterType{
Duration: &models.FloatCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: markerIdxWithDuration,
},
},
[]int{markerIdxWithDuration},
[]int{markerIdx2WithDuration, markerIdxWithScene},
},
{
"not equals",
&models.SceneMarkerFilterType{
Duration: &models.FloatCriterionInput{
Modifier: models.CriterionModifierNotEquals,
Value: markerIdx2WithDuration,
},
},
[]int{markerIdxWithDuration},
[]int{markerIdx2WithDuration, markerIdxWithScene},
},
{
"greater than",
&models.SceneMarkerFilterType{
Duration: &models.FloatCriterionInput{
Modifier: models.CriterionModifierGreaterThan,
Value: markerIdxWithDuration,
},
},
[]int{markerIdx2WithDuration},
[]int{markerIdxWithDuration, markerIdxWithScene},
},
{
"less than",
&models.SceneMarkerFilterType{
Duration: &models.FloatCriterionInput{
Modifier: models.CriterionModifierLessThan,
Value: markerIdx2WithDuration,
},
},
[]int{markerIdxWithDuration},
[]int{markerIdx2WithDuration, markerIdxWithScene},
},
}
qb := db.SceneMarker
for _, tt := range cases {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
got, _, err := qb.Query(ctx, tt.markerFilter, nil)
if err != nil {
t.Errorf("SceneMarkerStore.Query() error = %v", err)
return
}
ids := markersToIDs(got)
include := indexesToIDs(markerIDs, tt.include)
exclude := indexesToIDs(markerIDs, tt.exclude)
for _, i := range include {
assert.Contains(ids, i)
}
for _, e := range exclude {
assert.NotContains(ids, e)
}
})
}
}
func queryMarkers(ctx context.Context, t *testing.T, sqb models.SceneMarkerReader, markerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) []*models.SceneMarker {
t.Helper()
result, _, err := sqb.Query(ctx, markerFilter, findFilter)

View file

@ -276,6 +276,8 @@ const (
markerIdxWithScene = iota
markerIdxWithTag
markerIdxWithSceneTag
markerIdxWithDuration
markerIdx2WithDuration
totalMarkers
)
@ -1506,6 +1508,18 @@ func performerAliases(i int) []string {
return []string{getPerformerStringValue(i, "alias")}
}
func getPerformerCustomFields(index int) map[string]interface{} {
if index%5 == 0 {
return nil
}
return map[string]interface{}{
"string": getPerformerStringValue(index, "custom"),
"int": int64(index % 5),
"real": float64(index) / 10,
}
}
// createPerformers creates n performers with plain Name and o performers with camel cased NaMe included
func createPerformers(ctx context.Context, n int, o int) error {
pqb := db.Performer
@ -1556,7 +1570,10 @@ func createPerformers(ctx context.Context, n int, o int) error {
})
}
err := pqb.Create(ctx, &performer)
err := pqb.Create(ctx, &models.CreatePerformerInput{
Performer: &performer,
CustomFields: getPerformerCustomFields(i),
})
if err != nil {
return fmt.Errorf("Error creating performer %v+: %s", performer, err.Error())
@ -1754,10 +1771,20 @@ func createStudios(ctx context.Context, n int, o int) error {
return nil
}
func getMarkerEndSeconds(index int) *float64 {
if index != markerIdxWithDuration && index != markerIdx2WithDuration {
return nil
}
ret := float64(index)
return &ret
}
func createMarker(ctx context.Context, mqb models.SceneMarkerReaderWriter, markerSpec markerSpec) error {
markerIdx := len(markerIDs)
marker := models.SceneMarker{
SceneID: sceneIDs[markerSpec.sceneIdx],
PrimaryTagID: tagIDs[markerSpec.primaryTagIdx],
EndSeconds: getMarkerEndSeconds(markerIdx),
}
err := mqb.Create(ctx, &marker)

View file

@ -32,6 +32,7 @@ var (
performersURLsJoinTable = goqu.T(performerURLsTable)
performersTagsJoinTable = goqu.T(performersTagsTable)
performersStashIDsJoinTable = goqu.T("performer_stash_ids")
performersCustomFieldsTable = goqu.T("performer_custom_fields")
studiosAliasesJoinTable = goqu.T(studioAliasesTable)
studiosTagsJoinTable = goqu.T(studiosTagsTable)

View file

@ -41,4 +41,6 @@ fragment PerformerData on Performer {
death_date
hair_color
weight
custom_fields
}

View file

@ -8,7 +8,7 @@ fragment SceneMarkerData on SceneMarker {
screenshot
scene {
id
...SceneMarkerSceneData
}
primary_tag {
@ -21,3 +21,18 @@ fragment SceneMarkerData on SceneMarker {
name
}
}
fragment SceneMarkerSceneData on Scene {
id
title
files {
width
height
path
}
performers {
id
name
image_path
}
}

View file

@ -47,3 +47,7 @@ mutation SceneMarkerUpdate(
mutation SceneMarkerDestroy($id: ID!) {
sceneMarkerDestroy(id: $id)
}
mutation SceneMarkersDestroy($ids: [ID!]!) {
sceneMarkersDestroy(ids: $ids)
}

View file

@ -18,7 +18,6 @@ import locales, { registerCountry } from "src/locales";
import {
useConfiguration,
useConfigureUI,
usePlugins,
useSystemStatus,
} from "src/core/StashService";
import flattenMessages from "./utils/flattenMessages";
@ -40,12 +39,9 @@ import { releaseNotes } from "./docs/en/ReleaseNotes";
import { getPlatformURL } from "./core/createClient";
import { lazyComponent } from "./utils/lazyComponent";
import { isPlatformUniquelyRenderedByApple } from "./utils/apple";
import useScript, { useCSS } from "./hooks/useScript";
import { useMemoOnce } from "./hooks/state";
import Event from "./hooks/event";
import { uniq } from "lodash-es";
import { PluginRoutes } from "./plugins";
import { PluginRoutes, PluginsLoader } from "./plugins";
// import plugin_api to run code
import "./pluginApi";
@ -97,54 +93,6 @@ function languageMessageString(language: string) {
return language.replace(/-/, "");
}
type PluginList = NonNullable<Required<GQL.PluginsQuery["plugins"]>>;
// sort plugins by their dependencies
function sortPlugins(plugins: PluginList) {
type Node = { id: string; afters: string[] };
let nodes: Record<string, Node> = {};
let sorted: PluginList = [];
let visited: Record<string, boolean> = {};
plugins.forEach((v) => {
let from = v.id;
if (!nodes[from]) nodes[from] = { id: from, afters: [] };
v.requires?.forEach((to) => {
if (!nodes[to]) nodes[to] = { id: to, afters: [] };
if (!nodes[to].afters.includes(from)) nodes[to].afters.push(from);
});
});
function visit(idstr: string, ancestors: string[] = []) {
let node = nodes[idstr];
const { id } = node;
if (visited[idstr]) return;
ancestors.push(id);
visited[idstr] = true;
node.afters.forEach(function (afterID) {
if (ancestors.indexOf(afterID) >= 0)
throw new Error("closed chain : " + afterID + " is in " + id);
visit(afterID.toString(), ancestors.slice());
});
const plugin = plugins.find((v) => v.id === id);
if (plugin) {
sorted.unshift(plugin);
}
}
Object.keys(nodes).forEach((n) => {
visit(n);
});
return sorted;
}
const AppContainer: React.FC<React.PropsWithChildren<{}>> = PatchFunction(
"App",
(props: React.PropsWithChildren<{}>) => {
@ -215,46 +163,6 @@ export const App: React.FC = () => {
setLocale();
}, [customMessages, language]);
const {
data: plugins,
loading: pluginsLoading,
error: pluginsError,
} = usePlugins();
const sortedPlugins = useMemoOnce(() => {
return [
sortPlugins(plugins?.plugins ?? []),
!pluginsLoading && !pluginsError,
];
}, [plugins?.plugins, pluginsLoading, pluginsError]);
const pluginJavascripts = useMemoOnce(() => {
return [
uniq(
sortedPlugins
?.filter((plugin) => plugin.enabled && plugin.paths.javascript)
.map((plugin) => plugin.paths.javascript!)
.flat() ?? []
),
!!sortedPlugins && !pluginsLoading && !pluginsError,
];
}, [sortedPlugins, pluginsLoading, pluginsError]);
const pluginCSS = useMemoOnce(() => {
return [
uniq(
sortedPlugins
?.filter((plugin) => plugin.enabled && plugin.paths.css)
.map((plugin) => plugin.paths.css!)
.flat() ?? []
),
!!sortedPlugins && !pluginsLoading && !pluginsError,
];
}, [sortedPlugins, pluginsLoading, pluginsError]);
useScript(pluginJavascripts ?? [], !pluginsLoading && !pluginsError);
useCSS(pluginCSS ?? [], !pluginsLoading && !pluginsError);
const location = useLocation();
const history = useHistory();
const setupMatch = useRouteMatch(["/setup", "/migrate"]);
@ -365,43 +273,45 @@ export const App: React.FC = () => {
const titleProps = makeTitleProps();
return (
<AppContainer>
<ErrorBoundary>
{messages ? (
<IntlProvider
locale={language}
messages={messages}
formats={intlFormats}
>
<ConfigurationProvider
configuration={config.data?.configuration}
loading={config.loading}
>
{maybeRenderReleaseNotes()}
<ToastProvider>
<ConnectionMonitor />
<Suspense fallback={<LoadingIndicator />}>
<LightboxProvider>
<ManualProvider>
<InteractiveProvider>
<Helmet {...titleProps} />
{maybeRenderNavbar()}
<div
className={`main container-fluid ${
appleRendering ? "apple" : ""
}`}
>
{renderContent()}
</div>
</InteractiveProvider>
</ManualProvider>
</LightboxProvider>
</Suspense>
</ToastProvider>
</ConfigurationProvider>
</IntlProvider>
) : null}
</ErrorBoundary>
</AppContainer>
<ErrorBoundary>
{messages ? (
<IntlProvider
locale={language}
messages={messages}
formats={intlFormats}
>
<PluginsLoader>
<AppContainer>
<ConfigurationProvider
configuration={config.data?.configuration}
loading={config.loading}
>
{maybeRenderReleaseNotes()}
<ToastProvider>
<ConnectionMonitor />
<Suspense fallback={<LoadingIndicator />}>
<LightboxProvider>
<ManualProvider>
<InteractiveProvider>
<Helmet {...titleProps} />
{maybeRenderNavbar()}
<div
className={`main container-fluid ${
appleRendering ? "apple" : ""
}`}
>
{renderContent()}
</div>
</InteractiveProvider>
</ManualProvider>
</LightboxProvider>
</Suspense>
</ToastProvider>
</ConfigurationProvider>
</AppContainer>
</PluginsLoader>
</IntlProvider>
) : null}
</ErrorBoundary>
);
};

View file

@ -243,11 +243,13 @@ export const EditFilterDialog: React.FC<IEditFilterProps> = ({
}, [currentFilter.mode]);
const criterionOptions = useMemo(() => {
return [...filterOptions.criterionOptions].sort((a, b) => {
return intl
.formatMessage({ id: a.messageID })
.localeCompare(intl.formatMessage({ id: b.messageID }));
});
return [...filterOptions.criterionOptions]
.filter((c) => !c.hidden)
.sort((a, b) => {
return intl
.formatMessage({ id: a.messageID })
.localeCompare(intl.formatMessage({ id: b.messageID }));
});
}, [intl, filterOptions.criterionOptions]);
const optionSelected = useCallback(

View file

@ -14,6 +14,7 @@ import {
FormatWeight,
} from "../PerformerList";
import { PatchComponent } from "src/patch";
import { CustomFields } from "src/components/Shared/CustomFields";
interface IPerformerDetails {
performer: GQL.PerformerDataFragment;
@ -176,6 +177,7 @@ export const PerformerDetailsPanel: React.FC<IPerformerDetails> =
value={renderStashIDs()}
fullWidth={fullWidth}
/>
{fullWidth && <CustomFields values={performer.custom_fields} />}
</PerformerDetailGroup>
);
});

View file

@ -47,6 +47,8 @@ import {
yupUniqueStringList,
} from "src/utils/yup";
import { useTagsEdit } from "src/hooks/tagsEdit";
import { CustomFieldsInput } from "src/components/Shared/CustomFields";
import { cloneDeep } from "@apollo/client/utilities";
const isScraper = (
scraper: GQL.Scraper | GQL.StashBox
@ -61,6 +63,16 @@ interface IPerformerDetails {
setEncodingImage: (loading: boolean) => void;
}
function customFieldInput(isNew: boolean, input: {}) {
if (isNew) {
return input;
} else {
return {
full: input,
};
}
}
export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
performer,
isVisible,
@ -115,6 +127,7 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
ignore_auto_tag: yup.boolean().defined(),
stash_ids: yup.mixed<GQL.StashIdInput[]>().defined(),
image: yup.string().nullable().optional(),
custom_fields: yup.object().required().defined(),
});
const initialValues = {
@ -142,15 +155,26 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
tag_ids: (performer.tags ?? []).map((t) => t.id),
ignore_auto_tag: performer.ignore_auto_tag ?? false,
stash_ids: getStashIDs(performer.stash_ids),
custom_fields: cloneDeep(performer.custom_fields ?? {}),
};
type InputValues = yup.InferType<typeof schema>;
const [customFieldsError, setCustomFieldsError] = useState<string>();
function submit(values: InputValues) {
const input = {
...schema.cast(values),
custom_fields: customFieldInput(isNew, values.custom_fields),
};
onSave(input);
}
const formik = useFormik<InputValues>({
initialValues,
enableReinitialize: true,
validate: yupFormikValidate(schema),
onSubmit: (values) => onSave(schema.cast(values)),
onSubmit: submit,
});
const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit(
@ -571,7 +595,11 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
</div>
<Button
variant="success"
disabled={(!isNew && !formik.dirty) || !isEqual(formik.errors, {})}
disabled={
(!isNew && !formik.dirty) ||
!isEqual(formik.errors, {}) ||
customFieldsError !== undefined
}
onClick={() => formik.submitForm()}
>
<FormattedMessage id="actions.save" />
@ -680,6 +708,15 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
{renderInputField("ignore_auto_tag", "checkbox")}
<hr />
<CustomFieldsInput
values={formik.values.custom_fields}
onChange={(v) => formik.setFieldValue("custom_fields", v)}
error={customFieldsError}
setError={(e) => setCustomFieldsError(e)}
/>
{renderButtons("mt-3")}
</Form>
</>

View file

@ -54,6 +54,17 @@
display: none;
}
}
.detail-group .custom-fields .collapse-button {
display: table-cell;
font-weight: 700;
padding-left: 0;
}
.custom-fields .detail-item-title,
.custom-fields .detail-item-value {
font-family: "Courier New", Courier, monospace;
}
/* stylelint-enable selector-class-pattern */
}

View file

@ -0,0 +1,83 @@
import React, { useState } from "react";
import { useSceneMarkersDestroy } from "src/core/StashService";
import * as GQL from "src/core/generated-graphql";
import { ModalComponent } from "src/components/Shared/Modal";
import { useToast } from "src/hooks/Toast";
import { useIntl } from "react-intl";
import { faTrashAlt } from "@fortawesome/free-solid-svg-icons";
interface IDeleteSceneMarkersDialogProps {
selected: GQL.SceneMarkerDataFragment[];
onClose: (confirmed: boolean) => void;
}
export const DeleteSceneMarkersDialog: React.FC<
IDeleteSceneMarkersDialogProps
> = (props: IDeleteSceneMarkersDialogProps) => {
const intl = useIntl();
const singularEntity = intl.formatMessage({ id: "marker" });
const pluralEntity = intl.formatMessage({ id: "markers" });
const header = intl.formatMessage(
{ id: "dialogs.delete_object_title" },
{ count: props.selected.length, singularEntity, pluralEntity }
);
const toastMessage = intl.formatMessage(
{ id: "toast.delete_past_tense" },
{ count: props.selected.length, singularEntity, pluralEntity }
);
const message = intl.formatMessage(
{ id: "dialogs.delete_object_desc" },
{ count: props.selected.length, singularEntity, pluralEntity }
);
const Toast = useToast();
const [deleteSceneMarkers] = useSceneMarkersDestroy(
getSceneMarkersDeleteInput()
);
// Network state
const [isDeleting, setIsDeleting] = useState(false);
function getSceneMarkersDeleteInput(): GQL.SceneMarkersDestroyMutationVariables {
return {
ids: props.selected.map((marker) => marker.id),
};
}
async function onDelete() {
setIsDeleting(true);
try {
await deleteSceneMarkers();
Toast.success(toastMessage);
props.onClose(true);
} catch (e) {
Toast.error(e);
props.onClose(false);
}
setIsDeleting(false);
}
return (
<ModalComponent
show
icon={faTrashAlt}
header={header}
accept={{
variant: "danger",
onClick: onDelete,
text: intl.formatMessage({ id: "actions.delete" }),
}}
cancel={{
onClick: () => props.onClose(false),
text: intl.formatMessage({ id: "actions.cancel" }),
variant: "secondary",
}}
isRunning={isDeleting}
>
<p>{message}</p>
</ModalComponent>
);
};
export default DeleteSceneMarkersDialog;

View file

@ -94,7 +94,7 @@ export const PreviewScrubber: React.FC<IScenePreviewProps> = ({
onClick(s.start);
}
if (spriteInfo === null) return null;
if (spriteInfo === null || !vttPath) return null;
return (
<div className="preview-scrubber">

View file

@ -0,0 +1,214 @@
import React, { useEffect, useMemo, useState } from "react";
import { Button, ButtonGroup } from "react-bootstrap";
import * as GQL from "src/core/generated-graphql";
import { Icon } from "../Shared/Icon";
import { TagLink } from "../Shared/TagLink";
import { HoverPopover } from "../Shared/HoverPopover";
import NavUtils from "src/utils/navigation";
import TextUtils from "src/utils/text";
import { ConfigurationContext } from "src/hooks/Config";
import { GridCard, calculateCardWidth } from "../Shared/GridCard/GridCard";
import { faTag } from "@fortawesome/free-solid-svg-icons";
import ScreenUtils from "src/utils/screen";
import { markerTitle } from "src/core/markers";
import { Link } from "react-router-dom";
import { objectTitle } from "src/core/files";
import { PerformerPopoverButton } from "../Shared/PerformerPopoverButton";
import { ScenePreview } from "./SceneCard";
import { TruncatedText } from "../Shared/TruncatedText";
interface ISceneMarkerCardProps {
marker: GQL.SceneMarkerDataFragment;
containerWidth?: number;
previewHeight?: number;
index?: number;
compact?: boolean;
selecting?: boolean;
selected?: boolean | undefined;
zoomIndex?: number;
onSelectedChanged?: (selected: boolean, shiftKey: boolean) => void;
}
const SceneMarkerCardPopovers = (props: ISceneMarkerCardProps) => {
function maybeRenderPerformerPopoverButton() {
if (props.marker.scene.performers.length <= 0) return;
return (
<PerformerPopoverButton
performers={props.marker.scene.performers}
linkType="scene_marker"
/>
);
}
function renderTagPopoverButton() {
const popoverContent = [
<TagLink
key={props.marker.primary_tag.id}
tag={props.marker.primary_tag}
linkType="scene_marker"
/>,
];
props.marker.tags.map((tag) =>
popoverContent.push(
<TagLink key={tag.id} tag={tag} linkType="scene_marker" />
)
);
return (
<HoverPopover
className="tag-count"
placement="bottom"
content={popoverContent}
>
<Button className="minimal">
<Icon icon={faTag} />
<span>{popoverContent.length}</span>
</Button>
</HoverPopover>
);
}
function renderPopoverButtonGroup() {
if (!props.compact) {
return (
<>
<hr />
<ButtonGroup className="card-popovers">
{maybeRenderPerformerPopoverButton()}
{renderTagPopoverButton()}
</ButtonGroup>
</>
);
}
}
return <>{renderPopoverButtonGroup()}</>;
};
const SceneMarkerCardDetails = (props: ISceneMarkerCardProps) => {
return (
<div className="scene-marker-card__details">
<span className="scene-marker-card__time">
{TextUtils.formatTimestampRange(
props.marker.seconds,
props.marker.end_seconds ?? undefined
)}
</span>
<TruncatedText
className="scene-marker-card__scene"
lineCount={3}
text={
<Link to={NavUtils.makeSceneMarkersSceneUrl(props.marker.scene)}>
{objectTitle(props.marker.scene)}
</Link>
}
/>
</div>
);
};
const SceneMarkerCardImage = (props: ISceneMarkerCardProps) => {
const { configuration } = React.useContext(ConfigurationContext);
const file = useMemo(
() =>
props.marker.scene.files.length > 0
? props.marker.scene.files[0]
: undefined,
[props.marker.scene]
);
function isPortrait() {
const width = file?.width ? file.width : 0;
const height = file?.height ? file.height : 0;
return height > width;
}
function maybeRenderSceneSpecsOverlay() {
return (
<div className="scene-specs-overlay">
{props.marker.end_seconds && (
<span className="overlay-duration">
{TextUtils.secondsToTimestamp(
props.marker.end_seconds - props.marker.seconds
)}
</span>
)}
</div>
);
}
return (
<>
<ScenePreview
image={props.marker.screenshot ?? undefined}
video={props.marker.stream ?? undefined}
soundActive={configuration?.interface?.soundOnPreview ?? false}
isPortrait={isPortrait()}
/>
{maybeRenderSceneSpecsOverlay()}
</>
);
};
export const SceneMarkerCard = (props: ISceneMarkerCardProps) => {
const [cardWidth, setCardWidth] = useState<number>();
function zoomIndex() {
if (!props.compact && props.zoomIndex !== undefined) {
return `zoom-${props.zoomIndex}`;
}
return "";
}
useEffect(() => {
if (
!props.containerWidth ||
props.zoomIndex === undefined ||
ScreenUtils.isMobile()
)
return;
let zoomValue = props.zoomIndex;
let preferredCardWidth: number;
switch (zoomValue) {
case 0:
preferredCardWidth = 240;
break;
case 1:
preferredCardWidth = 340; // this value is intentionally higher than 320
break;
case 2:
preferredCardWidth = 480;
break;
case 3:
preferredCardWidth = 640;
}
let fittedCardWidth = calculateCardWidth(
props.containerWidth,
preferredCardWidth!
);
setCardWidth(fittedCardWidth);
}, [props, props.containerWidth, props.zoomIndex]);
return (
<GridCard
className={`scene-marker-card ${zoomIndex()}`}
url={NavUtils.makeSceneMarkerUrl(props.marker)}
title={markerTitle(props.marker)}
width={cardWidth}
linkClassName="scene-marker-card-link"
thumbnailSectionClassName="video-section"
resumeTime={props.marker.seconds}
image={<SceneMarkerCardImage {...props} />}
details={<SceneMarkerCardDetails {...props} />}
popovers={<SceneMarkerCardPopovers {...props} />}
selected={props.selected}
selecting={props.selecting}
onSelectedChanged={props.onSelectedChanged}
/>
);
};

View file

@ -0,0 +1,38 @@
import React from "react";
import * as GQL from "src/core/generated-graphql";
import { SceneMarkerCard } from "./SceneMarkerCard";
import { useContainerDimensions } from "../Shared/GridCard/GridCard";
interface ISceneMarkerCardsGrid {
markers: GQL.SceneMarkerDataFragment[];
selectedIds: Set<string>;
zoomIndex: number;
onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void;
}
export const SceneMarkerCardsGrid: React.FC<ISceneMarkerCardsGrid> = ({
markers,
selectedIds,
zoomIndex,
onSelectChange,
}) => {
const [componentRef, { width }] = useContainerDimensions();
return (
<div className="row justify-content-center" ref={componentRef}>
{markers.map((marker, index) => (
<SceneMarkerCard
key={marker.id}
containerWidth={width}
marker={marker}
index={index}
zoomIndex={zoomIndex}
selecting={selectedIds.size > 0}
selected={selectedIds.has(marker.id)}
onSelectedChanged={(selected: boolean, shiftKey: boolean) =>
onSelectChange(marker.id, selected, shiftKey)
}
/>
))}
</div>
);
};

View file

@ -14,6 +14,8 @@ import { ListFilterModel } from "src/models/list-filter/filter";
import { DisplayMode } from "src/models/list-filter/types";
import { MarkerWallPanel } from "../Wall/WallPanel";
import { View } from "../List/views";
import { SceneMarkerCardsGrid } from "./SceneMarkerCardsGrid";
import { DeleteSceneMarkersDialog } from "./DeleteSceneMarkersDialog";
function getItems(result: GQL.FindSceneMarkersQueryResult) {
return result?.data?.findSceneMarkers?.scene_markers ?? [];
@ -27,6 +29,7 @@ interface ISceneMarkerList {
filterHook?: (filter: ListFilterModel) => ListFilterModel;
view?: View;
alterQuery?: boolean;
defaultSort?: string;
}
export const SceneMarkerList: React.FC<ISceneMarkerList> = ({
@ -84,7 +87,9 @@ export const SceneMarkerList: React.FC<ISceneMarkerList> = ({
function renderContent(
result: GQL.FindSceneMarkersQueryResult,
filter: ListFilterModel
filter: ListFilterModel,
selectedIds: Set<string>,
onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void
) {
if (!result.data?.findSceneMarkers) return;
@ -93,6 +98,29 @@ export const SceneMarkerList: React.FC<ISceneMarkerList> = ({
<MarkerWallPanel markers={result.data.findSceneMarkers.scene_markers} />
);
}
if (filter.displayMode === DisplayMode.Grid) {
return (
<SceneMarkerCardsGrid
markers={result.data.findSceneMarkers.scene_markers}
zoomIndex={filter.zoomIndex}
selectedIds={selectedIds}
onSelectChange={onSelectChange}
/>
);
}
}
function renderDeleteDialog(
selectedSceneMarkers: GQL.SceneMarkerDataFragment[],
onClose: (confirmed: boolean) => void
) {
return (
<DeleteSceneMarkersDialog
selected={selectedSceneMarkers}
onClose={onClose}
/>
);
}
return (
@ -104,12 +132,15 @@ export const SceneMarkerList: React.FC<ISceneMarkerList> = ({
alterQuery={alterQuery}
filterHook={filterHook}
view={view}
selectable
>
<ItemList
zoomable
view={view}
otherOperations={otherOperations}
addKeybinds={addKeybinds}
renderContent={renderContent}
renderDeleteDialog={renderDeleteDialog}
/>
</ItemListContext>
);

View file

@ -221,6 +221,7 @@ textarea.scene-description {
}
.scene-card,
.scene-marker-card,
.gallery-card {
.scene-specs-overlay {
transition: opacity 0.5s;
@ -278,7 +279,8 @@ textarea.scene-description {
}
}
.scene-card.card {
.scene-card.card,
.scene-marker-card.card {
overflow: hidden;
padding: 0;

View file

@ -110,9 +110,7 @@ export const GenerateOptions: React.FC<IGenerateOptions> = ({
}
/>
<BooleanSetting
advanced
id="marker-screenshot-task"
className="sub-setting"
checked={options.markerScreenshots ?? false}
disabled={!options.markers}
headingID="dialogs.scene_gen.marker_screenshots"

View file

@ -8,6 +8,7 @@ import { Button, Collapse } from "react-bootstrap";
import { Icon } from "./Icon";
interface IProps {
className?: string;
text: React.ReactNode;
}
@ -17,12 +18,12 @@ export const CollapseButton: React.FC<React.PropsWithChildren<IProps>> = (
const [open, setOpen] = useState(false);
return (
<div>
<div className={props.className}>
<Button
onClick={() => setOpen(!open)}
className="minimal collapse-button"
>
<Icon icon={open ? faChevronDown : faChevronRight} />
<Icon icon={open ? faChevronDown : faChevronRight} fixedWidth />
<span>{props.text}</span>
</Button>
<Collapse in={open}>
@ -44,7 +45,7 @@ export const ExpandCollapseButton: React.FC<{
className="minimal expand-collapse"
onClick={() => setCollapsed(!collapsed)}
>
<Icon className="fa-fw" icon={buttonIcon} />
<Icon icon={buttonIcon} fixedWidth />
</Button>
</span>
);

View file

@ -0,0 +1,308 @@
import React, { useEffect, useMemo, useRef, useState } from "react";
import { CollapseButton } from "./CollapseButton";
import { DetailItem } from "./DetailItem";
import { Button, Col, Form, FormGroup, InputGroup, Row } from "react-bootstrap";
import { FormattedMessage, useIntl } from "react-intl";
import { cloneDeep } from "@apollo/client/utilities";
import { Icon } from "./Icon";
import { faMinus, faPlus } from "@fortawesome/free-solid-svg-icons";
import cx from "classnames";
const maxFieldNameLength = 64;
export type CustomFieldMap = {
[key: string]: unknown;
};
interface ICustomFields {
values: CustomFieldMap;
}
function convertValue(value: unknown): string {
if (typeof value === "string") {
return value;
} else if (typeof value === "number") {
return value.toString();
} else if (typeof value === "boolean") {
return value ? "true" : "false";
} else if (Array.isArray(value)) {
return value.join(", ");
} else {
return JSON.stringify(value);
}
}
const CustomField: React.FC<{ field: string; value: unknown }> = ({
field,
value,
}) => {
const valueStr = convertValue(value);
// replace spaces with hyphen characters for css id
const id = field.toLowerCase().replace(/ /g, "-");
return (
<DetailItem
id={id}
label={field}
labelTitle={field}
value={valueStr}
fullWidth={true}
showEmpty
/>
);
};
export const CustomFields: React.FC<ICustomFields> = ({ values }) => {
const intl = useIntl();
if (Object.keys(values).length === 0) {
return null;
}
return (
// according to linter rule CSS classes shouldn't use underscores
<div className="custom-fields">
<CollapseButton text={intl.formatMessage({ id: "custom_fields.title" })}>
{Object.entries(values).map(([key, value]) => (
<CustomField key={key} field={key} value={value} />
))}
</CollapseButton>
</div>
);
};
function isNumeric(v: string) {
return /^-?(?:0|(?:[1-9][0-9]*))(?:\.[0-9]+)?$/.test(v);
}
function convertCustomValue(v: string) {
// if the value is numeric, convert it to a number
if (isNumeric(v)) {
return Number(v);
} else {
return v;
}
}
const CustomFieldInput: React.FC<{
field: string;
value: unknown;
onChange: (field: string, value: unknown) => void;
isNew?: boolean;
error?: string;
}> = ({ field, value, onChange, isNew = false, error }) => {
const intl = useIntl();
const [currentField, setCurrentField] = useState(field);
const [currentValue, setCurrentValue] = useState(value as string);
const fieldRef = useRef<HTMLInputElement>(null);
const valueRef = useRef<HTMLInputElement>(null);
useEffect(() => {
setCurrentField(field);
setCurrentValue(value as string);
}, [field, value]);
function onBlur() {
onChange(currentField, convertCustomValue(currentValue));
}
function onDelete() {
onChange("", "");
}
return (
<FormGroup>
<Row className={cx("custom-fields-row", { "custom-fields-new": isNew })}>
<Col sm={3} xl={2} className="custom-fields-field">
{isNew ? (
<>
<Form.Control
ref={fieldRef}
className="input-control"
type="text"
value={currentField ?? ""}
placeholder={intl.formatMessage({ id: "custom_fields.field" })}
onChange={(event) => setCurrentField(event.currentTarget.value)}
onBlur={onBlur}
/>
</>
) : (
<Form.Label title={currentField}>{currentField}</Form.Label>
)}
</Col>
<Col sm={9} xl={7}>
<InputGroup>
<Form.Control
ref={valueRef}
className="input-control"
type="text"
value={(currentValue as string) ?? ""}
placeholder={currentField}
onChange={(event) => setCurrentValue(event.currentTarget.value)}
onBlur={onBlur}
/>
<InputGroup.Append>
{!isNew && (
<Button
className="custom-fields-remove"
variant="danger"
onClick={() => onDelete()}
>
<Icon icon={faMinus} />
</Button>
)}
</InputGroup.Append>
</InputGroup>
</Col>
</Row>
<Form.Control.Feedback type="invalid">{error}</Form.Control.Feedback>
</FormGroup>
);
};
interface ICustomField {
field: string;
value: unknown;
}
interface ICustomFieldsInput {
values: CustomFieldMap;
error?: string;
onChange: (values: CustomFieldMap) => void;
setError: (error?: string) => void;
}
export const CustomFieldsInput: React.FC<ICustomFieldsInput> = ({
values,
error,
onChange,
setError,
}) => {
const intl = useIntl();
const [newCustomField, setNewCustomField] = useState<ICustomField>({
field: "",
value: "",
});
const fields = useMemo(() => {
const valueCopy = cloneDeep(values);
if (newCustomField.field !== "" && error === undefined) {
delete valueCopy[newCustomField.field];
}
const ret = Object.keys(valueCopy);
ret.sort();
return ret;
}, [values, newCustomField, error]);
function onSetNewField(v: ICustomField) {
// validate the field name
let newError = undefined;
if (v.field.length > maxFieldNameLength) {
newError = intl.formatMessage({
id: "errors.custom_fields.field_name_length",
});
}
if (v.field.trim() === "" && v.value !== "") {
newError = intl.formatMessage({
id: "errors.custom_fields.field_name_required",
});
}
if (v.field.trim() !== v.field) {
newError = intl.formatMessage({
id: "errors.custom_fields.field_name_whitespace",
});
}
if (fields.includes(v.field)) {
newError = intl.formatMessage({
id: "errors.custom_fields.duplicate_field",
});
}
const oldField = newCustomField;
setNewCustomField(v);
const valuesCopy = cloneDeep(values);
if (oldField.field !== "" && error === undefined) {
delete valuesCopy[oldField.field];
}
// if valid, pass up
if (!newError && v.field !== "") {
valuesCopy[v.field] = v.value;
}
onChange(valuesCopy);
setError(newError);
}
function onAdd() {
const newValues = {
...values,
[newCustomField.field]: newCustomField.value,
};
setNewCustomField({ field: "", value: "" });
onChange(newValues);
}
function fieldChanged(
currentField: string,
newField: string,
value: unknown
) {
let newValues = cloneDeep(values);
delete newValues[currentField];
if (newField !== "") {
newValues[newField] = value;
}
onChange(newValues);
}
return (
<CollapseButton
className="custom-fields-input"
text={intl.formatMessage({ id: "custom_fields.title" })}
>
<Row>
<Col xl={12}>
<Row className="custom-fields-input-header">
<Form.Label column sm={3} xl={2}>
<FormattedMessage id="custom_fields.field" />
</Form.Label>
<Form.Label column sm={9} xl={7}>
<FormattedMessage id="custom_fields.value" />
</Form.Label>
</Row>
{fields.map((field) => (
<CustomFieldInput
key={field}
field={field}
value={values[field]}
onChange={(newField, newValue) =>
fieldChanged(field, newField, newValue)
}
/>
))}
<CustomFieldInput
field={newCustomField.field}
value={newCustomField.value}
error={error}
onChange={(field, value) => onSetNewField({ field, value })}
isNew
/>
</Col>
</Row>
<Button
className="custom-fields-add"
variant="success"
onClick={() => onAdd()}
disabled={newCustomField.field === "" || error !== undefined}
>
<Icon icon={faPlus} />
</Button>
</CollapseButton>
);
};

View file

@ -3,34 +3,39 @@ import { FormattedMessage } from "react-intl";
interface IDetailItem {
id?: string | null;
label?: React.ReactNode;
value?: React.ReactNode;
labelTitle?: string;
title?: string;
fullWidth?: boolean;
showEmpty?: boolean;
}
export const DetailItem: React.FC<IDetailItem> = ({
id,
label,
value,
labelTitle,
title,
fullWidth,
showEmpty = false,
}) => {
if (!id || !value || value === "Na") {
if (!id || (!showEmpty && (!value || value === "Na"))) {
return <></>;
}
const message = <FormattedMessage id={id} />;
const message = label ?? <FormattedMessage id={id} />;
// according to linter rule CSS classes shouldn't use underscores
const sanitisedID = id.replace(/_/g, "-");
return (
// according to linter rule CSS classes shouldn't use underscores
<div className={`detail-item ${id}`}>
<span className={`detail-item-title ${id.replace("_", "-")}`}>
<span className={`detail-item-title ${sanitisedID}`} title={labelTitle}>
{message}
{fullWidth ? ":" : ""}
</span>
<span
className={`detail-item-value ${id.replace("_", "-")}`}
title={title}
>
<span className={`detail-item-value ${sanitisedID}`} title={title}>
{value}
</span>
</div>

View file

@ -38,7 +38,7 @@ const CommonLinkComponent: React.FC<ICommonLinkProps> = ({
interface IPerformerLinkProps {
performer: INamedObject & { disambiguation?: string | null };
linkType?: "scene" | "gallery" | "image";
linkType?: "scene" | "gallery" | "image" | "scene_marker";
className?: string;
}
@ -55,6 +55,8 @@ export const PerformerLink: React.FC<IPerformerLinkProps> = ({
return NavUtils.makePerformerGalleriesUrl(performer);
case "image":
return NavUtils.makePerformerImagesUrl(performer);
case "scene_marker":
return NavUtils.makePerformerSceneMarkersUrl(performer);
case "scene":
default:
return NavUtils.makePerformerScenesUrl(performer);
@ -209,7 +211,8 @@ interface ITagLinkProps {
| "details"
| "performer"
| "group"
| "studio";
| "studio"
| "scene_marker";
className?: string;
hoverPlacement?: Placement;
showHierarchyIcon?: boolean;
@ -238,6 +241,8 @@ export const TagLink: React.FC<ITagLinkProps> = ({
return NavUtils.makeTagImagesUrl(tag);
case "group":
return NavUtils.makeTagGroupsUrl(tag);
case "scene_marker":
return NavUtils.makeTagSceneMarkersUrl(tag);
case "details":
return NavUtils.makeTagUrl(tag.id ?? "");
}

View file

@ -197,6 +197,15 @@ button.collapse-button.btn-primary:not(:disabled):not(.disabled):active {
border: none;
box-shadow: none;
color: #f5f8fa;
text-align: left;
}
button.collapse-button {
.fa-icon {
margin-left: 0;
}
padding-left: 0;
}
.hover-popover-content {
@ -678,3 +687,44 @@ button.btn.favorite-button {
}
}
}
.custom-fields .detail-item .detail-item-title {
max-width: 130px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.custom-fields-input > .collapse-button {
font-weight: 700;
}
.custom-fields-row {
align-items: center;
font-family: "Courier New", Courier, monospace;
font-size: 0.875rem;
.form-label {
margin-bottom: 0;
max-width: 100%;
overflow: hidden;
text-overflow: ellipsis;
vertical-align: middle;
white-space: nowrap;
}
// labels with titles are styled with help cursor and dotted underline elsewhere
div.custom-fields-field label.form-label {
cursor: inherit;
text-decoration: inherit;
}
.form-control,
.btn {
font-size: 0.875rem;
}
&.custom-fields-new > div:not(:last-child) {
padding-right: 0;
}
}

View file

@ -1,5 +1,5 @@
import { faTimes } from "@fortawesome/free-solid-svg-icons";
import React, { useRef, useContext } from "react";
import React, { useContext, useState } from "react";
import {
Badge,
Button,
@ -14,6 +14,102 @@ import { Icon } from "src/components/Shared/Icon";
import { ParseMode, TagOperation } from "../constants";
import { TaggerStateContext } from "../context";
const Blacklist: React.FC<{
list: string[];
setList: (blacklist: string[]) => void;
}> = ({ list, setList }) => {
const intl = useIntl();
const [currentValue, setCurrentValue] = useState("");
const [error, setError] = useState<string>();
function addBlacklistItem() {
if (!currentValue) return;
// don't add duplicate items
if (list.includes(currentValue)) {
setError(
intl.formatMessage({
id: "component_tagger.config.errors.blacklist_duplicate",
})
);
return;
}
// validate regex
try {
new RegExp(currentValue);
} catch (e) {
setError((e as SyntaxError).message);
return;
}
setList([...list, currentValue]);
setCurrentValue("");
}
function removeBlacklistItem(index: number) {
const newBlacklist = [...list];
newBlacklist.splice(index, 1);
setList(newBlacklist);
}
return (
<div>
<h5>
<FormattedMessage id="component_tagger.config.blacklist_label" />
</h5>
<Form.Group>
<InputGroup hasValidation>
<Form.Control
className="text-input"
value={currentValue}
onChange={(e) => {
setCurrentValue(e.currentTarget.value);
setError(undefined);
}}
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === "Enter") {
addBlacklistItem();
e.preventDefault();
}
}}
isInvalid={!!error}
/>
<InputGroup.Append>
<Button onClick={() => addBlacklistItem()}>
<FormattedMessage id="actions.add" />
</Button>
</InputGroup.Append>
<Form.Control.Feedback type="invalid">{error}</Form.Control.Feedback>
</InputGroup>
</Form.Group>
<div>
{intl.formatMessage(
{ id: "component_tagger.config.blacklist_desc" },
{ chars_require_escape: <code>[\^$.|?*+()</code> }
)}
</div>
{list.map((item, index) => (
<Badge
className="tag-item d-inline-block"
variant="secondary"
key={item}
>
{item.toString()}
<Button
className="minimal ml-2"
onClick={() => removeBlacklistItem(index)}
>
<Icon icon={faTimes} />
</Button>
</Badge>
))}
</div>
);
};
interface IConfigProps {
show: boolean;
}
@ -21,33 +117,6 @@ interface IConfigProps {
const Config: React.FC<IConfigProps> = ({ show }) => {
const { config, setConfig } = useContext(TaggerStateContext);
const intl = useIntl();
const blacklistRef = useRef<HTMLInputElement | null>(null);
function addBlacklistItem() {
if (!blacklistRef.current) return;
const input = blacklistRef.current.value;
if (!input) return;
// don't add duplicate items
if (!config.blacklist.includes(input)) {
setConfig({
...config,
blacklist: [...config.blacklist, input],
});
}
blacklistRef.current.value = "";
}
function removeBlacklistItem(index: number) {
const newBlacklist = [...config.blacklist];
newBlacklist.splice(index, 1);
setConfig({
...config,
blacklist: newBlacklist,
});
}
return (
<Collapse in={show}>
@ -198,47 +267,10 @@ const Config: React.FC<IConfigProps> = ({ show }) => {
</Form.Group>
</Form>
<div className="col-md-6">
<h5>
<FormattedMessage id="component_tagger.config.blacklist_label" />
</h5>
<InputGroup>
<Form.Control
className="text-input"
ref={blacklistRef}
onKeyPress={(e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === "Enter") {
addBlacklistItem();
e.preventDefault();
}
}}
/>
<InputGroup.Append>
<Button onClick={() => addBlacklistItem()}>
<FormattedMessage id="actions.add" />
</Button>
</InputGroup.Append>
</InputGroup>
<div>
{intl.formatMessage(
{ id: "component_tagger.config.blacklist_desc" },
{ chars_require_escape: <code>[\^$.|?*+()</code> }
)}
</div>
{config.blacklist.map((item, index) => (
<Badge
className="tag-item d-inline-block"
variant="secondary"
key={item}
>
{item.toString()}
<Button
className="minimal ml-2"
onClick={() => removeBlacklistItem(index)}
>
<Icon icon={faTimes} />
</Button>
</Badge>
))}
<Blacklist
list={config.blacklist}
setList={(blacklist) => setConfig({ ...config, blacklist })}
/>
</div>
</div>
</Card>

View file

@ -69,6 +69,8 @@ const Scene: React.FC<{
: undefined
}
showLightboxImage={showLightboxImage}
queue={queue}
index={index}
>
{searchResult && searchResult.results?.length ? (
<SceneSearchResults scenes={searchResult.results} target={scene} />

View file

@ -1,6 +1,6 @@
import React, { useState, useContext, PropsWithChildren, useMemo } from "react";
import * as GQL from "src/core/generated-graphql";
import { Link } from "react-router-dom";
import { Link, useHistory } from "react-router-dom";
import { Button, Collapse, Form, InputGroup } from "react-bootstrap";
import { FormattedMessage } from "react-intl";
@ -19,6 +19,8 @@ import {
} from "@fortawesome/free-solid-svg-icons";
import { objectPath, objectTitle } from "src/core/files";
import { ExternalLink } from "src/components/Shared/ExternalLink";
import { ConfigurationContext } from "src/hooks/Config";
import { SceneQueue } from "src/models/sceneQueue";
interface ITaggerSceneDetails {
scene: GQL.SlimSceneDataFragment;
@ -91,6 +93,8 @@ interface ITaggerScene {
scrapeSceneFragment?: (scene: GQL.SlimSceneDataFragment) => void;
loading?: boolean;
showLightboxImage: (imagePath: string) => void;
queue?: SceneQueue;
index?: number;
}
export const TaggerScene: React.FC<PropsWithChildren<ITaggerScene>> = ({
@ -102,6 +106,8 @@ export const TaggerScene: React.FC<PropsWithChildren<ITaggerScene>> = ({
errorMessage,
children,
showLightboxImage,
queue,
index,
}) => {
const { config } = useContext(TaggerStateContext);
const [queryString, setQueryString] = useState<string>("");
@ -125,6 +131,11 @@ export const TaggerScene: React.FC<PropsWithChildren<ITaggerScene>> = ({
const height = file?.height ? file.height : 0;
const isPortrait = height > width;
const history = useHistory();
const { configuration } = React.useContext(ConfigurationContext);
const cont = configuration?.interface.continuePlaylistDefault ?? false;
async function query() {
if (!doSceneQuery) return;
@ -213,6 +224,18 @@ export const TaggerScene: React.FC<PropsWithChildren<ITaggerScene>> = ({
}
}
function onScrubberClick(timestamp: number) {
const link = queue
? queue.makeLink(scene.id, {
sceneIndex: index,
continue: cont,
start: timestamp,
})
: `/scenes/${scene.id}?t=${timestamp}`;
history.push(link);
}
return (
<div key={scene.id} className="mt-3 search-item">
<div className="row">
@ -224,6 +247,8 @@ export const TaggerScene: React.FC<PropsWithChildren<ITaggerScene>> = ({
video={scene.paths.preview ?? undefined}
isPortrait={isPortrait}
soundActive={false}
vttPath={scene.paths.vtt ?? undefined}
onScrubberClick={onScrubberClick}
/>
{maybeRenderSpriteIcon()}
</Link>

View file

@ -12,22 +12,19 @@
.scene-card-preview {
border-radius: 3px;
color: $text-color;
height: 100px;
margin-bottom: 0;
max-height: 100px;
overflow: hidden;
width: 150px;
&-video {
background-color: #495b68;
}
width: auto;
}
.sprite-button {
bottom: 5px;
filter: drop-shadow(1px 1px 1px #222);
padding: 0;
position: absolute;
right: 5px;
top: 5px;
}
.sub-content {

View file

@ -83,6 +83,17 @@ export function prepareQueryString(
mode: ParseMode,
blacklist: string[]
) {
const regexs = blacklist
.map((b) => {
try {
return new RegExp(b, "gi");
} catch {
// ignore
return null;
}
})
.filter((r) => r !== null) as RegExp[];
if ((mode === "auto" && scene.date && scene.studio) || mode === "metadata") {
let str = [
scene.date,
@ -92,8 +103,8 @@ export function prepareQueryString(
]
.filter((s) => s !== "")
.join(" ");
blacklist.forEach((b) => {
str = str.replace(new RegExp(b, "gi"), " ");
regexs.forEach((re) => {
str = str.replace(re, " ");
});
return str;
}
@ -106,8 +117,9 @@ export function prepareQueryString(
} else if (mode === "dir" && paths.length) {
s = paths[paths.length - 1];
}
blacklist.forEach((b) => {
s = s.replace(new RegExp(b, "gi"), " ");
regexs.forEach((re) => {
s = s.replace(re, " ");
});
s = parseDate(s);
return s.replace(/\./g, " ").replace(/ +/g, " ");

View file

@ -1499,6 +1499,24 @@ export const useSceneMarkerDestroy = () =>
},
});
export const useSceneMarkersDestroy = (
input: GQL.SceneMarkersDestroyMutationVariables
) =>
GQL.useSceneMarkersDestroyMutation({
variables: input,
update(cache, result) {
if (!result.data?.sceneMarkersDestroy) return;
for (const id of input.ids) {
const obj = { __typename: "SceneMarker", id };
cache.evict({ id: cache.identify(obj) });
}
evictTypeFields(cache, sceneMarkerMutationImpactedTypeFields);
evictQueries(cache, sceneMarkerMutationImpactedQueries);
},
});
const galleryMutationImpactedTypeFields = {
Scene: ["galleries"],
Performer: ["gallery_count", "performer_count"],

View file

@ -247,7 +247,7 @@ sceneByURL:
A different stash server can be configured as a scraping source. This action applies only to `performerByName`, `performerByFragment`, and `sceneByFragment` types. This action requires that the top-level `stashServer` field is configured.
`stashServer` contains a single `url` field for the remote stash server. The username and password can be embedded in this string using `username:password@host`.
`stashServer` contains a single `url` field for the remote stash server. The username and password can be embedded in this string using `username:password@host`. Alternatively, the `apiKey` field can be used to authenticate with the remote stash server.
An example stash scrape configuration is below:
@ -260,6 +260,7 @@ performerByFragment:
sceneByFragment:
action: stash
stashServer:
apiKey: <api key>
url: http://stashserver.com:9999
```

View file

@ -44,10 +44,15 @@ export function useTagsEdit(
}
// add the new tag to the new tags value
const newTagIds = tags
.map((t) => t.id)
.concat([result.data.tagCreate.id]);
setFieldValue(newTagIds);
onSetTags(
tags.concat([
{
id: result.data.tagCreate.id,
name: toCreate.name ?? "",
aliases: [],
},
])
);
// remove the tag from the list
const newTagsClone = newTags!.concat();
@ -73,20 +78,26 @@ export function useTagsEdit(
function updateTagsStateFromScraper(
scrapedTags?: Pick<GQL.ScrapedTag, "name" | "stored_id">[]
) {
if (scrapedTags) {
// map tags to their ids and filter out those not found
onSetTags(
scrapedTags.map((p) => {
return {
id: p.stored_id!,
name: p.name ?? "",
aliases: [],
};
})
);
setNewTags(scrapedTags.filter((t) => !t.stored_id));
if (!scrapedTags) {
return;
}
// map tags to their ids and filter out those not found
const idTags = scrapedTags.filter(
(t) => t.stored_id !== undefined && t.stored_id !== null
);
const newNewTags = scrapedTags.filter((t) => !t.stored_id);
onSetTags(
idTags.map((p) => {
return {
id: p.stored_id!,
name: p.name ?? "",
aliases: [],
};
})
);
setNewTags(newNewTags);
}
function renderNewTags() {

View file

@ -1,6 +1,9 @@
import { useEffect, useMemo } from "react";
import { useEffect, useMemo, useState } from "react";
const useScript = (urls: string | string[], condition: boolean = true) => {
// array of booleans to track the loading state of each script
const [loadStates, setLoadStates] = useState<boolean[]>();
const useScript = (urls: string | string[], condition?: boolean) => {
const urlArray = useMemo(() => {
if (!Array.isArray(urls)) {
return [urls];
@ -10,12 +13,25 @@ const useScript = (urls: string | string[], condition?: boolean) => {
}, [urls]);
useEffect(() => {
if (condition) {
setLoadStates(urlArray.map(() => false));
}
const scripts = urlArray.map((url) => {
const script = document.createElement("script");
script.src = url;
script.async = false;
script.defer = true;
function onLoad() {
setLoadStates((prev) =>
prev!.map((state, i) => (i === urlArray.indexOf(url) ? true : state))
);
}
script.addEventListener("load", onLoad);
script.addEventListener("error", onLoad); // handle error as well
return script;
});
@ -33,6 +49,12 @@ const useScript = (urls: string | string[], condition?: boolean) => {
}
};
}, [urlArray, condition]);
return (
condition &&
loadStates &&
(loadStates.length === 0 || loadStates.every((state) => state))
);
};
export const useCSS = (urls: string | string[], condition?: boolean) => {

View file

@ -173,6 +173,9 @@
"active_instance": "Active stash-box instance:",
"blacklist_desc": "Blacklist items are excluded from queries. Note that they are regular expressions and also case-insensitive. Certain characters must be escaped with a backslash: {chars_require_escape}",
"blacklist_label": "Blacklist",
"errors": {
"blacklist_duplicate": "Duplicate blacklist item"
},
"mark_organized_desc": "Immediately mark the scene as Organized after the Save button is clicked.",
"mark_organized_label": "Mark as Organized on save",
"query_mode_auto": "Auto",
@ -851,6 +854,11 @@
"only": "Only"
},
"custom": "Custom",
"custom_fields": {
"field": "Field",
"title": "Custom Fields",
"value": "Value"
},
"date": "Date",
"date_format": "YYYY-MM-DD",
"datetime_format": "YYYY-MM-DD HH:MM",
@ -936,7 +944,7 @@
"marker_image_previews": "Marker Animated Image Previews",
"marker_image_previews_tooltip": "Also generate animated (webp) previews, only required when Scene/Marker Wall Preview Type is set to Animated Image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.",
"marker_screenshots": "Marker Screenshots",
"marker_screenshots_tooltip": "Marker static JPG images, only required if Preview Type is set to Static Image.",
"marker_screenshots_tooltip": "Marker static JPG images",
"markers": "Marker Previews",
"markers_tooltip": "20 second videos which begin at the given timecode.",
"override_preview_generation_options": "Override Preview Generation Options",
@ -1032,6 +1040,12 @@
},
"empty_server": "Add some scenes to your server to view recommendations on this page.",
"errors": {
"custom_fields": {
"duplicate_field": "Field name must be unique",
"field_name_length": "Field name must fewer than 65 characters",
"field_name_required": "Field name is required",
"field_name_whitespace": "Field name cannot have leading or trailing whitespace"
},
"header": "Error",
"image_index_greater_than_zero": "Image index must be greater than 0",
"invalid_javascript_string": "Invalid javascript code: {error}",
@ -1118,7 +1132,8 @@
"last_played_at": "Last Played At",
"library": "Library",
"loading": {
"generic": "Loading…"
"generic": "Loading…",
"plugins": "Loading plugins…"
},
"marker_count": "Marker Count",
"markers": "Markers",

View file

@ -192,6 +192,7 @@ interface ICriterionOptionsParams {
modifierOptions?: CriterionModifier[];
defaultModifier?: CriterionModifier;
options?: Option[];
hidden?: boolean;
makeCriterion: (
o: CriterionOption,
config?: ConfigDataFragment
@ -204,6 +205,10 @@ export class CriterionOption {
public readonly defaultModifier: CriterionModifier;
public readonly options: Option[] | undefined;
public readonly inputType: InputType;
// used for legacy criteria that are not shown in the UI
public readonly hidden: boolean = false;
public readonly makeCriterionFn: (
o: CriterionOption,
config?: ConfigDataFragment
@ -216,6 +221,7 @@ export class CriterionOption {
this.defaultModifier = options.defaultModifier ?? CriterionModifier.Equals;
this.options = options.options;
this.inputType = options.inputType;
this.hidden = options.hidden ?? false;
this.makeCriterionFn = options.makeCriterion;
}
@ -637,7 +643,11 @@ export function createNumberCriterionOption(
}
export class NullNumberCriterionOption extends CriterionOption {
constructor(messageID: string, value: CriterionType) {
constructor(
messageID: string,
value: CriterionType,
makeCriterion?: () => Criterion<CriterionValue>
) {
super({
messageID,
type: value,
@ -653,7 +663,9 @@ export class NullNumberCriterionOption extends CriterionOption {
],
defaultModifier: CriterionModifier.Equals,
inputType: "number",
makeCriterion: () => new NumberCriterion(this),
makeCriterion: makeCriterion
? makeCriterion
: () => new NumberCriterion(this),
});
}
}
@ -780,6 +792,19 @@ export function createDurationCriterionOption(
return new DurationCriterionOption(messageID ?? value, value);
}
export class NullDurationCriterionOption extends NullNumberCriterionOption {
constructor(messageID: string, value: CriterionType) {
super(messageID, value, () => new DurationCriterion(this));
}
}
export function createNullDurationCriterionOption(
value: CriterionType,
messageID?: string
) {
return new NullDurationCriterionOption(messageID ?? value, value);
}
export class DurationCriterion extends Criterion<INumberValue> {
constructor(type: CriterionOption) {
super(type, { value: undefined, value2: undefined });

View file

@ -50,5 +50,6 @@ export const LegacyMoviesCriterionOption = new CriterionOption({
modifierOptions,
defaultModifier,
inputType,
hidden: true,
makeCriterion: () => new GroupsCriterion(GroupsCriterionOption),
});

View file

@ -6,22 +6,25 @@ import { DisplayMode } from "./types";
import {
createDateCriterionOption,
createMandatoryTimestampCriterionOption,
createNullDurationCriterionOption,
} from "./criteria/criterion";
const defaultSortBy = "title";
const sortByOptions = [
"duration",
"title",
"seconds",
"scene_id",
"random",
"scenes_updated_at",
].map(ListFilterOptions.createSortBy);
const displayModeOptions = [DisplayMode.Wall];
const displayModeOptions = [DisplayMode.Grid, DisplayMode.Wall];
const criterionOptions = [
TagsCriterionOption,
MarkersScenesCriterionOption,
SceneTagsCriterionOption,
PerformersCriterionOption,
createNullDurationCriterionOption("duration"),
createMandatoryTimestampCriterionOption("created_at"),
createMandatoryTimestampCriterionOption("updated_at"),
createDateCriterionOption("scene_date"),

View file

@ -1,5 +1,122 @@
import React from "react";
import { PatchFunction } from "./patch";
import { usePlugins } from "./core/StashService";
import { useMemoOnce } from "./hooks/state";
import { uniq } from "lodash-es";
import useScript, { useCSS } from "./hooks/useScript";
import { PluginsQuery } from "./core/generated-graphql";
import { LoadingIndicator } from "./components/Shared/LoadingIndicator";
import { FormattedMessage } from "react-intl";
type PluginList = NonNullable<Required<PluginsQuery["plugins"]>>;
// sort plugins by their dependencies
function sortPlugins(plugins: PluginList) {
type Node = { id: string; afters: string[] };
let nodes: Record<string, Node> = {};
let sorted: PluginList = [];
let visited: Record<string, boolean> = {};
plugins.forEach((v) => {
let from = v.id;
if (!nodes[from]) nodes[from] = { id: from, afters: [] };
v.requires?.forEach((to) => {
if (!nodes[to]) nodes[to] = { id: to, afters: [] };
if (!nodes[to].afters.includes(from)) nodes[to].afters.push(from);
});
});
function visit(idstr: string, ancestors: string[] = []) {
let node = nodes[idstr];
const { id } = node;
if (visited[idstr]) return;
ancestors.push(id);
visited[idstr] = true;
node.afters.forEach(function (afterID) {
if (ancestors.indexOf(afterID) >= 0)
throw new Error("closed chain : " + afterID + " is in " + id);
visit(afterID.toString(), ancestors.slice());
});
const plugin = plugins.find((v) => v.id === id);
if (plugin) {
sorted.unshift(plugin);
}
}
Object.keys(nodes).forEach((n) => {
visit(n);
});
return sorted;
}
// load all plugins and their dependencies
// returns true when all plugins are loaded, regardess of success or failure
function useLoadPlugins() {
const {
data: plugins,
loading: pluginsLoading,
error: pluginsError,
} = usePlugins();
const sortedPlugins = useMemoOnce(() => {
return [
sortPlugins(plugins?.plugins ?? []),
!pluginsLoading && !pluginsError,
];
}, [plugins?.plugins, pluginsLoading, pluginsError]);
const pluginJavascripts = useMemoOnce(() => {
return [
uniq(
sortedPlugins
?.filter((plugin) => plugin.enabled && plugin.paths.javascript)
.map((plugin) => plugin.paths.javascript!)
.flat() ?? []
),
!!sortedPlugins && !pluginsLoading && !pluginsError,
];
}, [sortedPlugins, pluginsLoading, pluginsError]);
const pluginCSS = useMemoOnce(() => {
return [
uniq(
sortedPlugins
?.filter((plugin) => plugin.enabled && plugin.paths.css)
.map((plugin) => plugin.paths.css!)
.flat() ?? []
),
!!sortedPlugins && !pluginsLoading && !pluginsError,
];
}, [sortedPlugins, pluginsLoading, pluginsError]);
const pluginJavascriptLoaded = useScript(
pluginJavascripts ?? [],
!!pluginJavascripts && !pluginsLoading && !pluginsError
);
useCSS(pluginCSS ?? [], !pluginsLoading && !pluginsError);
return !pluginsLoading && !!pluginJavascripts && pluginJavascriptLoaded;
}
export const PluginsLoader: React.FC<React.PropsWithChildren<{}>> = ({
children,
}) => {
const loaded = useLoadPlugins();
if (!loaded)
return (
<LoadingIndicator message={<FormattedMessage id="loading.plugins" />} />
);
return <>{children}</>;
};
export const PluginRoutes: React.FC<React.PropsWithChildren<{}>> =
PatchFunction("PluginRoutes", (props: React.PropsWithChildren<{}>) => {

View file

@ -30,6 +30,8 @@ import { PhashCriterion } from "src/models/list-filter/criteria/phash";
import { ILabeledId } from "src/models/list-filter/types";
import { IntlShape } from "react-intl";
import { galleryTitle } from "src/core/galleries";
import { MarkersScenesCriterion } from "src/models/list-filter/criteria/scenes";
import { objectTitle } from "src/core/files";
function addExtraCriteria(
dest: Criterion<CriterionValue>[],
@ -129,6 +131,20 @@ const makePerformerGroupsUrl = (
return `/groups?${filter.makeQueryParameters()}`;
};
const makePerformerSceneMarkersUrl = (
performer: Partial<GQL.PerformerDataFragment>
) => {
if (!performer.id) return "#";
const filter = new ListFilterModel(GQL.FilterMode.SceneMarkers, undefined);
const criterion = new PerformersCriterion();
criterion.value.items = [
{ id: performer.id, label: performer.name || `Performer ${performer.id}` },
];
filter.criteria.push(criterion);
return `/scenes/markers?${filter.makeQueryParameters()}`;
};
const makePerformersCountryUrl = (
performer: Partial<GQL.PerformerDataFragment>
) => {
@ -429,6 +445,15 @@ const makeSubGroupsUrl = (group: INamedObject) => {
return `/groups?${filter.makeQueryParameters()}`;
};
const makeSceneMarkersSceneUrl = (scene: GQL.SceneMarkerSceneDataFragment) => {
if (!scene.id) return "#";
const filter = new ListFilterModel(GQL.FilterMode.SceneMarkers, undefined);
const criterion = new MarkersScenesCriterion();
criterion.value = [{ id: scene.id, label: objectTitle(scene) }];
filter.criteria.push(criterion);
return `/scenes/markers?${filter.makeQueryParameters()}`;
};
export function handleUnsavedChanges(
intl: IntlShape,
basepath: string,
@ -449,6 +474,7 @@ const NavUtils = {
makePerformerImagesUrl,
makePerformerGalleriesUrl,
makePerformerGroupsUrl,
makePerformerSceneMarkersUrl,
makePerformersCountryUrl,
makeStudioScenesUrl,
makeStudioImagesUrl,
@ -477,6 +503,7 @@ const NavUtils = {
makeDirectorGroupsUrl,
makeContainingGroupsUrl,
makeSubGroupsUrl,
makeSceneMarkersSceneUrl,
};
export default NavUtils;

View file

@ -3498,9 +3498,9 @@ cross-inspect@1.0.0:
tslib "^2.4.0"
cross-spawn@^7.0.2:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
version "7.0.6"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
dependencies:
path-key "^3.1.0"
shebang-command "^2.0.0"