Performer custom fields (#5487)

* Backend changes
* Show custom field values
* Add custom fields table input
* Add custom field filtering
* Add unit tests
* Include custom fields in import/export
* Anonymise performer custom fields
* Move json.Number handler functions to api
* Handle json.Number conversion in api
This commit is contained in:
WithoutPants 2024-12-03 13:49:55 +11:00 committed by GitHub
parent a0e09bbe5c
commit 8c8be22fe4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
56 changed files with 2158 additions and 277 deletions

View file

@ -91,6 +91,12 @@ input StashIDCriterionInput {
modifier: CriterionModifier! modifier: CriterionModifier!
} }
input CustomFieldCriterionInput {
field: String!
value: [Any!]
modifier: CriterionModifier!
}
input PerformerFilterType { input PerformerFilterType {
AND: PerformerFilterType AND: PerformerFilterType
OR: PerformerFilterType OR: PerformerFilterType
@ -182,6 +188,8 @@ input PerformerFilterType {
created_at: TimestampCriterionInput created_at: TimestampCriterionInput
"Filter by last update time" "Filter by last update time"
updated_at: TimestampCriterionInput updated_at: TimestampCriterionInput
custom_fields: [CustomFieldCriterionInput!]
} }
input SceneMarkerFilterType { input SceneMarkerFilterType {

View file

@ -338,3 +338,10 @@ type SystemStatus {
input MigrateInput { input MigrateInput {
backupPath: String! backupPath: String!
} }
input CustomFieldsInput {
"If populated, the entire custom fields map will be replaced with this value"
full: Map
"If populated, only the keys in this map will be updated"
partial: Map
}

View file

@ -58,6 +58,8 @@ type Performer {
updated_at: Time! updated_at: Time!
groups: [Group!]! groups: [Group!]!
movies: [Movie!]! @deprecated(reason: "use groups instead") movies: [Movie!]! @deprecated(reason: "use groups instead")
custom_fields: Map!
} }
input PerformerCreateInput { input PerformerCreateInput {
@ -93,6 +95,8 @@ input PerformerCreateInput {
hair_color: String hair_color: String
weight: Int weight: Int
ignore_auto_tag: Boolean ignore_auto_tag: Boolean
custom_fields: Map
} }
input PerformerUpdateInput { input PerformerUpdateInput {
@ -129,6 +133,8 @@ input PerformerUpdateInput {
hair_color: String hair_color: String
weight: Int weight: Int
ignore_auto_tag: Boolean ignore_auto_tag: Boolean
custom_fields: CustomFieldsInput
} }
input BulkUpdateStrings { input BulkUpdateStrings {
@ -167,6 +173,8 @@ input BulkPerformerUpdateInput {
hair_color: String hair_color: String
weight: Int weight: Int
ignore_auto_tag: Boolean ignore_auto_tag: Boolean
custom_fields: CustomFieldsInput
} }
input PerformerDestroyInput { input PerformerDestroyInput {

36
internal/api/json.go Normal file
View file

@ -0,0 +1,36 @@
package api
import (
"encoding/json"
"strings"
)
// JSONNumberToNumber converts a JSON number to either a float64 or int64.
func jsonNumberToNumber(n json.Number) interface{} {
if strings.Contains(string(n), ".") {
f, _ := n.Float64()
return f
}
ret, _ := n.Int64()
return ret
}
// ConvertMapJSONNumbers converts all JSON numbers in a map to either float64 or int64.
func convertMapJSONNumbers(m map[string]interface{}) (ret map[string]interface{}) {
if m == nil {
return nil
}
ret = make(map[string]interface{})
for k, v := range m {
if n, ok := v.(json.Number); ok {
ret[k] = jsonNumberToNumber(n)
} else if mm, ok := v.(map[string]interface{}); ok {
ret[k] = convertMapJSONNumbers(mm)
} else {
ret[k] = v
}
}
return ret
}

60
internal/api/json_test.go Normal file
View file

@ -0,0 +1,60 @@
package api
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
)
func TestConvertMapJSONNumbers(t *testing.T) {
tests := []struct {
name string
input map[string]interface{}
expected map[string]interface{}
}{
{
name: "Convert JSON numbers to numbers",
input: map[string]interface{}{
"int": json.Number("12"),
"float": json.Number("12.34"),
"string": "foo",
},
expected: map[string]interface{}{
"int": int64(12),
"float": 12.34,
"string": "foo",
},
},
{
name: "Convert JSON numbers to numbers in nested maps",
input: map[string]interface{}{
"foo": map[string]interface{}{
"int": json.Number("56"),
"float": json.Number("56.78"),
"nested-string": "bar",
},
"int": json.Number("12"),
"float": json.Number("12.34"),
"string": "foo",
},
expected: map[string]interface{}{
"foo": map[string]interface{}{
"int": int64(56),
"float": 56.78,
"nested-string": "bar",
},
"int": int64(12),
"float": 12.34,
"string": "foo",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := convertMapJSONNumbers(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}

View file

@ -0,0 +1,221 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// CustomFieldsLoaderConfig captures the config to create a new CustomFieldsLoader
type CustomFieldsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]models.CustomFieldMap, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewCustomFieldsLoader creates a new CustomFieldsLoader given a fetch, wait, and maxBatch
func NewCustomFieldsLoader(config CustomFieldsLoaderConfig) *CustomFieldsLoader {
return &CustomFieldsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// CustomFieldsLoader batches and caches requests
type CustomFieldsLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]models.CustomFieldMap, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]models.CustomFieldMap
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *customFieldsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type customFieldsLoaderBatch struct {
keys []int
data []models.CustomFieldMap
error []error
closing bool
done chan struct{}
}
// Load a CustomFieldMap by key, batching and caching will be applied automatically
func (l *CustomFieldsLoader) Load(key int) (models.CustomFieldMap, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a CustomFieldMap.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *CustomFieldsLoader) LoadThunk(key int) func() (models.CustomFieldMap, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (models.CustomFieldMap, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &customFieldsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (models.CustomFieldMap, error) {
<-batch.done
var data models.CustomFieldMap
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *CustomFieldsLoader) LoadAll(keys []int) ([]models.CustomFieldMap, []error) {
results := make([]func() (models.CustomFieldMap, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
customFieldMaps := make([]models.CustomFieldMap, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
customFieldMaps[i], errors[i] = thunk()
}
return customFieldMaps, errors
}
// LoadAllThunk returns a function that when called will block waiting for a CustomFieldMaps.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *CustomFieldsLoader) LoadAllThunk(keys []int) func() ([]models.CustomFieldMap, []error) {
results := make([]func() (models.CustomFieldMap, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]models.CustomFieldMap, []error) {
customFieldMaps := make([]models.CustomFieldMap, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
customFieldMaps[i], errors[i] = thunk()
}
return customFieldMaps, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *CustomFieldsLoader) Prime(key int, value models.CustomFieldMap) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *CustomFieldsLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *CustomFieldsLoader) unsafeSet(key int, value models.CustomFieldMap) {
if l.cache == nil {
l.cache = map[int]models.CustomFieldMap{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *customFieldsLoaderBatch) keyIndex(l *CustomFieldsLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *customFieldsLoaderBatch) startTimer(l *CustomFieldsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *customFieldsLoaderBatch) end(l *CustomFieldsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -13,6 +13,7 @@
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden CustomFieldsLoader int github.com/stashapp/stash/pkg/models.CustomFieldMap
//go:generate go run github.com/vektah/dataloaden SceneOCountLoader int int //go:generate go run github.com/vektah/dataloaden SceneOCountLoader int int
//go:generate go run github.com/vektah/dataloaden ScenePlayCountLoader int int //go:generate go run github.com/vektah/dataloaden ScenePlayCountLoader int int
//go:generate go run github.com/vektah/dataloaden SceneOHistoryLoader int []time.Time //go:generate go run github.com/vektah/dataloaden SceneOHistoryLoader int []time.Time
@ -51,13 +52,16 @@ type Loaders struct {
ImageFiles *ImageFileIDsLoader ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader GalleryFiles *GalleryFileIDsLoader
GalleryByID *GalleryLoader GalleryByID *GalleryLoader
ImageByID *ImageLoader ImageByID *ImageLoader
PerformerByID *PerformerLoader
StudioByID *StudioLoader PerformerByID *PerformerLoader
TagByID *TagLoader PerformerCustomFields *CustomFieldsLoader
GroupByID *GroupLoader
FileByID *FileLoader StudioByID *StudioLoader
TagByID *TagLoader
GroupByID *GroupLoader
FileByID *FileLoader
} }
type Middleware struct { type Middleware struct {
@ -88,6 +92,11 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch, maxBatch: maxBatch,
fetch: m.fetchPerformers(ctx), fetch: m.fetchPerformers(ctx),
}, },
PerformerCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchPerformerCustomFields(ctx),
},
StudioByID: &StudioLoader{ StudioByID: &StudioLoader{
wait: wait, wait: wait,
maxBatch: maxBatch, maxBatch: maxBatch,
@ -214,6 +223,18 @@ func (m Middleware) fetchPerformers(ctx context.Context) func(keys []int) ([]*mo
} }
} }
func (m Middleware) fetchPerformerCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Performer.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*models.Studio, []error) { func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*models.Studio, []error) {
return func(keys []int) (ret []*models.Studio, errs []error) { return func(keys []int) (ret []*models.Studio, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error { err := m.Repository.WithDB(ctx, func(ctx context.Context) error {

View file

@ -268,6 +268,19 @@ func (r *performerResolver) Groups(ctx context.Context, obj *models.Performer) (
return ret, nil return ret, nil
} }
func (r *performerResolver) CustomFields(ctx context.Context, obj *models.Performer) (map[string]interface{}, error) {
m, err := loaders.From(ctx).PerformerCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}
// deprecated // deprecated
func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Group, err error) { func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Group, err error) {
return r.Groups(ctx, obj) return r.Groups(ctx, obj)

View file

@ -645,13 +645,13 @@ func (r *mutationResolver) ConfigureUI(ctx context.Context, input map[string]int
if input != nil { if input != nil {
// #5483 - convert JSON numbers to float64 or int64 // #5483 - convert JSON numbers to float64 or int64
input = utils.ConvertMapJSONNumbers(input) input = convertMapJSONNumbers(input)
c.SetUIConfiguration(input) c.SetUIConfiguration(input)
} }
if partial != nil { if partial != nil {
// #5483 - convert JSON numbers to float64 or int64 // #5483 - convert JSON numbers to float64 or int64
partial = utils.ConvertMapJSONNumbers(partial) partial = convertMapJSONNumbers(partial)
// merge partial into existing config // merge partial into existing config
existing := c.GetUIConfiguration() existing := c.GetUIConfiguration()
utils.MergeMaps(existing, partial) utils.MergeMaps(existing, partial)
@ -672,9 +672,9 @@ func (r *mutationResolver) ConfigureUISetting(ctx context.Context, key string, v
// #5483 - convert JSON numbers to float64 or int64 // #5483 - convert JSON numbers to float64 or int64
if m, ok := value.(map[string]interface{}); ok { if m, ok := value.(map[string]interface{}); ok {
value = utils.ConvertMapJSONNumbers(m) value = convertMapJSONNumbers(m)
} else if n, ok := value.(json.Number); ok { } else if n, ok := value.(json.Number); ok {
value = utils.JSONNumberToNumber(n) value = jsonNumberToNumber(n)
} }
cfg.Set(key, value) cfg.Set(key, value)
@ -686,7 +686,7 @@ func (r *mutationResolver) ConfigurePlugin(ctx context.Context, pluginID string,
c := config.GetInstance() c := config.GetInstance()
// #5483 - convert JSON numbers to float64 or int64 // #5483 - convert JSON numbers to float64 or int64
input = utils.ConvertMapJSONNumbers(input) input = convertMapJSONNumbers(input)
c.SetPluginConfiguration(pluginID, input) c.SetPluginConfiguration(pluginID, input)
if err := c.Write(); err != nil { if err := c.Write(); err != nil {

View file

@ -108,7 +108,13 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return err return err
} }
err = qb.Create(ctx, &newPerformer) i := &models.CreatePerformerInput{
Performer: &newPerformer,
// convert json.Numbers to int/float
CustomFields: convertMapJSONNumbers(input.CustomFields),
}
err = qb.Create(ctx, i)
if err != nil { if err != nil {
return err return err
} }
@ -290,6 +296,11 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
return nil, fmt.Errorf("converting tag ids: %w", err) return nil, fmt.Errorf("converting tag ids: %w", err)
} }
updatedPerformer.CustomFields = input.CustomFields
// convert json.Numbers to int/float
updatedPerformer.CustomFields.Full = convertMapJSONNumbers(updatedPerformer.CustomFields.Full)
updatedPerformer.CustomFields.Partial = convertMapJSONNumbers(updatedPerformer.CustomFields.Partial)
var imageData []byte var imageData []byte
imageIncluded := translator.hasField("image") imageIncluded := translator.hasField("image")
if input.Image != nil { if input.Image != nil {

View file

@ -91,7 +91,7 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error {
Name: testName, Name: testName,
} }
err := pqb.Create(ctx, &performer) err := pqb.Create(ctx, &models.CreatePerformerInput{Performer: &performer})
if err != nil { if err != nil {
return err return err
} }

View file

@ -41,7 +41,7 @@ func createMissingPerformer(ctx context.Context, endpoint string, w PerformerCre
return nil, err return nil, err
} }
err = w.Create(ctx, newPerformer) err = w.Create(ctx, &models.CreatePerformerInput{Performer: newPerformer})
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating performer: %w", err) return nil, fmt.Errorf("error creating performer: %w", err)
} }

View file

@ -24,8 +24,8 @@ func Test_getPerformerID(t *testing.T) {
db := mocks.NewDatabase() db := mocks.NewDatabase()
db.Performer.On("Create", testCtx, mock.Anything).Run(func(args mock.Arguments) { db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) {
p := args.Get(1).(*models.Performer) p := args.Get(1).(*models.CreatePerformerInput)
p.ID = validStoredID p.ID = validStoredID
}).Return(nil) }).Return(nil)
@ -154,14 +154,14 @@ func Test_createMissingPerformer(t *testing.T) {
db := mocks.NewDatabase() db := mocks.NewDatabase()
db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.Performer) bool { db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.CreatePerformerInput) bool {
return p.Name == validName return p.Name == validName
})).Run(func(args mock.Arguments) { })).Run(func(args mock.Arguments) {
p := args.Get(1).(*models.Performer) p := args.Get(1).(*models.CreatePerformerInput)
p.ID = performerID p.ID = performerID
}).Return(nil) }).Return(nil)
db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.Performer) bool { db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.CreatePerformerInput) bool {
return p.Name == invalidName return p.Name == invalidName
})).Return(errors.New("error creating performer")) })).Return(errors.New("error creating performer"))

View file

@ -194,7 +194,7 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m
return err return err
} }
if err := qb.Create(ctx, newPerformer); err != nil { if err := qb.Create(ctx, &models.CreatePerformerInput{Performer: newPerformer}); err != nil {
return err return err
} }

View file

@ -188,7 +188,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod
newPerformer := models.NewPerformer() newPerformer := models.NewPerformer()
newPerformer.Name = name newPerformer.Name = name
err := i.PerformerWriter.Create(ctx, &newPerformer) err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{
Performer: &newPerformer,
})
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -201,8 +201,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) {
} }
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3) db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) { db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) {
performer := args.Get(1).(*models.Performer) performer := args.Get(1).(*models.CreatePerformerInput)
performer.ID = existingPerformerID performer.ID = existingPerformerID
}).Return(nil) }).Return(nil)
@ -235,7 +235,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
} }
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once() db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error")) db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error"))
err := i.PreImport(testCtx) err := i.PreImport(testCtx)
assert.NotNil(t, err) assert.NotNil(t, err)

View file

@ -274,7 +274,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod
newPerformer := models.NewPerformer() newPerformer := models.NewPerformer()
newPerformer.Name = name newPerformer.Name = name
err := i.PerformerWriter.Create(ctx, &newPerformer) err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{
Performer: &newPerformer,
})
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -163,8 +163,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) {
} }
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3) db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) { db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) {
performer := args.Get(1).(*models.Performer) performer := args.Get(1).(*models.CreatePerformerInput)
performer.ID = existingPerformerID performer.ID = existingPerformerID
}).Return(nil) }).Return(nil)
@ -197,7 +197,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
} }
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once() db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error")) db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error"))
err := i.PreImport(testCtx) err := i.PreImport(testCtx)
assert.NotNil(t, err) assert.NotNil(t, err)

View file

@ -0,0 +1,17 @@
package models
import "context"
type CustomFieldMap map[string]interface{}
type CustomFieldsInput struct {
// If populated, the entire custom fields map will be replaced with this value
Full map[string]interface{} `json:"full"`
// If populated, only the keys in this map will be updated
Partial map[string]interface{} `json:"partial"`
}
type CustomFieldsReader interface {
GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error)
GetCustomFieldsBulk(ctx context.Context, ids []int) ([]CustomFieldMap, error)
}

View file

@ -194,3 +194,9 @@ type PhashDistanceCriterionInput struct {
type OrientationCriterionInput struct { type OrientationCriterionInput struct {
Value []OrientationEnum `json:"value"` Value []OrientationEnum `json:"value"`
} }
type CustomFieldCriterionInput struct {
Field string `json:"field"`
Value []any `json:"value"`
Modifier CriterionModifier `json:"modifier"`
}

View file

@ -65,6 +65,8 @@ type Performer struct {
StashIDs []models.StashID `json:"stash_ids,omitempty"` StashIDs []models.StashID `json:"stash_ids,omitempty"`
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
CustomFields map[string]interface{} `json:"custom_fields,omitempty"`
// deprecated - for import only // deprecated - for import only
URL string `json:"url,omitempty"` URL string `json:"url,omitempty"`
Twitter string `json:"twitter,omitempty"` Twitter string `json:"twitter,omitempty"`

View file

@ -80,11 +80,11 @@ func (_m *PerformerReaderWriter) CountByTagID(ctx context.Context, tagID int) (i
} }
// Create provides a mock function with given fields: ctx, newPerformer // Create provides a mock function with given fields: ctx, newPerformer
func (_m *PerformerReaderWriter) Create(ctx context.Context, newPerformer *models.Performer) error { func (_m *PerformerReaderWriter) Create(ctx context.Context, newPerformer *models.CreatePerformerInput) error {
ret := _m.Called(ctx, newPerformer) ret := _m.Called(ctx, newPerformer)
var r0 error var r0 error
if rf, ok := ret.Get(0).(func(context.Context, *models.Performer) error); ok { if rf, ok := ret.Get(0).(func(context.Context, *models.CreatePerformerInput) error); ok {
r0 = rf(ctx, newPerformer) r0 = rf(ctx, newPerformer)
} else { } else {
r0 = ret.Error(0) r0 = ret.Error(0)
@ -314,6 +314,52 @@ func (_m *PerformerReaderWriter) GetAliases(ctx context.Context, relatedID int)
return r0, r1 return r0, r1
} }
// GetCustomFields provides a mock function with given fields: ctx, id
func (_m *PerformerReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) {
ret := _m.Called(ctx, id)
var r0 map[string]interface{}
if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok {
r0 = rf(ctx, id)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(map[string]interface{})
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids
func (_m *PerformerReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) {
ret := _m.Called(ctx, ids)
var r0 []models.CustomFieldMap
if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]models.CustomFieldMap)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetImage provides a mock function with given fields: ctx, performerID // GetImage provides a mock function with given fields: ctx, performerID
func (_m *PerformerReaderWriter) GetImage(ctx context.Context, performerID int) ([]byte, error) { func (_m *PerformerReaderWriter) GetImage(ctx context.Context, performerID int) ([]byte, error) {
ret := _m.Called(ctx, performerID) ret := _m.Called(ctx, performerID)
@ -502,11 +548,11 @@ func (_m *PerformerReaderWriter) QueryForAutoTag(ctx context.Context, words []st
} }
// Update provides a mock function with given fields: ctx, updatedPerformer // Update provides a mock function with given fields: ctx, updatedPerformer
func (_m *PerformerReaderWriter) Update(ctx context.Context, updatedPerformer *models.Performer) error { func (_m *PerformerReaderWriter) Update(ctx context.Context, updatedPerformer *models.UpdatePerformerInput) error {
ret := _m.Called(ctx, updatedPerformer) ret := _m.Called(ctx, updatedPerformer)
var r0 error var r0 error
if rf, ok := ret.Get(0).(func(context.Context, *models.Performer) error); ok { if rf, ok := ret.Get(0).(func(context.Context, *models.UpdatePerformerInput) error); ok {
r0 = rf(ctx, updatedPerformer) r0 = rf(ctx, updatedPerformer)
} else { } else {
r0 = ret.Error(0) r0 = ret.Error(0)

View file

@ -39,6 +39,18 @@ type Performer struct {
StashIDs RelatedStashIDs `json:"stash_ids"` StashIDs RelatedStashIDs `json:"stash_ids"`
} }
type CreatePerformerInput struct {
*Performer
CustomFields map[string]interface{} `json:"custom_fields"`
}
type UpdatePerformerInput struct {
*Performer
CustomFields CustomFieldsInput `json:"custom_fields"`
}
func NewPerformer() Performer { func NewPerformer() Performer {
currentTime := time.Now() currentTime := time.Now()
return Performer{ return Performer{
@ -80,6 +92,8 @@ type PerformerPartial struct {
Aliases *UpdateStrings Aliases *UpdateStrings
TagIDs *UpdateIDs TagIDs *UpdateIDs
StashIDs *UpdateStashIDs StashIDs *UpdateStashIDs
CustomFields CustomFieldsInput
} }
func NewPerformerPartial() PerformerPartial { func NewPerformerPartial() PerformerPartial {

View file

@ -198,6 +198,9 @@ type PerformerFilterType struct {
CreatedAt *TimestampCriterionInput `json:"created_at"` CreatedAt *TimestampCriterionInput `json:"created_at"`
// Filter by updated at // Filter by updated at
UpdatedAt *TimestampCriterionInput `json:"updated_at"` UpdatedAt *TimestampCriterionInput `json:"updated_at"`
// Filter by custom fields
CustomFields []CustomFieldCriterionInput `json:"custom_fields"`
} }
type PerformerCreateInput struct { type PerformerCreateInput struct {
@ -234,6 +237,8 @@ type PerformerCreateInput struct {
HairColor *string `json:"hair_color"` HairColor *string `json:"hair_color"`
Weight *int `json:"weight"` Weight *int `json:"weight"`
IgnoreAutoTag *bool `json:"ignore_auto_tag"` IgnoreAutoTag *bool `json:"ignore_auto_tag"`
CustomFields map[string]interface{} `json:"custom_fields"`
} }
type PerformerUpdateInput struct { type PerformerUpdateInput struct {
@ -271,4 +276,6 @@ type PerformerUpdateInput struct {
HairColor *string `json:"hair_color"` HairColor *string `json:"hair_color"`
Weight *int `json:"weight"` Weight *int `json:"weight"`
IgnoreAutoTag *bool `json:"ignore_auto_tag"` IgnoreAutoTag *bool `json:"ignore_auto_tag"`
CustomFields CustomFieldsInput `json:"custom_fields"`
} }

View file

@ -43,12 +43,12 @@ type PerformerCounter interface {
// PerformerCreator provides methods to create performers. // PerformerCreator provides methods to create performers.
type PerformerCreator interface { type PerformerCreator interface {
Create(ctx context.Context, newPerformer *Performer) error Create(ctx context.Context, newPerformer *CreatePerformerInput) error
} }
// PerformerUpdater provides methods to update performers. // PerformerUpdater provides methods to update performers.
type PerformerUpdater interface { type PerformerUpdater interface {
Update(ctx context.Context, updatedPerformer *Performer) error Update(ctx context.Context, updatedPerformer *UpdatePerformerInput) error
UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error) UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error)
UpdateImage(ctx context.Context, performerID int, image []byte) error UpdateImage(ctx context.Context, performerID int, image []byte) error
} }
@ -80,6 +80,8 @@ type PerformerReader interface {
TagIDLoader TagIDLoader
URLLoader URLLoader
CustomFieldsReader
All(ctx context.Context) ([]*Performer, error) All(ctx context.Context) ([]*Performer, error)
GetImage(ctx context.Context, performerID int) ([]byte, error) GetImage(ctx context.Context, performerID int) ([]byte, error)
HasImage(ctx context.Context, performerID int) (bool, error) HasImage(ctx context.Context, performerID int) (bool, error)

View file

@ -17,6 +17,7 @@ type ImageAliasStashIDGetter interface {
models.AliasLoader models.AliasLoader
models.StashIDLoader models.StashIDLoader
models.URLLoader models.URLLoader
models.CustomFieldsReader
} }
// ToJSON converts a Performer object into its JSON equivalent. // ToJSON converts a Performer object into its JSON equivalent.
@ -87,6 +88,12 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode
newPerformerJSON.StashIDs = performer.StashIDs.List() newPerformerJSON.StashIDs = performer.StashIDs.List()
var err error
newPerformerJSON.CustomFields, err = reader.GetCustomFields(ctx, performer.ID)
if err != nil {
return nil, fmt.Errorf("getting performer custom fields: %v", err)
}
image, err := reader.GetImage(ctx, performer.ID) image, err := reader.GetImage(ctx, performer.ID)
if err != nil { if err != nil {
logger.Errorf("Error getting performer image: %v", err) logger.Errorf("Error getting performer image: %v", err)

View file

@ -15,9 +15,11 @@ import (
) )
const ( const (
performerID = 1 performerID = 1
noImageID = 2 noImageID = 2
errImageID = 3 errImageID = 3
customFieldsID = 4
errCustomFieldsID = 5
) )
const ( const (
@ -50,6 +52,11 @@ var (
penisLength = 1.23 penisLength = 1.23
circumcisedEnum = models.CircumisedEnumCut circumcisedEnum = models.CircumisedEnumCut
circumcised = circumcisedEnum.String() circumcised = circumcisedEnum.String()
emptyCustomFields = make(map[string]interface{})
customFields = map[string]interface{}{
"customField1": "customValue1",
}
) )
var imageBytes = []byte("imageBytes") var imageBytes = []byte("imageBytes")
@ -118,8 +125,8 @@ func createEmptyPerformer(id int) models.Performer {
} }
} }
func createFullJSONPerformer(name string, image string) *jsonschema.Performer { func createFullJSONPerformer(name string, image string, withCustomFields bool) *jsonschema.Performer {
return &jsonschema.Performer{ ret := &jsonschema.Performer{
Name: name, Name: name,
Disambiguation: disambiguation, Disambiguation: disambiguation,
URLs: []string{url, twitter, instagram}, URLs: []string{url, twitter, instagram},
@ -152,7 +159,13 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer {
Weight: weight, Weight: weight,
StashIDs: stashIDs, StashIDs: stashIDs,
IgnoreAutoTag: autoTagIgnored, IgnoreAutoTag: autoTagIgnored,
CustomFields: emptyCustomFields,
} }
if withCustomFields {
ret.CustomFields = customFields
}
return ret
} }
func createEmptyJSONPerformer() *jsonschema.Performer { func createEmptyJSONPerformer() *jsonschema.Performer {
@ -166,13 +179,15 @@ func createEmptyJSONPerformer() *jsonschema.Performer {
UpdatedAt: json.JSONTime{ UpdatedAt: json.JSONTime{
Time: updateTime, Time: updateTime,
}, },
CustomFields: emptyCustomFields,
} }
} }
type testScenario struct { type testScenario struct {
input models.Performer input models.Performer
expected *jsonschema.Performer customFields map[string]interface{}
err bool expected *jsonschema.Performer
err bool
} }
var scenarios []testScenario var scenarios []testScenario
@ -181,20 +196,36 @@ func initTestTable() {
scenarios = []testScenario{ scenarios = []testScenario{
{ {
*createFullPerformer(performerID, performerName), *createFullPerformer(performerID, performerName),
createFullJSONPerformer(performerName, image), emptyCustomFields,
createFullJSONPerformer(performerName, image, false),
false,
},
{
*createFullPerformer(customFieldsID, performerName),
customFields,
createFullJSONPerformer(performerName, image, true),
false, false,
}, },
{ {
createEmptyPerformer(noImageID), createEmptyPerformer(noImageID),
emptyCustomFields,
createEmptyJSONPerformer(), createEmptyJSONPerformer(),
false, false,
}, },
{ {
*createFullPerformer(errImageID, performerName), *createFullPerformer(errImageID, performerName),
createFullJSONPerformer(performerName, ""), emptyCustomFields,
createFullJSONPerformer(performerName, "", false),
// failure to get image should not cause an error // failure to get image should not cause an error
false, false,
}, },
{
*createFullPerformer(errCustomFieldsID, performerName),
customFields,
nil,
// failure to get custom fields should cause an error
true,
},
} }
} }
@ -204,11 +235,19 @@ func TestToJSON(t *testing.T) {
db := mocks.NewDatabase() db := mocks.NewDatabase()
imageErr := errors.New("error getting image") imageErr := errors.New("error getting image")
customFieldsErr := errors.New("error getting custom fields")
db.Performer.On("GetImage", testCtx, performerID).Return(imageBytes, nil).Once() db.Performer.On("GetImage", testCtx, performerID).Return(imageBytes, nil).Once()
db.Performer.On("GetImage", testCtx, customFieldsID).Return(imageBytes, nil).Once()
db.Performer.On("GetImage", testCtx, noImageID).Return(nil, nil).Once() db.Performer.On("GetImage", testCtx, noImageID).Return(nil, nil).Once()
db.Performer.On("GetImage", testCtx, errImageID).Return(nil, imageErr).Once() db.Performer.On("GetImage", testCtx, errImageID).Return(nil, imageErr).Once()
db.Performer.On("GetCustomFields", testCtx, performerID).Return(emptyCustomFields, nil).Once()
db.Performer.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once()
db.Performer.On("GetCustomFields", testCtx, noImageID).Return(emptyCustomFields, nil).Once()
db.Performer.On("GetCustomFields", testCtx, errImageID).Return(emptyCustomFields, nil).Once()
db.Performer.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, customFieldsErr).Once()
for i, s := range scenarios { for i, s := range scenarios {
tag := s.input tag := s.input
json, err := ToJSON(testCtx, db.Performer, &tag) json, err := ToJSON(testCtx, db.Performer, &tag)

View file

@ -25,13 +25,15 @@ type Importer struct {
Input jsonschema.Performer Input jsonschema.Performer
MissingRefBehaviour models.ImportMissingRefEnum MissingRefBehaviour models.ImportMissingRefEnum
ID int ID int
performer models.Performer performer models.Performer
imageData []byte customFields models.CustomFieldMap
imageData []byte
} }
func (i *Importer) PreImport(ctx context.Context) error { func (i *Importer) PreImport(ctx context.Context) error {
i.performer = performerJSONToPerformer(i.Input) i.performer = performerJSONToPerformer(i.Input)
i.customFields = i.Input.CustomFields
if err := i.populateTags(ctx); err != nil { if err := i.populateTags(ctx); err != nil {
return err return err
@ -165,7 +167,10 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
} }
func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Create(ctx context.Context) (*int, error) {
err := i.ReaderWriter.Create(ctx, &i.performer) err := i.ReaderWriter.Create(ctx, &models.CreatePerformerInput{
Performer: &i.performer,
CustomFields: i.customFields,
})
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating performer: %v", err) return nil, fmt.Errorf("error creating performer: %v", err)
} }
@ -175,9 +180,13 @@ func (i *Importer) Create(ctx context.Context) (*int, error) {
} }
func (i *Importer) Update(ctx context.Context, id int) error { func (i *Importer) Update(ctx context.Context, id int) error {
performer := i.performer i.performer.ID = id
performer.ID = id err := i.ReaderWriter.Update(ctx, &models.UpdatePerformerInput{
err := i.ReaderWriter.Update(ctx, &performer) Performer: &i.performer,
CustomFields: models.CustomFieldsInput{
Full: i.customFields,
},
})
if err != nil { if err != nil {
return fmt.Errorf("error updating existing performer: %v", err) return fmt.Errorf("error updating existing performer: %v", err)
} }

View file

@ -53,13 +53,14 @@ func TestImporterPreImport(t *testing.T) {
assert.NotNil(t, err) assert.NotNil(t, err)
i.Input = *createFullJSONPerformer(performerName, image) i.Input = *createFullJSONPerformer(performerName, image, true)
err = i.PreImport(testCtx) err = i.PreImport(testCtx)
assert.Nil(t, err) assert.Nil(t, err)
expectedPerformer := *createFullPerformer(0, performerName) expectedPerformer := *createFullPerformer(0, performerName)
assert.Equal(t, expectedPerformer, i.performer) assert.Equal(t, expectedPerformer, i.performer)
assert.Equal(t, models.CustomFieldMap(customFields), i.customFields)
} }
func TestImporterPreImportWithTag(t *testing.T) { func TestImporterPreImportWithTag(t *testing.T) {
@ -234,10 +235,18 @@ func TestCreate(t *testing.T) {
Name: performerName, Name: performerName,
} }
performerInput := models.CreatePerformerInput{
Performer: &performer,
}
performerErr := models.Performer{ performerErr := models.Performer{
Name: performerNameErr, Name: performerNameErr,
} }
performerErrInput := models.CreatePerformerInput{
Performer: &performerErr,
}
i := Importer{ i := Importer{
ReaderWriter: db.Performer, ReaderWriter: db.Performer,
TagWriter: db.Tag, TagWriter: db.Tag,
@ -245,11 +254,11 @@ func TestCreate(t *testing.T) {
} }
errCreate := errors.New("Create error") errCreate := errors.New("Create error")
db.Performer.On("Create", testCtx, &performer).Run(func(args mock.Arguments) { db.Performer.On("Create", testCtx, &performerInput).Run(func(args mock.Arguments) {
arg := args.Get(1).(*models.Performer) arg := args.Get(1).(*models.CreatePerformerInput)
arg.ID = performerID arg.ID = performerID
}).Return(nil).Once() }).Return(nil).Once()
db.Performer.On("Create", testCtx, &performerErr).Return(errCreate).Once() db.Performer.On("Create", testCtx, &performerErrInput).Return(errCreate).Once()
id, err := i.Create(testCtx) id, err := i.Create(testCtx)
assert.Equal(t, performerID, *id) assert.Equal(t, performerID, *id)
@ -284,7 +293,10 @@ func TestUpdate(t *testing.T) {
// id needs to be set for the mock input // id needs to be set for the mock input
performer.ID = performerID performer.ID = performerID
db.Performer.On("Update", testCtx, &performer).Return(nil).Once() performerInput := models.UpdatePerformerInput{
Performer: &performer,
}
db.Performer.On("Update", testCtx, &performerInput).Return(nil).Once()
err := i.Update(testCtx, performerID) err := i.Update(testCtx, performerID)
assert.Nil(t, err) assert.Nil(t, err)
@ -293,7 +305,10 @@ func TestUpdate(t *testing.T) {
// need to set id separately // need to set id separately
performerErr.ID = errImageID performerErr.ID = errImageID
db.Performer.On("Update", testCtx, &performerErr).Return(errUpdate).Once() performerErrInput := models.UpdatePerformerInput{
Performer: &performerErr,
}
db.Performer.On("Update", testCtx, &performerErrInput).Return(errUpdate).Once()
err = i.Update(testCtx, errImageID) err = i.Update(testCtx, errImageID)
assert.NotNil(t, err) assert.NotNil(t, err)

View file

@ -325,7 +325,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod
newPerformer := models.NewPerformer() newPerformer := models.NewPerformer()
newPerformer.Name = name newPerformer.Name = name
err := i.PerformerWriter.Create(ctx, &newPerformer) err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{
Performer: &newPerformer,
})
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -327,8 +327,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) {
} }
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3) db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) { db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) {
p := args.Get(1).(*models.Performer) p := args.Get(1).(*models.CreatePerformerInput)
p.ID = existingPerformerID p.ID = existingPerformerID
}).Return(nil) }).Return(nil)
@ -361,7 +361,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
} }
db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once() db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error")) db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error"))
err := i.PreImport(testCtx) err := i.PreImport(testCtx)
assert.NotNil(t, err) assert.NotNil(t, err)

View file

@ -600,6 +600,10 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error {
return err return err
} }
if err := db.anonymiseCustomFields(ctx, goqu.T(performersCustomFieldsTable.GetTable()), "performer_id"); err != nil {
return err
}
return nil return nil
} }
@ -1050,3 +1054,73 @@ func (db *Anonymiser) obfuscateString(in string, dict string) string {
return out.String() return out.String()
} }
func (db *Anonymiser) anonymiseCustomFields(ctx context.Context, table exp.IdentifierExpression, idColumn string) error {
lastID := 0
lastField := ""
total := 0
const logEvery = 10000
for gotSome := true; gotSome; {
if err := txn.WithTxn(ctx, db, func(ctx context.Context) error {
query := dialect.From(table).Select(
table.Col(idColumn),
table.Col("field"),
table.Col("value"),
).Where(
goqu.L("("+idColumn+", field)").Gt(goqu.L("(?, ?)", lastID, lastField)),
).Order(
table.Col(idColumn).Asc(), table.Col("field").Asc(),
).Limit(1000)
gotSome = false
const single = false
return queryFunc(ctx, query, single, func(rows *sqlx.Rows) error {
var (
id int
field string
value string
)
if err := rows.Scan(
&id,
&field,
&value,
); err != nil {
return err
}
set := goqu.Record{}
set["field"] = db.obfuscateString(field, letters)
set["value"] = db.obfuscateString(value, letters)
if len(set) > 0 {
stmt := dialect.Update(table).Set(set).Where(
table.Col(idColumn).Eq(id),
table.Col("field").Eq(field),
)
if _, err := exec(ctx, stmt); err != nil {
return fmt.Errorf("anonymising %s: %w", table.GetTable(), err)
}
}
lastID = id
lastField = field
gotSome = true
total++
if total%logEvery == 0 {
logger.Infof("Anonymised %d %s custom fields", total, table.GetTable())
}
return nil
})
}); err != nil {
return err
}
}
return nil
}

308
pkg/sqlite/custom_fields.go Normal file
View file

@ -0,0 +1,308 @@
package sqlite
import (
"context"
"fmt"
"regexp"
"strings"
"github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/models"
)
const maxCustomFieldNameLength = 64
type customFieldsStore struct {
table exp.IdentifierExpression
fk exp.IdentifierExpression
}
func (s *customFieldsStore) deleteForID(ctx context.Context, id int) error {
table := s.table
q := dialect.Delete(table).Where(s.fk.Eq(id))
_, err := exec(ctx, q)
if err != nil {
return fmt.Errorf("deleting from %s: %w", s.table.GetTable(), err)
}
return nil
}
func (s *customFieldsStore) SetCustomFields(ctx context.Context, id int, values models.CustomFieldsInput) error {
var partial bool
var valMap map[string]interface{}
switch {
case values.Full != nil:
partial = false
valMap = values.Full
case values.Partial != nil:
partial = true
valMap = values.Partial
default:
return nil
}
if err := s.validateCustomFields(valMap); err != nil {
return err
}
return s.setCustomFields(ctx, id, valMap, partial)
}
func (s *customFieldsStore) validateCustomFields(values map[string]interface{}) error {
// ensure that custom field names are valid
// no leading or trailing whitespace, no empty strings
for k := range values {
if err := s.validateCustomFieldName(k); err != nil {
return fmt.Errorf("custom field name %q: %w", k, err)
}
}
return nil
}
func (s *customFieldsStore) validateCustomFieldName(fieldName string) error {
// ensure that custom field names are valid
// no leading or trailing whitespace, no empty strings
if strings.TrimSpace(fieldName) == "" {
return fmt.Errorf("custom field name cannot be empty")
}
if fieldName != strings.TrimSpace(fieldName) {
return fmt.Errorf("custom field name cannot have leading or trailing whitespace")
}
if len(fieldName) > maxCustomFieldNameLength {
return fmt.Errorf("custom field name must be less than %d characters", maxCustomFieldNameLength+1)
}
return nil
}
func getSQLValueFromCustomFieldInput(input interface{}) (interface{}, error) {
switch v := input.(type) {
case []interface{}, map[string]interface{}:
// TODO - in future it would be nice to convert to a JSON string
// however, we would need some way to differentiate between a JSON string and a regular string
// for now, we will not support objects and arrays
return nil, fmt.Errorf("unsupported custom field value type: %T", input)
default:
return v, nil
}
}
func (s *customFieldsStore) sqlValueToValue(value interface{}) interface{} {
// TODO - if we ever support objects and arrays we will need to add support here
return value
}
func (s *customFieldsStore) setCustomFields(ctx context.Context, id int, values map[string]interface{}, partial bool) error {
if !partial {
// delete existing custom fields
if err := s.deleteForID(ctx, id); err != nil {
return err
}
}
if len(values) == 0 {
return nil
}
conflictKey := s.fk.GetCol().(string) + ", field"
// upsert new custom fields
q := dialect.Insert(s.table).Prepared(true).Cols(s.fk, "field", "value").
OnConflict(goqu.DoUpdate(conflictKey, goqu.Record{"value": goqu.I("excluded.value")}))
r := make([]interface{}, len(values))
var i int
for key, value := range values {
v, err := getSQLValueFromCustomFieldInput(value)
if err != nil {
return fmt.Errorf("getting SQL value for field %q: %w", key, err)
}
r[i] = goqu.Record{"field": key, "value": v, s.fk.GetCol().(string): id}
i++
}
if _, err := exec(ctx, q.Rows(r...)); err != nil {
return fmt.Errorf("inserting custom fields: %w", err)
}
return nil
}
func (s *customFieldsStore) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) {
q := dialect.Select("field", "value").From(s.table).Where(s.fk.Eq(id))
const single = false
ret := make(map[string]interface{})
err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var field string
var value interface{}
if err := rows.Scan(&field, &value); err != nil {
return fmt.Errorf("scanning custom fields: %w", err)
}
ret[field] = s.sqlValueToValue(value)
return nil
})
if err != nil {
return nil, fmt.Errorf("getting custom fields: %w", err)
}
return ret, nil
}
func (s *customFieldsStore) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) {
q := dialect.Select(s.fk.As("id"), "field", "value").From(s.table).Where(s.fk.In(ids))
const single = false
ret := make([]models.CustomFieldMap, len(ids))
idi := make(map[int]int, len(ids))
for i, id := range ids {
idi[id] = i
}
err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var id int
var field string
var value interface{}
if err := rows.Scan(&id, &field, &value); err != nil {
return fmt.Errorf("scanning custom fields: %w", err)
}
i := idi[id]
m := ret[i]
if m == nil {
m = make(map[string]interface{})
ret[i] = m
}
m[field] = s.sqlValueToValue(value)
return nil
})
if err != nil {
return nil, fmt.Errorf("getting custom fields: %w", err)
}
return ret, nil
}
type customFieldsFilterHandler struct {
table string
fkCol string
c []models.CustomFieldCriterionInput
idCol string
}
func (h *customFieldsFilterHandler) innerJoin(f *filterBuilder, as string, field string) {
joinOn := fmt.Sprintf("%s = %s.%s AND %s.field = ?", h.idCol, as, h.fkCol, as)
f.addInnerJoin(h.table, as, joinOn, field)
}
func (h *customFieldsFilterHandler) leftJoin(f *filterBuilder, as string, field string) {
joinOn := fmt.Sprintf("%s = %s.%s AND %s.field = ?", h.idCol, as, h.fkCol, as)
f.addLeftJoin(h.table, as, joinOn, field)
}
func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs string, cc models.CustomFieldCriterionInput) {
// convert values
cv := make([]interface{}, len(cc.Value))
for i, v := range cc.Value {
var err error
cv[i], err = getSQLValueFromCustomFieldInput(v)
if err != nil {
f.setError(err)
return
}
}
switch cc.Modifier {
case models.CriterionModifierEquals:
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%[1]s.value IN %s", joinAs, getInBinding(len(cv))), cv...)
case models.CriterionModifierNotEquals:
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%[1]s.value NOT IN %s", joinAs, getInBinding(len(cv))), cv...)
case models.CriterionModifierIncludes:
clauses := make([]sqlClause, len(cv))
for i, v := range cv {
clauses[i] = makeClause(fmt.Sprintf("%s.value LIKE ?", joinAs), fmt.Sprintf("%%%v%%", v))
}
h.innerJoin(f, joinAs, cc.Field)
f.whereClauses = append(f.whereClauses, clauses...)
case models.CriterionModifierExcludes:
for _, v := range cv {
f.addWhere(fmt.Sprintf("%[1]s.value NOT LIKE ?", joinAs), fmt.Sprintf("%%%v%%", v))
}
h.leftJoin(f, joinAs, cc.Field)
case models.CriterionModifierMatchesRegex:
for _, v := range cv {
vs, ok := v.(string)
if !ok {
f.setError(fmt.Errorf("unsupported custom field criterion value type: %T", v))
}
if _, err := regexp.Compile(vs); err != nil {
f.setError(err)
return
}
f.addWhere(fmt.Sprintf("(%s.value regexp ?)", joinAs), v)
}
h.innerJoin(f, joinAs, cc.Field)
case models.CriterionModifierNotMatchesRegex:
for _, v := range cv {
vs, ok := v.(string)
if !ok {
f.setError(fmt.Errorf("unsupported custom field criterion value type: %T", v))
}
if _, err := regexp.Compile(vs); err != nil {
f.setError(err)
return
}
f.addWhere(fmt.Sprintf("(%s.value IS NULL OR %[1]s.value NOT regexp ?)", joinAs), v)
}
h.leftJoin(f, joinAs, cc.Field)
case models.CriterionModifierIsNull:
h.leftJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value IS NULL OR TRIM(%[1]s.value) = ''", joinAs))
case models.CriterionModifierNotNull:
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("TRIM(%[1]s.value) != ''", joinAs))
case models.CriterionModifierBetween:
if len(cv) != 2 {
f.setError(fmt.Errorf("expected 2 values for custom field criterion modifier BETWEEN, got %d", len(cv)))
return
}
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value BETWEEN ? AND ?", joinAs), cv[0], cv[1])
case models.CriterionModifierNotBetween:
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value NOT BETWEEN ? AND ?", joinAs), cv[0], cv[1])
case models.CriterionModifierLessThan:
if len(cv) != 1 {
f.setError(fmt.Errorf("expected 1 value for custom field criterion modifier LESS_THAN, got %d", len(cv)))
return
}
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value < ?", joinAs), cv[0])
case models.CriterionModifierGreaterThan:
if len(cv) != 1 {
f.setError(fmt.Errorf("expected 1 value for custom field criterion modifier LESS_THAN, got %d", len(cv)))
return
}
h.innerJoin(f, joinAs, cc.Field)
f.addWhere(fmt.Sprintf("%s.value > ?", joinAs), cv[0])
default:
f.setError(fmt.Errorf("unsupported custom field criterion modifier: %s", cc.Modifier))
}
}
func (h *customFieldsFilterHandler) handle(ctx context.Context, f *filterBuilder) {
if len(h.c) == 0 {
return
}
for i, cc := range h.c {
join := fmt.Sprintf("custom_fields_%d", i)
h.handleCriterion(f, join, cc)
}
}

View file

@ -0,0 +1,176 @@
//go:build integration
// +build integration
package sqlite_test
import (
"context"
"testing"
"github.com/stashapp/stash/pkg/models"
"github.com/stretchr/testify/assert"
)
func TestSetCustomFields(t *testing.T) {
performerIdx := performerIdx1WithScene
mergeCustomFields := func(i map[string]interface{}) map[string]interface{} {
m := getPerformerCustomFields(performerIdx)
for k, v := range i {
m[k] = v
}
return m
}
tests := []struct {
name string
input models.CustomFieldsInput
expected map[string]interface{}
wantErr bool
}{
{
"valid full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"key": "value",
},
},
map[string]interface{}{
"key": "value",
},
false,
},
{
"valid partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"key": "value",
},
},
mergeCustomFields(map[string]interface{}{
"key": "value",
}),
false,
},
{
"valid partial overwrite",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"real": float64(4.56),
},
},
mergeCustomFields(map[string]interface{}{
"real": float64(4.56),
}),
false,
},
{
"leading space full",
models.CustomFieldsInput{
Full: map[string]interface{}{
" key": "value",
},
},
nil,
true,
},
{
"trailing space full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"key ": "value",
},
},
nil,
true,
},
{
"leading space partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
" key": "value",
},
},
nil,
true,
},
{
"trailing space partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"key ": "value",
},
},
nil,
true,
},
{
"big key full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"12345678901234567890123456789012345678901234567890123456789012345": "value",
},
},
nil,
true,
},
{
"big key partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"12345678901234567890123456789012345678901234567890123456789012345": "value",
},
},
nil,
true,
},
{
"empty key full",
models.CustomFieldsInput{
Full: map[string]interface{}{
"": "value",
},
},
nil,
true,
},
{
"empty key partial",
models.CustomFieldsInput{
Partial: map[string]interface{}{
"": "value",
},
},
nil,
true,
},
}
// use performer custom fields store
store := db.Performer
id := performerIDs[performerIdx]
assert := assert.New(t)
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
err := store.SetCustomFields(ctx, id, tt.input)
if (err != nil) != tt.wantErr {
t.Errorf("SetCustomFields() error = %v, wantErr %v", err, tt.wantErr)
return
}
if tt.wantErr {
return
}
actual, err := store.GetCustomFields(ctx, id)
if err != nil {
t.Errorf("GetCustomFields() error = %v", err)
return
}
assert.Equal(tt.expected, actual)
})
}
}

View file

@ -34,7 +34,7 @@ const (
cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE" cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE"
) )
var appSchemaVersion uint = 70 var appSchemaVersion uint = 71
//go:embed migrations/*.sql //go:embed migrations/*.sql
var migrationsBox embed.FS var migrationsBox embed.FS

View file

@ -95,6 +95,7 @@ type join struct {
as string as string
onClause string onClause string
joinType string joinType string
args []interface{}
} }
// equals returns true if the other join alias/table is equal to this one // equals returns true if the other join alias/table is equal to this one
@ -229,12 +230,13 @@ func (f *filterBuilder) not(n *filterBuilder) {
// The AS is omitted if as is empty. // The AS is omitted if as is empty.
// This method does not add a join if it its alias/table name is already // This method does not add a join if it its alias/table name is already
// present in another existing join. // present in another existing join.
func (f *filterBuilder) addLeftJoin(table, as, onClause string) { func (f *filterBuilder) addLeftJoin(table, as, onClause string, args ...interface{}) {
newJoin := join{ newJoin := join{
table: table, table: table,
as: as, as: as,
onClause: onClause, onClause: onClause,
joinType: "LEFT", joinType: "LEFT",
args: args,
} }
f.joins.add(newJoin) f.joins.add(newJoin)
@ -245,12 +247,13 @@ func (f *filterBuilder) addLeftJoin(table, as, onClause string) {
// The AS is omitted if as is empty. // The AS is omitted if as is empty.
// This method does not add a join if it its alias/table name is already // This method does not add a join if it its alias/table name is already
// present in another existing join. // present in another existing join.
func (f *filterBuilder) addInnerJoin(table, as, onClause string) { func (f *filterBuilder) addInnerJoin(table, as, onClause string, args ...interface{}) {
newJoin := join{ newJoin := join{
table: table, table: table,
as: as, as: as,
onClause: onClause, onClause: onClause,
joinType: "INNER", joinType: "INNER",
args: args,
} }
f.joins.add(newJoin) f.joins.add(newJoin)

View file

@ -0,0 +1,9 @@
CREATE TABLE `performer_custom_fields` (
`performer_id` integer NOT NULL,
`field` varchar(64) NOT NULL,
`value` BLOB NOT NULL,
PRIMARY KEY (`performer_id`, `field`),
foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE
);
CREATE INDEX `index_performer_custom_fields_field_value` ON `performer_custom_fields` (`field`, `value`);

View file

@ -226,6 +226,7 @@ var (
type PerformerStore struct { type PerformerStore struct {
blobJoinQueryBuilder blobJoinQueryBuilder
customFieldsStore
tableMgr *table tableMgr *table
} }
@ -236,6 +237,10 @@ func NewPerformerStore(blobStore *BlobStore) *PerformerStore {
blobStore: blobStore, blobStore: blobStore,
joinTable: performerTable, joinTable: performerTable,
}, },
customFieldsStore: customFieldsStore{
table: performersCustomFieldsTable,
fk: performersCustomFieldsTable.Col(performerIDColumn),
},
tableMgr: performerTableMgr, tableMgr: performerTableMgr,
} }
} }
@ -248,9 +253,9 @@ func (qb *PerformerStore) selectDataset() *goqu.SelectDataset {
return dialect.From(qb.table()).Select(qb.table().All()) return dialect.From(qb.table()).Select(qb.table().All())
} }
func (qb *PerformerStore) Create(ctx context.Context, newObject *models.Performer) error { func (qb *PerformerStore) Create(ctx context.Context, newObject *models.CreatePerformerInput) error {
var r performerRow var r performerRow
r.fromPerformer(*newObject) r.fromPerformer(*newObject.Performer)
id, err := qb.tableMgr.insertID(ctx, r) id, err := qb.tableMgr.insertID(ctx, r)
if err != nil { if err != nil {
@ -282,12 +287,17 @@ func (qb *PerformerStore) Create(ctx context.Context, newObject *models.Performe
} }
} }
const partial = false
if err := qb.setCustomFields(ctx, id, newObject.CustomFields, partial); err != nil {
return err
}
updated, err := qb.find(ctx, id) updated, err := qb.find(ctx, id)
if err != nil { if err != nil {
return fmt.Errorf("finding after create: %w", err) return fmt.Errorf("finding after create: %w", err)
} }
*newObject = *updated *newObject.Performer = *updated
return nil return nil
} }
@ -330,12 +340,16 @@ func (qb *PerformerStore) UpdatePartial(ctx context.Context, id int, partial mod
} }
} }
if err := qb.SetCustomFields(ctx, id, partial.CustomFields); err != nil {
return nil, err
}
return qb.find(ctx, id) return qb.find(ctx, id)
} }
func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.Performer) error { func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.UpdatePerformerInput) error {
var r performerRow var r performerRow
r.fromPerformer(*updatedObject) r.fromPerformer(*updatedObject.Performer)
if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil {
return err return err
@ -365,6 +379,10 @@ func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.Perf
} }
} }
if err := qb.SetCustomFields(ctx, updatedObject.ID, updatedObject.CustomFields); err != nil {
return err
}
return nil return nil
} }

View file

@ -203,6 +203,13 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler {
performerRepository.tags.innerJoin(f, "performer_tag", "performers.id") performerRepository.tags.innerJoin(f, "performer_tag", "performers.id")
}, },
}, },
&customFieldsFilterHandler{
table: performersCustomFieldsTable.GetTable(),
fkCol: performerIDColumn,
c: filter.CustomFields,
idCol: "performers.id",
},
} }
} }

View file

@ -16,6 +16,12 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
var testCustomFields = map[string]interface{}{
"string": "aaa",
"int": int64(123), // int64 to match the type of the field in the database
"real": 1.23,
}
func loadPerformerRelationships(ctx context.Context, expected models.Performer, actual *models.Performer) error { func loadPerformerRelationships(ctx context.Context, expected models.Performer, actual *models.Performer) error {
if expected.Aliases.Loaded() { if expected.Aliases.Loaded() {
if err := actual.LoadAliases(ctx, db.Performer); err != nil { if err := actual.LoadAliases(ctx, db.Performer); err != nil {
@ -81,57 +87,62 @@ func Test_PerformerStore_Create(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
newObject models.Performer newObject models.CreatePerformerInput
wantErr bool wantErr bool
}{ }{
{ {
"full", "full",
models.Performer{ models.CreatePerformerInput{
Name: name, Performer: &models.Performer{
Disambiguation: disambiguation, Name: name,
Gender: &gender, Disambiguation: disambiguation,
URLs: models.NewRelatedStrings(urls), Gender: &gender,
Birthdate: &birthdate, URLs: models.NewRelatedStrings(urls),
Ethnicity: ethnicity, Birthdate: &birthdate,
Country: country, Ethnicity: ethnicity,
EyeColor: eyeColor, Country: country,
Height: &height, EyeColor: eyeColor,
Measurements: measurements, Height: &height,
FakeTits: fakeTits, Measurements: measurements,
PenisLength: &penisLength, FakeTits: fakeTits,
Circumcised: &circumcised, PenisLength: &penisLength,
CareerLength: careerLength, Circumcised: &circumcised,
Tattoos: tattoos, CareerLength: careerLength,
Piercings: piercings, Tattoos: tattoos,
Favorite: favorite, Piercings: piercings,
Rating: &rating, Favorite: favorite,
Details: details, Rating: &rating,
DeathDate: &deathdate, Details: details,
HairColor: hairColor, DeathDate: &deathdate,
Weight: &weight, HairColor: hairColor,
IgnoreAutoTag: ignoreAutoTag, Weight: &weight,
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}), IgnoreAutoTag: ignoreAutoTag,
Aliases: models.NewRelatedStrings(aliases), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{ Aliases: models.NewRelatedStrings(aliases),
{ StashIDs: models.NewRelatedStashIDs([]models.StashID{
StashID: stashID1, {
Endpoint: endpoint1, StashID: stashID1,
}, Endpoint: endpoint1,
{ },
StashID: stashID2, {
Endpoint: endpoint2, StashID: stashID2,
}, Endpoint: endpoint2,
}), },
CreatedAt: createdAt, }),
UpdatedAt: updatedAt, CreatedAt: createdAt,
UpdatedAt: updatedAt,
},
CustomFields: testCustomFields,
}, },
false, false,
}, },
{ {
"invalid tag id", "invalid tag id",
models.Performer{ models.CreatePerformerInput{
Name: name, Performer: &models.Performer{
TagIDs: models.NewRelatedIDs([]int{invalidID}), Name: name,
TagIDs: models.NewRelatedIDs([]int{invalidID}),
},
}, },
true, true,
}, },
@ -155,16 +166,16 @@ func Test_PerformerStore_Create(t *testing.T) {
assert.NotZero(p.ID) assert.NotZero(p.ID)
copy := tt.newObject copy := *tt.newObject.Performer
copy.ID = p.ID copy.ID = p.ID
// load relationships // load relationships
if err := loadPerformerRelationships(ctx, copy, &p); err != nil { if err := loadPerformerRelationships(ctx, copy, p.Performer); err != nil {
t.Errorf("loadPerformerRelationships() error = %v", err) t.Errorf("loadPerformerRelationships() error = %v", err)
return return
} }
assert.Equal(copy, p) assert.Equal(copy, *p.Performer)
// ensure can find the performer // ensure can find the performer
found, err := qb.Find(ctx, p.ID) found, err := qb.Find(ctx, p.ID)
@ -183,6 +194,15 @@ func Test_PerformerStore_Create(t *testing.T) {
} }
assert.Equal(copy, *found) assert.Equal(copy, *found)
// ensure custom fields are set
cf, err := qb.GetCustomFields(ctx, p.ID)
if err != nil {
t.Errorf("PerformerStore.GetCustomFields() error = %v", err)
return
}
assert.Equal(tt.newObject.CustomFields, cf)
return return
}) })
} }
@ -228,77 +248,109 @@ func Test_PerformerStore_Update(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
updatedObject *models.Performer updatedObject models.UpdatePerformerInput
wantErr bool wantErr bool
}{ }{
{ {
"full", "full",
&models.Performer{ models.UpdatePerformerInput{
ID: performerIDs[performerIdxWithGallery], Performer: &models.Performer{
Name: name, ID: performerIDs[performerIdxWithGallery],
Disambiguation: disambiguation, Name: name,
Gender: &gender, Disambiguation: disambiguation,
URLs: models.NewRelatedStrings(urls), Gender: &gender,
Birthdate: &birthdate, URLs: models.NewRelatedStrings(urls),
Ethnicity: ethnicity, Birthdate: &birthdate,
Country: country, Ethnicity: ethnicity,
EyeColor: eyeColor, Country: country,
Height: &height, EyeColor: eyeColor,
Measurements: measurements, Height: &height,
FakeTits: fakeTits, Measurements: measurements,
PenisLength: &penisLength, FakeTits: fakeTits,
Circumcised: &circumcised, PenisLength: &penisLength,
CareerLength: careerLength, Circumcised: &circumcised,
Tattoos: tattoos, CareerLength: careerLength,
Piercings: piercings, Tattoos: tattoos,
Favorite: favorite, Piercings: piercings,
Rating: &rating, Favorite: favorite,
Details: details, Rating: &rating,
DeathDate: &deathdate, Details: details,
HairColor: hairColor, DeathDate: &deathdate,
Weight: &weight, HairColor: hairColor,
IgnoreAutoTag: ignoreAutoTag, Weight: &weight,
Aliases: models.NewRelatedStrings(aliases), IgnoreAutoTag: ignoreAutoTag,
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}), Aliases: models.NewRelatedStrings(aliases),
StashIDs: models.NewRelatedStashIDs([]models.StashID{ TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}),
{ StashIDs: models.NewRelatedStashIDs([]models.StashID{
StashID: stashID1, {
Endpoint: endpoint1, StashID: stashID1,
}, Endpoint: endpoint1,
{ },
StashID: stashID2, {
Endpoint: endpoint2, StashID: stashID2,
}, Endpoint: endpoint2,
}), },
CreatedAt: createdAt, }),
UpdatedAt: updatedAt, CreatedAt: createdAt,
UpdatedAt: updatedAt,
},
}, },
false, false,
}, },
{ {
"clear nullables", "clear nullables",
&models.Performer{ models.UpdatePerformerInput{
ID: performerIDs[performerIdxWithGallery], Performer: &models.Performer{
Aliases: models.NewRelatedStrings([]string{}), ID: performerIDs[performerIdxWithGallery],
URLs: models.NewRelatedStrings([]string{}), Aliases: models.NewRelatedStrings([]string{}),
TagIDs: models.NewRelatedIDs([]int{}), URLs: models.NewRelatedStrings([]string{}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{}), TagIDs: models.NewRelatedIDs([]int{}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
},
}, },
false, false,
}, },
{ {
"clear tag ids", "clear tag ids",
&models.Performer{ models.UpdatePerformerInput{
ID: performerIDs[sceneIdxWithTag], Performer: &models.Performer{
TagIDs: models.NewRelatedIDs([]int{}), ID: performerIDs[sceneIdxWithTag],
TagIDs: models.NewRelatedIDs([]int{}),
},
},
false,
},
{
"set custom fields",
models.UpdatePerformerInput{
Performer: &models.Performer{
ID: performerIDs[performerIdxWithGallery],
},
CustomFields: models.CustomFieldsInput{
Full: testCustomFields,
},
},
false,
},
{
"clear custom fields",
models.UpdatePerformerInput{
Performer: &models.Performer{
ID: performerIDs[performerIdxWithGallery],
},
CustomFields: models.CustomFieldsInput{
Full: map[string]interface{}{},
},
}, },
false, false,
}, },
{ {
"invalid tag id", "invalid tag id",
&models.Performer{ models.UpdatePerformerInput{
ID: performerIDs[sceneIdxWithGallery], Performer: &models.Performer{
TagIDs: models.NewRelatedIDs([]int{invalidID}), ID: performerIDs[sceneIdxWithGallery],
TagIDs: models.NewRelatedIDs([]int{invalidID}),
},
}, },
true, true,
}, },
@ -309,9 +361,9 @@ func Test_PerformerStore_Update(t *testing.T) {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t) assert := assert.New(t)
copy := *tt.updatedObject copy := *tt.updatedObject.Performer
if err := qb.Update(ctx, tt.updatedObject); (err != nil) != tt.wantErr { if err := qb.Update(ctx, &tt.updatedObject); (err != nil) != tt.wantErr {
t.Errorf("PerformerStore.Update() error = %v, wantErr %v", err, tt.wantErr) t.Errorf("PerformerStore.Update() error = %v, wantErr %v", err, tt.wantErr)
} }
@ -331,6 +383,17 @@ func Test_PerformerStore_Update(t *testing.T) {
} }
assert.Equal(copy, *s) assert.Equal(copy, *s)
// ensure custom fields are correct
if tt.updatedObject.CustomFields.Full != nil {
cf, err := qb.GetCustomFields(ctx, tt.updatedObject.ID)
if err != nil {
t.Errorf("PerformerStore.GetCustomFields() error = %v", err)
return
}
assert.Equal(tt.updatedObject.CustomFields.Full, cf)
}
}) })
} }
} }
@ -573,6 +636,79 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) {
} }
} }
func Test_PerformerStore_UpdatePartialCustomFields(t *testing.T) {
tests := []struct {
name string
id int
partial models.PerformerPartial
expected map[string]interface{} // nil to use the partial
}{
{
"set custom fields",
performerIDs[performerIdxWithGallery],
models.PerformerPartial{
CustomFields: models.CustomFieldsInput{
Full: testCustomFields,
},
},
nil,
},
{
"clear custom fields",
performerIDs[performerIdxWithGallery],
models.PerformerPartial{
CustomFields: models.CustomFieldsInput{
Full: map[string]interface{}{},
},
},
nil,
},
{
"partial custom fields",
performerIDs[performerIdxWithGallery],
models.PerformerPartial{
CustomFields: models.CustomFieldsInput{
Partial: map[string]interface{}{
"string": "bbb",
"new_field": "new",
},
},
},
map[string]interface{}{
"int": int64(3),
"real": 1.3,
"string": "bbb",
"new_field": "new",
},
},
}
for _, tt := range tests {
qb := db.Performer
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
_, err := qb.UpdatePartial(ctx, tt.id, tt.partial)
if err != nil {
t.Errorf("PerformerStore.UpdatePartial() error = %v", err)
return
}
// ensure custom fields are correct
cf, err := qb.GetCustomFields(ctx, tt.id)
if err != nil {
t.Errorf("PerformerStore.GetCustomFields() error = %v", err)
return
}
if tt.expected == nil {
assert.Equal(tt.partial.CustomFields.Full, cf)
} else {
assert.Equal(tt.expected, cf)
}
})
}
}
func TestPerformerFindBySceneID(t *testing.T) { func TestPerformerFindBySceneID(t *testing.T) {
withTxn(func(ctx context.Context) error { withTxn(func(ctx context.Context) error {
pqb := db.Performer pqb := db.Performer
@ -1042,6 +1178,242 @@ func TestPerformerQuery(t *testing.T) {
} }
} }
func TestPerformerQueryCustomFields(t *testing.T) {
tests := []struct {
name string
filter *models.PerformerFilterType
includeIdxs []int
excludeIdxs []int
wantErr bool
}{
{
"equals",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierEquals,
Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")},
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"not equals",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierNotEquals,
Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")},
},
},
},
nil,
[]int{performerIdxWithGallery},
false,
},
{
"includes",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierIncludes,
Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")[9:]},
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"excludes",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierExcludes,
Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")[9:]},
},
},
},
nil,
[]int{performerIdxWithGallery},
false,
},
{
"regex",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierMatchesRegex,
Value: []any{".*13_custom"},
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"invalid regex",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierMatchesRegex,
Value: []any{"["},
},
},
},
nil,
nil,
true,
},
{
"not matches regex",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierNotMatchesRegex,
Value: []any{".*13_custom"},
},
},
},
nil,
[]int{performerIdxWithGallery},
false,
},
{
"invalid not matches regex",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierNotMatchesRegex,
Value: []any{"["},
},
},
},
nil,
nil,
true,
},
{
"null",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "not existing",
Modifier: models.CriterionModifierIsNull,
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"null",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdxWithGallery, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "string",
Modifier: models.CriterionModifierNotNull,
},
},
},
[]int{performerIdxWithGallery},
nil,
false,
},
{
"between",
&models.PerformerFilterType{
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "real",
Modifier: models.CriterionModifierBetween,
Value: []any{0.05, 0.15},
},
},
},
[]int{performerIdx1WithScene},
nil,
false,
},
{
"not between",
&models.PerformerFilterType{
Name: &models.StringCriterionInput{
Value: getPerformerStringValue(performerIdx1WithScene, "Name"),
Modifier: models.CriterionModifierEquals,
},
CustomFields: []models.CustomFieldCriterionInput{
{
Field: "real",
Modifier: models.CriterionModifierNotBetween,
Value: []any{0.05, 0.15},
},
},
},
nil,
[]int{performerIdx1WithScene},
false,
},
}
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
assert := assert.New(t)
performers, _, err := db.Performer.Query(ctx, tt.filter, nil)
if (err != nil) != tt.wantErr {
t.Errorf("PerformerStore.Query() error = %v, wantErr %v", err, tt.wantErr)
return
}
ids := performersToIDs(performers)
include := indexesToIDs(performerIDs, tt.includeIdxs)
exclude := indexesToIDs(performerIDs, tt.excludeIdxs)
for _, i := range include {
assert.Contains(ids, i)
}
for _, e := range exclude {
assert.NotContains(ids, e)
}
})
}
}
func TestPerformerQueryPenisLength(t *testing.T) { func TestPerformerQueryPenisLength(t *testing.T) {
var upper = 4.0 var upper = 4.0
@ -1172,7 +1544,7 @@ func TestPerformerUpdatePerformerImage(t *testing.T) {
performer := models.Performer{ performer := models.Performer{
Name: name, Name: name,
} }
err := qb.Create(ctx, &performer) err := qb.Create(ctx, &models.CreatePerformerInput{Performer: &performer})
if err != nil { if err != nil {
return fmt.Errorf("Error creating performer: %s", err.Error()) return fmt.Errorf("Error creating performer: %s", err.Error())
} }
@ -1680,7 +2052,7 @@ func TestPerformerStashIDs(t *testing.T) {
performer := &models.Performer{ performer := &models.Performer{
Name: name, Name: name,
} }
if err := qb.Create(ctx, performer); err != nil { if err := qb.Create(ctx, &models.CreatePerformerInput{Performer: performer}); err != nil {
return fmt.Errorf("Error creating performer: %s", err.Error()) return fmt.Errorf("Error creating performer: %s", err.Error())
} }

View file

@ -133,6 +133,9 @@ func (qb *queryBuilder) join(table, as, onClause string) {
func (qb *queryBuilder) addJoins(joins ...join) { func (qb *queryBuilder) addJoins(joins ...join) {
qb.joins.add(joins...) qb.joins.add(joins...)
for _, j := range joins {
qb.args = append(qb.args, j.args...)
}
} }
func (qb *queryBuilder) addFilter(f *filterBuilder) error { func (qb *queryBuilder) addFilter(f *filterBuilder) error {
@ -151,6 +154,9 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) error {
qb.args = append(args, qb.args...) qb.args = append(args, qb.args...)
} }
// add joins here to insert args
qb.addJoins(f.getAllJoins()...)
clause, args = f.generateWhereClauses() clause, args = f.generateWhereClauses()
if len(clause) > 0 { if len(clause) > 0 {
qb.addWhere(clause) qb.addWhere(clause)
@ -169,8 +175,6 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) error {
qb.addArg(args...) qb.addArg(args...)
} }
qb.addJoins(f.getAllJoins()...)
return nil return nil
} }

View file

@ -222,8 +222,8 @@ func (r *repository) innerJoin(j joiner, as string, parentIDCol string) {
} }
type joiner interface { type joiner interface {
addLeftJoin(table, as, onClause string) addLeftJoin(table, as, onClause string, args ...interface{})
addInnerJoin(table, as, onClause string) addInnerJoin(table, as, onClause string, args ...interface{})
} }
type joinRepository struct { type joinRepository struct {

View file

@ -1508,6 +1508,18 @@ func performerAliases(i int) []string {
return []string{getPerformerStringValue(i, "alias")} return []string{getPerformerStringValue(i, "alias")}
} }
func getPerformerCustomFields(index int) map[string]interface{} {
if index%5 == 0 {
return nil
}
return map[string]interface{}{
"string": getPerformerStringValue(index, "custom"),
"int": int64(index % 5),
"real": float64(index) / 10,
}
}
// createPerformers creates n performers with plain Name and o performers with camel cased NaMe included // createPerformers creates n performers with plain Name and o performers with camel cased NaMe included
func createPerformers(ctx context.Context, n int, o int) error { func createPerformers(ctx context.Context, n int, o int) error {
pqb := db.Performer pqb := db.Performer
@ -1558,7 +1570,10 @@ func createPerformers(ctx context.Context, n int, o int) error {
}) })
} }
err := pqb.Create(ctx, &performer) err := pqb.Create(ctx, &models.CreatePerformerInput{
Performer: &performer,
CustomFields: getPerformerCustomFields(i),
})
if err != nil { if err != nil {
return fmt.Errorf("Error creating performer %v+: %s", performer, err.Error()) return fmt.Errorf("Error creating performer %v+: %s", performer, err.Error())

View file

@ -32,6 +32,7 @@ var (
performersURLsJoinTable = goqu.T(performerURLsTable) performersURLsJoinTable = goqu.T(performerURLsTable)
performersTagsJoinTable = goqu.T(performersTagsTable) performersTagsJoinTable = goqu.T(performersTagsTable)
performersStashIDsJoinTable = goqu.T("performer_stash_ids") performersStashIDsJoinTable = goqu.T("performer_stash_ids")
performersCustomFieldsTable = goqu.T("performer_custom_fields")
studiosAliasesJoinTable = goqu.T(studioAliasesTable) studiosAliasesJoinTable = goqu.T(studioAliasesTable)
studiosTagsJoinTable = goqu.T(studiosTagsTable) studiosTagsJoinTable = goqu.T(studiosTagsTable)

View file

@ -1,16 +0,0 @@
package utils
import (
"encoding/json"
"strings"
)
// JSONNumberToNumber converts a JSON number to either a float64 or int64.
func JSONNumberToNumber(n json.Number) interface{} {
if strings.Contains(string(n), ".") {
f, _ := n.Float64()
return f
}
ret, _ := n.Int64()
return ret
}

View file

@ -1,7 +1,6 @@
package utils package utils
import ( import (
"encoding/json"
"strings" "strings"
) )
@ -80,19 +79,3 @@ func MergeMaps(dest map[string]interface{}, src map[string]interface{}) {
dest[k] = v dest[k] = v
} }
} }
// ConvertMapJSONNumbers converts all JSON numbers in a map to either float64 or int64.
func ConvertMapJSONNumbers(m map[string]interface{}) (ret map[string]interface{}) {
ret = make(map[string]interface{})
for k, v := range m {
if n, ok := v.(json.Number); ok {
ret[k] = JSONNumberToNumber(n)
} else if mm, ok := v.(map[string]interface{}); ok {
ret[k] = ConvertMapJSONNumbers(mm)
} else {
ret[k] = v
}
}
return ret
}

View file

@ -1,11 +1,8 @@
package utils package utils
import ( import (
"encoding/json"
"reflect" "reflect"
"testing" "testing"
"github.com/stretchr/testify/assert"
) )
func TestNestedMapGet(t *testing.T) { func TestNestedMapGet(t *testing.T) {
@ -282,55 +279,3 @@ func TestMergeMaps(t *testing.T) {
}) })
} }
} }
func TestConvertMapJSONNumbers(t *testing.T) {
tests := []struct {
name string
input map[string]interface{}
expected map[string]interface{}
}{
{
name: "Convert JSON numbers to numbers",
input: map[string]interface{}{
"int": json.Number("12"),
"float": json.Number("12.34"),
"string": "foo",
},
expected: map[string]interface{}{
"int": int64(12),
"float": 12.34,
"string": "foo",
},
},
{
name: "Convert JSON numbers to numbers in nested maps",
input: map[string]interface{}{
"foo": map[string]interface{}{
"int": json.Number("56"),
"float": json.Number("56.78"),
"nested-string": "bar",
},
"int": json.Number("12"),
"float": json.Number("12.34"),
"string": "foo",
},
expected: map[string]interface{}{
"foo": map[string]interface{}{
"int": int64(56),
"float": 56.78,
"nested-string": "bar",
},
"int": int64(12),
"float": 12.34,
"string": "foo",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := ConvertMapJSONNumbers(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}

View file

@ -41,4 +41,6 @@ fragment PerformerData on Performer {
death_date death_date
hair_color hair_color
weight weight
custom_fields
} }

View file

@ -14,6 +14,7 @@ import {
FormatWeight, FormatWeight,
} from "../PerformerList"; } from "../PerformerList";
import { PatchComponent } from "src/patch"; import { PatchComponent } from "src/patch";
import { CustomFields } from "src/components/Shared/CustomFields";
interface IPerformerDetails { interface IPerformerDetails {
performer: GQL.PerformerDataFragment; performer: GQL.PerformerDataFragment;
@ -176,6 +177,7 @@ export const PerformerDetailsPanel: React.FC<IPerformerDetails> =
value={renderStashIDs()} value={renderStashIDs()}
fullWidth={fullWidth} fullWidth={fullWidth}
/> />
{fullWidth && <CustomFields values={performer.custom_fields} />}
</PerformerDetailGroup> </PerformerDetailGroup>
); );
}); });

View file

@ -47,6 +47,8 @@ import {
yupUniqueStringList, yupUniqueStringList,
} from "src/utils/yup"; } from "src/utils/yup";
import { useTagsEdit } from "src/hooks/tagsEdit"; import { useTagsEdit } from "src/hooks/tagsEdit";
import { CustomFieldsInput } from "src/components/Shared/CustomFields";
import { cloneDeep } from "@apollo/client/utilities";
const isScraper = ( const isScraper = (
scraper: GQL.Scraper | GQL.StashBox scraper: GQL.Scraper | GQL.StashBox
@ -61,6 +63,16 @@ interface IPerformerDetails {
setEncodingImage: (loading: boolean) => void; setEncodingImage: (loading: boolean) => void;
} }
function customFieldInput(isNew: boolean, input: {}) {
if (isNew) {
return input;
} else {
return {
full: input,
};
}
}
export const PerformerEditPanel: React.FC<IPerformerDetails> = ({ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
performer, performer,
isVisible, isVisible,
@ -115,6 +127,7 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
ignore_auto_tag: yup.boolean().defined(), ignore_auto_tag: yup.boolean().defined(),
stash_ids: yup.mixed<GQL.StashIdInput[]>().defined(), stash_ids: yup.mixed<GQL.StashIdInput[]>().defined(),
image: yup.string().nullable().optional(), image: yup.string().nullable().optional(),
custom_fields: yup.object().required().defined(),
}); });
const initialValues = { const initialValues = {
@ -142,15 +155,26 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
tag_ids: (performer.tags ?? []).map((t) => t.id), tag_ids: (performer.tags ?? []).map((t) => t.id),
ignore_auto_tag: performer.ignore_auto_tag ?? false, ignore_auto_tag: performer.ignore_auto_tag ?? false,
stash_ids: getStashIDs(performer.stash_ids), stash_ids: getStashIDs(performer.stash_ids),
custom_fields: cloneDeep(performer.custom_fields ?? {}),
}; };
type InputValues = yup.InferType<typeof schema>; type InputValues = yup.InferType<typeof schema>;
const [customFieldsError, setCustomFieldsError] = useState<string>();
function submit(values: InputValues) {
const input = {
...schema.cast(values),
custom_fields: customFieldInput(isNew, values.custom_fields),
};
onSave(input);
}
const formik = useFormik<InputValues>({ const formik = useFormik<InputValues>({
initialValues, initialValues,
enableReinitialize: true, enableReinitialize: true,
validate: yupFormikValidate(schema), validate: yupFormikValidate(schema),
onSubmit: (values) => onSave(schema.cast(values)), onSubmit: submit,
}); });
const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit( const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit(
@ -571,7 +595,11 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
</div> </div>
<Button <Button
variant="success" variant="success"
disabled={(!isNew && !formik.dirty) || !isEqual(formik.errors, {})} disabled={
(!isNew && !formik.dirty) ||
!isEqual(formik.errors, {}) ||
customFieldsError !== undefined
}
onClick={() => formik.submitForm()} onClick={() => formik.submitForm()}
> >
<FormattedMessage id="actions.save" /> <FormattedMessage id="actions.save" />
@ -680,6 +708,15 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
{renderInputField("ignore_auto_tag", "checkbox")} {renderInputField("ignore_auto_tag", "checkbox")}
<hr />
<CustomFieldsInput
values={formik.values.custom_fields}
onChange={(v) => formik.setFieldValue("custom_fields", v)}
error={customFieldsError}
setError={(e) => setCustomFieldsError(e)}
/>
{renderButtons("mt-3")} {renderButtons("mt-3")}
</Form> </Form>
</> </>

View file

@ -54,6 +54,17 @@
display: none; display: none;
} }
} }
.detail-group .custom-fields .collapse-button {
display: table-cell;
font-weight: 700;
padding-left: 0;
}
.custom-fields .detail-item-title,
.custom-fields .detail-item-value {
font-family: "Courier New", Courier, monospace;
}
/* stylelint-enable selector-class-pattern */ /* stylelint-enable selector-class-pattern */
} }

View file

@ -8,6 +8,7 @@ import { Button, Collapse } from "react-bootstrap";
import { Icon } from "./Icon"; import { Icon } from "./Icon";
interface IProps { interface IProps {
className?: string;
text: React.ReactNode; text: React.ReactNode;
} }
@ -17,12 +18,12 @@ export const CollapseButton: React.FC<React.PropsWithChildren<IProps>> = (
const [open, setOpen] = useState(false); const [open, setOpen] = useState(false);
return ( return (
<div> <div className={props.className}>
<Button <Button
onClick={() => setOpen(!open)} onClick={() => setOpen(!open)}
className="minimal collapse-button" className="minimal collapse-button"
> >
<Icon icon={open ? faChevronDown : faChevronRight} /> <Icon icon={open ? faChevronDown : faChevronRight} fixedWidth />
<span>{props.text}</span> <span>{props.text}</span>
</Button> </Button>
<Collapse in={open}> <Collapse in={open}>
@ -44,7 +45,7 @@ export const ExpandCollapseButton: React.FC<{
className="minimal expand-collapse" className="minimal expand-collapse"
onClick={() => setCollapsed(!collapsed)} onClick={() => setCollapsed(!collapsed)}
> >
<Icon className="fa-fw" icon={buttonIcon} /> <Icon icon={buttonIcon} fixedWidth />
</Button> </Button>
</span> </span>
); );

View file

@ -0,0 +1,308 @@
import React, { useEffect, useMemo, useRef, useState } from "react";
import { CollapseButton } from "./CollapseButton";
import { DetailItem } from "./DetailItem";
import { Button, Col, Form, FormGroup, InputGroup, Row } from "react-bootstrap";
import { FormattedMessage, useIntl } from "react-intl";
import { cloneDeep } from "@apollo/client/utilities";
import { Icon } from "./Icon";
import { faMinus, faPlus } from "@fortawesome/free-solid-svg-icons";
import cx from "classnames";
const maxFieldNameLength = 64;
export type CustomFieldMap = {
[key: string]: unknown;
};
interface ICustomFields {
values: CustomFieldMap;
}
function convertValue(value: unknown): string {
if (typeof value === "string") {
return value;
} else if (typeof value === "number") {
return value.toString();
} else if (typeof value === "boolean") {
return value ? "true" : "false";
} else if (Array.isArray(value)) {
return value.join(", ");
} else {
return JSON.stringify(value);
}
}
const CustomField: React.FC<{ field: string; value: unknown }> = ({
field,
value,
}) => {
const valueStr = convertValue(value);
// replace spaces with hyphen characters for css id
const id = field.toLowerCase().replace(/ /g, "-");
return (
<DetailItem
id={id}
label={field}
labelTitle={field}
value={valueStr}
fullWidth={true}
showEmpty
/>
);
};
export const CustomFields: React.FC<ICustomFields> = ({ values }) => {
const intl = useIntl();
if (Object.keys(values).length === 0) {
return null;
}
return (
// according to linter rule CSS classes shouldn't use underscores
<div className="custom-fields">
<CollapseButton text={intl.formatMessage({ id: "custom_fields.title" })}>
{Object.entries(values).map(([key, value]) => (
<CustomField key={key} field={key} value={value} />
))}
</CollapseButton>
</div>
);
};
function isNumeric(v: string) {
return /^-?(?:0|(?:[1-9][0-9]*))(?:\.[0-9]+)?$/.test(v);
}
function convertCustomValue(v: string) {
// if the value is numeric, convert it to a number
if (isNumeric(v)) {
return Number(v);
} else {
return v;
}
}
const CustomFieldInput: React.FC<{
field: string;
value: unknown;
onChange: (field: string, value: unknown) => void;
isNew?: boolean;
error?: string;
}> = ({ field, value, onChange, isNew = false, error }) => {
const intl = useIntl();
const [currentField, setCurrentField] = useState(field);
const [currentValue, setCurrentValue] = useState(value as string);
const fieldRef = useRef<HTMLInputElement>(null);
const valueRef = useRef<HTMLInputElement>(null);
useEffect(() => {
setCurrentField(field);
setCurrentValue(value as string);
}, [field, value]);
function onBlur() {
onChange(currentField, convertCustomValue(currentValue));
}
function onDelete() {
onChange("", "");
}
return (
<FormGroup>
<Row className={cx("custom-fields-row", { "custom-fields-new": isNew })}>
<Col sm={3} xl={2} className="custom-fields-field">
{isNew ? (
<>
<Form.Control
ref={fieldRef}
className="input-control"
type="text"
value={currentField ?? ""}
placeholder={intl.formatMessage({ id: "custom_fields.field" })}
onChange={(event) => setCurrentField(event.currentTarget.value)}
onBlur={onBlur}
/>
</>
) : (
<Form.Label title={currentField}>{currentField}</Form.Label>
)}
</Col>
<Col sm={9} xl={7}>
<InputGroup>
<Form.Control
ref={valueRef}
className="input-control"
type="text"
value={(currentValue as string) ?? ""}
placeholder={currentField}
onChange={(event) => setCurrentValue(event.currentTarget.value)}
onBlur={onBlur}
/>
<InputGroup.Append>
{!isNew && (
<Button
className="custom-fields-remove"
variant="danger"
onClick={() => onDelete()}
>
<Icon icon={faMinus} />
</Button>
)}
</InputGroup.Append>
</InputGroup>
</Col>
</Row>
<Form.Control.Feedback type="invalid">{error}</Form.Control.Feedback>
</FormGroup>
);
};
interface ICustomField {
field: string;
value: unknown;
}
interface ICustomFieldsInput {
values: CustomFieldMap;
error?: string;
onChange: (values: CustomFieldMap) => void;
setError: (error?: string) => void;
}
export const CustomFieldsInput: React.FC<ICustomFieldsInput> = ({
values,
error,
onChange,
setError,
}) => {
const intl = useIntl();
const [newCustomField, setNewCustomField] = useState<ICustomField>({
field: "",
value: "",
});
const fields = useMemo(() => {
const valueCopy = cloneDeep(values);
if (newCustomField.field !== "" && error === undefined) {
delete valueCopy[newCustomField.field];
}
const ret = Object.keys(valueCopy);
ret.sort();
return ret;
}, [values, newCustomField, error]);
function onSetNewField(v: ICustomField) {
// validate the field name
let newError = undefined;
if (v.field.length > maxFieldNameLength) {
newError = intl.formatMessage({
id: "errors.custom_fields.field_name_length",
});
}
if (v.field.trim() === "" && v.value !== "") {
newError = intl.formatMessage({
id: "errors.custom_fields.field_name_required",
});
}
if (v.field.trim() !== v.field) {
newError = intl.formatMessage({
id: "errors.custom_fields.field_name_whitespace",
});
}
if (fields.includes(v.field)) {
newError = intl.formatMessage({
id: "errors.custom_fields.duplicate_field",
});
}
const oldField = newCustomField;
setNewCustomField(v);
const valuesCopy = cloneDeep(values);
if (oldField.field !== "" && error === undefined) {
delete valuesCopy[oldField.field];
}
// if valid, pass up
if (!newError && v.field !== "") {
valuesCopy[v.field] = v.value;
}
onChange(valuesCopy);
setError(newError);
}
function onAdd() {
const newValues = {
...values,
[newCustomField.field]: newCustomField.value,
};
setNewCustomField({ field: "", value: "" });
onChange(newValues);
}
function fieldChanged(
currentField: string,
newField: string,
value: unknown
) {
let newValues = cloneDeep(values);
delete newValues[currentField];
if (newField !== "") {
newValues[newField] = value;
}
onChange(newValues);
}
return (
<CollapseButton
className="custom-fields-input"
text={intl.formatMessage({ id: "custom_fields.title" })}
>
<Row>
<Col xl={12}>
<Row className="custom-fields-input-header">
<Form.Label column sm={3} xl={2}>
<FormattedMessage id="custom_fields.field" />
</Form.Label>
<Form.Label column sm={9} xl={7}>
<FormattedMessage id="custom_fields.value" />
</Form.Label>
</Row>
{fields.map((field) => (
<CustomFieldInput
key={field}
field={field}
value={values[field]}
onChange={(newField, newValue) =>
fieldChanged(field, newField, newValue)
}
/>
))}
<CustomFieldInput
field={newCustomField.field}
value={newCustomField.value}
error={error}
onChange={(field, value) => onSetNewField({ field, value })}
isNew
/>
</Col>
</Row>
<Button
className="custom-fields-add"
variant="success"
onClick={() => onAdd()}
disabled={newCustomField.field === "" || error !== undefined}
>
<Icon icon={faPlus} />
</Button>
</CollapseButton>
);
};

View file

@ -3,34 +3,39 @@ import { FormattedMessage } from "react-intl";
interface IDetailItem { interface IDetailItem {
id?: string | null; id?: string | null;
label?: React.ReactNode;
value?: React.ReactNode; value?: React.ReactNode;
labelTitle?: string;
title?: string; title?: string;
fullWidth?: boolean; fullWidth?: boolean;
showEmpty?: boolean;
} }
export const DetailItem: React.FC<IDetailItem> = ({ export const DetailItem: React.FC<IDetailItem> = ({
id, id,
label,
value, value,
labelTitle,
title, title,
fullWidth, fullWidth,
showEmpty = false,
}) => { }) => {
if (!id || !value || value === "Na") { if (!id || (!showEmpty && (!value || value === "Na"))) {
return <></>; return <></>;
} }
const message = <FormattedMessage id={id} />; const message = label ?? <FormattedMessage id={id} />;
// according to linter rule CSS classes shouldn't use underscores
const sanitisedID = id.replace(/_/g, "-");
return ( return (
// according to linter rule CSS classes shouldn't use underscores
<div className={`detail-item ${id}`}> <div className={`detail-item ${id}`}>
<span className={`detail-item-title ${id.replace("_", "-")}`}> <span className={`detail-item-title ${sanitisedID}`} title={labelTitle}>
{message} {message}
{fullWidth ? ":" : ""} {fullWidth ? ":" : ""}
</span> </span>
<span <span className={`detail-item-value ${sanitisedID}`} title={title}>
className={`detail-item-value ${id.replace("_", "-")}`}
title={title}
>
{value} {value}
</span> </span>
</div> </div>

View file

@ -197,6 +197,15 @@ button.collapse-button.btn-primary:not(:disabled):not(.disabled):active {
border: none; border: none;
box-shadow: none; box-shadow: none;
color: #f5f8fa; color: #f5f8fa;
text-align: left;
}
button.collapse-button {
.fa-icon {
margin-left: 0;
}
padding-left: 0;
} }
.hover-popover-content { .hover-popover-content {
@ -678,3 +687,44 @@ button.btn.favorite-button {
} }
} }
} }
.custom-fields .detail-item .detail-item-title {
max-width: 130px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.custom-fields-input > .collapse-button {
font-weight: 700;
}
.custom-fields-row {
align-items: center;
font-family: "Courier New", Courier, monospace;
font-size: 0.875rem;
.form-label {
margin-bottom: 0;
max-width: 100%;
overflow: hidden;
text-overflow: ellipsis;
vertical-align: middle;
white-space: nowrap;
}
// labels with titles are styled with help cursor and dotted underline elsewhere
div.custom-fields-field label.form-label {
cursor: inherit;
text-decoration: inherit;
}
.form-control,
.btn {
font-size: 0.875rem;
}
&.custom-fields-new > div:not(:last-child) {
padding-right: 0;
}
}

View file

@ -854,6 +854,11 @@
"only": "Only" "only": "Only"
}, },
"custom": "Custom", "custom": "Custom",
"custom_fields": {
"field": "Field",
"title": "Custom Fields",
"value": "Value"
},
"date": "Date", "date": "Date",
"date_format": "YYYY-MM-DD", "date_format": "YYYY-MM-DD",
"datetime_format": "YYYY-MM-DD HH:MM", "datetime_format": "YYYY-MM-DD HH:MM",
@ -1035,6 +1040,12 @@
}, },
"empty_server": "Add some scenes to your server to view recommendations on this page.", "empty_server": "Add some scenes to your server to view recommendations on this page.",
"errors": { "errors": {
"custom_fields": {
"duplicate_field": "Field name must be unique",
"field_name_length": "Field name must fewer than 65 characters",
"field_name_required": "Field name is required",
"field_name_whitespace": "Field name cannot have leading or trailing whitespace"
},
"header": "Error", "header": "Error",
"image_index_greater_than_zero": "Image index must be greater than 0", "image_index_greater_than_zero": "Image index must be greater than 0",
"invalid_javascript_string": "Invalid javascript code: {error}", "invalid_javascript_string": "Invalid javascript code: {error}",