mirror of
https://github.com/stashapp/stash.git
synced 2026-04-17 04:24:07 +02:00
Merge 0c39702792 into fd480c5a3e
This commit is contained in:
commit
057fa9aade
24 changed files with 1289 additions and 31 deletions
|
|
@ -12,6 +12,8 @@ type Query {
|
|||
"Queries for Files"
|
||||
findFiles(
|
||||
file_filter: FileFilterType
|
||||
"Provide the ID of a saved filter instead of providing a file_filter. Cannot be used with file_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
ids: [ID!]
|
||||
): FindFilesResultType!
|
||||
|
|
@ -22,6 +24,8 @@ type Query {
|
|||
"Queries for Files"
|
||||
findFolders(
|
||||
folder_filter: FolderFilterType
|
||||
"Provide the ID of a saved filter instead of providing a folder_filter. Cannot be used with folder_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
ids: [ID!]
|
||||
): FindFoldersResultType!
|
||||
|
|
@ -33,6 +37,8 @@ type Query {
|
|||
"A function which queries Scene objects"
|
||||
findScenes(
|
||||
scene_filter: SceneFilterType
|
||||
"Provide the ID of a saved filter instead of providing a scene_filter. Cannot be used with scene_filter "
|
||||
saved_filter_id: ID
|
||||
scene_ids: [Int!] @deprecated(reason: "use ids")
|
||||
ids: [ID!]
|
||||
filter: FindFilterType
|
||||
|
|
@ -64,6 +70,8 @@ type Query {
|
|||
"A function which queries SceneMarker objects"
|
||||
findSceneMarkers(
|
||||
scene_marker_filter: SceneMarkerFilterType
|
||||
"Provide the ID of a saved filter instead of providing a scene_marker_filter. Cannot be used with scene_marker_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
ids: [ID!]
|
||||
): FindSceneMarkersResultType!
|
||||
|
|
@ -73,6 +81,8 @@ type Query {
|
|||
"A function which queries Scene objects"
|
||||
findImages(
|
||||
image_filter: ImageFilterType
|
||||
"Provide the ID of a saved filter instead of providing a image_filter. Cannot be used with image_filter "
|
||||
saved_filter_id: ID
|
||||
image_ids: [Int!] @deprecated(reason: "use ids")
|
||||
ids: [ID!]
|
||||
filter: FindFilterType
|
||||
|
|
@ -83,6 +93,8 @@ type Query {
|
|||
"A function which queries Performer objects"
|
||||
findPerformers(
|
||||
performer_filter: PerformerFilterType
|
||||
"Provide the ID of a saved filter instead of providing a performer_filter. Cannot be used with performer_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
performer_ids: [Int!] @deprecated(reason: "use ids")
|
||||
ids: [ID!]
|
||||
|
|
@ -93,6 +105,8 @@ type Query {
|
|||
"A function which queries Studio objects"
|
||||
findStudios(
|
||||
studio_filter: StudioFilterType
|
||||
"Provide the ID of a saved filter instead of providing a studio_filter. Cannot be used with studio_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
ids: [ID!]
|
||||
): FindStudiosResultType!
|
||||
|
|
@ -102,6 +116,8 @@ type Query {
|
|||
"A function which queries Movie objects"
|
||||
findMovies(
|
||||
movie_filter: MovieFilterType
|
||||
"Provide the ID of a saved filter instead of providing a movie_filter. Cannot be used with movie_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
ids: [ID!]
|
||||
): FindMoviesResultType! @deprecated(reason: "Use findGroups instead")
|
||||
|
|
@ -111,6 +127,8 @@ type Query {
|
|||
"A function which queries Group objects"
|
||||
findGroups(
|
||||
group_filter: GroupFilterType
|
||||
"Provide the ID of a saved filter instead of providing a group_filter. Cannot be used with group_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
ids: [ID!]
|
||||
): FindGroupsResultType!
|
||||
|
|
@ -118,6 +136,8 @@ type Query {
|
|||
findGallery(id: ID!): Gallery
|
||||
findGalleries(
|
||||
gallery_filter: GalleryFilterType
|
||||
"Provide the ID of a saved filter instead of providing a gallery_filter. Cannot be used with gallery_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
ids: [ID!]
|
||||
): FindGalleriesResultType!
|
||||
|
|
@ -125,6 +145,8 @@ type Query {
|
|||
findTag(id: ID!): Tag
|
||||
findTags(
|
||||
tag_filter: TagFilterType
|
||||
"Provide the ID of a saved filter instead of providing a tag_filter. Cannot be used with tag_filter "
|
||||
saved_filter_id: ID
|
||||
filter: FindFilterType
|
||||
ids: [ID!]
|
||||
): FindTagsResultType!
|
||||
|
|
|
|||
|
|
@ -987,6 +987,8 @@ type SavedFilter {
|
|||
object_filter: Map
|
||||
# generic map for ui options
|
||||
ui_options: Map
|
||||
# mapping of object IDs to labels for criteria in object_filter (e.g. tag IDs to names)
|
||||
label_mapping: LabelMappingType
|
||||
}
|
||||
|
||||
input SaveFilterInput {
|
||||
|
|
@ -1012,3 +1014,19 @@ input SetDefaultFilterInput {
|
|||
# generic map for ui options
|
||||
ui_options: Map
|
||||
}
|
||||
|
||||
type LabelMappingEntry {
|
||||
id: ID!
|
||||
label: String!
|
||||
}
|
||||
|
||||
type LabelMappingType {
|
||||
tags: [LabelMappingEntry!]!
|
||||
studios: [LabelMappingEntry!]!
|
||||
performers: [LabelMappingEntry!]!
|
||||
groups: [LabelMappingEntry!]!
|
||||
galleries: [LabelMappingEntry!]!
|
||||
folders: [LabelMappingEntry!]!
|
||||
scenes: [LabelMappingEntry!]!
|
||||
movies: [LabelMappingEntry!]!
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
|
@ -9,3 +10,152 @@ import (
|
|||
func (r *savedFilterResolver) Filter(ctx context.Context, obj *models.SavedFilter) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (r *savedFilterResolver) LabelMapping(ctx context.Context, obj *models.SavedFilter) (*LabelMappingType, error) {
|
||||
mapping := &LabelMappingType{}
|
||||
if obj.ObjectFilter == nil {
|
||||
return mapping, nil
|
||||
}
|
||||
|
||||
// Helper to extract IDs from a list of strings
|
||||
extractIDs := func(v interface{}) []int {
|
||||
var ids []int
|
||||
if list, ok := v.([]interface{}); ok {
|
||||
for _, item := range list {
|
||||
if strID, ok := item.(string); ok {
|
||||
if intID, err := strconv.Atoi(strID); err == nil {
|
||||
ids = append(ids, intID)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
// Helper to fetch and populate mapping
|
||||
populateMapping := func(criteriaKeys []string, fetchLabels func([]int) []*LabelMappingEntry) []*LabelMappingEntry {
|
||||
var allIDs []int
|
||||
|
||||
for _, criteriaKey := range criteriaKeys {
|
||||
criterion, ok := obj.ObjectFilter[criteriaKey].(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if val, ok := criterion["value"]; ok {
|
||||
allIDs = append(allIDs, extractIDs(val)...)
|
||||
}
|
||||
if excl, ok := criterion["excludes"]; ok {
|
||||
allIDs = append(allIDs, extractIDs(excl)...)
|
||||
}
|
||||
}
|
||||
|
||||
if len(allIDs) > 0 {
|
||||
// deduplicate IDs
|
||||
idMap := make(map[int]bool)
|
||||
var dedupedIDs []int
|
||||
for _, id := range allIDs {
|
||||
if !idMap[id] {
|
||||
idMap[id] = true
|
||||
dedupedIDs = append(dedupedIDs, id)
|
||||
}
|
||||
}
|
||||
|
||||
return fetchLabels(dedupedIDs)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
// Tags
|
||||
mapping.Tags = populateMapping([]string{"tags", "scene_tags", "performer_tags", "studio_tags", "parents", "children"}, func(ids []int) []*LabelMappingEntry {
|
||||
var res []*LabelMappingEntry
|
||||
tags, _ := r.repository.Tag.FindMany(ctx, ids)
|
||||
for _, t := range tags {
|
||||
res = append(res, &LabelMappingEntry{ID: strconv.Itoa(t.ID), Label: t.Name})
|
||||
}
|
||||
return res
|
||||
})
|
||||
|
||||
// Performers
|
||||
mapping.Performers = populateMapping([]string{"performers"}, func(ids []int) []*LabelMappingEntry {
|
||||
var res []*LabelMappingEntry
|
||||
performers, _ := r.repository.Performer.FindMany(ctx, ids)
|
||||
for _, p := range performers {
|
||||
res = append(res, &LabelMappingEntry{ID: strconv.Itoa(p.ID), Label: p.Name})
|
||||
}
|
||||
return res
|
||||
})
|
||||
|
||||
// Studios
|
||||
mapping.Studios = populateMapping([]string{"studios"}, func(ids []int) []*LabelMappingEntry {
|
||||
var res []*LabelMappingEntry
|
||||
studios, _ := r.repository.Studio.FindMany(ctx, ids)
|
||||
for _, s := range studios {
|
||||
res = append(res, &LabelMappingEntry{ID: strconv.Itoa(s.ID), Label: s.Name})
|
||||
}
|
||||
return res
|
||||
})
|
||||
|
||||
// Groups
|
||||
mapping.Groups = populateMapping([]string{"groups", "containing_groups", "sub_groups"}, func(ids []int) []*LabelMappingEntry {
|
||||
var res []*LabelMappingEntry
|
||||
groups, _ := r.repository.Group.FindMany(ctx, ids)
|
||||
for _, g := range groups {
|
||||
res = append(res, &LabelMappingEntry{ID: strconv.Itoa(g.ID), Label: g.Name})
|
||||
}
|
||||
return res
|
||||
})
|
||||
|
||||
// Galleries
|
||||
mapping.Galleries = populateMapping([]string{"galleries"}, func(ids []int) []*LabelMappingEntry {
|
||||
var res []*LabelMappingEntry
|
||||
galleries, _ := r.repository.Gallery.FindMany(ctx, ids)
|
||||
for _, g := range galleries {
|
||||
res = append(res, &LabelMappingEntry{ID: strconv.Itoa(g.ID), Label: g.Title})
|
||||
}
|
||||
return res
|
||||
})
|
||||
|
||||
// Folders
|
||||
mapping.Folders = populateMapping([]string{"folders", "parent_folder"}, func(ids []int) []*LabelMappingEntry {
|
||||
var res []*LabelMappingEntry
|
||||
folderIDs := make([]models.FolderID, len(ids))
|
||||
for i, id := range ids {
|
||||
folderIDs[i] = models.FolderID(id)
|
||||
}
|
||||
folders, _ := r.repository.Folder.FindMany(ctx, folderIDs)
|
||||
for _, f := range folders {
|
||||
res = append(res, &LabelMappingEntry{ID: strconv.Itoa(int(f.ID)), Label: f.Path})
|
||||
}
|
||||
return res
|
||||
})
|
||||
|
||||
// Scenes
|
||||
mapping.Scenes = populateMapping([]string{"scenes"}, func(ids []int) []*LabelMappingEntry {
|
||||
var res []*LabelMappingEntry
|
||||
scenes, _ := r.repository.Scene.FindMany(ctx, ids)
|
||||
for _, s := range scenes {
|
||||
label := s.Title
|
||||
if label == "" && s.Details != "" {
|
||||
label = s.Details
|
||||
}
|
||||
if label == "" {
|
||||
label = s.Checksum
|
||||
}
|
||||
res = append(res, &LabelMappingEntry{ID: strconv.Itoa(s.ID), Label: label})
|
||||
}
|
||||
return res
|
||||
})
|
||||
|
||||
// Movies
|
||||
mapping.Movies = populateMapping([]string{"movies"}, func(ids []int) []*LabelMappingEntry {
|
||||
return nil
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return mapping, err
|
||||
}
|
||||
|
|
|
|||
190
internal/api/resolver_model_saved_filter_test.go
Normal file
190
internal/api/resolver_model_saved_filter_test.go
Normal file
|
|
@ -0,0 +1,190 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// We verify the `LabelMapping` function handles parsing the interface mapping without panic and extracts correct lists.
|
||||
func TestSavedFilterLabelMappingEmpty(t *testing.T) {
|
||||
// Basic instantiation to just ensure it does not panic and returns empty correctly.
|
||||
resolver := &savedFilterResolver{}
|
||||
|
||||
obj := &models.SavedFilter{
|
||||
ObjectFilter: nil,
|
||||
}
|
||||
|
||||
mapping, err := resolver.LabelMapping(context.Background(), obj)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if mapping == nil || mapping.Tags != nil {
|
||||
t.Errorf("expected empty mapping, got %v", mapping)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSavedFilterLabelMappingComprehensive(t *testing.T) {
|
||||
mockDB := mocks.NewDatabase()
|
||||
resolver := &savedFilterResolver{
|
||||
Resolver: &Resolver{
|
||||
repository: mockDB.Repository(),
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
obj := &models.SavedFilter{
|
||||
ObjectFilter: map[string]interface{}{
|
||||
"tags": map[string]interface{}{
|
||||
"value": []interface{}{"1", "2"},
|
||||
"excludes": []interface{}{"3"},
|
||||
},
|
||||
"scene_tags": map[string]interface{}{
|
||||
"value": []interface{}{"4"},
|
||||
},
|
||||
"performers": map[string]interface{}{
|
||||
"value": []interface{}{"10"},
|
||||
},
|
||||
"studios": map[string]interface{}{
|
||||
"value": []interface{}{"20"},
|
||||
},
|
||||
"groups": map[string]interface{}{
|
||||
"value": []interface{}{"30"},
|
||||
},
|
||||
"galleries": map[string]interface{}{
|
||||
"value": []interface{}{"40"},
|
||||
},
|
||||
"folders": map[string]interface{}{
|
||||
"value": []interface{}{"50"},
|
||||
},
|
||||
"scenes": map[string]interface{}{
|
||||
"value": []interface{}{"60", "61", "62"},
|
||||
},
|
||||
"movies": map[string]interface{}{
|
||||
"value": []interface{}{"70"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mockDB.Tag.On("FindMany", mock.Anything, mock.MatchedBy(func(ids []int) bool {
|
||||
return len(ids) == 4
|
||||
})).Return([]*models.Tag{
|
||||
{ID: 1, Name: "Tag1"},
|
||||
{ID: 2, Name: "Tag2"},
|
||||
{ID: 3, Name: "Tag3"},
|
||||
{ID: 4, Name: "Tag4"},
|
||||
}, nil).Once()
|
||||
|
||||
mockDB.Performer.On("FindMany", mock.Anything, []int{10}).Return([]*models.Performer{
|
||||
{ID: 10, Name: "Performer10"},
|
||||
}, nil).Once()
|
||||
|
||||
mockDB.Studio.On("FindMany", mock.Anything, []int{20}).Return([]*models.Studio{
|
||||
{ID: 20, Name: "Studio20"},
|
||||
}, nil).Once()
|
||||
|
||||
mockDB.Group.On("FindMany", mock.Anything, []int{30}).Return([]*models.Group{
|
||||
{ID: 30, Name: "Group30"},
|
||||
}, nil).Once()
|
||||
|
||||
mockDB.Gallery.On("FindMany", mock.Anything, []int{40}).Return([]*models.Gallery{
|
||||
{ID: 40, Title: "Gallery40"},
|
||||
}, nil).Once()
|
||||
|
||||
mockDB.Folder.On("FindMany", mock.Anything, []models.FolderID{50}).Return([]*models.Folder{
|
||||
{ID: 50, Path: "/folder/50"},
|
||||
}, nil).Once()
|
||||
|
||||
mockDB.Scene.On("FindMany", mock.Anything, mock.MatchedBy(func(ids []int) bool {
|
||||
return len(ids) == 3
|
||||
})).Return([]*models.Scene{
|
||||
{ID: 60, Title: "Scene60"},
|
||||
{ID: 61, Details: "Scene61 Details"},
|
||||
{ID: 62, Checksum: "checksum62"},
|
||||
}, nil).Once()
|
||||
|
||||
mapping, err := resolver.LabelMapping(ctx, obj)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, mapping)
|
||||
|
||||
assert.Len(t, mapping.Tags, 4)
|
||||
assert.Equal(t, "Tag1", mapping.Tags[0].Label)
|
||||
assert.Equal(t, "1", mapping.Tags[0].ID)
|
||||
|
||||
assert.Len(t, mapping.Performers, 1)
|
||||
assert.Equal(t, "Performer10", mapping.Performers[0].Label)
|
||||
|
||||
assert.Len(t, mapping.Studios, 1)
|
||||
assert.Equal(t, "Studio20", mapping.Studios[0].Label)
|
||||
|
||||
assert.Len(t, mapping.Groups, 1)
|
||||
assert.Equal(t, "Group30", mapping.Groups[0].Label)
|
||||
assert.Equal(t, "30", mapping.Groups[0].ID)
|
||||
|
||||
assert.Len(t, mapping.Galleries, 1)
|
||||
assert.Equal(t, "Gallery40", mapping.Galleries[0].Label)
|
||||
assert.Equal(t, "40", mapping.Galleries[0].ID)
|
||||
|
||||
assert.Len(t, mapping.Folders, 1)
|
||||
assert.Equal(t, "/folder/50", mapping.Folders[0].Label)
|
||||
assert.Equal(t, "50", mapping.Folders[0].ID)
|
||||
|
||||
assert.Len(t, mapping.Scenes, 3)
|
||||
assert.Equal(t, "Scene60", mapping.Scenes[0].Label)
|
||||
assert.Equal(t, "60", mapping.Scenes[0].ID)
|
||||
assert.Equal(t, "Scene61 Details", mapping.Scenes[1].Label)
|
||||
assert.Equal(t, "checksum62", mapping.Scenes[2].Label)
|
||||
|
||||
// Movies isn't implemented and should be nil
|
||||
assert.Nil(t, mapping.Movies)
|
||||
}
|
||||
|
||||
func TestSavedFilterLabelMappingDeduplication(t *testing.T) {
|
||||
mockDB := mocks.NewDatabase()
|
||||
resolver := &savedFilterResolver{
|
||||
Resolver: &Resolver{
|
||||
repository: mockDB.Repository(),
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
obj := &models.SavedFilter{
|
||||
ObjectFilter: map[string]interface{}{
|
||||
"tags": map[string]interface{}{
|
||||
"value": []interface{}{"1", "2"},
|
||||
"excludes": []interface{}{"2", "3"},
|
||||
},
|
||||
"scene_tags": map[string]interface{}{
|
||||
"value": []interface{}{"1", "3"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mockDB.Tag.On("FindMany", mock.Anything, mock.MatchedBy(func(ids []int) bool {
|
||||
if len(ids) != 3 {
|
||||
return false
|
||||
}
|
||||
// IDs should be 1, 2, 3
|
||||
idMap := map[int]bool{}
|
||||
for _, id := range ids {
|
||||
idMap[id] = true
|
||||
}
|
||||
return idMap[1] && idMap[2] && idMap[3]
|
||||
})).Return([]*models.Tag{
|
||||
{ID: 1, Name: "Tag1"},
|
||||
{ID: 2, Name: "Tag2"},
|
||||
{ID: 3, Name: "Tag3"},
|
||||
}, nil).Once()
|
||||
|
||||
mapping, err := resolver.LabelMapping(ctx, obj)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, mapping)
|
||||
|
||||
assert.Len(t, mapping.Tags, 3)
|
||||
}
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"context"
|
||||
"errors"
|
||||
"strconv"
|
||||
|
|
@ -48,9 +50,21 @@ func (r *queryResolver) FindFile(ctx context.Context, id *string, path *string)
|
|||
func (r *queryResolver) FindFiles(
|
||||
ctx context.Context,
|
||||
fileFilter *models.FileFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType,
|
||||
ids []string,
|
||||
) (ret *FindFilesResultType, err error) {
|
||||
if fileFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both fileFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.FileFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.FileFilterType{}
|
||||
return nil, fmt.Errorf("saved filters are not supported for %s", "fileFilter")
|
||||
} else {
|
||||
finalFilter = fileFilter
|
||||
}
|
||||
var fileIDs []models.FileID
|
||||
if len(ids) > 0 {
|
||||
fileIDsInt, err := stringslice.StringSliceToIntSlice(ids)
|
||||
|
|
@ -89,7 +103,7 @@ func (r *queryResolver) FindFiles(
|
|||
FindFilter: filter,
|
||||
Count: fields.Has("count"),
|
||||
},
|
||||
FileFilter: fileFilter,
|
||||
FileFilter: finalFilter,
|
||||
TotalDuration: fields.Has("duration"),
|
||||
Megapixels: fields.Has("megapixels"),
|
||||
TotalSize: fields.Has("size"),
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"context"
|
||||
"errors"
|
||||
"strconv"
|
||||
|
|
@ -43,9 +45,21 @@ func (r *queryResolver) FindFolder(ctx context.Context, id *string, path *string
|
|||
func (r *queryResolver) FindFolders(
|
||||
ctx context.Context,
|
||||
folderFilter *models.FolderFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType,
|
||||
ids []string,
|
||||
) (ret *FindFoldersResultType, err error) {
|
||||
if folderFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both folderFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.FolderFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.FolderFilterType{}
|
||||
return nil, fmt.Errorf("saved filters are not supported for %s", "folderFilter")
|
||||
} else {
|
||||
finalFilter = folderFilter
|
||||
}
|
||||
var folderIDs []models.FolderID
|
||||
if len(ids) > 0 {
|
||||
folderIDsInt, err := handleIDList(ids, "ids")
|
||||
|
|
@ -74,7 +88,7 @@ func (r *queryResolver) FindFolders(
|
|||
FindFilter: filter,
|
||||
Count: fields.Has("count"),
|
||||
},
|
||||
FolderFilter: folderFilter,
|
||||
FolderFilter: finalFilter,
|
||||
})
|
||||
if err == nil {
|
||||
folders, err = result.Resolve(ctx)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
|
|
@ -23,7 +25,30 @@ func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType, ids []string) (ret *FindGalleriesResultType, err error) {
|
||||
func (r *queryResolver) FindGalleries(
|
||||
ctx context.Context,
|
||||
galleryFilter *models.GalleryFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType,
|
||||
ids []string,
|
||||
) (ret *FindGalleriesResultType, err error) {
|
||||
if galleryFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both galleryFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.GalleryFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.GalleryFilterType{}
|
||||
mode := models.FilterModeGalleries
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = galleryFilter
|
||||
}
|
||||
idInts, err := handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -38,7 +63,7 @@ func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models
|
|||
galleries, err = r.repository.Gallery.FindMany(ctx, idInts)
|
||||
total = len(galleries)
|
||||
} else {
|
||||
galleries, total, err = r.repository.Gallery.Query(ctx, galleryFilter, filter)
|
||||
galleries, total, err = r.repository.Gallery.Query(ctx, finalFilter, filter)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
|
|
@ -23,7 +25,30 @@ func (r *queryResolver) FindGroup(ctx context.Context, id string) (ret *models.G
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindGroups(ctx context.Context, groupFilter *models.GroupFilterType, filter *models.FindFilterType, ids []string) (ret *FindGroupsResultType, err error) {
|
||||
func (r *queryResolver) FindGroups(
|
||||
ctx context.Context,
|
||||
groupFilter *models.GroupFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType,
|
||||
ids []string,
|
||||
) (ret *FindGroupsResultType, err error) {
|
||||
if groupFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both groupFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.GroupFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.GroupFilterType{}
|
||||
mode := models.FilterModeGroups
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = groupFilter
|
||||
}
|
||||
idInts, err := handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -38,7 +63,7 @@ func (r *queryResolver) FindGroups(ctx context.Context, groupFilter *models.Grou
|
|||
groups, err = r.repository.Group.FindMany(ctx, idInts)
|
||||
total = len(groups)
|
||||
} else {
|
||||
groups, total, err = r.repository.Group.Query(ctx, groupFilter, filter)
|
||||
groups, total, err = r.repository.Group.Query(ctx, finalFilter, filter)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
"slices"
|
||||
"strconv"
|
||||
|
|
@ -49,10 +51,28 @@ func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *str
|
|||
func (r *queryResolver) FindImages(
|
||||
ctx context.Context,
|
||||
imageFilter *models.ImageFilterType,
|
||||
savedFilterID *string,
|
||||
imageIds []int,
|
||||
ids []string,
|
||||
filter *models.FindFilterType,
|
||||
) (ret *FindImagesResultType, err error) {
|
||||
if imageFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both imageFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.ImageFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.ImageFilterType{}
|
||||
mode := models.FilterModeImages
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = imageFilter
|
||||
}
|
||||
if len(ids) > 0 {
|
||||
imageIds, err = handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
|
|
@ -96,7 +116,7 @@ func (r *queryResolver) FindImages(
|
|||
FindFilter: filter,
|
||||
Count: slices.Contains(fields, "count"),
|
||||
},
|
||||
ImageFilter: imageFilter,
|
||||
ImageFilter: finalFilter,
|
||||
Megapixels: slices.Contains(fields, "megapixels"),
|
||||
TotalSize: slices.Contains(fields, "filesize"),
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
|
|
@ -23,7 +25,30 @@ func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.G
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.GroupFilterType, filter *models.FindFilterType, ids []string) (ret *FindMoviesResultType, err error) {
|
||||
func (r *queryResolver) FindMovies(
|
||||
ctx context.Context,
|
||||
movieFilter *models.GroupFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType,
|
||||
ids []string,
|
||||
) (ret *FindMoviesResultType, err error) {
|
||||
if movieFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both movieFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.GroupFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.GroupFilterType{}
|
||||
mode := models.FilterModeMovies
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = movieFilter
|
||||
}
|
||||
idInts, err := handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -38,7 +63,7 @@ func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.Grou
|
|||
groups, err = r.repository.Group.FindMany(ctx, idInts)
|
||||
total = len(groups)
|
||||
} else {
|
||||
groups, total, err = r.repository.Group.Query(ctx, movieFilter, filter)
|
||||
groups, total, err = r.repository.Group.Query(ctx, finalFilter, filter)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
|
|
@ -23,7 +25,30 @@ func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *mode
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType, performerIDs []int, ids []string) (ret *FindPerformersResultType, err error) {
|
||||
func (r *queryResolver) FindPerformers(
|
||||
ctx context.Context,
|
||||
performerFilter *models.PerformerFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType, performerIDs []int,
|
||||
ids []string,
|
||||
) (ret *FindPerformersResultType, err error) {
|
||||
if performerFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both performerFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.PerformerFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.PerformerFilterType{}
|
||||
mode := models.FilterModePerformers
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = performerFilter
|
||||
}
|
||||
if len(ids) > 0 {
|
||||
performerIDs, err = handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
|
|
@ -32,8 +57,8 @@ func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *mod
|
|||
}
|
||||
|
||||
// #5682 - convert JSON numbers to float64 or int64
|
||||
if performerFilter != nil {
|
||||
performerFilter.CustomFields = convertCustomFieldCriterionInputJSONNumbers(performerFilter.CustomFields)
|
||||
if finalFilter != nil {
|
||||
finalFilter.CustomFields = convertCustomFieldCriterionInputJSONNumbers(finalFilter.CustomFields)
|
||||
}
|
||||
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
|
|
@ -45,7 +70,7 @@ func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *mod
|
|||
performers, err = r.repository.Performer.FindMany(ctx, performerIDs)
|
||||
total = len(performers)
|
||||
} else {
|
||||
performers, total, err = r.repository.Performer.Query(ctx, performerFilter, filter)
|
||||
performers, total, err = r.repository.Performer.Query(ctx, finalFilter, filter)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
"slices"
|
||||
"strconv"
|
||||
|
|
@ -77,10 +79,28 @@ func (r *queryResolver) FindSceneByHash(ctx context.Context, input SceneHashInpu
|
|||
func (r *queryResolver) FindScenes(
|
||||
ctx context.Context,
|
||||
sceneFilter *models.SceneFilterType,
|
||||
savedFilterID *string,
|
||||
sceneIDs []int,
|
||||
ids []string,
|
||||
filter *models.FindFilterType,
|
||||
) (ret *FindScenesResultType, err error) {
|
||||
if sceneFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both sceneFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.SceneFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.SceneFilterType{}
|
||||
mode := models.FilterModeScenes
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = sceneFilter
|
||||
}
|
||||
if len(ids) > 0 {
|
||||
sceneIDs, err = handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
|
|
@ -120,7 +140,7 @@ func (r *queryResolver) FindScenes(
|
|||
FindFilter: filter,
|
||||
Count: slices.Contains(fields, "count"),
|
||||
},
|
||||
SceneFilter: sceneFilter,
|
||||
SceneFilter: finalFilter,
|
||||
TotalDuration: slices.Contains(fields, "duration"),
|
||||
TotalSize: slices.Contains(fields, "filesize"),
|
||||
})
|
||||
|
|
@ -151,10 +171,10 @@ func (r *queryResolver) FindScenes(
|
|||
func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (ret *FindScenesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
|
||||
sceneFilter := &models.SceneFilterType{}
|
||||
finalFilter := &models.SceneFilterType{}
|
||||
|
||||
if filter != nil && filter.Q != nil {
|
||||
sceneFilter.Path = &models.StringCriterionInput{
|
||||
finalFilter.Path = &models.StringCriterionInput{
|
||||
Modifier: models.CriterionModifierMatchesRegex,
|
||||
Value: "(?i)" + *filter.Q,
|
||||
}
|
||||
|
|
@ -175,7 +195,7 @@ func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *model
|
|||
FindFilter: queryFilter,
|
||||
Count: slices.Contains(fields, "count"),
|
||||
},
|
||||
SceneFilter: sceneFilter,
|
||||
SceneFilter: finalFilter,
|
||||
TotalDuration: slices.Contains(fields, "duration"),
|
||||
TotalSize: slices.Contains(fields, "filesize"),
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,12 +1,37 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType, ids []string) (ret *FindSceneMarkersResultType, err error) {
|
||||
func (r *queryResolver) FindSceneMarkers(
|
||||
ctx context.Context,
|
||||
sceneMarkerFilter *models.SceneMarkerFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType,
|
||||
ids []string,
|
||||
) (ret *FindSceneMarkersResultType, err error) {
|
||||
if sceneMarkerFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both sceneMarkerFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.SceneMarkerFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.SceneMarkerFilterType{}
|
||||
var mode models.FilterMode = models.FilterModeSceneMarkers
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = sceneMarkerFilter
|
||||
}
|
||||
idInts, err := handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -21,7 +46,7 @@ func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter
|
|||
sceneMarkers, err = r.repository.SceneMarker.FindMany(ctx, idInts)
|
||||
total = len(sceneMarkers)
|
||||
} else {
|
||||
sceneMarkers, total, err = r.repository.SceneMarker.Query(ctx, sceneMarkerFilter, filter)
|
||||
sceneMarkers, total, err = r.repository.SceneMarker.Query(ctx, finalFilter, filter)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
|
|
@ -24,7 +26,30 @@ func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType, ids []string) (ret *FindStudiosResultType, err error) {
|
||||
func (r *queryResolver) FindStudios(
|
||||
ctx context.Context,
|
||||
studioFilter *models.StudioFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType,
|
||||
ids []string,
|
||||
) (ret *FindStudiosResultType, err error) {
|
||||
if studioFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both studioFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.StudioFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.StudioFilterType{}
|
||||
mode := models.FilterModeStudios
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = studioFilter
|
||||
}
|
||||
idInts, err := handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -39,7 +64,7 @@ func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.St
|
|||
studios, err = r.repository.Studio.FindMany(ctx, idInts)
|
||||
total = len(studios)
|
||||
} else {
|
||||
studios, total, err = r.repository.Studio.Query(ctx, studioFilter, filter)
|
||||
studios, total, err = r.repository.Studio.Query(ctx, finalFilter, filter)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
|
|
@ -23,7 +25,30 @@ func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType, ids []string) (ret *FindTagsResultType, err error) {
|
||||
func (r *queryResolver) FindTags(
|
||||
ctx context.Context,
|
||||
tagFilter *models.TagFilterType,
|
||||
savedFilterID *string,
|
||||
filter *models.FindFilterType,
|
||||
ids []string,
|
||||
) (ret *FindTagsResultType, err error) {
|
||||
if tagFilter != nil && savedFilterID != nil {
|
||||
return nil, errors.New("cannot provide both tagFilter and saved_filter_id")
|
||||
}
|
||||
|
||||
var finalFilter *models.TagFilterType
|
||||
if savedFilterID != nil {
|
||||
finalFilter = &models.TagFilterType{}
|
||||
mode := models.FilterModeTags
|
||||
|
||||
mergedFindFilter, err := r.resolveSavedFilter(ctx, *savedFilterID, mode, finalFilter, filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filter = mergedFindFilter
|
||||
} else {
|
||||
finalFilter = tagFilter
|
||||
}
|
||||
idInts, err := handleIDList(ids, "ids")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -38,7 +63,7 @@ func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilte
|
|||
tags, err = r.repository.Tag.FindMany(ctx, idInts)
|
||||
total = len(tags)
|
||||
} else {
|
||||
tags, total, err = r.repository.Tag.Query(ctx, tagFilter, filter)
|
||||
tags, total, err = r.repository.Tag.Query(ctx, finalFilter, filter)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
|
|
|||
74
internal/api/resolver_saved_filter_helper.go
Normal file
74
internal/api/resolver_saved_filter_helper.go
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
// resolveSavedFilter is a helper that looks up a saved filter by ID, enforces that it matches
|
||||
// the expected mode (e.g. SCENES), and returns the populated object filter (e.g., SceneFilterType)
|
||||
// and an updated find filter that merges the saved one with the overrides.
|
||||
func (r *queryResolver) resolveSavedFilter(ctx context.Context, savedFilterID string, mode models.FilterMode, outObjectFilter interface{}, currentFindFilter *models.FindFilterType) (*models.FindFilterType, error) {
|
||||
id, err := strconv.Atoi(savedFilterID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid saved_filter_id: %w", err)
|
||||
}
|
||||
|
||||
var savedFilter *models.SavedFilter
|
||||
err = r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
savedFilter, err = r.repository.SavedFilter.Find(ctx, id)
|
||||
return err
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch saved filter: %w", err)
|
||||
}
|
||||
if savedFilter == nil {
|
||||
return nil, fmt.Errorf("saved filter %s not found", savedFilterID)
|
||||
}
|
||||
|
||||
if savedFilter.Mode != mode {
|
||||
return nil, fmt.Errorf("saved filter is of mode %s, but expected %s", savedFilter.Mode, mode)
|
||||
}
|
||||
|
||||
if savedFilter.ObjectFilter != nil {
|
||||
b, err := json.Marshal(savedFilter.ObjectFilter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal object filter: %w", err)
|
||||
}
|
||||
if err := json.Unmarshal(b, outObjectFilter); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal object filter into target struct: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Merge find filter
|
||||
finalFindFilter := savedFilter.FindFilter
|
||||
if finalFindFilter == nil {
|
||||
finalFindFilter = &models.FindFilterType{}
|
||||
}
|
||||
|
||||
if currentFindFilter != nil {
|
||||
if currentFindFilter.Q != nil {
|
||||
finalFindFilter.Q = currentFindFilter.Q
|
||||
}
|
||||
if currentFindFilter.Page != nil {
|
||||
finalFindFilter.Page = currentFindFilter.Page
|
||||
}
|
||||
if currentFindFilter.PerPage != nil {
|
||||
finalFindFilter.PerPage = currentFindFilter.PerPage
|
||||
}
|
||||
if currentFindFilter.Sort != nil {
|
||||
finalFindFilter.Sort = currentFindFilter.Sort
|
||||
}
|
||||
if currentFindFilter.Direction != nil {
|
||||
finalFindFilter.Direction = currentFindFilter.Direction
|
||||
}
|
||||
}
|
||||
|
||||
return finalFindFilter, nil
|
||||
}
|
||||
145
internal/api/resolver_saved_filter_helper_test.go
Normal file
145
internal/api/resolver_saved_filter_helper_test.go
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
func TestResolveSavedFilter(t *testing.T) {
|
||||
mockDB := mocks.NewDatabase()
|
||||
resolver := &queryResolver{
|
||||
Resolver: &Resolver{
|
||||
repository: mockDB.Repository(),
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
t.Run("invalid ID", func(t *testing.T) {
|
||||
_, err := resolver.resolveSavedFilter(ctx, "abc", models.FilterModeScenes, &models.SceneFilterType{}, nil)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "invalid saved_filter_id")
|
||||
})
|
||||
|
||||
t.Run("not found", func(t *testing.T) {
|
||||
mockDB.SavedFilter.On("Find", mock.Anything, 123).Return(nil, nil).Once()
|
||||
_, err := resolver.resolveSavedFilter(ctx, "123", models.FilterModeScenes, &models.SceneFilterType{}, nil)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "saved filter 123 not found")
|
||||
})
|
||||
|
||||
t.Run("mode mismatch", func(t *testing.T) {
|
||||
savedFilter := &models.SavedFilter{
|
||||
ID: 123,
|
||||
Mode: models.FilterModeImages,
|
||||
}
|
||||
mockDB.SavedFilter.On("Find", mock.Anything, 123).Return(savedFilter, nil).Once()
|
||||
_, err := resolver.resolveSavedFilter(ctx, "123", models.FilterModeScenes, &models.SceneFilterType{}, nil)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "expected SCENES")
|
||||
})
|
||||
|
||||
t.Run("success with merge", func(t *testing.T) {
|
||||
q1 := "search1"
|
||||
q2 := "search2"
|
||||
page := 2
|
||||
savedFilter := &models.SavedFilter{
|
||||
ID: 123,
|
||||
Mode: models.FilterModeScenes,
|
||||
ObjectFilter: map[string]interface{}{
|
||||
"rating100": map[string]interface{}{
|
||||
"modifier": "GREATER_THAN",
|
||||
"value": 60,
|
||||
},
|
||||
},
|
||||
FindFilter: &models.FindFilterType{
|
||||
Q: &q1,
|
||||
Page: &page,
|
||||
},
|
||||
}
|
||||
mockDB.SavedFilter.On("Find", mock.Anything, 123).Return(savedFilter, nil).Once()
|
||||
|
||||
sceneFilter := &models.SceneFilterType{}
|
||||
currentFindFilter := &models.FindFilterType{
|
||||
Q: &q2,
|
||||
}
|
||||
|
||||
mergedFilter, err := resolver.resolveSavedFilter(ctx, "123", models.FilterModeScenes, sceneFilter, currentFindFilter)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify object filter unmarshaled correctly
|
||||
assert.NotNil(t, sceneFilter.Rating100)
|
||||
assert.Equal(t, models.CriterionModifierGreaterThan, sceneFilter.Rating100.Modifier)
|
||||
assert.Equal(t, 60, sceneFilter.Rating100.Value)
|
||||
|
||||
// Verify find filter merged correctly (override Q, keep Page)
|
||||
assert.Equal(t, q2, *mergedFilter.Q)
|
||||
assert.Equal(t, page, *mergedFilter.Page)
|
||||
})
|
||||
}
|
||||
|
||||
func TestLabelMapping(t *testing.T) {
|
||||
mockDB := mocks.NewDatabase()
|
||||
resolver := &savedFilterResolver{
|
||||
Resolver: &Resolver{
|
||||
repository: mockDB.Repository(),
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
t.Run("mapping with various criteria", func(t *testing.T) {
|
||||
obj := &models.SavedFilter{
|
||||
ObjectFilter: map[string]interface{}{
|
||||
"tags": map[string]interface{}{
|
||||
"value": []interface{}{"1", "2"},
|
||||
"excludes": []interface{}{"3"},
|
||||
},
|
||||
"performers": map[string]interface{}{
|
||||
"value": []interface{}{"10"},
|
||||
},
|
||||
"studios": map[string]interface{}{
|
||||
"value": []interface{}{"20"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mockDB.Tag.On("FindMany", mock.Anything, []int{1, 2, 3}).Return([]*models.Tag{
|
||||
{ID: 1, Name: "Tag1"},
|
||||
{ID: 2, Name: "Tag2"},
|
||||
{ID: 3, Name: "Tag3"},
|
||||
}, nil).Once()
|
||||
|
||||
mockDB.Performer.On("FindMany", mock.Anything, []int{10}).Return([]*models.Performer{
|
||||
{ID: 10, Name: "Performer10"},
|
||||
}, nil).Once()
|
||||
|
||||
mockDB.Studio.On("FindMany", mock.Anything, []int{20}).Return([]*models.Studio{
|
||||
{ID: 20, Name: "Studio20"},
|
||||
}, nil).Once()
|
||||
|
||||
// Other mock calls for empty slices
|
||||
mockDB.Group.On("FindMany", mock.Anything, []int(nil)).Return([]*models.Group{}, nil).Maybe()
|
||||
mockDB.Gallery.On("FindMany", mock.Anything, []int(nil)).Return([]*models.Gallery{}, nil).Maybe()
|
||||
mockDB.Folder.On("FindMany", mock.Anything, []models.FolderID{}).Return([]*models.Folder{}, nil).Maybe()
|
||||
mockDB.Scene.On("FindMany", mock.Anything, []int(nil)).Return([]*models.Scene{}, nil).Maybe()
|
||||
|
||||
mapping, err := resolver.LabelMapping(ctx, obj)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, mapping)
|
||||
|
||||
assert.Len(t, mapping.Tags, 3)
|
||||
assert.Equal(t, "Tag1", mapping.Tags[0].Label)
|
||||
|
||||
assert.Len(t, mapping.Performers, 1)
|
||||
assert.Equal(t, "Performer10", mapping.Performers[0].Label)
|
||||
|
||||
assert.Len(t, mapping.Studios, 1)
|
||||
assert.Equal(t, "Studio20", mapping.Studios[0].Label)
|
||||
})
|
||||
}
|
||||
|
|
@ -34,7 +34,7 @@ const (
|
|||
cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE"
|
||||
)
|
||||
|
||||
var appSchemaVersion uint = 85
|
||||
var appSchemaVersion uint = 86
|
||||
|
||||
//go:embed migrations/*.sql
|
||||
var migrationsBox embed.FS
|
||||
|
|
|
|||
155
pkg/sqlite/migrations/86_postmigrate.go
Normal file
155
pkg/sqlite/migrations/86_postmigrate.go
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
package migrations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/sqlite"
|
||||
)
|
||||
|
||||
func post86(ctx context.Context, db *sqlx.DB) error {
|
||||
logger.Info("Running post-migration for schema version 86")
|
||||
|
||||
m := schema86Migrator{
|
||||
migrator: migrator{
|
||||
db: db,
|
||||
},
|
||||
}
|
||||
|
||||
return m.migrateSavedFilters(ctx)
|
||||
}
|
||||
|
||||
type schema86Migrator struct {
|
||||
migrator
|
||||
}
|
||||
|
||||
func (m *schema86Migrator) migrateSavedFilters(ctx context.Context) error {
|
||||
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
|
||||
rows, err := tx.Query("SELECT id, object_filter FROM saved_filters ORDER BY id")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
id int
|
||||
objectFilter []byte
|
||||
)
|
||||
|
||||
err := rows.Scan(&id, &objectFilter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(objectFilter) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
newObjectFilter, err := m.convertObjectFilter(objectFilter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert object filter for saved filter %d: %w", id, err)
|
||||
}
|
||||
|
||||
if newObjectFilter != nil {
|
||||
_, err = tx.Exec("UPDATE saved_filters SET object_filter = ? WHERE id = ?", newObjectFilter, id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update saved filter %d: %w", id, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return rows.Err()
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *schema86Migrator) convertObjectFilter(data []byte) ([]byte, error) {
|
||||
var filter map[string]interface{}
|
||||
if err := json.Unmarshal(data, &filter); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal object filter: %w", err)
|
||||
}
|
||||
|
||||
for _, value := range filter {
|
||||
criterion, ok := value.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
v, hasValue := criterion["value"]
|
||||
if !hasValue || v == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if valueObj, isObj := v.(map[string]interface{}); isObj {
|
||||
_, hasItems := valueObj["items"]
|
||||
_, hasExcluded := valueObj["excluded"]
|
||||
|
||||
if hasItems || hasExcluded {
|
||||
var values []string
|
||||
if items, ok := valueObj["items"].([]interface{}); ok {
|
||||
for _, item := range items {
|
||||
if itemMap, isMap := item.(map[string]interface{}); isMap {
|
||||
if idStr, ok := itemMap["id"].(string); ok {
|
||||
values = append(values, idStr)
|
||||
} else if idFloat, ok := itemMap["id"].(float64); ok {
|
||||
values = append(values, fmt.Sprintf("%d", int(idFloat)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var excludes []string
|
||||
if excluded, ok := valueObj["excluded"].([]interface{}); ok {
|
||||
for _, item := range excluded {
|
||||
if itemMap, isMap := item.(map[string]interface{}); isMap {
|
||||
if idStr, ok := itemMap["id"].(string); ok {
|
||||
excludes = append(excludes, idStr)
|
||||
} else if idFloat, ok := itemMap["id"].(float64); ok {
|
||||
excludes = append(excludes, fmt.Sprintf("%d", int(idFloat)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var depth interface{}
|
||||
if d, ok := valueObj["depth"]; ok {
|
||||
depth = d
|
||||
} else if d, ok := valueObj["Depth"]; ok {
|
||||
depth = d
|
||||
}
|
||||
|
||||
if len(values) > 0 {
|
||||
criterion["value"] = values
|
||||
} else {
|
||||
criterion["value"] = []string{}
|
||||
}
|
||||
|
||||
if len(excludes) > 0 || hasExcluded {
|
||||
if excludes == nil {
|
||||
criterion["excludes"] = []string{}
|
||||
} else {
|
||||
criterion["excludes"] = excludes
|
||||
}
|
||||
}
|
||||
|
||||
if depth != nil {
|
||||
criterion["depth"] = depth
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return json.Marshal(filter)
|
||||
}
|
||||
|
||||
func init() {
|
||||
sqlite.RegisterPostMigration(86, post86)
|
||||
}
|
||||
108
pkg/sqlite/migrations/86_postmigrate_test.go
Normal file
108
pkg/sqlite/migrations/86_postmigrate_test.go
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
package migrations
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestConvertObjectFilter(t *testing.T) {
|
||||
migrator := schema86Migrator{}
|
||||
input := `{
|
||||
"tags": {
|
||||
"modifier": "INCLUDES",
|
||||
"value": {
|
||||
"depth": 0,
|
||||
"excluded": [
|
||||
{
|
||||
"id": "27",
|
||||
"label": "JAV Actress"
|
||||
}
|
||||
],
|
||||
"items": [
|
||||
{
|
||||
"id": "28",
|
||||
"label": "xyz"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{"tags":{"depth":0,"excludes":["27"],"modifier":"INCLUDES","value":["28"]}}`
|
||||
|
||||
output, err := migrator.convertObjectFilter([]byte(input))
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
var outMap, expMap map[string]interface{}
|
||||
if err := json.Unmarshal(output, &outMap); err != nil {
|
||||
t.Fatalf("unexpected unmarshal error: %v", err)
|
||||
}
|
||||
if err := json.Unmarshal([]byte(expected), &expMap); err != nil {
|
||||
t.Fatalf("unexpected unmarshal error: %v", err)
|
||||
}
|
||||
|
||||
outJSON, err := json.Marshal(outMap)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected marshal error: %v", err)
|
||||
}
|
||||
expJSON, err := json.Marshal(expMap)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected marshal error: %v", err)
|
||||
}
|
||||
|
||||
if string(outJSON) != string(expJSON) {
|
||||
t.Errorf("expected %s, got %s", string(expJSON), string(outJSON))
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertObjectFilterPrimitive(t *testing.T) {
|
||||
migrator := schema86Migrator{}
|
||||
input := `{
|
||||
"galleries": {
|
||||
"modifier": "INCLUDES",
|
||||
"value": {
|
||||
"excluded": [],
|
||||
"items": [
|
||||
{
|
||||
"id": "1",
|
||||
"label": "gallery 1"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"has_markers": {
|
||||
"modifier": "EQUALS",
|
||||
"value": "true"
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{"galleries":{"excludes":[],"modifier":"INCLUDES","value":["1"]},"has_markers":{"modifier":"EQUALS","value":"true"}}`
|
||||
|
||||
output, err := migrator.convertObjectFilter([]byte(input))
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
var outMap, expMap map[string]interface{}
|
||||
if err := json.Unmarshal(output, &outMap); err != nil {
|
||||
t.Fatalf("unexpected unmarshal error: %v", err)
|
||||
}
|
||||
if err := json.Unmarshal([]byte(expected), &expMap); err != nil {
|
||||
t.Fatalf("unexpected unmarshal error: %v", err)
|
||||
}
|
||||
|
||||
outJSON, err := json.Marshal(outMap)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected marshal error: %v", err)
|
||||
}
|
||||
expJSON, err := json.Marshal(expMap)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected marshal error: %v", err)
|
||||
}
|
||||
|
||||
if string(outJSON) != string(expJSON) {
|
||||
t.Errorf("expected %s, got %s", string(expJSON), string(outJSON))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
PRAGMA foreign_keys=OFF;
|
||||
|
||||
-- This migration updates the object_filter JSON structure in saved_filters
|
||||
-- to match the API input shape. The actual logic runs in 86_postmigrate.go.
|
||||
|
||||
CREATE TABLE `saved_filters_new` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`name` varchar(510) not null,
|
||||
`mode` varchar(255) not null,
|
||||
`find_filter` blob,
|
||||
`object_filter` blob,
|
||||
`ui_options` blob
|
||||
);
|
||||
|
||||
INSERT INTO `saved_filters_new`
|
||||
(
|
||||
`id`,
|
||||
`name`,
|
||||
`mode`,
|
||||
`find_filter`,
|
||||
`object_filter`,
|
||||
`ui_options`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`name`,
|
||||
`mode`,
|
||||
`find_filter`,
|
||||
`object_filter`,
|
||||
`ui_options`
|
||||
FROM `saved_filters`;
|
||||
|
||||
DROP INDEX `index_saved_filters_on_mode_name_unique`;
|
||||
DROP TABLE `saved_filters`;
|
||||
ALTER TABLE `saved_filters_new` rename to `saved_filters`;
|
||||
|
||||
CREATE UNIQUE INDEX `index_saved_filters_on_mode_name_unique` on `saved_filters` (`mode`, `name`);
|
||||
|
||||
PRAGMA foreign_keys=ON;
|
||||
|
|
@ -11,4 +11,38 @@ fragment SavedFilterData on SavedFilter {
|
|||
}
|
||||
object_filter
|
||||
ui_options
|
||||
label_mapping {
|
||||
tags {
|
||||
id
|
||||
label
|
||||
}
|
||||
studios {
|
||||
id
|
||||
label
|
||||
}
|
||||
performers {
|
||||
id
|
||||
label
|
||||
}
|
||||
groups {
|
||||
id
|
||||
label
|
||||
}
|
||||
galleries {
|
||||
id
|
||||
label
|
||||
}
|
||||
folders {
|
||||
id
|
||||
label
|
||||
}
|
||||
scenes {
|
||||
id
|
||||
label
|
||||
}
|
||||
movies {
|
||||
id
|
||||
label
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -213,8 +213,23 @@ export abstract class ModifierCriterion<
|
|||
|
||||
public setFromSavedCriterion(criterion: unknown) {
|
||||
const c = criterion as ISavedCriterion<V>;
|
||||
const labelMapping =
|
||||
((criterion as Record<string, unknown>)._labelMapping as
|
||||
| Record<string, string>
|
||||
| undefined) || {};
|
||||
if (c.value !== undefined && c.value !== null) {
|
||||
this.value = c.value;
|
||||
if (
|
||||
Array.isArray(c.value) &&
|
||||
c.value.length > 0 &&
|
||||
typeof c.value[0] === "string"
|
||||
) {
|
||||
this.value = (c.value as unknown as string[]).map((id: string) => ({
|
||||
id,
|
||||
label: labelMapping[id] || id,
|
||||
})) as unknown as V;
|
||||
} else {
|
||||
this.value = c.value;
|
||||
}
|
||||
}
|
||||
this.modifier = c.modifier;
|
||||
}
|
||||
|
|
@ -417,14 +432,34 @@ export class IHierarchicalLabeledIdCriterion extends ModifierCriterion<IHierarch
|
|||
public setFromSavedCriterion(
|
||||
criterion: ISavedCriterion<IHierarchicalLabelValue>
|
||||
) {
|
||||
const { modifier, value } = criterion;
|
||||
const c = criterion as unknown as Record<string, unknown>;
|
||||
const modifier = c.modifier as CriterionModifier;
|
||||
const value = c.value as IHierarchicalLabelValue | string[] | undefined;
|
||||
const excludes = c.excludes as string[] | undefined;
|
||||
const depth = c.depth as number | undefined;
|
||||
const _labelMapping =
|
||||
(c._labelMapping as Record<string, string> | undefined) || {};
|
||||
|
||||
if (value !== undefined) {
|
||||
this.value = {
|
||||
items: value.items || [],
|
||||
excluded: value.excluded || [],
|
||||
depth: value.depth || 0,
|
||||
};
|
||||
if (Array.isArray(value)) {
|
||||
this.value = {
|
||||
items: value.map((id: string) => ({
|
||||
id,
|
||||
label: _labelMapping[id] || id,
|
||||
})),
|
||||
excluded: (excludes || []).map((id: string) => ({
|
||||
id,
|
||||
label: _labelMapping[id] || id,
|
||||
})),
|
||||
depth: depth || 0,
|
||||
};
|
||||
} else {
|
||||
this.value = {
|
||||
items: value.items || [],
|
||||
excluded: value.excluded || [],
|
||||
depth: value.depth || 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const modifierOptions =
|
||||
|
|
|
|||
|
|
@ -292,6 +292,7 @@ export class ListFilterModel {
|
|||
find_filter: findFilter,
|
||||
object_filter: objectFilter,
|
||||
ui_options: uiOptions,
|
||||
label_mapping: labelMapping,
|
||||
} = savedFilter;
|
||||
|
||||
this.itemsPerPage = findFilter?.per_page ?? this.itemsPerPage;
|
||||
|
|
@ -311,9 +312,53 @@ export class ListFilterModel {
|
|||
this.currentPage = 1;
|
||||
|
||||
this.criteria = [];
|
||||
|
||||
// Convert array mapping to map for O(1) lookup
|
||||
const resolvedMapping: Record<string, Record<string, string>> = {};
|
||||
if (labelMapping) {
|
||||
for (const [groupKey, entries] of Object.entries(
|
||||
labelMapping as unknown as Record<
|
||||
string,
|
||||
{ id: string; label: string }[]
|
||||
>
|
||||
)) {
|
||||
if (Array.isArray(entries)) {
|
||||
resolvedMapping[groupKey] = entries.reduce(
|
||||
(
|
||||
acc: Record<string, string>,
|
||||
item: { id: string; label: string }
|
||||
) => {
|
||||
if (item && item.id && item.label) {
|
||||
acc[item.id] = item.label;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (objectFilter) {
|
||||
for (const [k, v] of Object.entries(objectFilter)) {
|
||||
const criterion = this.makeCriterion(k as CriterionType);
|
||||
|
||||
// Map criteria back to their respective groups
|
||||
let groupName = k;
|
||||
if (k.endsWith("_tags") || k === "parents" || k === "children") {
|
||||
groupName = "tags";
|
||||
} else if (k === "containing_groups" || k === "sub_groups") {
|
||||
groupName = "groups";
|
||||
} else if (k === "parent_folder") {
|
||||
groupName = "folders";
|
||||
}
|
||||
|
||||
if (labelMapping && resolvedMapping[groupName]) {
|
||||
if (typeof v === "object" && v !== null) {
|
||||
(v as unknown as Record<string, unknown>)._labelMapping =
|
||||
resolvedMapping[groupName];
|
||||
}
|
||||
}
|
||||
criterion.setFromSavedCriterion(v);
|
||||
this.criteria.push(criterion);
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue