Expand folder select hierarchy based on initial selected folder (#6738)

* Add sub_folders field to Folder type
* Expand folder select for the initial value
This commit is contained in:
WithoutPants 2026-03-23 16:15:23 +11:00 committed by GitHub
parent c9d0afee56
commit c5034422cb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 249 additions and 22 deletions

View file

@ -16,6 +16,9 @@ type Folder {
parent_folders: [Folder!]!
zip_file: BasicFile
"Returns direct sub-folders"
sub_folders: [Folder!]!
mod_time: Time!
created_at: Time!

View file

@ -11,7 +11,7 @@
//go:generate go run github.com/vektah/dataloaden GroupLoader int *github.com/stashapp/stash/pkg/models.Group
//go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File
//go:generate go run github.com/vektah/dataloaden FolderLoader github.com/stashapp/stash/pkg/models.FolderID *github.com/stashapp/stash/pkg/models.Folder
//go:generate go run github.com/vektah/dataloaden FolderParentFolderIDsLoader github.com/stashapp/stash/pkg/models.FolderID []github.com/stashapp/stash/pkg/models.FolderID
//go:generate go run github.com/vektah/dataloaden FolderRelatedFolderIDsLoader github.com/stashapp/stash/pkg/models.FolderID []github.com/stashapp/stash/pkg/models.FolderID
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
@ -75,7 +75,8 @@ type Loaders struct {
FileByID *FileLoader
FolderByID *FolderLoader
FolderParentFolderIDs *FolderParentFolderIDsLoader
FolderParentFolderIDs *FolderRelatedFolderIDsLoader
FolderSubFolderIDs *FolderRelatedFolderIDsLoader
}
type Middleware struct {
@ -166,11 +167,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchFolders(ctx),
},
FolderParentFolderIDs: &FolderParentFolderIDsLoader{
FolderParentFolderIDs: &FolderRelatedFolderIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFoldersParentFolderIDs(ctx),
},
FolderSubFolderIDs: &FolderRelatedFolderIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFoldersSubFolderIDs(ctx),
},
SceneFiles: &SceneFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
@ -427,6 +433,17 @@ func (m Middleware) fetchFoldersParentFolderIDs(ctx context.Context) func(keys [
}
}
func (m Middleware) fetchFoldersSubFolderIDs(ctx context.Context) func(keys []models.FolderID) ([][]models.FolderID, []error) {
return func(keys []models.FolderID) (ret [][]models.FolderID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Folder.GetManySubFolderIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) {
return func(keys []int) (ret [][]models.FileID, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {

View file

@ -22,16 +22,16 @@ type FolderParentFolderIDsLoaderConfig struct {
}
// NewFolderParentFolderIDsLoader creates a new FolderParentFolderIDsLoader given a fetch, wait, and maxBatch
func NewFolderParentFolderIDsLoader(config FolderParentFolderIDsLoaderConfig) *FolderParentFolderIDsLoader {
return &FolderParentFolderIDsLoader{
func NewFolderParentFolderIDsLoader(config FolderParentFolderIDsLoaderConfig) *FolderRelatedFolderIDsLoader {
return &FolderRelatedFolderIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// FolderParentFolderIDsLoader batches and caches requests
type FolderParentFolderIDsLoader struct {
// FolderRelatedFolderIDsLoader batches and caches requests
type FolderRelatedFolderIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []models.FolderID) ([][]models.FolderID, []error)
@ -63,14 +63,14 @@ type folderParentFolderIDsLoaderBatch struct {
}
// Load a FolderID by key, batching and caching will be applied automatically
func (l *FolderParentFolderIDsLoader) Load(key models.FolderID) ([]models.FolderID, error) {
func (l *FolderRelatedFolderIDsLoader) Load(key models.FolderID) ([]models.FolderID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a FolderID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FolderParentFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]models.FolderID, error) {
func (l *FolderRelatedFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]models.FolderID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
@ -113,7 +113,7 @@ func (l *FolderParentFolderIDsLoader) LoadThunk(key models.FolderID) func() ([]m
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *FolderParentFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]models.FolderID, []error) {
func (l *FolderRelatedFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]models.FolderID, []error) {
results := make([]func() ([]models.FolderID, error), len(keys))
for i, key := range keys {
@ -131,7 +131,7 @@ func (l *FolderParentFolderIDsLoader) LoadAll(keys []models.FolderID) ([][]model
// LoadAllThunk returns a function that when called will block waiting for a FolderIDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FolderParentFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func() ([][]models.FolderID, []error) {
func (l *FolderRelatedFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func() ([][]models.FolderID, []error) {
results := make([]func() ([]models.FolderID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
@ -149,7 +149,7 @@ func (l *FolderParentFolderIDsLoader) LoadAllThunk(keys []models.FolderID) func(
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *FolderParentFolderIDsLoader) Prime(key models.FolderID, value []models.FolderID) bool {
func (l *FolderRelatedFolderIDsLoader) Prime(key models.FolderID, value []models.FolderID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
@ -164,13 +164,13 @@ func (l *FolderParentFolderIDsLoader) Prime(key models.FolderID, value []models.
}
// Clear the value at key from the cache, if it exists
func (l *FolderParentFolderIDsLoader) Clear(key models.FolderID) {
func (l *FolderRelatedFolderIDsLoader) Clear(key models.FolderID) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *FolderParentFolderIDsLoader) unsafeSet(key models.FolderID, value []models.FolderID) {
func (l *FolderRelatedFolderIDsLoader) unsafeSet(key models.FolderID, value []models.FolderID) {
if l.cache == nil {
l.cache = map[models.FolderID][]models.FolderID{}
}
@ -179,7 +179,7 @@ func (l *FolderParentFolderIDsLoader) unsafeSet(key models.FolderID, value []mod
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderParentFolderIDsLoader, key models.FolderID) int {
func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderRelatedFolderIDsLoader, key models.FolderID) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
@ -203,7 +203,7 @@ func (b *folderParentFolderIDsLoaderBatch) keyIndex(l *FolderParentFolderIDsLoad
return pos
}
func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderParentFolderIDsLoader) {
func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderRelatedFolderIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
@ -219,7 +219,7 @@ func (b *folderParentFolderIDsLoaderBatch) startTimer(l *FolderParentFolderIDsLo
b.end(l)
}
func (b *folderParentFolderIDsLoaderBatch) end(l *FolderParentFolderIDsLoader) {
func (b *folderParentFolderIDsLoaderBatch) end(l *FolderRelatedFolderIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -31,6 +31,17 @@ func (r *folderResolver) ParentFolders(ctx context.Context, obj *models.Folder)
return ret, firstError(errs)
}
func (r *folderResolver) SubFolders(ctx context.Context, obj *models.Folder) ([]*models.Folder, error) {
ids, err := loaders.From(ctx).FolderSubFolderIDs.Load(obj.ID)
if err != nil {
return nil, err
}
var errs []error
ret, errs := loaders.From(ctx).FolderByID.LoadAll(ids)
return ret, firstError(errs)
}
func (r *folderResolver) ZipFile(ctx context.Context, obj *models.Folder) (*BasicFile, error) {
return zipFileResolver(ctx, obj.ZipFileID)
}

View file

@ -224,6 +224,29 @@ func (_m *FolderReaderWriter) GetManyParentFolderIDs(ctx context.Context, folder
return r0, r1
}
// GetManySubFolderIDs provides a mock function with given fields: ctx, folderIDs
func (_m *FolderReaderWriter) GetManySubFolderIDs(ctx context.Context, folderIDs []models.FolderID) ([][]models.FolderID, error) {
ret := _m.Called(ctx, folderIDs)
var r0 [][]models.FolderID
if rf, ok := ret.Get(0).(func(context.Context, []models.FolderID) [][]models.FolderID); ok {
r0 = rf(ctx, folderIDs)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([][]models.FolderID)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []models.FolderID) error); ok {
r1 = rf(ctx, folderIDs)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Query provides a mock function with given fields: ctx, options
func (_m *FolderReaderWriter) Query(ctx context.Context, options models.FolderQueryOptions) (*models.FolderQueryResult, error) {
ret := _m.Called(ctx, options)

View file

@ -16,6 +16,7 @@ type FolderFinder interface {
FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error)
FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error)
GetManyParentFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error)
GetManySubFolderIDs(ctx context.Context, folderIDs []FolderID) ([][]FolderID, error)
}
type FolderQueryer interface {

View file

@ -409,6 +409,42 @@ func (qb *FolderStore) GetManyParentFolderIDs(ctx context.Context, folderIDs []m
return ret, nil
}
func (qb *FolderStore) GetManySubFolderIDs(ctx context.Context, parentFolderIDs []models.FolderID) ([][]models.FolderID, error) {
table := qb.table()
q := dialect.From(table).Select(
table.Col(idColumn),
table.Col("parent_folder_id"),
).Where(qb.table().Col("parent_folder_id").In(parentFolderIDs))
sql, args, err := q.ToSQL()
if err != nil {
return nil, fmt.Errorf("building query: %w", err)
}
var results []struct {
FolderID int `db:"id"`
ParentFolderID models.FolderID `db:"parent_folder_id"`
}
if err := querySelect(ctx, sql, args, &results); err != nil {
return nil, fmt.Errorf("getting folders by parent folder ids %v: %w", parentFolderIDs, err)
}
retMap := make(map[models.FolderID][]models.FolderID)
for _, v := range results {
retMap[v.ParentFolderID] = append(retMap[v.ParentFolderID], models.FolderID(v.FolderID))
}
ret := make([][]models.FolderID, len(parentFolderIDs))
for i, parentID := range parentFolderIDs {
ret[i] = retMap[parentID]
}
return ret, nil
}
func (qb *FolderStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.SelectDataset {
table := qb.table()

View file

@ -1209,6 +1209,14 @@ func querySimple(ctx context.Context, query *goqu.SelectDataset, out interface{}
return nil
}
func querySelect(ctx context.Context, query string, args []interface{}, dest interface{}) error {
if err := dbWrapper.Select(ctx, dest, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) {
return fmt.Errorf("running query: %s [%v]: %w", query, args, err)
}
return nil
}
// func cols(table exp.IdentifierExpression, cols []string) []interface{} {
// var ret []interface{}
// for _, c := range cols {

View file

@ -22,3 +22,20 @@ query FindFoldersForQuery(
}
}
}
query FindFolderHierarchyForIDs($ids: [ID!]!) {
findFolders(ids: $ids) {
count
folders {
...SelectFolderData
parent_folders {
...SelectFolderData
# the parent folders will be expanded, so we need the child folders
sub_folders {
...SelectFolderData
}
}
}
}
}

View file

@ -1,6 +1,7 @@
import React, { useCallback, useEffect, useMemo, useState } from "react";
import {
FolderDataFragment,
useFindFolderHierarchyForIDsQuery,
useFindFoldersForQueryQuery,
useFindRootFoldersForSelectQuery,
} from "src/core/generated-graphql";
@ -141,7 +142,30 @@ function replaceFolder(folder: IFolder): (f: IFolder) => IFolder {
};
}
function useFolderMap(query: string, skip?: boolean) {
function mergeFolderMaps(base: IFolder[], update: IFolder[]): IFolder[] {
const ret = [...base];
update.forEach((updateFolder) => {
const existingIndex = ret.findIndex((f) => f.id === updateFolder.id);
if (existingIndex === -1) {
// not found, add to the end
ret.push(updateFolder);
} else {
// found, replace
ret[existingIndex] = updateFolder;
}
});
return ret;
}
function useFolderMap(
query: string,
skip?: boolean,
initialSelected?: string[]
) {
const [cachedInitialSelected] = useState<string[]>(initialSelected ?? []);
const { data: rootFoldersResult } = useFindRootFoldersForSelectQuery({
skip,
});
@ -153,11 +177,94 @@ function useFolderMap(query: string, skip?: boolean) {
},
});
const { data: initialSelectedResult } = useFindFolderHierarchyForIDsQuery({
skip: !initialSelected || cachedInitialSelected.length === 0,
variables: {
ids: cachedInitialSelected ?? [],
},
});
const rootFolders: IFolder[] = useMemo(() => {
const ret = rootFoldersResult?.findFolders.folders ?? [];
return ret.map((f) => ({ ...f, expanded: false, children: undefined }));
}, [rootFoldersResult]);
const initialSelectedFolders: IFolder[] = useMemo(() => {
const ret: IFolder[] = [];
(initialSelectedResult?.findFolders.folders ?? []).forEach((folder) => {
if (!folder.parent_folders.length) {
// add root folder if not present
if (!ret.find((f) => f.id === folder.id)) {
ret.push({ ...folder, expanded: true, children: [] });
}
return;
}
let currentParent: IFolder | undefined;
for (let i = folder.parent_folders.length - 1; i >= 0; i--) {
const thisFolder = folder.parent_folders[i];
let existing: IFolder | undefined;
if (i === folder.parent_folders.length - 1) {
// last parent, add the folder as root if not present
existing = ret.find((f) => f.id === thisFolder.id);
if (!existing) {
existing = {
...folder.parent_folders[i],
expanded: true,
children: folder.parent_folders[i].sub_folders.map((f) => ({
...f,
expanded: false,
children: undefined,
})),
};
ret.push(existing);
}
currentParent = existing;
continue;
}
const existingIndex =
currentParent!.children?.findIndex((f) => f.id === thisFolder.id) ??
-1;
if (existingIndex === -1) {
// should be guaranteed
throw new Error(
`Parent folder ${thisFolder.id} not found in children of ${
currentParent!.id
}`
);
}
existing = currentParent!.children![existingIndex];
// replace children
existing = {
...existing,
expanded: true,
children: thisFolder.sub_folders.map((f) => ({
...f,
expanded: false,
children: undefined,
})),
};
currentParent!.children![existingIndex] = existing;
currentParent = existing;
}
});
return ret;
}, [initialSelectedResult]);
const mergedRootFolders = useMemo(() => {
if (query) {
return rootFolders;
}
return mergeFolderMaps(rootFolders, initialSelectedFolders);
}, [rootFolders, initialSelectedFolders, query]);
const queryFolders: IFolder[] = useMemo(() => {
// construct the folder list from the query result
const ret: IFolder[] = [];
@ -229,11 +336,11 @@ function useFolderMap(query: string, skip?: boolean) {
useEffect(() => {
if (!query) {
setFolderMap(rootFolders);
setFolderMap(mergedRootFolders);
} else {
setFolderMap(queryFolders);
}
}, [query, rootFolders, queryFolders]);
}, [query, mergedRootFolders, queryFolders]);
async function onToggleExpanded(folder: IFolder) {
setFolderMap(folderMap.map(toggleExpandedFn(folder)));
@ -472,8 +579,6 @@ export const SidebarFolderFilter: React.FC<
props.onOpen?.();
}
const { folderMap, onToggleExpanded } = useFolderMap(query, skip);
const option = props.criterionOption ?? FolderCriterionOption;
const { filter, setFilter } = props;
@ -494,6 +599,12 @@ export const SidebarFolderFilter: React.FC<
const multipleSelected =
criterion.value.items.length > 1 || criterion.value.excluded.length > 0;
const { folderMap, onToggleExpanded } = useFolderMap(
query,
skip,
criterion.value.items.map((i) => i.id)
);
function onSelect(folder: IFolder) {
// maintain sub-folder select if present
const depth = subDirsSelected ? -1 : 0;