mirror of
https://github.com/stashapp/stash.git
synced 2026-03-27 15:42:04 +01:00
perf(ui): optimize duplicate checker UI to prevent browser freezing
This fixes an issue where Chrome would become unresponsive and prompt the user to kill the page when a large number of duplicates (e.g. 30,000+ groups) were found. 1. Changed the fetchPolicy on FindDuplicateImages to 'no-cache'. Loading 30k+ complex objects into the Apollo normalized cache blocked the main thread for an extended period. Bypassing the cache for this massive one-off query resolves the blocking. 2. Optimized the sorting algorithm in both Image and Scene duplicate checkers. Previously, the group size was recalculated by iterating over all nested files inside the sort's comparison function, resulting in millions of unnecessary iterations (O(N log N) with a heavy inner loop). Now, group sizes are precalculated into a map (O(N)) before sorting.
This commit is contained in:
parent
eeba66e598
commit
00ff6d9936
2 changed files with 15 additions and 3 deletions
|
|
@ -86,7 +86,7 @@ const ImageDuplicateChecker: React.FC = () => {
|
|||
|
||||
const { data, loading, refetch } = useFindDuplicateImagesQuery({
|
||||
variables: { distance: hashDistance },
|
||||
fetchPolicy: "network-only",
|
||||
fetchPolicy: "no-cache",
|
||||
});
|
||||
|
||||
const getGroupTotalSize = (group: GQL.SlimImageDataFragment[]) => {
|
||||
|
|
@ -101,8 +101,14 @@ const ImageDuplicateChecker: React.FC = () => {
|
|||
|
||||
const allGroups = useMemo(() => {
|
||||
const groups = data?.findDuplicateImages ?? [];
|
||||
|
||||
const groupSizes = new Map<GQL.SlimImageDataFragment[], number>();
|
||||
groups.forEach((group) => {
|
||||
groupSizes.set(group, getGroupTotalSize(group));
|
||||
});
|
||||
|
||||
return [...groups].sort((a, b) => {
|
||||
return getGroupTotalSize(b) - getGroupTotalSize(a);
|
||||
return (groupSizes.get(b) ?? 0) - (groupSizes.get(a) ?? 0);
|
||||
});
|
||||
}, [data?.findDuplicateImages]);
|
||||
|
||||
|
|
|
|||
|
|
@ -92,9 +92,15 @@ export const SceneDuplicateChecker: React.FC = () => {
|
|||
|
||||
const scenes = useMemo(() => {
|
||||
const groups = data?.findDuplicateScenes ?? [];
|
||||
|
||||
const groupSizes = new Map<GQL.SlimSceneDataFragment[], number>();
|
||||
groups.forEach((group) => {
|
||||
groupSizes.set(group, getGroupTotalSize(group));
|
||||
});
|
||||
|
||||
// Sort by total file size descending (largest groups first)
|
||||
return [...groups].sort((a, b) => {
|
||||
return getGroupTotalSize(b) - getGroupTotalSize(a);
|
||||
return (groupSizes.get(b) ?? 0) - (groupSizes.get(a) ?? 0);
|
||||
});
|
||||
}, [data?.findDuplicateScenes]);
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue