fix: relation widget performance (#3975)
This commit is contained in:
@ -1,4 +1,10 @@
|
||||
import { resolveBackend, Backend, extractSearchFields } from '../backend';
|
||||
import {
|
||||
resolveBackend,
|
||||
Backend,
|
||||
extractSearchFields,
|
||||
expandSearchEntries,
|
||||
mergeExpandedEntries,
|
||||
} from '../backend';
|
||||
import registry from 'Lib/registry';
|
||||
import { FOLDER } from 'Constants/collectionTypes';
|
||||
import { Map, List, fromJS } from 'immutable';
|
||||
@ -696,4 +702,242 @@ describe('Backend', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('expandSearchEntries', () => {
|
||||
it('should expand entry with list to multiple entries', () => {
|
||||
const entry = {
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
};
|
||||
|
||||
expect(expandSearchEntries([entry], ['list.*', 'field.nested.list.*.name'])).toEqual([
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'list.0',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'list.1',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'field.nested.list.0.name',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'field.nested.list.1.name',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeExpandedEntries', () => {
|
||||
it('should merge entries and filter data', () => {
|
||||
const expanded = [
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
{ id: 3, name: '3' },
|
||||
{ id: 4, name: '4' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'field.nested.list.0.name',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
{ id: 3, name: '3' },
|
||||
{ id: 4, name: '4' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'field.nested.list.3.name',
|
||||
},
|
||||
];
|
||||
|
||||
expect(mergeExpandedEntries(expanded)).toEqual([
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 4, name: '4' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should merge entries and filter data based on different fields', () => {
|
||||
const expanded = [
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
{ id: 3, name: '3' },
|
||||
{ id: 4, name: '4' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'field.nested.list.0.name',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
{ id: 3, name: '3' },
|
||||
{ id: 4, name: '4' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'field.nested.list.3.name',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 2, name: '2' },
|
||||
{ id: 3, name: '3' },
|
||||
{ id: 4, name: '4' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [1, 2],
|
||||
},
|
||||
field: 'list.1',
|
||||
},
|
||||
];
|
||||
|
||||
expect(mergeExpandedEntries(expanded)).toEqual([
|
||||
{
|
||||
data: {
|
||||
field: {
|
||||
nested: {
|
||||
list: [
|
||||
{ id: 1, name: '1' },
|
||||
{ id: 4, name: '4' },
|
||||
],
|
||||
},
|
||||
},
|
||||
list: [2],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should merge entries and keep sort by entry index', () => {
|
||||
const expanded = [
|
||||
{
|
||||
data: {
|
||||
list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
|
||||
},
|
||||
field: 'list.5',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
|
||||
},
|
||||
field: 'list.0',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
|
||||
},
|
||||
field: 'list.11',
|
||||
},
|
||||
{
|
||||
data: {
|
||||
list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
|
||||
},
|
||||
field: 'list.1',
|
||||
},
|
||||
];
|
||||
|
||||
expect(mergeExpandedEntries(expanded)).toEqual([
|
||||
{
|
||||
data: {
|
||||
list: [5, 0, 11, 1],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -75,17 +75,22 @@ export function querying(
|
||||
};
|
||||
}
|
||||
|
||||
type Response = {
|
||||
type SearchResponse = {
|
||||
entries: EntryValue[];
|
||||
pagination: number;
|
||||
};
|
||||
|
||||
type QueryResponse = {
|
||||
hits: EntryValue[];
|
||||
query: string;
|
||||
};
|
||||
|
||||
export function querySuccess(
|
||||
namespace: string,
|
||||
collection: string,
|
||||
searchFields: string[],
|
||||
searchTerm: string,
|
||||
response: Response,
|
||||
response: QueryResponse,
|
||||
) {
|
||||
return {
|
||||
type: QUERY_SUCCESS,
|
||||
@ -174,7 +179,7 @@ export function searchEntries(
|
||||
);
|
||||
|
||||
return searchPromise.then(
|
||||
(response: Response) =>
|
||||
(response: SearchResponse) =>
|
||||
dispatch(
|
||||
searchSuccess(
|
||||
searchTerm,
|
||||
@ -195,8 +200,10 @@ export function query(
|
||||
collectionName: string,
|
||||
searchFields: string[],
|
||||
searchTerm: string,
|
||||
file?: string,
|
||||
limit?: number,
|
||||
) {
|
||||
return (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
|
||||
return async (dispatch: ThunkDispatch<State, {}, AnyAction>, getState: () => State) => {
|
||||
dispatch(querying(namespace, collectionName, searchFields, searchTerm));
|
||||
|
||||
const state = getState();
|
||||
@ -212,13 +219,13 @@ export function query(
|
||||
collectionName,
|
||||
searchTerm,
|
||||
)
|
||||
: backend.query(collection, searchFields, searchTerm);
|
||||
: backend.query(collection, searchFields, searchTerm, file, limit);
|
||||
|
||||
return queryPromise.then(
|
||||
(response: Response) =>
|
||||
dispatch(querySuccess(namespace, collectionName, searchFields, searchTerm, response)),
|
||||
(error: Error) =>
|
||||
dispatch(queryFailure(namespace, collectionName, searchFields, searchTerm, error)),
|
||||
);
|
||||
try {
|
||||
const response: QueryResponse = await queryPromise;
|
||||
return dispatch(querySuccess(namespace, collectionName, searchFields, searchTerm, response));
|
||||
} catch (error) {
|
||||
return dispatch(queryFailure(namespace, collectionName, searchFields, searchTerm, error));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { attempt, flatten, isError, uniq, trim, sortBy } from 'lodash';
|
||||
import { List, Map, fromJS } from 'immutable';
|
||||
import { attempt, flatten, isError, uniq, trim, sortBy, get, set } from 'lodash';
|
||||
import { List, Map, fromJS, Set } from 'immutable';
|
||||
import * as fuzzy from 'fuzzy';
|
||||
import { resolveFormat } from './formats/formats';
|
||||
import { selectUseWorkflow } from './reducers/config';
|
||||
@ -56,7 +56,7 @@ import AssetProxy from './valueObjects/AssetProxy';
|
||||
import { FOLDER, FILES } from './constants/collectionTypes';
|
||||
import { selectCustomPath } from './reducers/entryDraft';
|
||||
|
||||
const { extractTemplateVars, dateParsers } = stringTemplate;
|
||||
const { extractTemplateVars, dateParsers, expandPath } = stringTemplate;
|
||||
|
||||
export class LocalStorageAuthStore {
|
||||
storageKey = 'netlify-cms-user';
|
||||
@ -84,25 +84,104 @@ function getEntryBackupKey(collectionName?: string, slug?: string) {
|
||||
return `${baseKey}.${collectionName}${suffix}`;
|
||||
}
|
||||
|
||||
const getEntryField = (field: string, entry: EntryValue) => {
|
||||
const value = get(entry.data, field);
|
||||
if (value) {
|
||||
return String(value);
|
||||
} else {
|
||||
const firstFieldPart = field.split('.')[0];
|
||||
if (entry[firstFieldPart as keyof EntryValue]) {
|
||||
// allows searching using entry.slug/entry.path etc.
|
||||
return entry[firstFieldPart as keyof EntryValue];
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const extractSearchFields = (searchFields: string[]) => (entry: EntryValue) =>
|
||||
searchFields.reduce((acc, field) => {
|
||||
const nestedFields = field.split('.');
|
||||
let f = entry.data;
|
||||
for (let i = 0; i < nestedFields.length; i++) {
|
||||
f = f[nestedFields[i]];
|
||||
if (!f) break;
|
||||
}
|
||||
|
||||
if (f) {
|
||||
return `${acc} ${f}`;
|
||||
} else if (entry[nestedFields[0] as keyof EntryValue]) {
|
||||
// allows searching using entry.slug/entry.path etc.
|
||||
return `${acc} ${entry[nestedFields[0] as keyof EntryValue]}`;
|
||||
const value = getEntryField(field, entry);
|
||||
if (value) {
|
||||
return `${acc} ${value}`;
|
||||
} else {
|
||||
return acc;
|
||||
}
|
||||
}, '');
|
||||
|
||||
export const expandSearchEntries = (entries: EntryValue[], searchFields: string[]) => {
|
||||
// expand the entries for the purpose of the search
|
||||
const expandedEntries = entries.reduce((acc, e) => {
|
||||
const expandedFields = searchFields.reduce((acc, f) => {
|
||||
const fields = expandPath({ data: e.data, path: f });
|
||||
acc.push(...fields);
|
||||
return acc;
|
||||
}, [] as string[]);
|
||||
|
||||
for (let i = 0; i < expandedFields.length; i++) {
|
||||
acc.push({ ...e, field: expandedFields[i] });
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, [] as (EntryValue & { field: string })[]);
|
||||
|
||||
return expandedEntries;
|
||||
};
|
||||
|
||||
export const mergeExpandedEntries = (entries: (EntryValue & { field: string })[]) => {
|
||||
// merge the search results by slug and only keep data that matched the search
|
||||
const fields = entries.map(f => f.field);
|
||||
const arrayPaths: Record<string, Set<string>> = {};
|
||||
|
||||
const merged = entries.reduce((acc, e) => {
|
||||
if (!acc[e.slug]) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { field, ...rest } = e;
|
||||
acc[e.slug] = rest;
|
||||
arrayPaths[e.slug] = Set();
|
||||
}
|
||||
|
||||
const nestedFields = e.field.split('.');
|
||||
let value = acc[e.slug].data;
|
||||
for (let i = 0; i < nestedFields.length; i++) {
|
||||
value = value[nestedFields[i]];
|
||||
if (Array.isArray(value)) {
|
||||
const path = nestedFields.slice(0, i + 1).join('.');
|
||||
arrayPaths[e.slug] = arrayPaths[e.slug].add(path);
|
||||
}
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {} as Record<string, EntryValue>);
|
||||
|
||||
// this keeps the search score sorting order designated by the order in entries
|
||||
// and filters non matching items
|
||||
Object.keys(merged).forEach(slug => {
|
||||
const data = merged[slug].data;
|
||||
for (const path of arrayPaths[slug].toArray()) {
|
||||
const array = get(data, path) as unknown[];
|
||||
const filtered = array.filter((_, index) => {
|
||||
return fields.some(f => `${f}.`.startsWith(`${path}.${index}.`));
|
||||
});
|
||||
filtered.sort((a, b) => {
|
||||
const indexOfA = array.indexOf(a);
|
||||
const indexOfB = array.indexOf(b);
|
||||
const pathOfA = `${path}.${indexOfA}.`;
|
||||
const pathOfB = `${path}.${indexOfB}.`;
|
||||
|
||||
const matchingFieldIndexA = fields.findIndex(f => `${f}.`.startsWith(pathOfA));
|
||||
const matchingFieldIndexB = fields.findIndex(f => `${f}.`.startsWith(pathOfB));
|
||||
|
||||
return matchingFieldIndexA - matchingFieldIndexB;
|
||||
});
|
||||
|
||||
set(data, path, filtered);
|
||||
}
|
||||
});
|
||||
|
||||
return Object.values(merged);
|
||||
};
|
||||
|
||||
const sortByScore = (a: fuzzy.FilterResult<EntryValue>, b: fuzzy.FilterResult<EntryValue>) => {
|
||||
if (a.score > b.score) return -1;
|
||||
if (a.score < b.score) return 1;
|
||||
@ -497,13 +576,35 @@ export class Backend {
|
||||
return { entries: hits };
|
||||
}
|
||||
|
||||
async query(collection: Collection, searchFields: string[], searchTerm: string) {
|
||||
const entries = await this.listAllEntries(collection);
|
||||
const hits = fuzzy
|
||||
.filter(searchTerm, entries, { extract: extractSearchFields(searchFields) })
|
||||
async query(
|
||||
collection: Collection,
|
||||
searchFields: string[],
|
||||
searchTerm: string,
|
||||
file?: string,
|
||||
limit?: number,
|
||||
) {
|
||||
let entries = await this.listAllEntries(collection);
|
||||
if (file) {
|
||||
entries = entries.filter(e => e.slug === file);
|
||||
}
|
||||
|
||||
const expandedEntries = expandSearchEntries(entries, searchFields);
|
||||
|
||||
let hits = fuzzy
|
||||
.filter(searchTerm, expandedEntries, {
|
||||
extract: entry => {
|
||||
return getEntryField(entry.field, entry);
|
||||
},
|
||||
})
|
||||
.sort(sortByScore)
|
||||
.map(f => f.original);
|
||||
return { query: searchTerm, hits };
|
||||
|
||||
if (limit !== undefined && limit > 0) {
|
||||
hits = hits.slice(0, limit);
|
||||
}
|
||||
|
||||
const merged = mergeExpandedEntries(hits);
|
||||
return { query: searchTerm, hits: merged };
|
||||
}
|
||||
|
||||
traverseCursor(cursor: Cursor, action: string) {
|
||||
|
Reference in New Issue
Block a user