Feat: entry sorting (#3494)

* refactor: typescript search actions, add tests avoid duplicate search

* refactor: switch from promise chain to async/await in loadEntries

* feat: add sorting, initial commit

* fix: set isFetching to true on entries request

* fix: ui improvments and bug fixes

* test: fix tests

* feat(backend-gitlab): cache local tree)

* fix: fix prop type warning

* refactor: code cleanup

* feat(backend-bitbucket): add local tree caching support

* feat: swtich to orderBy and support multiple sort keys

* fix: backoff function

* fix: improve backoff

* feat: infer sortable fields

* feat: fetch file commit metadata - initial commit

* feat: extract file author and date, finalize GitLab & Bitbucket

* refactor: code cleanup

* feat: handle github rate limit errors

* refactor: code cleanup

* fix(github): add missing author and date when traversing cursor

* fix: add missing author and date when traversing cursor

* refactor: code cleanup

* refactor: code cleanup

* refactor: code cleanup

* test: fix tests

* fix: rebuild local tree when head doesn't exist in remote branch

* fix: allow sortable fields to be an empty array

* fix: allow translation of built in sort fields

* build: fix proxy server build

* fix: hide commit author and date fields by default on non git backends

* fix(algolia): add listAllEntries method for alogolia integration

* fix: handle sort fields overflow

* test(bitbucket): re-record some bitbucket e2e tests

* test(bitbucket): fix media library test

* refactor(gitgateway-gitlab): share request code and handle 404 errors

* fix: always show commit date by default

* docs: add sortableFields

* refactor: code cleanup

* improvement: drop multi-sort, rework sort UI

* chore: force main package bumps

Co-authored-by: Shawn Erquhart <shawn@erquh.art>
This commit is contained in:
Erez Rokah
2020-04-01 06:13:27 +03:00
committed by GitHub
parent cbb3927101
commit 174d86f0a0
82 changed files with 15128 additions and 12621 deletions

View File

@ -1,3 +1,6 @@
import { asyncLock, AsyncLock } from './asyncLock';
import unsentRequest from './unsentRequest';
export const CMS_BRANCH_PREFIX = 'cms';
export const DEFAULT_PR_BODY = 'Automatically generated by Netlify CMS';
export const MERGE_COMMIT_MESSAGE = 'Automatically generated. Merged on Netlify CMS.';
@ -27,6 +30,93 @@ export interface FetchError extends Error {
status: number;
}
interface API {
rateLimiter?: AsyncLock;
buildRequest: (req: ApiRequest) => ApiRequest | Promise<ApiRequest>;
requestFunction?: (req: ApiRequest) => Promise<Response>;
}
export type ApiRequestObject = {
url: string;
params?: Record<string, string | boolean | number>;
method?: 'POST' | 'PUT' | 'DELETE' | 'HEAD';
headers?: Record<string, string>;
body?: string | FormData;
cache?: 'no-store';
};
export type ApiRequest = ApiRequestObject | string;
class RateLimitError extends Error {
resetSeconds: number;
constructor(message: string, resetSeconds: number) {
super(message);
if (resetSeconds < 0) {
this.resetSeconds = 1;
} else if (resetSeconds > 60 * 60) {
this.resetSeconds = 60 * 60;
} else {
this.resetSeconds = resetSeconds;
}
}
}
export const requestWithBackoff = async (
api: API,
req: ApiRequest,
attempt = 1,
): Promise<Response> => {
if (api.rateLimiter) {
await api.rateLimiter.acquire();
}
try {
const builtRequest = await api.buildRequest(req);
const requestFunction = api.requestFunction || unsentRequest.performRequest;
const response: Response = await requestFunction(builtRequest);
if (response.status === 429) {
// GitLab/Bitbucket too many requests
const text = await response.text().catch(() => 'Too many requests');
throw new Error(text);
} else if (response.status === 403) {
// GitHub too many requests
const { message } = await response.json().catch(() => ({ message: '' }));
if (message.match('API rate limit exceeded')) {
const now = new Date();
const nextWindowInSeconds = response.headers.has('X-RateLimit-Reset')
? parseInt(response.headers.get('X-RateLimit-Reset')!)
: now.getTime() / 1000 + 60;
throw new RateLimitError(message, nextWindowInSeconds);
}
}
return response;
} catch (err) {
if (attempt <= 5) {
if (!api.rateLimiter) {
const timeout = err.resetSeconds || attempt * attempt;
console.log(
`Pausing requests for ${timeout} ${
attempt === 1 ? 'second' : 'seconds'
} due to fetch failures:`,
err.message,
);
api.rateLimiter = asyncLock();
api.rateLimiter.acquire();
setTimeout(() => {
api.rateLimiter?.release();
api.rateLimiter = undefined;
console.log(`Done pausing requests`);
}, 1000 * timeout);
}
return requestWithBackoff(api, req, attempt + 1);
} else {
throw err;
}
}
};
export const readFile = async (
id: string | null | undefined,
fetchContent: () => Promise<string | Blob>,
@ -46,6 +136,29 @@ export const readFile = async (
return content;
};
export type FileMetadata = {
author: string;
updatedOn: string;
};
const getFileMetadataKey = (id: string) => `gh.${id}.meta`;
export const readFileMetadata = async (
id: string,
fetchMetadata: () => Promise<FileMetadata>,
localForage: LocalForage,
) => {
const key = getFileMetadataKey(id);
const cached = await localForage.getItem<FileMetadata>(key);
if (cached) {
return cached;
} else {
const metadata = await fetchMetadata();
await localForage.setItem<FileMetadata>(key, metadata);
return metadata;
}
};
/**
* Keywords for inferring a status that will provide a deploy preview URL.
*/

View File

@ -1,9 +1,4 @@
import {
parseLinkHeader,
getAllResponses,
getPathDepth,
filterByPropExtension,
} from '../backendUtil';
import { parseLinkHeader, getAllResponses, getPathDepth, filterByExtension } from '../backendUtil';
import { oneLine } from 'common-tags';
import nock from 'nock';
@ -85,13 +80,14 @@ describe('getPathDepth', () => {
});
});
describe('filterByPropExtension', () => {
it('should return filtered array based on extension', () => {
expect(
filterByPropExtension('.html.md', 'path')([{ path: 'file.html.md' }, { path: 'file.json' }]),
).toEqual([{ path: 'file.html.md' }]);
expect(
filterByPropExtension('html.md', 'path')([{ path: 'file.html.md' }, { path: 'file.json' }]),
).toEqual([{ path: 'file.html.md' }]);
describe('filterByExtension', () => {
it('should return true when extension matches', () => {
expect(filterByExtension({ path: 'file.html.md' }, '.html.md')).toBe(true);
expect(filterByExtension({ path: 'file.html.md' }, 'html.md')).toBe(true);
});
it("should return false when extension doesn't match", () => {
expect(filterByExtension({ path: 'file.json' }, '.html.md')).toBe(false);
expect(filterByExtension({ path: 'file.json' }, 'html.md')).toBe(false);
});
});

View File

@ -1,4 +1,4 @@
import { flow, fromPairs, get } from 'lodash';
import { flow, fromPairs } from 'lodash';
import { map } from 'lodash/fp';
import { fromJS } from 'immutable';
import unsentRequest from './unsentRequest';
@ -6,10 +6,10 @@ import APIError from './APIError';
type Formatter = (res: Response) => Promise<string | Blob | unknown>;
export const filterByPropExtension = (extension: string, propName: string) => <T>(arr: T[]) =>
arr.filter(el =>
get(el, propName, '').endsWith(extension.startsWith('.') ? extension : `.${extension}`),
);
export const filterByExtension = (file: { path: string }, extension: string) => {
const path = file?.path || '';
return path.endsWith(extension.startsWith('.') ? extension : `.${extension}`);
};
const catchFormatErrors = (format: string, formatter: Formatter) => (res: Response) => {
try {
@ -64,18 +64,23 @@ export const responseParser = (options: {
apiName: string;
}) => (res: Response) => parseResponse(res, options);
export const parseLinkHeader = flow([
linksString => linksString.split(','),
map((str: string) => str.trim().split(';')),
map(([linkStr, keyStr]) => [
keyStr.match(/rel="(.*?)"/)[1],
linkStr
.trim()
.match(/<(.*?)>/)[1]
.replace(/\+/g, '%20'),
]),
fromPairs,
]);
export const parseLinkHeader = (header: string | null) => {
if (!header) {
return {};
}
return flow([
linksString => linksString.split(','),
map((str: string) => str.trim().split(';')),
map(([linkStr, keyStr]) => [
keyStr.match(/rel="(.*?)"/)[1],
linkStr
.trim()
.match(/<(.*?)>/)[1]
.replace(/\+/g, '%20'),
]),
fromPairs,
])(header);
};
export const getAllResponses = async (
url: string,

View File

@ -1,6 +1,9 @@
import semaphore, { Semaphore } from 'semaphore';
import { unionBy, sortBy } from 'lodash';
import Cursor from './Cursor';
import { AsyncLock } from './asyncLock';
import { FileMetadata } from './API';
import { basename } from './path';
export type DisplayURLObject = { id: string; path: string };
@ -25,7 +28,7 @@ export interface UnpublishedEntryMediaFile {
export interface ImplementationEntry {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: string;
file: { path: string; label?: string; id?: string | null };
file: { path: string; label?: string; id?: string | null; author?: string; updatedOn?: string };
slug?: string;
mediaFiles?: ImplementationMediaFile[];
metaData?: { collection: string; status: string };
@ -135,6 +138,8 @@ export interface Implementation {
cursor: Cursor,
action: string,
) => Promise<{ entries: ImplementationEntry[]; cursor: Cursor }>;
isGitBackend?: () => boolean;
}
const MAX_CONCURRENT_DOWNLOADS = 10;
@ -156,28 +161,40 @@ type ReadFile = (
id: string | null | undefined,
options: { parseText: boolean },
) => Promise<string | Blob>;
type ReadFileMetadata = (path: string, id: string) => Promise<FileMetadata>;
type ReadUnpublishedFile = (
key: string,
) => Promise<{ metaData: Metadata; fileData: string; isModification: boolean; slug: string }>;
const fetchFiles = async (files: ImplementationFile[], readFile: ReadFile, apiName: string) => {
const fetchFiles = async (
files: ImplementationFile[],
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(() =>
readFile(file.path, file.id, { parseText: true })
.then(data => {
resolve({ file, data: data as string });
sem.leave();
})
.catch((error = true) => {
sem.leave();
console.error(`failed to load file from ${apiName}: ${file.path}`);
resolve({ error });
}),
),
sem.take(async () => {
try {
const [data, fileMetadata] = await Promise.all([
readFile(file.path, file.id, { parseText: true }),
file.id
? readFileMetadata(file.path, file.id)
: Promise.resolve({ author: '', updatedOn: '' }),
]);
resolve({ file: { ...file, ...fileMetadata }, data: data as string });
sem.leave();
} catch (error) {
sem.leave();
console.error(`failed to load file from ${apiName}: ${file.path}`);
resolve({ error: true });
}
}),
),
);
});
@ -230,18 +247,20 @@ const fetchUnpublishedFiles = async (
export const entriesByFolder = async (
listFiles: () => Promise<ImplementationFile[]>,
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
const files = await listFiles();
return fetchFiles(files, readFile, apiName);
return fetchFiles(files, readFile, readFileMetadata, apiName);
};
export const entriesByFiles = async (
files: ImplementationFile[],
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
return fetchFiles(files, readFile, apiName);
return fetchFiles(files, readFile, readFileMetadata, apiName);
};
export const unpublishedEntries = async (
@ -306,3 +325,255 @@ export const runWithLock = async (lock: AsyncLock, func: Function, message: stri
lock.release();
}
};
const LOCAL_KEY = 'git.local';
type LocalTree = {
head: string;
files: { id: string; name: string; path: string }[];
};
type GetKeyArgs = {
branch: string;
folder: string;
extension: string;
depth: number;
};
const getLocalKey = ({ branch, folder, extension, depth }: GetKeyArgs) => {
return `${LOCAL_KEY}.${branch}.${folder}.${extension}.${depth}`;
};
type PersistLocalTreeArgs = GetKeyArgs & {
localForage: LocalForage;
localTree: LocalTree;
};
type GetLocalTreeArgs = GetKeyArgs & {
localForage: LocalForage;
};
export const persistLocalTree = async ({
localForage,
localTree,
branch,
folder,
extension,
depth,
}: PersistLocalTreeArgs) => {
await localForage.setItem<LocalTree>(
getLocalKey({ branch, folder, extension, depth }),
localTree,
);
};
export const getLocalTree = async ({
localForage,
branch,
folder,
extension,
depth,
}: GetLocalTreeArgs) => {
const localTree = await localForage.getItem<LocalTree>(
getLocalKey({ branch, folder, extension, depth }),
);
return localTree;
};
type GetDiffFromLocalTreeMethods = {
getDifferences: (
to: string,
from: string,
) => Promise<
{
oldPath: string;
newPath: string;
status: string;
binary: boolean;
}[]
>;
filterFile: (file: { path: string; name: string }) => boolean;
getFileId: (path: string) => Promise<string>;
};
type GetDiffFromLocalTreeArgs = GetDiffFromLocalTreeMethods & {
branch: { name: string; sha: string };
localTree: LocalTree;
folder: string;
extension: string;
depth: number;
};
const getDiffFromLocalTree = async ({
branch,
localTree,
folder,
getDifferences,
filterFile,
getFileId,
}: GetDiffFromLocalTreeArgs) => {
const diff = await getDifferences(branch.sha, localTree.head);
const diffFiles = diff
.filter(d => (d.oldPath?.startsWith(folder) || d.newPath?.startsWith(folder)) && !d.binary)
.reduce((acc, d) => {
if (d.status === 'renamed') {
acc.push({
path: d.oldPath,
name: basename(d.oldPath),
deleted: true,
});
acc.push({
path: d.newPath,
name: basename(d.newPath),
deleted: false,
});
} else if (d.status === 'deleted') {
acc.push({
path: d.oldPath,
name: basename(d.oldPath),
deleted: true,
});
} else {
acc.push({
path: d.newPath || d.oldPath,
name: basename(d.newPath || d.oldPath),
deleted: false,
});
}
return acc;
}, [] as { path: string; name: string; deleted: boolean }[])
.filter(filterFile);
const diffFilesWithIds = await Promise.all(
diffFiles.map(async file => {
if (!file.deleted) {
const id = await getFileId(file.path);
return { ...file, id };
} else {
return { ...file, id: '' };
}
}),
);
return diffFilesWithIds;
};
type AllEntriesByFolderArgs = GetKeyArgs &
GetDiffFromLocalTreeMethods & {
listAllFiles: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationFile[]>;
readFile: ReadFile;
readFileMetadata: ReadFileMetadata;
getDefaultBranch: () => Promise<{ name: string; sha: string }>;
isShaExistsInBranch: (branch: string, sha: string) => Promise<boolean>;
apiName: string;
localForage: LocalForage;
};
export const allEntriesByFolder = async ({
listAllFiles,
readFile,
readFileMetadata,
apiName,
branch,
localForage,
folder,
extension,
depth,
getDefaultBranch,
isShaExistsInBranch,
getDifferences,
getFileId,
filterFile,
}: AllEntriesByFolderArgs) => {
const listAllFilesAndPersist = async () => {
const files = await listAllFiles(folder, extension, depth);
const branch = await getDefaultBranch();
await persistLocalTree({
localForage,
localTree: {
head: branch.sha,
files: files.map(f => ({ id: f.id!, path: f.path, name: basename(f.path) })),
},
branch: branch.name,
depth,
extension,
folder,
});
return files;
};
const listFiles = async () => {
const localTree = await getLocalTree({ localForage, branch, folder, extension, depth });
if (localTree) {
const branch = await getDefaultBranch();
// if the branch was forced pushed the local tree sha can be removed from the remote tree
const localTreeInBranch = await isShaExistsInBranch(branch.name, localTree.head);
if (!localTreeInBranch) {
console.log(
`Can't find local tree head '${localTree.head}' in branch '${branch.name}', rebuilding local tree`,
);
return listAllFilesAndPersist();
}
const diff = await getDiffFromLocalTree({
branch,
localTree,
folder,
extension,
depth,
getDifferences,
getFileId,
filterFile,
}).catch(e => {
console.log('Failed getting diff from local tree:', e);
return null;
});
if (!diff) {
console.log(`Diff is null, rebuilding local tree`);
return listAllFilesAndPersist();
}
if (diff.length === 0) {
// return local copy
return localTree.files;
} else {
// refresh local copy
const identity = (file: { path: string }) => file.path;
const deleted = diff.reduce((acc, d) => {
acc[d.path] = d.deleted;
return acc;
}, {} as Record<string, boolean>);
const newCopy = sortBy(
unionBy(
diff.filter(d => !deleted[d.path]),
localTree.files.filter(f => !deleted[f.path]),
identity,
),
identity,
);
await persistLocalTree({
localForage,
localTree: { head: branch.sha, files: newCopy },
branch: branch.name,
depth,
extension,
folder,
});
return newCopy;
}
} else {
return listAllFilesAndPersist();
}
};
const files = await listFiles();
return fetchFiles(files, readFile, readFileMetadata, apiName);
};

View File

@ -6,7 +6,7 @@ import { isAbsolutePath, basename, fileExtensionWithSeparator, fileExtension } f
import { onlySuccessfulPromises, flowAsync, then } from './promise';
import unsentRequest from './unsentRequest';
import {
filterByPropExtension,
filterByExtension,
getAllResponses,
parseLinkHeader,
parseResponse,
@ -37,9 +37,11 @@ import {
Config as C,
UnpublishedEntryMediaFile as UEMF,
blobToFileObj,
allEntriesByFolder,
} from './implementation';
import {
readFile,
readFileMetadata,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
@ -54,6 +56,8 @@ import {
parseContentKey,
branchFromContentKey,
contentKeyFromBranch,
ApiRequest as AR,
requestWithBackoff,
} from './API';
import {
createPointerFile,
@ -77,16 +81,7 @@ export type Entry = E;
export type UnpublishedEntryMediaFile = UEMF;
export type PersistOptions = PO;
export type AssetProxy = AP;
export type ApiRequest =
| {
url: string;
params?: Record<string, string | boolean | number>;
method?: 'POST' | 'PUT' | 'DELETE' | 'HEAD';
headers?: Record<string, string>;
body?: string | FormData;
cache?: 'no-store';
}
| string;
export type ApiRequest = AR;
export type Config = C;
export type FetchError = FE;
export type PointerFile = PF;
@ -105,7 +100,7 @@ export const NetlifyCmsLibUtil = {
flowAsync,
then,
unsentRequest,
filterByPropExtension,
filterByExtension,
parseLinkHeader,
parseResponse,
responseParser,
@ -118,6 +113,7 @@ export const NetlifyCmsLibUtil = {
getMediaDisplayURL,
getMediaAsBlob,
readFile,
readFileMetadata,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
@ -138,6 +134,8 @@ export const NetlifyCmsLibUtil = {
branchFromContentKey,
contentKeyFromBranch,
blobToFileObj,
requestWithBackoff,
allEntriesByFolder,
};
export {
APIError,
@ -153,7 +151,7 @@ export {
flowAsync,
then,
unsentRequest,
filterByPropExtension,
filterByExtension,
parseLinkHeader,
getAllResponses,
parseResponse,
@ -169,6 +167,7 @@ export {
getMediaDisplayURL,
getMediaAsBlob,
readFile,
readFileMetadata,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
@ -189,4 +188,6 @@ export {
branchFromContentKey,
contentKeyFromBranch,
blobToFileObj,
requestWithBackoff,
allEntriesByFolder,
};