Feat: entry sorting (#3494)

* refactor: typescript search actions, add tests avoid duplicate search

* refactor: switch from promise chain to async/await in loadEntries

* feat: add sorting, initial commit

* fix: set isFetching to true on entries request

* fix: ui improvments and bug fixes

* test: fix tests

* feat(backend-gitlab): cache local tree)

* fix: fix prop type warning

* refactor: code cleanup

* feat(backend-bitbucket): add local tree caching support

* feat: swtich to orderBy and support multiple sort keys

* fix: backoff function

* fix: improve backoff

* feat: infer sortable fields

* feat: fetch file commit metadata - initial commit

* feat: extract file author and date, finalize GitLab & Bitbucket

* refactor: code cleanup

* feat: handle github rate limit errors

* refactor: code cleanup

* fix(github): add missing author and date when traversing cursor

* fix: add missing author and date when traversing cursor

* refactor: code cleanup

* refactor: code cleanup

* refactor: code cleanup

* test: fix tests

* fix: rebuild local tree when head doesn't exist in remote branch

* fix: allow sortable fields to be an empty array

* fix: allow translation of built in sort fields

* build: fix proxy server build

* fix: hide commit author and date fields by default on non git backends

* fix(algolia): add listAllEntries method for alogolia integration

* fix: handle sort fields overflow

* test(bitbucket): re-record some bitbucket e2e tests

* test(bitbucket): fix media library test

* refactor(gitgateway-gitlab): share request code and handle 404 errors

* fix: always show commit date by default

* docs: add sortableFields

* refactor: code cleanup

* improvement: drop multi-sort, rework sort UI

* chore: force main package bumps

Co-authored-by: Shawn Erquhart <shawn@erquh.art>
This commit is contained in:
Erez Rokah
2020-04-01 06:13:27 +03:00
committed by GitHub
parent cbb3927101
commit 174d86f0a0
82 changed files with 15128 additions and 12621 deletions

View File

@ -1,3 +1,6 @@
import { asyncLock, AsyncLock } from './asyncLock';
import unsentRequest from './unsentRequest';
export const CMS_BRANCH_PREFIX = 'cms';
export const DEFAULT_PR_BODY = 'Automatically generated by Netlify CMS';
export const MERGE_COMMIT_MESSAGE = 'Automatically generated. Merged on Netlify CMS.';
@ -27,6 +30,93 @@ export interface FetchError extends Error {
status: number;
}
interface API {
rateLimiter?: AsyncLock;
buildRequest: (req: ApiRequest) => ApiRequest | Promise<ApiRequest>;
requestFunction?: (req: ApiRequest) => Promise<Response>;
}
export type ApiRequestObject = {
url: string;
params?: Record<string, string | boolean | number>;
method?: 'POST' | 'PUT' | 'DELETE' | 'HEAD';
headers?: Record<string, string>;
body?: string | FormData;
cache?: 'no-store';
};
export type ApiRequest = ApiRequestObject | string;
class RateLimitError extends Error {
resetSeconds: number;
constructor(message: string, resetSeconds: number) {
super(message);
if (resetSeconds < 0) {
this.resetSeconds = 1;
} else if (resetSeconds > 60 * 60) {
this.resetSeconds = 60 * 60;
} else {
this.resetSeconds = resetSeconds;
}
}
}
export const requestWithBackoff = async (
api: API,
req: ApiRequest,
attempt = 1,
): Promise<Response> => {
if (api.rateLimiter) {
await api.rateLimiter.acquire();
}
try {
const builtRequest = await api.buildRequest(req);
const requestFunction = api.requestFunction || unsentRequest.performRequest;
const response: Response = await requestFunction(builtRequest);
if (response.status === 429) {
// GitLab/Bitbucket too many requests
const text = await response.text().catch(() => 'Too many requests');
throw new Error(text);
} else if (response.status === 403) {
// GitHub too many requests
const { message } = await response.json().catch(() => ({ message: '' }));
if (message.match('API rate limit exceeded')) {
const now = new Date();
const nextWindowInSeconds = response.headers.has('X-RateLimit-Reset')
? parseInt(response.headers.get('X-RateLimit-Reset')!)
: now.getTime() / 1000 + 60;
throw new RateLimitError(message, nextWindowInSeconds);
}
}
return response;
} catch (err) {
if (attempt <= 5) {
if (!api.rateLimiter) {
const timeout = err.resetSeconds || attempt * attempt;
console.log(
`Pausing requests for ${timeout} ${
attempt === 1 ? 'second' : 'seconds'
} due to fetch failures:`,
err.message,
);
api.rateLimiter = asyncLock();
api.rateLimiter.acquire();
setTimeout(() => {
api.rateLimiter?.release();
api.rateLimiter = undefined;
console.log(`Done pausing requests`);
}, 1000 * timeout);
}
return requestWithBackoff(api, req, attempt + 1);
} else {
throw err;
}
}
};
export const readFile = async (
id: string | null | undefined,
fetchContent: () => Promise<string | Blob>,
@ -46,6 +136,29 @@ export const readFile = async (
return content;
};
export type FileMetadata = {
author: string;
updatedOn: string;
};
const getFileMetadataKey = (id: string) => `gh.${id}.meta`;
export const readFileMetadata = async (
id: string,
fetchMetadata: () => Promise<FileMetadata>,
localForage: LocalForage,
) => {
const key = getFileMetadataKey(id);
const cached = await localForage.getItem<FileMetadata>(key);
if (cached) {
return cached;
} else {
const metadata = await fetchMetadata();
await localForage.setItem<FileMetadata>(key, metadata);
return metadata;
}
};
/**
* Keywords for inferring a status that will provide a deploy preview URL.
*/