* refactor: typescript search actions, add tests avoid duplicate search * refactor: switch from promise chain to async/await in loadEntries * feat: add sorting, initial commit * fix: set isFetching to true on entries request * fix: ui improvments and bug fixes * test: fix tests * feat(backend-gitlab): cache local tree) * fix: fix prop type warning * refactor: code cleanup * feat(backend-bitbucket): add local tree caching support * feat: swtich to orderBy and support multiple sort keys * fix: backoff function * fix: improve backoff * feat: infer sortable fields * feat: fetch file commit metadata - initial commit * feat: extract file author and date, finalize GitLab & Bitbucket * refactor: code cleanup * feat: handle github rate limit errors * refactor: code cleanup * fix(github): add missing author and date when traversing cursor * fix: add missing author and date when traversing cursor * refactor: code cleanup * refactor: code cleanup * refactor: code cleanup * test: fix tests * fix: rebuild local tree when head doesn't exist in remote branch * fix: allow sortable fields to be an empty array * fix: allow translation of built in sort fields * build: fix proxy server build * fix: hide commit author and date fields by default on non git backends * fix(algolia): add listAllEntries method for alogolia integration * fix: handle sort fields overflow * test(bitbucket): re-record some bitbucket e2e tests * test(bitbucket): fix media library test * refactor(gitgateway-gitlab): share request code and handle 404 errors * fix: always show commit date by default * docs: add sortableFields * refactor: code cleanup * improvement: drop multi-sort, rework sort UI * chore: force main package bumps Co-authored-by: Shawn Erquhart <shawn@erquh.art>
116 lines
3.3 KiB
TypeScript
116 lines
3.3 KiB
TypeScript
import { flow, fromPairs } from 'lodash';
|
|
import { map } from 'lodash/fp';
|
|
import { fromJS } from 'immutable';
|
|
import unsentRequest from './unsentRequest';
|
|
import APIError from './APIError';
|
|
|
|
type Formatter = (res: Response) => Promise<string | Blob | unknown>;
|
|
|
|
export const filterByExtension = (file: { path: string }, extension: string) => {
|
|
const path = file?.path || '';
|
|
return path.endsWith(extension.startsWith('.') ? extension : `.${extension}`);
|
|
};
|
|
|
|
const catchFormatErrors = (format: string, formatter: Formatter) => (res: Response) => {
|
|
try {
|
|
return formatter(res);
|
|
} catch (err) {
|
|
throw new Error(
|
|
`Response cannot be parsed into the expected format (${format}): ${err.message}`,
|
|
);
|
|
}
|
|
};
|
|
|
|
const responseFormatters = fromJS({
|
|
json: async (res: Response) => {
|
|
const contentType = res.headers.get('Content-Type') || '';
|
|
if (!contentType.startsWith('application/json') && !contentType.startsWith('text/json')) {
|
|
throw new Error(`${contentType} is not a valid JSON Content-Type`);
|
|
}
|
|
return res.json();
|
|
},
|
|
text: async (res: Response) => res.text(),
|
|
blob: async (res: Response) => res.blob(),
|
|
}).mapEntries(([format, formatter]: [string, Formatter]) => [
|
|
format,
|
|
catchFormatErrors(format, formatter),
|
|
]);
|
|
|
|
export const parseResponse = async (
|
|
res: Response,
|
|
{ expectingOk = true, format = 'text', apiName = '' },
|
|
) => {
|
|
let body;
|
|
try {
|
|
const formatter = responseFormatters.get(format, false);
|
|
if (!formatter) {
|
|
throw new Error(`${format} is not a supported response format.`);
|
|
}
|
|
body = await formatter(res);
|
|
} catch (err) {
|
|
throw new APIError(err.message, res.status, apiName);
|
|
}
|
|
if (expectingOk && !res.ok) {
|
|
const isJSON = format === 'json';
|
|
const message = isJSON ? body.message || body.msg || body.error?.message : body;
|
|
throw new APIError(isJSON && message ? message : body, res.status, apiName);
|
|
}
|
|
return body;
|
|
};
|
|
|
|
export const responseParser = (options: {
|
|
expectingOk?: boolean;
|
|
format: string;
|
|
apiName: string;
|
|
}) => (res: Response) => parseResponse(res, options);
|
|
|
|
export const parseLinkHeader = (header: string | null) => {
|
|
if (!header) {
|
|
return {};
|
|
}
|
|
return flow([
|
|
linksString => linksString.split(','),
|
|
map((str: string) => str.trim().split(';')),
|
|
map(([linkStr, keyStr]) => [
|
|
keyStr.match(/rel="(.*?)"/)[1],
|
|
linkStr
|
|
.trim()
|
|
.match(/<(.*?)>/)[1]
|
|
.replace(/\+/g, '%20'),
|
|
]),
|
|
fromPairs,
|
|
])(header);
|
|
};
|
|
|
|
export const getAllResponses = async (
|
|
url: string,
|
|
options: { headers?: {} } = {},
|
|
linkHeaderRelName: string,
|
|
nextUrlProcessor: (url: string) => string,
|
|
) => {
|
|
const maxResponses = 30;
|
|
let responseCount = 1;
|
|
|
|
let req = unsentRequest.fromFetchArguments(url, options);
|
|
|
|
const pageResponses = [];
|
|
|
|
while (req && responseCount < maxResponses) {
|
|
const pageResponse = await unsentRequest.performRequest(req);
|
|
const linkHeader = pageResponse.headers.get('Link');
|
|
const nextURL = linkHeader && parseLinkHeader(linkHeader)[linkHeaderRelName];
|
|
|
|
const { headers = {} } = options;
|
|
req = nextURL && unsentRequest.fromFetchArguments(nextUrlProcessor(nextURL), { headers });
|
|
pageResponses.push(pageResponse);
|
|
responseCount++;
|
|
}
|
|
|
|
return pageResponses;
|
|
};
|
|
|
|
export const getPathDepth = (path: string) => {
|
|
const depth = path.split('/').length;
|
|
return depth;
|
|
};
|