refactor: convert function expressions to declarations (#4926)

This commit is contained in:
Vladislav Shkodin
2021-02-08 20:01:21 +02:00
committed by GitHub
parent c0236536dd
commit 141a2eba56
241 changed files with 3444 additions and 2933 deletions

View File

@ -38,11 +38,11 @@ class RateLimitError extends Error {
}
}
export const requestWithBackoff = async (
export async function requestWithBackoff(
api: API,
req: ApiRequest,
attempt = 1,
): Promise<Response> => {
): Promise<Response> {
if (api.rateLimiter) {
await api.rateLimiter.acquire();
}
@ -92,14 +92,14 @@ export const requestWithBackoff = async (
return requestWithBackoff(api, req, attempt + 1);
}
}
};
}
export const readFile = async (
export async function readFile(
id: string | null | undefined,
fetchContent: () => Promise<string | Blob>,
localForage: LocalForage,
isText: boolean,
) => {
) {
const key = id ? (isText ? `gh.${id}` : `gh.${id}.blob`) : null;
const cached = key ? await localForage.getItem<string | Blob>(key) : null;
if (cached) {
@ -111,20 +111,22 @@ export const readFile = async (
await localForage.setItem(key, content);
}
return content;
};
}
export type FileMetadata = {
author: string;
updatedOn: string;
};
const getFileMetadataKey = (id: string) => `gh.${id}.meta`;
function getFileMetadataKey(id: string) {
return `gh.${id}.meta`;
}
export const readFileMetadata = async (
export async function readFileMetadata(
id: string | null | undefined,
fetchMetadata: () => Promise<FileMetadata>,
localForage: LocalForage,
) => {
) {
const key = id ? getFileMetadataKey(id) : null;
const cached = key && (await localForage.getItem<FileMetadata>(key));
if (cached) {
@ -136,7 +138,7 @@ export const readFileMetadata = async (
await localForage.setItem<FileMetadata>(key, metadata);
}
return metadata;
};
}
/**
* Keywords for inferring a status that will provide a deploy preview URL.
@ -148,12 +150,12 @@ const PREVIEW_CONTEXT_KEYWORDS = ['deploy'];
* deploy preview. Checks for an exact match against `previewContext` if given,
* otherwise checks for inclusion of a value from `PREVIEW_CONTEXT_KEYWORDS`.
*/
export const isPreviewContext = (context: string, previewContext: string) => {
export function isPreviewContext(context: string, previewContext: string) {
if (previewContext) {
return context === previewContext;
}
return PREVIEW_CONTEXT_KEYWORDS.some(keyword => context.includes(keyword));
};
}
export enum PreviewState {
Other = 'other',
@ -164,20 +166,20 @@ export enum PreviewState {
* Retrieve a deploy preview URL from an array of statuses. By default, a
* matching status is inferred via `isPreviewContext`.
*/
export const getPreviewStatus = (
export function getPreviewStatus(
statuses: {
context: string;
target_url: string;
state: PreviewState;
}[],
previewContext: string,
) => {
) {
return statuses.find(({ context }) => {
return isPreviewContext(context, previewContext);
});
};
}
const getConflictingBranches = (branchName: string) => {
function getConflictingBranches(branchName: string) {
// for cms/posts/post-1, conflicting branches are cms/posts, cms
const parts = branchName.split('/');
parts.pop();
@ -188,13 +190,13 @@ const getConflictingBranches = (branchName: string) => {
}, [] as string[]);
return conflictingBranches;
};
}
export const throwOnConflictingBranches = async (
export async function throwOnConflictingBranches(
branchName: string,
getBranch: (name: string) => Promise<{ name: string }>,
apiName: string,
) => {
) {
const possibleConflictingBranches = getConflictingBranches(branchName);
const conflictingBranches = await Promise.all(
@ -213,4 +215,4 @@ export const throwOnConflictingBranches = async (
apiName,
);
}
};
}

View File

@ -3,27 +3,36 @@ export const DEFAULT_PR_BODY = 'Automatically generated by Netlify CMS';
export const MERGE_COMMIT_MESSAGE = 'Automatically generated. Merged on Netlify CMS.';
const DEFAULT_NETLIFY_CMS_LABEL_PREFIX = 'netlify-cms/';
const getLabelPrefix = (labelPrefix: string) => labelPrefix || DEFAULT_NETLIFY_CMS_LABEL_PREFIX;
export const isCMSLabel = (label: string, labelPrefix: string) =>
label.startsWith(getLabelPrefix(labelPrefix));
export const labelToStatus = (label: string, labelPrefix: string) =>
label.substr(getLabelPrefix(labelPrefix).length);
export const statusToLabel = (status: string, labelPrefix: string) =>
`${getLabelPrefix(labelPrefix)}${status}`;
function getLabelPrefix(labelPrefix: string) {
return labelPrefix || DEFAULT_NETLIFY_CMS_LABEL_PREFIX;
}
export const generateContentKey = (collectionName: string, slug: string) =>
`${collectionName}/${slug}`;
export function isCMSLabel(label: string, labelPrefix: string) {
return label.startsWith(getLabelPrefix(labelPrefix));
}
export const parseContentKey = (contentKey: string) => {
export function labelToStatus(label: string, labelPrefix: string) {
return label.substr(getLabelPrefix(labelPrefix).length);
}
export function statusToLabel(status: string, labelPrefix: string) {
return `${getLabelPrefix(labelPrefix)}${status}`;
}
export function generateContentKey(collectionName: string, slug: string) {
return `${collectionName}/${slug}`;
}
export function parseContentKey(contentKey: string) {
const index = contentKey.indexOf('/');
return { collection: contentKey.substr(0, index), slug: contentKey.substr(index + 1) };
};
}
export const contentKeyFromBranch = (branch: string) => {
export function contentKeyFromBranch(branch: string) {
return branch.substring(`${CMS_BRANCH_PREFIX}/`.length);
};
}
export const branchFromContentKey = (contentKey: string) => {
export function branchFromContentKey(contentKey: string) {
return `${CMS_BRANCH_PREFIX}/${contentKey}`;
};
}

View File

@ -27,7 +27,7 @@ export type CursorStore = {
type ActionHandler = (action: string) => unknown;
const jsToMap = (obj: {}) => {
function jsToMap(obj: {}) {
if (obj === undefined) {
return Map();
}
@ -36,7 +36,7 @@ const jsToMap = (obj: {}) => {
throw new Error('Object must be equivalent to a Map.');
}
return immutableObj;
};
}
const knownMetaKeys = Set([
'index',
@ -49,8 +49,10 @@ const knownMetaKeys = Set([
'folder',
'depth',
]);
const filterUnknownMetaKeys = (meta: Map<string, string>) =>
meta.filter((_v, k) => knownMetaKeys.has(k as string));
function filterUnknownMetaKeys(meta: Map<string, string>) {
return meta.filter((_v, k) => knownMetaKeys.has(k as string));
}
/*
createCursorMap takes one of three signatures:
@ -58,7 +60,7 @@ const filterUnknownMetaKeys = (meta: Map<string, string>) =>
- (cursorMap: <object/Map with optional actions, data, and meta keys>) -> cursor
- (actions: <array/List>, data: <object/Map>, meta: <optional object/Map>) -> cursor
*/
const createCursorStore = (...args: {}[]) => {
function createCursorStore(...args: {}[]) {
const { actions, data, meta } =
args.length === 1
? jsToMap(args[0]).toObject()
@ -71,15 +73,18 @@ const createCursorStore = (...args: {}[]) => {
data: jsToMap(data),
meta: jsToMap(meta).update(filterUnknownMetaKeys),
}) as CursorStore;
};
}
const hasAction = (store: CursorStore, action: string) => store.hasIn(['actions', action]);
function hasAction(store: CursorStore, action: string) {
return store.hasIn(['actions', action]);
}
const getActionHandlers = (store: CursorStore, handler: ActionHandler) =>
store
function getActionHandlers(store: CursorStore, handler: ActionHandler) {
return store
.get('actions', Set<string>())
.toMap()
.map(action => handler(action as string));
}
// The cursor logic is entirely functional, so this class simply
// provides a chainable interface

View File

@ -21,17 +21,20 @@ describe('parseLinkHeader', () => {
});
describe('getAllResponses', () => {
const generatePulls = length => {
function generatePulls(length) {
return Array.from({ length }, (_, id) => {
return { id: id + 1, number: `134${id}`, state: 'open' };
});
};
}
function createLinkHeaders({ page, pageCount }) {
const pageNum = parseInt(page, 10);
const pageCountNum = parseInt(pageCount, 10);
const url = 'https://api.github.com/pulls';
const link = linkPage => `<${url}?page=${linkPage}>`;
function link(linkPage) {
return `<${url}?page=${linkPage}>`;
}
const linkHeader = oneLine`
${pageNum === 1 ? '' : `${link(1)}; rel="first",`}

View File

@ -2,10 +2,10 @@ import semaphore from 'semaphore';
export type AsyncLock = { release: () => void; acquire: () => Promise<boolean> };
export const asyncLock = (): AsyncLock => {
export function asyncLock(): AsyncLock {
let lock = semaphore(1);
const acquire = (timeout = 15000) => {
function acquire(timeout = 15000) {
const promise = new Promise<boolean>(resolve => {
// this makes sure a caller doesn't gets stuck forever awaiting on the lock
const timeoutId = setTimeout(() => {
@ -21,9 +21,9 @@ export const asyncLock = (): AsyncLock => {
});
return promise;
};
}
const release = () => {
function release() {
try {
// suppress too many calls to leave error
lock.leave();
@ -37,7 +37,7 @@ export const asyncLock = (): AsyncLock => {
lock = semaphore(1);
}
}
};
}
return { acquire, release };
};
}

View File

@ -6,20 +6,22 @@ import APIError from './APIError';
type Formatter = (res: Response) => Promise<string | Blob | unknown>;
export const filterByExtension = (file: { path: string }, extension: string) => {
export function filterByExtension(file: { path: string }, extension: string) {
const path = file?.path || '';
return path.endsWith(extension.startsWith('.') ? extension : `.${extension}`);
};
}
const catchFormatErrors = (format: string, formatter: Formatter) => (res: Response) => {
try {
return formatter(res);
} catch (err) {
throw new Error(
`Response cannot be parsed into the expected format (${format}): ${err.message}`,
);
}
};
function catchFormatErrors(format: string, formatter: Formatter) {
return (res: Response) => {
try {
return formatter(res);
} catch (err) {
throw new Error(
`Response cannot be parsed into the expected format (${format}): ${err.message}`,
);
}
};
}
const responseFormatters = fromJS({
json: async (res: Response) => {
@ -36,10 +38,10 @@ const responseFormatters = fromJS({
catchFormatErrors(format, formatter),
]);
export const parseResponse = async (
export async function parseResponse(
res: Response,
{ expectingOk = true, format = 'text', apiName = '' },
) => {
) {
let body;
try {
const formatter = responseFormatters.get(format, false);
@ -56,15 +58,17 @@ export const parseResponse = async (
throw new APIError(isJSON && message ? message : body, res.status, apiName);
}
return body;
};
}
export const responseParser = (options: {
export function responseParser(options: {
expectingOk?: boolean;
format: string;
apiName: string;
}) => (res: Response) => parseResponse(res, options);
}) {
return (res: Response) => parseResponse(res, options);
}
export const parseLinkHeader = (header: string | null) => {
export function parseLinkHeader(header: string | null) {
if (!header) {
return {};
}
@ -80,14 +84,14 @@ export const parseLinkHeader = (header: string | null) => {
]),
fromPairs,
])(header);
};
}
export const getAllResponses = async (
export async function getAllResponses(
url: string,
options: { headers?: {} } = {},
linkHeaderRelName: string,
nextUrlProcessor: (url: string) => string,
) => {
) {
const maxResponses = 30;
let responseCount = 1;
@ -107,9 +111,9 @@ export const getAllResponses = async (
}
return pageResponses;
};
}
export const getPathDepth = (path: string) => {
export function getPathDepth(path: string) {
const depth = path.split('/').length;
return depth;
};
}

View File

@ -10,9 +10,18 @@ export interface PointerFile {
sha: string;
}
const splitIntoLines = (str: string) => str.split('\n');
const splitIntoWords = (str: string) => str.split(/\s+/g);
const isNonEmptyString = (str: string) => str !== '';
function splitIntoLines(str: string) {
return str.split('\n');
}
function splitIntoWords(str: string) {
return str.split(/\s+/g);
}
function isNonEmptyString(str: string) {
return str !== '';
}
const withoutEmptyLines = flow([map((str: string) => str.trim()), filter(isNonEmptyString)]);
export const parsePointerFile: (data: string) => PointerFile = flow([
splitIntoLines,
@ -29,9 +38,11 @@ export const parsePointerFile: (data: string) => PointerFile = flow([
//
// .gitattributes file parsing
const removeGitAttributesCommentsFromLine = (line: string) => line.split('#')[0];
function removeGitAttributesCommentsFromLine(line: string) {
return line.split('#')[0];
}
const parseGitPatternAttribute = (attributeString: string) => {
function parseGitPatternAttribute(attributeString: string) {
// There are three kinds of attribute settings:
// - a key=val pair sets an attribute to a specific value
// - a key without a value and a leading hyphen sets an attribute to false
@ -44,7 +55,7 @@ const parseGitPatternAttribute = (attributeString: string) => {
return [attributeString.slice(1), false];
}
return [attributeString, true];
};
}
const parseGitPatternAttributes = flow([map(parseGitPatternAttribute), fromPairs]);
@ -69,11 +80,13 @@ export const getLargeMediaPatternsFromGitAttributesFile = flow([
map(([pattern]) => pattern),
]);
export const createPointerFile = ({ size, sha }: PointerFile) => `\
export function createPointerFile({ size, sha }: PointerFile) {
return `\
version https://git-lfs.github.com/spec/v1
oid sha256:${sha}
size ${size}
`;
}
export async function getPointerFileForMediaFileObj(
client: { uploadResource: (pointer: PointerFile, resource: Blob) => Promise<string> },

View File

@ -200,12 +200,12 @@ type ReadFile = (
type ReadFileMetadata = (path: string, id: string | null | undefined) => Promise<FileMetadata>;
const fetchFiles = async (
async function fetchFiles(
files: ImplementationFile[],
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
) {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
files.forEach(file => {
@ -231,28 +231,28 @@ const fetchFiles = async (
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !(loadedEntry as { error: boolean }).error),
) as Promise<ImplementationEntry[]>;
};
}
export const entriesByFolder = async (
export async function entriesByFolder(
listFiles: () => Promise<ImplementationFile[]>,
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
) {
const files = await listFiles();
return fetchFiles(files, readFile, readFileMetadata, apiName);
};
}
export const entriesByFiles = async (
export async function entriesByFiles(
files: ImplementationFile[],
readFile: ReadFile,
readFileMetadata: ReadFileMetadata,
apiName: string,
) => {
) {
return fetchFiles(files, readFile, readFileMetadata, apiName);
};
}
export const unpublishedEntries = async (listEntriesKeys: () => Promise<string[]>) => {
export async function unpublishedEntries(listEntriesKeys: () => Promise<string[]>) {
try {
const keys = await listEntriesKeys();
return keys;
@ -262,14 +262,14 @@ export const unpublishedEntries = async (listEntriesKeys: () => Promise<string[]
}
throw error;
}
};
}
export const blobToFileObj = (name: string, blob: Blob) => {
export function blobToFileObj(name: string, blob: Blob) {
const options = name.match(/.svg$/) ? { type: 'image/svg+xml' } : {};
return new File([blob], name, options);
};
}
export const getMediaAsBlob = async (path: string, id: string | null, readFile: ReadFile) => {
export async function getMediaAsBlob(path: string, id: string | null, readFile: ReadFile) {
let blob: Blob;
if (path.match(/.svg$/)) {
const text = (await readFile(path, id, { parseText: true })) as string;
@ -278,13 +278,13 @@ export const getMediaAsBlob = async (path: string, id: string | null, readFile:
blob = (await readFile(path, id, { parseText: false })) as Blob;
}
return blob;
};
}
export const getMediaDisplayURL = async (
export async function getMediaDisplayURL(
displayURL: DisplayURL,
readFile: ReadFile,
semaphore: Semaphore,
) => {
) {
const { path, id } = displayURL as DisplayURLObject;
return new Promise<string>((resolve, reject) =>
semaphore.take(() =>
@ -294,9 +294,9 @@ export const getMediaDisplayURL = async (
.finally(() => semaphore.leave()),
),
);
};
}
export const runWithLock = async (lock: AsyncLock, func: Function, message: string) => {
export async function runWithLock(lock: AsyncLock, func: Function, message: string) {
try {
const acquired = await lock.acquire();
if (!acquired) {
@ -308,7 +308,7 @@ export const runWithLock = async (lock: AsyncLock, func: Function, message: stri
} finally {
lock.release();
}
};
}
const LOCAL_KEY = 'git.local';
@ -324,9 +324,9 @@ type GetKeyArgs = {
depth: number;
};
const getLocalKey = ({ branch, folder, extension, depth }: GetKeyArgs) => {
function getLocalKey({ branch, folder, extension, depth }: GetKeyArgs) {
return `${LOCAL_KEY}.${branch}.${folder}.${extension}.${depth}`;
};
}
type PersistLocalTreeArgs = GetKeyArgs & {
localForage: LocalForage;
@ -337,32 +337,32 @@ type GetLocalTreeArgs = GetKeyArgs & {
localForage: LocalForage;
};
export const persistLocalTree = async ({
export async function persistLocalTree({
localForage,
localTree,
branch,
folder,
extension,
depth,
}: PersistLocalTreeArgs) => {
}: PersistLocalTreeArgs) {
await localForage.setItem<LocalTree>(
getLocalKey({ branch, folder, extension, depth }),
localTree,
);
};
}
export const getLocalTree = async ({
export async function getLocalTree({
localForage,
branch,
folder,
extension,
depth,
}: GetLocalTreeArgs) => {
}: GetLocalTreeArgs) {
const localTree = await localForage.getItem<LocalTree>(
getLocalKey({ branch, folder, extension, depth }),
);
return localTree;
};
}
type GetDiffFromLocalTreeMethods = {
getDifferences: (
@ -387,14 +387,14 @@ type GetDiffFromLocalTreeArgs = GetDiffFromLocalTreeMethods & {
depth: number;
};
const getDiffFromLocalTree = async ({
async function getDiffFromLocalTree({
branch,
localTree,
folder,
getDifferences,
filterFile,
getFileId,
}: GetDiffFromLocalTreeArgs) => {
}: GetDiffFromLocalTreeArgs) {
const diff = await getDifferences(branch.sha, localTree.head);
const diffFiles = diff
.filter(d => d.oldPath?.startsWith(folder) || d.newPath?.startsWith(folder))
@ -441,7 +441,7 @@ const getDiffFromLocalTree = async ({
);
return diffFilesWithIds;
};
}
type AllEntriesByFolderArgs = GetKeyArgs &
GetDiffFromLocalTreeMethods & {
@ -458,7 +458,7 @@ type AllEntriesByFolderArgs = GetKeyArgs &
localForage: LocalForage;
};
export const allEntriesByFolder = async ({
export async function allEntriesByFolder({
listAllFiles,
readFile,
readFileMetadata,
@ -473,8 +473,8 @@ export const allEntriesByFolder = async ({
getDifferences,
getFileId,
filterFile,
}: AllEntriesByFolderArgs) => {
const listAllFilesAndPersist = async () => {
}: AllEntriesByFolderArgs) {
async function listAllFilesAndPersist() {
const files = await listAllFiles(folder, extension, depth);
const branch = await getDefaultBranch();
await persistLocalTree({
@ -489,9 +489,9 @@ export const allEntriesByFolder = async ({
folder,
});
return files;
};
}
const listFiles = async () => {
async function listFiles() {
const localTree = await getLocalTree({ localForage, branch, folder, extension, depth });
if (localTree) {
const branch = await getDefaultBranch();
@ -526,8 +526,6 @@ export const allEntriesByFolder = async ({
// return local copy
return localTree.files;
} else {
// refresh local copy
const identity = (file: { path: string }) => file.path;
const deleted = diff.reduce((acc, d) => {
acc[d.path] = d.deleted;
return acc;
@ -536,9 +534,9 @@ export const allEntriesByFolder = async ({
unionBy(
diff.filter(d => !deleted[d.path]),
localTree.files.filter(f => !deleted[f.path]),
identity,
file => file.path,
),
identity,
file => file.path,
);
await persistLocalTree({
@ -555,8 +553,8 @@ export const allEntriesByFolder = async ({
} else {
return listAllFilesAndPersist();
}
};
}
const files = await listFiles();
return fetchFiles(files, readFile, readFileMetadata, apiName);
};
}

View File

@ -1,5 +1,8 @@
const absolutePath = new RegExp('^(?:[a-z]+:)?//', 'i');
const normalizePath = (path: string) => path.replace(/[\\/]+/g, '/');
function normalizePath(path: string) {
return path.replace(/[\\/]+/g, '/');
}
export function isAbsolutePath(path: string) {
return absolutePath.test(path);

View File

@ -1,14 +1,21 @@
import flow from 'lodash/flow';
export const then = <T, V>(fn: (r: T) => V) => (p: Promise<T>) => Promise.resolve(p).then(fn);
export function then<T, V>(fn: (r: T) => V) {
return (p: Promise<T>) => Promise.resolve(p).then(fn);
}
const filterPromiseSymbol = Symbol('filterPromiseSymbol');
export const onlySuccessfulPromises = (promises: Promise<unknown>[]) => {
export function onlySuccessfulPromises(promises: Promise<unknown>[]) {
return Promise.all(promises.map(p => p.catch(() => filterPromiseSymbol))).then(results =>
results.filter(result => result !== filterPromiseSymbol),
);
};
}
const wrapFlowAsync = (fn: Function) => async (arg: unknown) => fn(await arg);
export const flowAsync = (fns: Function[]) => flow(fns.map(fn => wrapFlowAsync(fn)));
function wrapFlowAsync(fn: Function) {
return async (arg: unknown) => fn(await arg);
}
export function flowAsync(fns: Function[]) {
return flow(fns.map(fn => wrapFlowAsync(fn)));
}

View File

@ -3,15 +3,16 @@ import curry from 'lodash/curry';
import flow from 'lodash/flow';
import isString from 'lodash/isString';
const isAbortControllerSupported = () => {
function isAbortControllerSupported() {
if (typeof window !== 'undefined') {
return !!window.AbortController;
}
return false;
};
}
const timeout = 60;
const fetchWithTimeout = (input, init) => {
function fetchWithTimeout(input, init) {
if ((init && init.signal) || !isAbortControllerSupported()) {
return fetch(input, init);
}
@ -28,42 +29,47 @@ const fetchWithTimeout = (input, init) => {
}
throw e;
});
};
}
const decodeParams = paramsString =>
List(paramsString.split('&'))
function decodeParams(paramsString) {
return List(paramsString.split('&'))
.map(s => List(s.split('=')).map(decodeURIComponent))
.update(Map);
}
const fromURL = wholeURL => {
function fromURL(wholeURL) {
const [url, allParamsString] = wholeURL.split('?');
return Map({ url, ...(allParamsString ? { params: decodeParams(allParamsString) } : {}) });
};
}
const fromFetchArguments = (wholeURL, options) => {
function fromFetchArguments(wholeURL, options) {
return fromURL(wholeURL).merge(
(options ? fromJS(options) : Map()).remove('url').remove('params'),
);
};
}
const encodeParams = params =>
params
function encodeParams(params) {
return params
.entrySeq()
.map(([k, v]) => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`)
.join('&');
}
const toURL = req =>
`${req.get('url')}${req.get('params') ? `?${encodeParams(req.get('params'))}` : ''}`;
function toURL(req) {
return `${req.get('url')}${req.get('params') ? `?${encodeParams(req.get('params'))}` : ''}`;
}
const toFetchArguments = req => [
toURL(req),
req
.remove('url')
.remove('params')
.toJS(),
];
function toFetchArguments(req) {
return [
toURL(req),
req
.remove('url')
.remove('params')
.toJS(),
];
}
const maybeRequestArg = req => {
function maybeRequestArg(req) {
if (isString(req)) {
return fromURL(req);
}
@ -71,9 +77,15 @@ const maybeRequestArg = req => {
return fromJS(req);
}
return Map();
};
const ensureRequestArg = func => req => func(maybeRequestArg(req));
const ensureRequestArg2 = func => (arg, req) => func(arg, maybeRequestArg(req));
}
function ensureRequestArg(func) {
return req => func(maybeRequestArg(req));
}
function ensureRequestArg2(func) {
return (arg, req) => func(arg, maybeRequestArg(req));
}
// This actually performs the built request object
const performRequest = ensureRequestArg(req => {
@ -84,9 +96,14 @@ const performRequest = ensureRequestArg(req => {
// Each of the following functions takes options and returns another
// function that performs the requested action on a request.
const getCurriedRequestProcessor = flow([ensureRequestArg2, curry]);
const getPropSetFunction = path => getCurriedRequestProcessor((val, req) => req.setIn(path, val));
const getPropMergeFunction = path =>
getCurriedRequestProcessor((obj, req) => req.updateIn(path, (p = Map()) => p.merge(obj)));
function getPropSetFunction(path) {
return getCurriedRequestProcessor((val, req) => req.setIn(path, val));
}
function getPropMergeFunction(path) {
return getCurriedRequestProcessor((obj, req) => req.updateIn(path, (p = Map()) => p.merge(obj)));
}
const withMethod = getPropSetFunction(['method']);
const withBody = getPropSetFunction(['body']);