Open Authoring bugfixes and pagination improvements (#2523)

* Fix handling of displayURLs which are strings

* Add fromFetchArguments to unsentRequest

* Add parseLinkHeader to backendUtil

* Handle paginated endpoints in GitHub API

* Rename fork workflow to Open Authoring across the whole repo

* Fixes for bugs in GitHub API introduced by Open Authoring changes

* Fix getDeployPreview

* Fix incorrect auth header formatting GitHub implementation

cf. https://github.com/netlify/netlify-cms/pull/2456#discussion_r309633387

* Remove unused and broken method from GitHub API

cf. https://github.com/netlify/netlify-cms/pull/2456#discussion_r308687145

* Fix editorialWorkflowGit method in GitHub API

* Request published entry content from origin repo

* Better error when deleting a published post in Open Authoring

* Rename to Open Authoring in fork request message

Also adds a note to the fork request message that an existing fork of
the same repo will be used automatically.

* fix linting
This commit is contained in:
Benaiah Mischenko
2019-08-24 10:54:59 -07:00
committed by Shawn Erquhart
parent 66da66affd
commit 34e1f09105
16 changed files with 223 additions and 151 deletions

View File

@ -1,6 +1,8 @@
import { get } from 'lodash';
import { flow, fromPairs, get } from 'lodash';
import { map } from 'lodash/fp';
import { fromJS } from 'immutable';
import { fileExtension } from './path';
import unsentRequest from './unsentRequest';
export const filterByPropExtension = (extension, propName) => arr =>
arr.filter(el => fileExtension(get(el, propName)) === extension);
@ -40,3 +42,36 @@ export const parseResponse = async (res, { expectingOk = true, format = 'text' }
};
export const responseParser = options => res => parseResponse(res, options);
export const parseLinkHeader = flow([
linksString => linksString.split(','),
map(str => str.trim().split(';')),
map(([linkStr, keyStr]) => [
keyStr.match(/rel="(.*?)"/)[1],
linkStr
.trim()
.match(/<(.*?)>/)[1]
.replace(/\+/g, '%20'),
]),
fromPairs,
]);
export const getPaginatedRequestIterator = (url, options = {}, linkHeaderRelName = 'next') => {
let req = unsentRequest.fromFetchArguments(url, options);
const next = async () => {
if (!req) {
return { done: true };
}
const pageResponse = await unsentRequest.performRequest(req);
const linkHeader = pageResponse.headers.get('Link');
const nextURL = linkHeader && parseLinkHeader(linkHeader)[linkHeaderRelName];
req = nextURL && unsentRequest.fromURL(nextURL);
return { value: pageResponse };
};
return {
[Symbol.asyncIterator]: () => ({
next,
}),
};
};

View File

@ -11,7 +11,13 @@ import {
then,
} from './promise';
import unsentRequest from './unsentRequest';
import { filterByPropExtension, parseResponse, responseParser } from './backendUtil';
import {
filterByPropExtension,
getPaginatedRequestIterator,
parseLinkHeader,
parseResponse,
responseParser,
} from './backendUtil';
import loadScript from './loadScript';
import getBlobSHA from './getBlobSHA';
@ -33,6 +39,7 @@ export const NetlifyCmsLibUtil = {
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
parseResponse,
responseParser,
loadScript,
@ -56,6 +63,8 @@ export {
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
getPaginatedRequestIterator,
parseResponse,
responseParser,
loadScript,

View File

@ -13,6 +13,12 @@ const fromURL = wholeURL => {
return Map({ url, ...(allParamsString ? { params: decodeParams(allParamsString) } : {}) });
};
const fromFetchArguments = (wholeURL, options) => {
return fromURL(wholeURL).merge(
(options ? fromJS(options) : Map()).remove('url').remove('params'),
);
};
const encodeParams = params =>
params
.entrySeq()
@ -25,8 +31,8 @@ const toURL = req =>
const toFetchArguments = req => [
toURL(req),
req
.delete('url')
.delete('params')
.remove('url')
.remove('params')
.toJS(),
];
@ -85,6 +91,7 @@ const withTimestamp = ensureRequestArg(req => withParams({ ts: new Date().getTim
export default {
toURL,
fromURL,
fromFetchArguments,
performRequest,
withMethod,
withDefaultMethod,