Feat: editorial workflow bitbucket gitlab (#3014)

* refactor: typescript the backends

* feat: support multiple files upload for GitLab and BitBucket

* fix: load entry media files from media folder or UI state

* chore: cleanup log message

* chore: code cleanup

* refactor: typescript the test backend

* refactor: cleanup getEntry unsued variables

* refactor: moved shared backend code to lib util

* chore: rename files to preserve history

* fix: bind readFile method to API classes

* test(e2e): switch to chrome in cypress tests

* refactor: extract common api methods

* refactor: remove most of immutable js usage from backends

* feat(backend-gitlab): initial editorial workflow support

* feat(backend-gitlab): implement missing workflow methods

* chore: fix lint error

* feat(backend-gitlab): support files deletion

* test(e2e): add gitlab cypress tests

* feat(backend-bitbucket): implement missing editorial workflow methods

* test(e2e): add BitBucket backend e2e tests

* build: update node version to 12 on netlify builds

* fix(backend-bitbucket): extract BitBucket avatar url

* test: fix git-gateway AuthenticationPage test

* test(e2e): fix some backend tests

* test(e2e): fix tests

* test(e2e): add git-gateway editorial workflow test

* chore: code cleanup

* test(e2e): revert back to electron

* test(e2e): add non editorial workflow tests

* fix(git-gateway-gitlab): don't call unpublishedEntry in simple workflow

gitlab git-gateway doesn't support editorial workflow APIs yet. This change makes sure not to call them in simple workflow

* refactor(backend-bitbucket): switch to diffstat API instead of raw diff

* chore: fix test

* test(e2e): add more git-gateway tests

* fix: post rebase typescript fixes

* test(e2e): fix tests

* fix: fix parsing of content key and add tests

* refactor: rename test file

* test(unit): add getStatues unit tests

* chore: update cypress

* docs: update beta docs
This commit is contained in:
Erez Rokah
2020-01-15 00:15:14 +02:00
committed by Shawn Erquhart
parent 4ff5bc2ee0
commit 6f221ab3c1
251 changed files with 70910 additions and 15974 deletions

View File

@ -1,55 +0,0 @@
declare module 'netlify-cms-lib-util' {
export const isAbsolutePath: (path: string) => boolean;
export const basename: (path: string, extension?: string) => string;
export const EDITORIAL_WORKFLOW_ERROR: 'EDITORIAL_WORKFLOW_ERROR';
export const getBlobSHA: (blob: Blob) => string;
export interface CursorType {
create: (args: unknown) => Cursor;
updateStore: (args: unknown) => void;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
unwrapData: () => [Map<string, any>, CursorType];
actions: Set;
data: Map;
meta: Map;
store: Map;
}
export const Cursor: CursorType;
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol(
'cursor key for compatibility with old backends',
);
export class APIError extends Error {
status: number;
constructor(message?: string, responseStatus: number, backend: string);
}
export class EditorialWorkflowError extends Error {
constructor(message?: string, notUnderEditorialWorkflow: boolean);
notUnderEditorialWorkflow: boolean;
}
export const getAllResponses: (url: string, options: RequestInit) => Promise<Response[]>;
export const flowAsync: (funcs: Function[]) => () => Promise<unknown>;
export const localForage: {
setItem: <T>(key: string, item: T) => Promise<T>;
getItem: <T>(key: string) => Promise<T | null>;
removeItem: (key: string) => Promise<void>;
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const onlySuccessfulPromises: (...args: any[]) => any;
export const resolvePromiseProperties: (
object: Record<string, Promise<unknown>>,
) => Promise<unknown>;
export type ResponseParser<T> = (res: Response) => Promise<T>;
export const responseParser: ({ format }: { format: 'blob' | 'json' | 'text' }) => ResponseParser;
}

View File

@ -14,11 +14,12 @@
"scripts": {
"develop": "yarn build:esm --watch",
"build": "cross-env NODE_ENV=production webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward"
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"dependencies": {
"js-sha256": "^0.9.0",
"localforage": "^1.7.3"
"localforage": "^1.7.3",
"semaphore": "^1.1.0"
},
"peerDependencies": {
"immutable": "^3.7.6",

View File

@ -0,0 +1,78 @@
export const CMS_BRANCH_PREFIX = 'cms';
export const DEFAULT_PR_BODY = 'Automatically generated by Netlify CMS';
export const MERGE_COMMIT_MESSAGE = 'Automatically generated. Merged on Netlify CMS.';
const NETLIFY_CMS_LABEL_PREFIX = 'netlify-cms/';
export const isCMSLabel = (label: string) => label.startsWith(NETLIFY_CMS_LABEL_PREFIX);
export const labelToStatus = (label: string) => label.substr(NETLIFY_CMS_LABEL_PREFIX.length);
export const statusToLabel = (status: string) => `${NETLIFY_CMS_LABEL_PREFIX}${status}`;
export const generateContentKey = (collectionName: string, slug: string) =>
`${collectionName}/${slug}`;
export const parseContentKey = (contentKey: string) => {
const index = contentKey.indexOf('/');
return { collection: contentKey.substr(0, index), slug: contentKey.substr(index + 1) };
};
export interface FetchError extends Error {
status: number;
}
export const readFile = async (
id: string | null | undefined,
fetchContent: () => Promise<string | Blob>,
localForage: LocalForage,
isText: boolean,
) => {
const key = id ? (isText ? `gh.${id}` : `gh.${id}.blob`) : null;
const cached = key ? await localForage.getItem<string | Blob>(key) : null;
if (cached) {
return cached;
}
const content = await fetchContent();
if (key) {
localForage.setItem(key, content);
}
return content;
};
/**
* Keywords for inferring a status that will provide a deploy preview URL.
*/
const PREVIEW_CONTEXT_KEYWORDS = ['deploy'];
/**
* Check a given status context string to determine if it provides a link to a
* deploy preview. Checks for an exact match against `previewContext` if given,
* otherwise checks for inclusion of a value from `PREVIEW_CONTEXT_KEYWORDS`.
*/
export const isPreviewContext = (context: string, previewContext: string) => {
if (previewContext) {
return context === previewContext;
}
return PREVIEW_CONTEXT_KEYWORDS.some(keyword => context.includes(keyword));
};
export enum PreviewState {
Other = 'other',
Success = 'success',
}
/**
* Retrieve a deploy preview URL from an array of statuses. By default, a
* matching status is inferred via `isPreviewContext`.
*/
export const getPreviewStatus = (
statuses: {
context: string;
target_url: string;
state: PreviewState;
}[],
previewContext: string,
) => {
return statuses.find(({ context }) => {
return isPreviewContext(context, previewContext);
});
};

View File

@ -1,7 +1,12 @@
export const API_ERROR = 'API_ERROR';
export default class APIError extends Error {
constructor(message, status, api, meta = {}) {
message: string;
status: null | number;
api: string;
meta: {};
constructor(message: string, status: null | number, api: string, meta = {}) {
super(message);
this.message = message;
this.status = status;

View File

@ -1,122 +0,0 @@
import { fromJS, Map, Set } from 'immutable';
const jsToMap = obj => {
if (obj === undefined) {
return Map();
}
const immutableObj = fromJS(obj);
if (!Map.isMap(immutableObj)) {
throw new Error('Object must be equivalent to a Map.');
}
return immutableObj;
};
const knownMetaKeys = Set(['index', 'count', 'pageSize', 'pageCount', 'usingOldPaginationAPI']);
const filterUnknownMetaKeys = meta => meta.filter((v, k) => knownMetaKeys.has(k));
/*
createCursorMap takes one of three signatures:
- () -> cursor with empty actions, data, and meta
- (cursorMap: <object/Map with optional actions, data, and meta keys>) -> cursor
- (actions: <array/List>, data: <object/Map>, meta: <optional object/Map>) -> cursor
*/
const createCursorMap = (...args) => {
const { actions, data, meta } =
args.length === 1
? jsToMap(args[0]).toObject()
: { actions: args[0], data: args[1], meta: args[2] };
return Map({
// actions are a Set, rather than a List, to ensure an efficient .has
actions: Set(actions),
// data and meta are Maps
data: jsToMap(data),
meta: jsToMap(meta).update(filterUnknownMetaKeys),
});
};
const hasAction = (cursorMap, action) => cursorMap.hasIn(['actions', action]);
const getActionHandlers = (cursorMap, handler) =>
cursorMap
.get('actions', Set())
.toMap()
.map(action => handler(action));
// The cursor logic is entirely functional, so this class simply
// provides a chainable interface
export default class Cursor {
static create(...args) {
return new Cursor(...args);
}
constructor(...args) {
if (args[0] instanceof Cursor) {
return args[0];
}
this.store = createCursorMap(...args);
this.actions = this.store.get('actions');
this.data = this.store.get('data');
this.meta = this.store.get('meta');
}
updateStore(...args) {
return new Cursor(this.store.update(...args));
}
updateInStore(...args) {
return new Cursor(this.store.updateIn(...args));
}
hasAction(action) {
return hasAction(this.store, action);
}
addAction(action) {
return this.updateStore('actions', actions => actions.add(action));
}
removeAction(action) {
return this.updateStore('actions', actions => actions.delete(action));
}
setActions(actions) {
return this.updateStore(store => store.set('actions', Set(actions)));
}
mergeActions(actions) {
return this.updateStore('actions', oldActions => oldActions.union(actions));
}
getActionHandlers(handler) {
return getActionHandlers(this.store, handler);
}
setData(data) {
return new Cursor(this.store.set('data', jsToMap(data)));
}
mergeData(data) {
return new Cursor(this.store.mergeIn(['data'], jsToMap(data)));
}
wrapData(data) {
return this.updateStore('data', oldData => jsToMap(data).set('wrapped_cursor_data', oldData));
}
unwrapData() {
return [
this.store.get('data').delete('wrapped_cursor_data'),
this.updateStore('data', data => data.get('wrapped_cursor_data')),
];
}
clearData() {
return this.updateStore('data', () => Map());
}
setMeta(meta) {
return this.updateStore(store => store.set('meta', jsToMap(meta)));
}
mergeMeta(meta) {
return this.updateStore(store => store.update('meta', oldMeta => oldMeta.merge(jsToMap(meta))));
}
}
// This is a temporary hack to allow cursors to be added to the
// interface between backend.js and backends without modifying old
// backends at all. This should be removed in favor of wrapping old
// backends with a compatibility layer, as part of the backend API
// refactor.
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol('cursor key for compatibility with old backends');

View File

@ -0,0 +1,161 @@
import { fromJS, Map, Set } from 'immutable';
type CursorStoreObject = {
actions: Set<string>;
data: Map<string, unknown>;
meta: Map<string, unknown>;
};
export type CursorStore = {
get<K extends keyof CursorStoreObject>(
key: K,
defaultValue?: CursorStoreObject[K],
): CursorStoreObject[K];
getIn<V>(path: string[]): V;
set<K extends keyof CursorStoreObject, V extends CursorStoreObject[K]>(
key: K,
value: V,
): CursorStoreObject[K];
setIn(path: string[], value: unknown): CursorStore;
hasIn(path: string[]): boolean;
mergeIn(path: string[], value: unknown): CursorStore;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
update: (...args: any[]) => CursorStore;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
updateIn: (...args: any[]) => CursorStore;
};
type ActionHandler = (action: string) => unknown;
const jsToMap = (obj: {}) => {
if (obj === undefined) {
return Map();
}
const immutableObj = fromJS(obj);
if (!Map.isMap(immutableObj)) {
throw new Error('Object must be equivalent to a Map.');
}
return immutableObj;
};
const knownMetaKeys = Set(['index', 'count', 'pageSize', 'pageCount', 'usingOldPaginationAPI']);
const filterUnknownMetaKeys = (meta: Map<string, string>) =>
meta.filter((_v, k) => knownMetaKeys.has(k as string));
/*
createCursorMap takes one of three signatures:
- () -> cursor with empty actions, data, and meta
- (cursorMap: <object/Map with optional actions, data, and meta keys>) -> cursor
- (actions: <array/List>, data: <object/Map>, meta: <optional object/Map>) -> cursor
*/
const createCursorStore = (...args: {}[]) => {
const { actions, data, meta } =
args.length === 1
? jsToMap(args[0]).toObject()
: { actions: args[0], data: args[1], meta: args[2] };
return Map({
// actions are a Set, rather than a List, to ensure an efficient .has
actions: Set(actions),
// data and meta are Maps
data: jsToMap(data),
meta: jsToMap(meta).update(filterUnknownMetaKeys),
}) as CursorStore;
};
const hasAction = (store: CursorStore, action: string) => store.hasIn(['actions', action]);
const getActionHandlers = (store: CursorStore, handler: ActionHandler) =>
store
.get('actions', Set<string>())
.toMap()
.map(action => handler(action as string));
// The cursor logic is entirely functional, so this class simply
// provides a chainable interface
export default class Cursor {
store?: CursorStore;
actions?: Set<string>;
data?: Map<string, unknown>;
meta?: Map<string, unknown>;
static create(...args: {}[]) {
return new Cursor(...args);
}
constructor(...args: {}[]) {
if (args[0] instanceof Cursor) {
return args[0] as Cursor;
}
this.store = createCursorStore(...args);
this.actions = this.store.get('actions');
this.data = this.store.get('data');
this.meta = this.store.get('meta');
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
updateStore(...args: any[]) {
return new Cursor(this.store!.update(...args));
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
updateInStore(...args: any[]) {
return new Cursor(this.store!.updateIn(...args));
}
hasAction(action: string) {
return hasAction(this.store!, action);
}
addAction(action: string) {
return this.updateStore('actions', (actions: Set<string>) => actions.add(action));
}
removeAction(action: string) {
return this.updateStore('actions', (actions: Set<string>) => actions.delete(action));
}
setActions(actions: Iterable<string>) {
return this.updateStore((store: CursorStore) => store.set('actions', Set<string>(actions)));
}
mergeActions(actions: Set<string>) {
return this.updateStore('actions', (oldActions: Set<string>) => oldActions.union(actions));
}
getActionHandlers(handler: ActionHandler) {
return getActionHandlers(this.store!, handler);
}
setData(data: {}) {
return new Cursor(this.store!.set('data', jsToMap(data)));
}
mergeData(data: {}) {
return new Cursor(this.store!.mergeIn(['data'], jsToMap(data)));
}
wrapData(data: {}) {
return this.updateStore('data', (oldData: Map<string, unknown>) =>
jsToMap(data).set('wrapped_cursor_data', oldData),
);
}
unwrapData() {
return [
this.store!.get('data').delete('wrapped_cursor_data'),
this.updateStore('data', (data: Map<string, unknown>) => data.get('wrapped_cursor_data')),
] as [Map<string, unknown>, Cursor];
}
clearData() {
return this.updateStore('data', () => Map());
}
setMeta(meta: {}) {
return this.updateStore((store: CursorStore) => store.set('meta', jsToMap(meta)));
}
mergeMeta(meta: {}) {
return this.updateStore((store: CursorStore) =>
store.update('meta', (oldMeta: Map<string, unknown>) => oldMeta.merge(jsToMap(meta))),
);
}
}
// This is a temporary hack to allow cursors to be added to the
// interface between backend.js and backends without modifying old
// backends at all. This should be removed in favor of wrapping old
// backends with a compatibility layer, as part of the backend API
// refactor.
export const CURSOR_COMPATIBILITY_SYMBOL = Symbol('cursor key for compatibility with old backends');

View File

@ -0,0 +1,65 @@
import * as api from '../API';
describe('Api', () => {
describe('generateContentKey', () => {
it('should generate content key', () => {
expect(api.generateContentKey('posts', 'dir1/dir2/post-title')).toBe(
'posts/dir1/dir2/post-title',
);
});
});
describe('parseContentKey', () => {
it('should parse content key', () => {
expect(api.parseContentKey('posts/dir1/dir2/post-title')).toEqual({
collection: 'posts',
slug: 'dir1/dir2/post-title',
});
});
});
describe('isCMSLabel', () => {
it('should return true for CMS label', () => {
expect(api.isCMSLabel('netlify-cms/draft')).toBe(true);
});
it('should return false for non CMS label', () => {
expect(api.isCMSLabel('other/label')).toBe(false);
});
});
describe('labelToStatus', () => {
it('should get status from label', () => {
expect(api.labelToStatus('netlify-cms/draft')).toBe('draft');
});
});
describe('statusToLabel', () => {
it('should generate label from status', () => {
expect(api.statusToLabel('draft')).toBe('netlify-cms/draft');
});
});
describe('isPreviewContext', () => {
it('should return true for default preview context', () => {
expect(api.isPreviewContext('deploy', '')).toBe(true);
});
it('should return false for non default preview context', () => {
expect(api.isPreviewContext('other', '')).toBe(false);
});
it('should return true for custom preview context', () => {
expect(api.isPreviewContext('ci/custom_preview', 'ci/custom_preview')).toBe(true);
});
});
describe('getPreviewStatus', () => {
it('should return preview status on matching context', () => {
expect(api.getPreviewStatus([{ context: 'deploy' }])).toEqual({ context: 'deploy' });
});
it('should return undefined on matching context', () => {
expect(api.getPreviewStatus([{ context: 'other' }])).toBeUndefined();
});
});
});

View File

@ -1,6 +1,5 @@
import { parseLinkHeader, getAllResponses, getCollectionDepth } from '../backendUtil';
import { parseLinkHeader, getAllResponses, getPathDepth } from '../backendUtil';
import { oneLine } from 'common-tags';
import { Map } from 'immutable';
import nock from 'nock';
describe('parseLinkHeader', () => {
@ -71,12 +70,12 @@ describe('getAllResponses', () => {
});
});
describe('getCollectionDepth', () => {
it('should return 1 for collection with no path', () => {
expect(getCollectionDepth(Map({}))).toBe(1);
describe('getPathDepth', () => {
it('should return 1 for empty string', () => {
expect(getPathDepth('')).toBe(1);
});
it('should return 2 for collection with path of one nested folder', () => {
expect(getCollectionDepth(Map({ path: '{{year}}/{{slug}}' }))).toBe(2);
it('should return 2 for path of one nested folder', () => {
expect(getPathDepth('{{year}}/{{slug}}')).toBe(2);
});
});

View File

@ -0,0 +1,58 @@
import { getMediaAsBlob, getMediaDisplayURL } from '../implementation';
describe('implementation', () => {
describe('getMediaAsBlob', () => {
it('should return response blob on non svg file', async () => {
const blob = {};
const readFile = jest.fn().mockResolvedValue(blob);
await expect(getMediaAsBlob('static/media/image.png', 'sha', readFile)).resolves.toBe(blob);
expect(readFile).toHaveBeenCalledTimes(1);
expect(readFile).toHaveBeenCalledWith('static/media/image.png', 'sha', {
parseText: false,
});
});
it('should return text blob on svg file', async () => {
const text = 'svg';
const readFile = jest.fn().mockResolvedValue(text);
await expect(getMediaAsBlob('static/media/logo.svg', 'sha', readFile)).resolves.toEqual(
new Blob([text], { type: 'image/svg+xml' }),
);
expect(readFile).toHaveBeenCalledTimes(1);
expect(readFile).toHaveBeenCalledWith('static/media/logo.svg', 'sha', {
parseText: true,
});
});
});
describe('getMediaDisplayURL', () => {
it('should return createObjectURL result', async () => {
const blob = {};
const readFile = jest.fn().mockResolvedValue(blob);
const semaphore = { take: jest.fn(callback => callback()), leave: jest.fn() };
global.URL.createObjectURL = jest
.fn()
.mockResolvedValue('blob:http://localhost:8080/blob-id');
await expect(
getMediaDisplayURL({ path: 'static/media/image.png', id: 'sha' }, readFile, semaphore),
).resolves.toBe('blob:http://localhost:8080/blob-id');
expect(semaphore.take).toHaveBeenCalledTimes(1);
expect(semaphore.leave).toHaveBeenCalledTimes(1);
expect(readFile).toHaveBeenCalledTimes(1);
expect(readFile).toHaveBeenCalledWith('static/media/image.png', 'sha', {
parseText: false,
});
expect(global.URL.createObjectURL).toHaveBeenCalledTimes(1);
expect(global.URL.createObjectURL).toHaveBeenCalledWith(blob);
});
});
});

View File

@ -0,0 +1,20 @@
import unsentRequest from '../unsentRequest';
describe('unsentRequest', () => {
describe('withHeaders', () => {
it('should create new request with headers', () => {
expect(
unsentRequest
.withHeaders({ Authorization: 'token' })('path')
.toJS(),
).toEqual({ url: 'path', headers: { Authorization: 'token' } });
});
it('should add headers to existing request', () => {
expect(unsentRequest.withHeaders({ Authorization: 'token' }, 'path').toJS()).toEqual({
url: 'path',
headers: { Authorization: 'token' },
});
});
});
});

View File

@ -1,10 +1,12 @@
import semaphore from 'semaphore';
export const asyncLock = () => {
export type AsyncLock = { release: () => void; acquire: () => Promise<boolean> };
export const asyncLock = (): AsyncLock => {
let lock = semaphore(1);
const acquire = (timeout = 15000) => {
const promise = new Promise(resolve => {
const promise = new Promise<boolean>(resolve => {
// this makes sure a caller doesn't gets stuck forever awaiting on the lock
const timeoutId = setTimeout(() => {
// we reset the lock in that case to allow future consumers to use it without being blocked

View File

@ -3,11 +3,14 @@ import { map } from 'lodash/fp';
import { fromJS } from 'immutable';
import { fileExtension } from './path';
import unsentRequest from './unsentRequest';
import APIError from './APIError';
export const filterByPropExtension = (extension, propName) => arr =>
type Formatter = (res: Response) => Promise<string | Blob | unknown>;
export const filterByPropExtension = (extension: string, propName: string) => <T>(arr: T[]) =>
arr.filter(el => fileExtension(get(el, propName)) === extension);
const catchFormatErrors = (format, formatter) => res => {
const catchFormatErrors = (format: string, formatter: Formatter) => (res: Response) => {
try {
return formatter(res);
} catch (err) {
@ -18,34 +21,51 @@ const catchFormatErrors = (format, formatter) => res => {
};
const responseFormatters = fromJS({
json: async res => {
const contentType = res.headers.get('Content-Type');
json: async (res: Response) => {
const contentType = res.headers.get('Content-Type') || '';
if (!contentType.startsWith('application/json') && !contentType.startsWith('text/json')) {
throw new Error(`${contentType} is not a valid JSON Content-Type`);
}
return res.json();
},
text: async res => res.text(),
blob: async res => res.blob(),
}).mapEntries(([format, formatter]) => [format, catchFormatErrors(format, formatter)]);
text: async (res: Response) => res.text(),
blob: async (res: Response) => res.blob(),
}).mapEntries(([format, formatter]: [string, Formatter]) => [
format,
catchFormatErrors(format, formatter),
]);
export const parseResponse = async (res, { expectingOk = true, format = 'text' } = {}) => {
export const parseResponse = async (
res: Response,
{ expectingOk = true, format = 'text', apiName = '' },
) => {
let body;
try {
const formatter = responseFormatters.get(format, false);
if (!formatter) {
throw new Error(`${format} is not a supported response format.`);
}
body = await formatter(res);
} catch (err) {
throw new APIError(err.message, res.status, apiName);
}
if (expectingOk && !res.ok) {
throw new Error(`Expected an ok response, but received an error status: ${res.status}.`);
const isJSON = format === 'json';
const message = isJSON ? body.message || body.msg || body.error?.message : body;
throw new APIError(isJSON && message ? message : body, res.status, apiName);
}
const formatter = responseFormatters.get(format, false);
if (!formatter) {
throw new Error(`${format} is not a supported response format.`);
}
const body = await formatter(res);
return body;
};
export const responseParser = options => res => parseResponse(res, options);
export const responseParser = (options: {
expectingOk?: boolean;
format: string;
apiName: string;
}) => (res: Response) => parseResponse(res, options);
export const parseLinkHeader = flow([
linksString => linksString.split(','),
map(str => str.trim().split(';')),
map((str: string) => str.trim().split(';')),
map(([linkStr, keyStr]) => [
keyStr.match(/rel="(.*?)"/)[1],
linkStr
@ -56,7 +76,11 @@ export const parseLinkHeader = flow([
fromPairs,
]);
export const getAllResponses = async (url, options = {}, linkHeaderRelName = 'next') => {
export const getAllResponses = async (
url: string,
options: { headers?: {} } = {},
linkHeaderRelName = 'next',
) => {
const maxResponses = 30;
let responseCount = 1;
@ -78,7 +102,7 @@ export const getAllResponses = async (url, options = {}, linkHeaderRelName = 'ne
return pageResponses;
};
export const getCollectionDepth = collection => {
const depth = collection.get('path', '').split('/').length;
export const getPathDepth = (path: string) => {
const depth = path.split('/').length;
return depth;
};

View File

@ -1,9 +1,9 @@
import sha256 from 'js-sha256';
export default blob =>
export default (blob: Blob): Promise<string> =>
new Promise((resolve, reject) => {
const fr = new FileReader();
fr.onload = ({ target: { result } }) => resolve(sha256(result));
fr.onload = ({ target }) => resolve(sha256(target?.result));
fr.onerror = err => {
fr.abort();
reject(err);

View File

@ -0,0 +1,305 @@
import semaphore, { Semaphore } from 'semaphore';
import Cursor from './Cursor';
import { AsyncLock } from './asyncLock';
export type DisplayURLObject = { id: string; path: string };
export type DisplayURL =
| DisplayURLObject
| string
| { original: DisplayURL; path?: string; largeMedia?: string };
export interface ImplementationMediaFile {
name: string;
id: string;
size?: number;
displayURL?: DisplayURL;
path: string;
draft?: boolean;
url?: string;
file?: File;
}
export interface UnpublishedEntryMediaFile {
id: string;
path: string;
}
export interface ImplementationEntry {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: string;
file: { path: string; label?: string; id?: string | null };
slug?: string;
mediaFiles?: ImplementationMediaFile[];
metaData?: { collection: string; status: string };
isModification?: boolean;
}
export interface Map {
get: <T>(key: string, defaultValue?: T) => T;
getIn: <T>(key: string[], defaultValue?: T) => T;
setIn: <T>(key: string[], value: T) => Map;
set: <T>(key: string, value: T) => Map;
}
export type AssetProxy = {
path: string;
fileObj?: File;
toBase64?: () => Promise<string>;
};
export type Entry = { path: string; slug: string; raw: string };
export type PersistOptions = {
newEntry?: boolean;
parsedData?: { title: string; description: string };
commitMessage: string;
collectionName?: string;
useWorkflow?: boolean;
unpublished?: boolean;
status?: string;
};
export type DeleteOptions = {};
export type Credentials = { token: string | {}; refresh_token?: string };
export type User = Credentials & {
backendName?: string;
login?: string;
name: string;
useOpenAuthoring?: boolean;
};
export type Config = {
backend: {
repo?: string | null;
open_authoring?: boolean;
branch?: string;
api_root?: string;
squash_merges?: boolean;
use_graphql?: boolean;
preview_context?: string;
identity_url?: string;
gateway_url?: string;
large_media_url?: string;
use_large_media_transforms_in_media_library?: boolean;
};
media_folder: string;
base_url?: string;
site_id?: string;
};
export interface Implementation {
authComponent: () => void;
restoreUser: (user: User) => Promise<User>;
authenticate: (credentials: Credentials) => Promise<User>;
logout: () => Promise<void> | void | null;
getToken: () => Promise<string | null>;
getEntry: (path: string) => Promise<ImplementationEntry>;
entriesByFolder: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationEntry[]>;
entriesByFiles: (files: ImplementationFile[]) => Promise<ImplementationEntry[]>;
getMediaDisplayURL?: (displayURL: DisplayURL) => Promise<string>;
getMedia: (folder?: string) => Promise<ImplementationMediaFile[]>;
getMediaFile: (path: string) => Promise<ImplementationMediaFile>;
persistEntry: (obj: Entry, assetProxies: AssetProxy[], opts: PersistOptions) => Promise<void>;
persistMedia: (file: AssetProxy, opts: PersistOptions) => Promise<ImplementationMediaFile>;
deleteFile: (path: string, commitMessage: string) => Promise<void>;
unpublishedEntries: () => Promise<ImplementationEntry[]>;
unpublishedEntry: (collection: string, slug: string) => Promise<ImplementationEntry>;
updateUnpublishedEntryStatus: (
collection: string,
slug: string,
newStatus: string,
) => Promise<void>;
publishUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
deleteUnpublishedEntry: (collection: string, slug: string) => Promise<void>;
getDeployPreview: (
collectionName: string,
slug: string,
) => Promise<{ url: string; status: string } | null>;
allEntriesByFolder?: (
folder: string,
extension: string,
depth: number,
) => Promise<ImplementationEntry[]>;
traverseCursor?: (
cursor: Cursor,
action: string,
) => Promise<{ entries: ImplementationEntry[]; cursor: Cursor }>;
}
const MAX_CONCURRENT_DOWNLOADS = 10;
export type ImplementationFile = {
id?: string | null | undefined;
label?: string;
path: string;
};
type Metadata = {
objects: { entry: { path: string } };
collection: string;
status: string;
};
type ReadFile = (
path: string,
id: string | null | undefined,
options: { parseText: boolean },
) => Promise<string | Blob>;
type ReadUnpublishedFile = (
key: string,
) => Promise<{ metaData: Metadata; fileData: string; isModification: boolean; slug: string }>;
const fetchFiles = async (files: ImplementationFile[], readFile: ReadFile, apiName: string) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
files.forEach(file => {
promises.push(
new Promise(resolve =>
sem.take(() =>
readFile(file.path, file.id, { parseText: true })
.then(data => {
resolve({ file, data: data as string });
sem.leave();
})
.catch((error = true) => {
sem.leave();
console.error(`failed to load file from ${apiName}: ${file.path}`);
resolve({ error });
}),
),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !(loadedEntry as { error: boolean }).error),
) as Promise<ImplementationEntry[]>;
};
const fetchUnpublishedFiles = async (
keys: string[],
readUnpublishedFile: ReadUnpublishedFile,
apiName: string,
) => {
const sem = semaphore(MAX_CONCURRENT_DOWNLOADS);
const promises = [] as Promise<ImplementationEntry | { error: boolean }>[];
keys.forEach(key => {
promises.push(
new Promise(resolve =>
sem.take(() =>
readUnpublishedFile(key)
.then(data => {
if (data === null || data === undefined) {
resolve({ error: true });
sem.leave();
} else {
resolve({
slug: data.slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData,
metaData: data.metaData,
isModification: data.isModification,
});
sem.leave();
}
})
.catch((error = true) => {
sem.leave();
console.error(`failed to load file from ${apiName}: ${key}`);
resolve({ error });
}),
),
),
);
});
return Promise.all(promises).then(loadedEntries =>
loadedEntries.filter(loadedEntry => !(loadedEntry as { error: boolean }).error),
) as Promise<ImplementationEntry[]>;
};
export const entriesByFolder = async (
listFiles: () => Promise<ImplementationFile[]>,
readFile: ReadFile,
apiName: string,
) => {
const files = await listFiles();
return fetchFiles(files, readFile, apiName);
};
export const entriesByFiles = async (
files: ImplementationFile[],
readFile: ReadFile,
apiName: string,
) => {
return fetchFiles(files, readFile, apiName);
};
export const unpublishedEntries = async (
listEntriesKeys: () => Promise<string[]>,
readUnpublishedFile: ReadUnpublishedFile,
apiName: string,
) => {
try {
const keys = await listEntriesKeys();
const entries = await fetchUnpublishedFiles(keys, readUnpublishedFile, apiName);
return entries;
} catch (error) {
if (error.message === 'Not Found') {
return Promise.resolve([]);
}
throw error;
}
};
export const getMediaAsBlob = async (path: string, id: string | null, readFile: ReadFile) => {
let blob: Blob;
if (path.match(/.svg$/)) {
const text = (await readFile(path, id, { parseText: true })) as string;
blob = new Blob([text], { type: 'image/svg+xml' });
} else {
blob = (await readFile(path, id, { parseText: false })) as Blob;
}
return blob;
};
export const getMediaDisplayURL = async (
displayURL: DisplayURL,
readFile: ReadFile,
semaphore: Semaphore,
) => {
const { path, id } = displayURL as DisplayURLObject;
return new Promise<string>((resolve, reject) =>
semaphore.take(() =>
getMediaAsBlob(path, id, readFile)
.then(blob => URL.createObjectURL(blob))
.then(resolve, reject)
.finally(() => semaphore.leave()),
),
);
};
export const runWithLock = async (lock: AsyncLock, func: Function, message: string) => {
try {
const acquired = await lock.acquire();
if (!acquired) {
console.warn(message);
}
const result = await func();
return result;
} finally {
lock.release();
}
};

View File

@ -1,79 +0,0 @@
import APIError from './APIError';
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from './Cursor';
import EditorialWorkflowError, { EDITORIAL_WORKFLOW_ERROR } from './EditorialWorkflowError';
import localForage from './localForage';
import { isAbsolutePath, basename, fileExtensionWithSeparator, fileExtension } from './path';
import {
filterPromises,
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
} from './promise';
import unsentRequest from './unsentRequest';
import {
filterByPropExtension,
getAllResponses,
parseLinkHeader,
parseResponse,
responseParser,
getCollectionDepth,
} from './backendUtil';
import loadScript from './loadScript';
import getBlobSHA from './getBlobSHA';
import { asyncLock } from './asyncLock';
export const NetlifyCmsLibUtil = {
APIError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
EDITORIAL_WORKFLOW_ERROR,
localForage,
basename,
fileExtensionWithSeparator,
fileExtension,
filterPromises,
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
parseResponse,
responseParser,
loadScript,
getBlobSHA,
getCollectionDepth,
};
export {
APIError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
EDITORIAL_WORKFLOW_ERROR,
localForage,
basename,
fileExtensionWithSeparator,
fileExtension,
filterPromises,
filterPromisesWith,
onlySuccessfulPromises,
resolvePromiseProperties,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
getAllResponses,
parseResponse,
responseParser,
loadScript,
getBlobSHA,
asyncLock,
isAbsolutePath,
getCollectionDepth,
};

View File

@ -0,0 +1,164 @@
import APIError from './APIError';
import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from './Cursor';
import EditorialWorkflowError, { EDITORIAL_WORKFLOW_ERROR } from './EditorialWorkflowError';
import localForage from './localForage';
import { isAbsolutePath, basename, fileExtensionWithSeparator, fileExtension } from './path';
import { onlySuccessfulPromises, flowAsync, then } from './promise';
import unsentRequest from './unsentRequest';
import {
filterByPropExtension,
getAllResponses,
parseLinkHeader,
parseResponse,
responseParser,
getPathDepth,
} from './backendUtil';
import loadScript from './loadScript';
import getBlobSHA from './getBlobSHA';
import { asyncLock, AsyncLock as AL } from './asyncLock';
import {
Implementation as I,
ImplementationEntry as IE,
ImplementationMediaFile as IMF,
ImplementationFile as IF,
DisplayURLObject as DUO,
DisplayURL as DU,
Credentials as Cred,
User as U,
Entry as E,
PersistOptions as PO,
AssetProxy as AP,
entriesByFiles,
entriesByFolder,
unpublishedEntries,
getMediaDisplayURL,
getMediaAsBlob,
runWithLock,
Config as C,
UnpublishedEntryMediaFile as UEMF,
} from './implementation';
import {
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
labelToStatus,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
isPreviewContext,
getPreviewStatus,
PreviewState,
FetchError as FE,
parseContentKey,
} from './API';
export type AsyncLock = AL;
export type Implementation = I;
export type ImplementationEntry = IE;
export type ImplementationMediaFile = IMF;
export type ImplementationFile = IF;
export type DisplayURL = DU;
export type DisplayURLObject = DUO;
export type Credentials = Cred;
export type User = U;
export type Entry = E;
export type UnpublishedEntryMediaFile = UEMF;
export type PersistOptions = PO;
export type AssetProxy = AP;
export type ApiRequest =
| {
url: string;
params?: Record<string, string | boolean | number>;
method?: 'POST' | 'PUT' | 'DELETE' | 'HEAD';
headers?: Record<string, string>;
body?: string | FormData;
cache?: 'no-store';
}
| string;
export type Config = C;
export type FetchError = FE;
export const NetlifyCmsLibUtil = {
APIError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
EDITORIAL_WORKFLOW_ERROR,
localForage,
basename,
fileExtensionWithSeparator,
fileExtension,
onlySuccessfulPromises,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
parseResponse,
responseParser,
loadScript,
getBlobSHA,
getPathDepth,
entriesByFiles,
entriesByFolder,
unpublishedEntries,
getMediaDisplayURL,
getMediaAsBlob,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
labelToStatus,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
isPreviewContext,
getPreviewStatus,
runWithLock,
PreviewState,
parseContentKey,
};
export {
APIError,
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
EditorialWorkflowError,
EDITORIAL_WORKFLOW_ERROR,
localForage,
basename,
fileExtensionWithSeparator,
fileExtension,
onlySuccessfulPromises,
flowAsync,
then,
unsentRequest,
filterByPropExtension,
parseLinkHeader,
getAllResponses,
parseResponse,
responseParser,
loadScript,
getBlobSHA,
asyncLock,
isAbsolutePath,
getPathDepth,
entriesByFiles,
entriesByFolder,
unpublishedEntries,
getMediaDisplayURL,
getMediaAsBlob,
readFile,
CMS_BRANCH_PREFIX,
generateContentKey,
isCMSLabel,
labelToStatus,
statusToLabel,
DEFAULT_PR_BODY,
MERGE_COMMIT_MESSAGE,
isPreviewContext,
getPreviewStatus,
runWithLock,
PreviewState,
parseContentKey,
};

View File

@ -1,7 +1,7 @@
const absolutePath = new RegExp('^(?:[a-z]+:)?//', 'i');
const normalizePath = path => path.replace(/[\\/]+/g, '/');
const normalizePath = (path: string) => path.replace(/[\\/]+/g, '/');
export function isAbsolutePath(path) {
export function isAbsolutePath(path: string) {
return absolutePath.test(path);
}
@ -16,7 +16,7 @@ export function isAbsolutePath(path) {
* // returns
* 'quux'
*/
export function basename(p, ext = '') {
export function basename(p: string, ext = '') {
// Special case: Normalize will modify this to '.'
if (p === '') {
return p;
@ -50,13 +50,13 @@ export function basename(p, ext = '') {
* // returns
* '.html'
*/
export function fileExtensionWithSeparator(p) {
export function fileExtensionWithSeparator(p: string) {
p = normalizePath(p);
const sections = p.split('/');
p = sections.pop();
p = sections.pop() as string;
// Special case: foo/file.ext/ should return '.ext'
if (p === '' && sections.length > 0) {
p = sections.pop();
p = sections.pop() as string;
}
if (p === '..') {
return '';
@ -77,7 +77,7 @@ export function fileExtensionWithSeparator(p) {
* // returns
* 'html'
*/
export function fileExtension(p) {
export function fileExtension(p: string) {
const ext = fileExtensionWithSeparator(p);
return ext === '' ? ext : ext.substr(1);
}

View File

@ -1,38 +0,0 @@
import constant from 'lodash/constant';
import filter from 'lodash/fp/filter';
import map from 'lodash/fp/map';
import flow from 'lodash/flow';
import zipObject from 'lodash/zipObject';
export const filterPromises = (arr, filter) =>
Promise.all(arr.map(entry => Promise.resolve(entry).then(filter))).then(bits =>
arr.filter(() => bits.shift()),
);
export const filterPromisesWith = filter => arr => filterPromises(arr, filter);
export const resolvePromiseProperties = obj => {
// Get the keys which represent promises
const promiseKeys = Object.keys(obj).filter(key => typeof obj[key].then === 'function');
const promises = promiseKeys.map(key => obj[key]);
// Resolve all promises
return Promise.all(promises).then(resolvedPromises =>
// Return a copy of obj with promises overwritten by their
// resolved values
Object.assign({}, obj, zipObject(promiseKeys, resolvedPromises)),
);
};
export const then = fn => p => Promise.resolve(p).then(fn);
const filterPromiseSymbol = Symbol('filterPromiseSymbol');
export const onlySuccessfulPromises = flow([
then(map(p => p.catch(constant(filterPromiseSymbol)))),
then(Promise.all.bind(Promise)),
then(filter(maybeValue => maybeValue !== filterPromiseSymbol)),
]);
const wrapFlowAsync = fn => async arg => fn(await arg);
export const flowAsync = fns => flow(fns.map(fn => wrapFlowAsync(fn)));

View File

@ -0,0 +1,14 @@
import flow from 'lodash/flow';
export const then = <T, V>(fn: (r: T) => V) => (p: Promise<T>) => Promise.resolve(p).then(fn);
const filterPromiseSymbol = Symbol('filterPromiseSymbol');
export const onlySuccessfulPromises = (promises: Promise<unknown>[]) => {
return Promise.all(promises.map(p => p.catch(() => filterPromiseSymbol))).then(results =>
results.filter(result => result !== filterPromiseSymbol),
);
};
const wrapFlowAsync = (fn: Function) => async (arg: unknown) => fn(await arg);
export const flowAsync = (fns: Function[]) => flow(fns.map(fn => wrapFlowAsync(fn)));

View File

@ -0,0 +1,4 @@
declare module 'js-sha256' {
const sha256: (reader: string | ArrayBuffer | null | undefined) => string;
export default sha256;
}

View File

@ -0,0 +1,5 @@
declare module 'semaphore' {
export type Semaphore = { take: (f: Function) => void; leave: () => void };
const semaphore: (count: number) => Semaphore;
export default semaphore;
}