Feat: editorial workflow bitbucket gitlab (#3014)

* refactor: typescript the backends

* feat: support multiple files upload for GitLab and BitBucket

* fix: load entry media files from media folder or UI state

* chore: cleanup log message

* chore: code cleanup

* refactor: typescript the test backend

* refactor: cleanup getEntry unsued variables

* refactor: moved shared backend code to lib util

* chore: rename files to preserve history

* fix: bind readFile method to API classes

* test(e2e): switch to chrome in cypress tests

* refactor: extract common api methods

* refactor: remove most of immutable js usage from backends

* feat(backend-gitlab): initial editorial workflow support

* feat(backend-gitlab): implement missing workflow methods

* chore: fix lint error

* feat(backend-gitlab): support files deletion

* test(e2e): add gitlab cypress tests

* feat(backend-bitbucket): implement missing editorial workflow methods

* test(e2e): add BitBucket backend e2e tests

* build: update node version to 12 on netlify builds

* fix(backend-bitbucket): extract BitBucket avatar url

* test: fix git-gateway AuthenticationPage test

* test(e2e): fix some backend tests

* test(e2e): fix tests

* test(e2e): add git-gateway editorial workflow test

* chore: code cleanup

* test(e2e): revert back to electron

* test(e2e): add non editorial workflow tests

* fix(git-gateway-gitlab): don't call unpublishedEntry in simple workflow

gitlab git-gateway doesn't support editorial workflow APIs yet. This change makes sure not to call them in simple workflow

* refactor(backend-bitbucket): switch to diffstat API instead of raw diff

* chore: fix test

* test(e2e): add more git-gateway tests

* fix: post rebase typescript fixes

* test(e2e): fix tests

* fix: fix parsing of content key and add tests

* refactor: rename test file

* test(unit): add getStatues unit tests

* chore: update cypress

* docs: update beta docs
This commit is contained in:
Erez Rokah
2020-01-15 00:15:14 +02:00
committed by Shawn Erquhart
parent 4ff5bc2ee0
commit 6f221ab3c1
251 changed files with 70910 additions and 15974 deletions

View File

@ -16,18 +16,16 @@
"scripts": {
"develop": "yarn build:esm --watch",
"build": "cross-env NODE_ENV=production webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward"
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"peerDependencies": {
"@emotion/core": "^10.0.9",
"@emotion/styled": "^10.0.9",
"immutable": "^3.8.2",
"lodash": "^4.17.11",
"netlify-cms-lib-util": "^2.3.0",
"netlify-cms-ui-default": "^2.6.0",
"prop-types": "^15.7.2",
"react": "^16.8.4",
"react-immutable-proptypes": "^2.1.0",
"uuid": "^3.3.2"
}
}

View File

@ -1,6 +1,5 @@
import React from 'react';
import PropTypes from 'prop-types';
import ImmutablePropTypes from 'react-immutable-proptypes';
import styled from '@emotion/styled';
import { Icon, buttons, shadows, GoBackButton } from 'netlify-cms-ui-default';
@ -38,14 +37,14 @@ export default class AuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
config: ImmutablePropTypes.map.isRequired,
config: PropTypes.object.isRequired,
};
componentDidMount() {
/**
* Allow login screen to be skipped for demo purposes.
*/
const skipLogin = this.props.config.getIn(['backend', 'login']) === false;
const skipLogin = this.props.config.backend.login === false;
if (skipLogin) {
this.props.onLogin(this.state);
}
@ -65,7 +64,7 @@ export default class AuthenticationPage extends React.Component {
<LoginButton disabled={inProgress} onClick={this.handleLogin}>
{inProgress ? 'Logging in...' : 'Login'}
</LoginButton>
{config.get('site_url') && <GoBackButton href={config.get('site_url')}></GoBackButton>}
{config.site_url && <GoBackButton href={config.site_url}></GoBackButton>}
</StyledAuthenticationPage>
);
}

View File

@ -17,8 +17,8 @@ describe('test backend implementation', () => {
const backend = new TestBackend();
await expect(backend.getEntry(null, null, 'posts/some-post.md')).resolves.toEqual({
file: { path: 'posts/some-post.md' },
await expect(backend.getEntry('posts/some-post.md')).resolves.toEqual({
file: { path: 'posts/some-post.md', id: null },
data: 'post content',
});
});
@ -38,8 +38,8 @@ describe('test backend implementation', () => {
const backend = new TestBackend();
await expect(backend.getEntry(null, null, 'posts/dir1/dir2/some-post.md')).resolves.toEqual({
file: { path: 'posts/dir1/dir2/some-post.md' },
await expect(backend.getEntry('posts/dir1/dir2/some-post.md')).resolves.toEqual({
file: { path: 'posts/dir1/dir2/some-post.md', id: null },
data: 'post content',
});
});
@ -224,31 +224,31 @@ describe('test backend implementation', () => {
expect(getFolderEntries(tree, 'pages', 'md', 1)).toEqual([
{
file: { path: 'pages/root-page.md' },
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
},
]);
expect(getFolderEntries(tree, 'pages', 'md', 2)).toEqual([
{
file: { path: 'pages/dir1/nested-page-1.md' },
file: { path: 'pages/dir1/nested-page-1.md', id: null },
data: 'nested page 1 content',
},
{
file: { path: 'pages/root-page.md' },
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
},
]);
expect(getFolderEntries(tree, 'pages', 'md', 3)).toEqual([
{
file: { path: 'pages/dir1/dir2/nested-page-2.md' },
file: { path: 'pages/dir1/dir2/nested-page-2.md', id: null },
data: 'nested page 2 content',
},
{
file: { path: 'pages/dir1/nested-page-1.md' },
file: { path: 'pages/dir1/nested-page-1.md', id: null },
data: 'nested page 1 content',
},
{
file: { path: 'pages/root-page.md' },
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
},
]);

View File

@ -5,25 +5,49 @@ import {
Cursor,
CURSOR_COMPATIBILITY_SYMBOL,
basename,
getCollectionDepth,
Implementation,
Entry,
ImplementationEntry,
AssetProxy,
PersistOptions,
ImplementationMediaFile,
User,
Config,
ImplementationFile,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
type RepoFile = { file?: { path: string }; content: string };
type RepoTree = { [key: string]: RepoFile | RepoTree };
declare global {
interface Window {
repoFiles: RepoTree;
repoFilesUnpublished: ImplementationEntry[];
}
}
window.repoFiles = window.repoFiles || {};
window.repoFilesUnpublished = window.repoFilesUnpublished || [];
function getFile(path) {
function getFile(path: string) {
const segments = path.split('/');
let obj = window.repoFiles;
let obj: RepoTree = window.repoFiles;
while (obj && segments.length) {
obj = obj[segments.shift()];
obj = obj[segments.shift() as string] as RepoTree;
}
return obj || {};
return ((obj as unknown) as RepoFile) || {};
}
const pageSize = 10;
const getCursor = (collection, extension, entries, index) => {
const getCursor = (
folder: string,
extension: string,
entries: ImplementationEntry[],
index: number,
depth: number,
) => {
const count = entries.length;
const pageCount = Math.floor(count / pageSize);
return Cursor.create({
@ -32,24 +56,31 @@ const getCursor = (collection, extension, entries, index) => {
...(index > 0 ? ['prev', 'first'] : []),
],
meta: { index, count, pageSize, pageCount },
data: { collection, extension, index, pageCount },
data: { folder, extension, index, pageCount, depth },
});
};
export const getFolderEntries = (tree, folder, extension, depth, files = [], path = folder) => {
export const getFolderEntries = (
tree: RepoTree,
folder: string,
extension: string,
depth: number,
files = [] as ImplementationEntry[],
path = folder,
) => {
if (depth <= 0) {
return files;
}
Object.keys(tree[folder] || {}).forEach(key => {
if (key.endsWith(`.${extension}`)) {
const file = tree[folder][key];
const file = (tree[folder] as RepoTree)[key] as RepoFile;
files.unshift({
file: { path: `${path}/${key}` },
file: { path: `${path}/${key}`, id: null },
data: file.content,
});
} else {
const subTree = tree[folder];
const subTree = tree[folder] as RepoTree;
return getFolderEntries(subTree, key, extension, depth - 1, files, `${path}/${key}`);
}
});
@ -57,9 +88,11 @@ export const getFolderEntries = (tree, folder, extension, depth, files = [], pat
return files;
};
export default class TestBackend {
constructor(config, options = {}) {
this.config = config;
export default class TestBackend implements Implementation {
assets: ImplementationMediaFile[];
options: { initialWorkflowStatus?: string };
constructor(_config: Config, options = {}) {
this.assets = [];
this.options = options;
}
@ -68,12 +101,12 @@ export default class TestBackend {
return AuthenticationPage;
}
restoreUser(user) {
return this.authenticate(user);
restoreUser() {
return this.authenticate();
}
authenticate() {
return Promise.resolve();
return (Promise.resolve() as unknown) as Promise<User>;
}
logout() {
@ -84,14 +117,20 @@ export default class TestBackend {
return Promise.resolve('');
}
traverseCursor(cursor, action) {
const { collection, extension, index, pageCount } = cursor.data.toObject();
traverseCursor(cursor: Cursor, action: string) {
const { folder, extension, index, pageCount, depth } = cursor.data!.toObject() as {
folder: string;
extension: string;
index: number;
pageCount: number;
depth: number;
};
const newIndex = (() => {
if (action === 'next') {
return index + 1;
return (index as number) + 1;
}
if (action === 'prev') {
return index - 1;
return (index as number) - 1;
}
if (action === 'first') {
return 0;
@ -99,35 +138,26 @@ export default class TestBackend {
if (action === 'last') {
return pageCount;
}
return 0;
})();
// TODO: stop assuming cursors are for collections
const depth = getCollectionDepth(collection);
const allEntries = getFolderEntries(
window.repoFiles,
collection.get('folder'),
extension,
depth,
);
const allEntries = getFolderEntries(window.repoFiles, folder, extension, depth);
const entries = allEntries.slice(newIndex * pageSize, newIndex * pageSize + pageSize);
const newCursor = getCursor(collection, extension, allEntries, newIndex);
const newCursor = getCursor(folder, extension, allEntries, newIndex, depth);
return Promise.resolve({ entries, cursor: newCursor });
}
entriesByFolder(collection, extension) {
const folder = collection.get('folder');
const depth = getCollectionDepth(collection);
entriesByFolder(folder: string, extension: string, depth: number) {
const entries = folder ? getFolderEntries(window.repoFiles, folder, extension, depth) : [];
const cursor = getCursor(collection, extension, entries, 0);
const cursor = getCursor(folder, extension, entries, 0, depth);
const ret = take(entries, pageSize);
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
ret[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
return Promise.resolve(ret);
}
entriesByFiles(collection) {
const files = collection.get('files').map(collectionFile => ({
path: collectionFile.get('file'),
label: collectionFile.get('label'),
}));
entriesByFiles(files: ImplementationFile[]) {
return Promise.all(
files.map(file => ({
file,
@ -136,9 +166,9 @@ export default class TestBackend {
);
}
getEntry(collection, slug, path) {
getEntry(path: string) {
return Promise.resolve({
file: { path },
file: { path, id: null },
data: getFile(path).content,
});
}
@ -147,18 +177,18 @@ export default class TestBackend {
return Promise.resolve(window.repoFilesUnpublished);
}
getMediaFiles(entry) {
const mediaFiles = entry.mediaFiles.map(file => ({
getMediaFiles(entry: ImplementationEntry) {
const mediaFiles = entry.mediaFiles!.map(file => ({
...file,
...this.mediaFileToAsset(file),
file: file.fileObj,
...this.normalizeAsset(file),
file: file.file as File,
}));
return mediaFiles;
}
unpublishedEntry(collection, slug) {
unpublishedEntry(collection: string, slug: string) {
const entry = window.repoFilesUnpublished.find(
e => e.metaData.collection === collection.get('name') && e.slug === slug,
e => e.metaData!.collection === collection && e.slug === slug,
);
if (!entry) {
return Promise.reject(
@ -170,25 +200,32 @@ export default class TestBackend {
return Promise.resolve(entry);
}
deleteUnpublishedEntry(collection, slug) {
deleteUnpublishedEntry(collection: string, slug: string) {
const unpubStore = window.repoFilesUnpublished;
const existingEntryIndex = unpubStore.findIndex(
e => e.metaData.collection === collection && e.slug === slug,
e => e.metaData!.collection === collection && e.slug === slug,
);
unpubStore.splice(existingEntryIndex, 1);
return Promise.resolve();
}
async persistEntry({ path, raw, slug }, mediaFiles, options = {}) {
async persistEntry(
{ path, raw, slug }: Entry,
assetProxies: AssetProxy[],
options: PersistOptions,
) {
if (options.useWorkflow) {
const unpubStore = window.repoFilesUnpublished;
const existingEntryIndex = unpubStore.findIndex(e => e.file.path === path);
if (existingEntryIndex >= 0) {
const unpubEntry = { ...unpubStore[existingEntryIndex], data: raw };
unpubEntry.title = options.parsedData && options.parsedData.title;
unpubEntry.description = options.parsedData && options.parsedData.description;
unpubEntry.mediaFiles = mediaFiles;
const unpubEntry = {
...unpubStore[existingEntryIndex],
data: raw,
title: options.parsedData && options.parsedData.title,
description: options.parsedData && options.parsedData.description,
mediaFiles: assetProxies.map(this.normalizeAsset),
};
unpubStore.splice(existingEntryIndex, 1, unpubEntry);
} else {
@ -196,15 +233,16 @@ export default class TestBackend {
data: raw,
file: {
path,
id: null,
},
metaData: {
collection: options.collectionName,
status: options.status || this.options.initialWorkflowStatus,
collection: options.collectionName as string,
status: (options.status || this.options.initialWorkflowStatus) as string,
title: options.parsedData && options.parsedData.title,
description: options.parsedData && options.parsedData.description,
},
slug,
mediaFiles,
mediaFiles: assetProxies.map(this.normalizeAsset),
};
unpubStore.push(unpubEntry);
}
@ -218,78 +256,91 @@ export default class TestBackend {
let obj = window.repoFiles;
while (segments.length > 1) {
const segment = segments.shift();
const segment = segments.shift() as string;
obj[segment] = obj[segment] || {};
obj = obj[segment];
obj = obj[segment] as RepoTree;
}
obj[segments.shift()] = entry;
(obj[segments.shift() as string] as RepoFile) = entry;
await Promise.all(mediaFiles.map(file => this.persistMedia(file)));
await Promise.all(assetProxies.map(file => this.persistMedia(file)));
return Promise.resolve();
}
updateUnpublishedEntryStatus(collection, slug, newStatus) {
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const unpubStore = window.repoFilesUnpublished;
const entryIndex = unpubStore.findIndex(
e => e.metaData.collection === collection && e.slug === slug,
e => e.metaData!.collection === collection && e.slug === slug,
);
unpubStore[entryIndex].metaData.status = newStatus;
unpubStore[entryIndex]!.metaData!.status = newStatus;
return Promise.resolve();
}
async publishUnpublishedEntry(collection, slug) {
async publishUnpublishedEntry(collection: string, slug: string) {
const unpubStore = window.repoFilesUnpublished;
const unpubEntryIndex = unpubStore.findIndex(
e => e.metaData.collection === collection && e.slug === slug,
e => e.metaData!.collection === collection && e.slug === slug,
);
const unpubEntry = unpubStore[unpubEntryIndex];
const entry = { raw: unpubEntry.data, slug: unpubEntry.slug, path: unpubEntry.file.path };
const entry = {
raw: unpubEntry.data,
slug: unpubEntry.slug as string,
path: unpubEntry.file.path,
};
unpubStore.splice(unpubEntryIndex, 1);
await this.persistEntry(entry, unpubEntry.mediaFiles);
return { mediaFiles: this.getMediaFiles(unpubEntry) };
await this.persistEntry(entry, unpubEntry.mediaFiles!, { commitMessage: '' });
}
getMedia() {
return Promise.resolve(this.assets);
}
async getMediaFile(path) {
const asset = this.assets.find(asset => asset.path === path);
async getMediaFile(path: string) {
const asset = this.assets.find(asset => asset.path === path) as ImplementationMediaFile;
const url = asset.url as string;
const name = basename(path);
const blob = await fetch(asset.url).then(res => res.blob());
const blob = await fetch(url).then(res => res.blob());
const fileObj = new File([blob], name);
return {
displayURL: asset.url,
id: url,
displayURL: url,
path,
name,
size: fileObj.size,
file: fileObj,
url: asset.url,
url,
};
}
mediaFileToAsset(mediaFile) {
const { fileObj } = mediaFile;
normalizeAsset(assetProxy: AssetProxy) {
const fileObj = assetProxy.fileObj as File;
const { name, size } = fileObj;
const objectUrl = attempt(window.URL.createObjectURL, fileObj);
const url = isError(objectUrl) ? '' : objectUrl;
const normalizedAsset = { id: uuid(), name, size, path: mediaFile.path, url, displayURL: url };
const normalizedAsset = {
id: uuid(),
name,
size,
path: assetProxy.path,
url,
displayURL: url,
fileObj,
};
return normalizedAsset;
}
persistMedia(mediaFile) {
const normalizedAsset = this.mediaFileToAsset(mediaFile);
persistMedia(assetProxy: AssetProxy) {
const normalizedAsset = this.normalizeAsset(assetProxy);
this.assets.push(normalizedAsset);
return Promise.resolve(normalizedAsset);
}
deleteFile(path) {
deleteFile(path: string) {
const assetIndex = this.assets.findIndex(asset => asset.path === path);
if (assetIndex > -1) {
this.assets.splice(assetIndex, 1);
@ -299,4 +350,8 @@ export default class TestBackend {
return Promise.resolve();
}
async getDeployPreview() {
return null;
}
}