chore: add proxy backend (#3126)

* feat(backends): add proxy backend

* feat: add proxy server initial commit

* fix: move from joi to @hapi/joi

* test: add joi validation tests

* feat: proxy server initial implementations

* test: add tests, fix build

* chore: update yarn.lock

* build: fix develop command

* fix(back-proxy): fix bugs

* test(backend-proxy): add cypress tests

* chore: cleanup

* chore: support node 10

* chore: code cleanup

* chore: run cypress on ubuntu 16.04

* test(e2e): fix proxy backend cypress tests

* chore: don't start proxy server on yarn develop
This commit is contained in:
Erez Rokah 2020-01-22 23:47:34 +02:00 committed by Shawn Erquhart
parent cf57da223d
commit 7e8084be87
38 changed files with 2895 additions and 106 deletions

View File

@ -54,7 +54,7 @@ jobs:
e2e-with-cypress-record:
needs: build
if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false)
runs-on: ubuntu-latest
runs-on: ubuntu-16.04
strategy:
matrix:
@ -95,7 +95,7 @@ jobs:
e2e-no-cypress-record:
needs: build
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == true
runs-on: ubuntu-latest
runs-on: ubuntu-16.04
steps:
- uses: actions/checkout@v1

2
.gitignore vendored
View File

@ -13,7 +13,7 @@ website/data/contributors.json
cypress/videos
cypress/screenshots
__diff_output__
/coverage/
coverage/
.cache
*.log
.env

View File

@ -0,0 +1,30 @@
import fixture from './common/editorial_workflow';
import * as specUtils from './common/spec_utils';
import { entry1, entry2, entry3 } from './common/entries';
const backend = 'proxy';
describe('Proxy Backend Editorial Workflow', () => {
let taskResult = { data: {} };
before(() => {
specUtils.before(taskResult, { publish_mode: 'editorial_workflow' }, backend);
});
after(() => {
specUtils.after(taskResult, backend);
});
beforeEach(() => {
specUtils.beforeEach(taskResult, backend);
});
afterEach(() => {
specUtils.afterEach(taskResult, backend);
});
fixture({
entries: [entry1, entry2, entry3],
getUser: () => taskResult.data.user,
});
});

View File

@ -0,0 +1,27 @@
import fixture from './common/media_library';
import * as specUtils from './common/spec_utils';
import { entry1 } from './common/entries';
const backend = 'proxy';
describe('Proxy Backend Media Library - REST API', () => {
let taskResult = { data: {} };
before(() => {
specUtils.before(taskResult, { publish_mode: 'editorial_workflow' }, backend);
});
after(() => {
specUtils.after(taskResult, backend);
});
beforeEach(() => {
specUtils.beforeEach(taskResult, backend);
});
afterEach(() => {
specUtils.afterEach(taskResult, backend);
});
fixture({ entries: [entry1], getUser: () => taskResult.data.user });
});

View File

@ -0,0 +1,30 @@
import fixture from './common/simple_workflow';
import * as specUtils from './common/spec_utils';
import { entry1, entry2, entry3 } from './common/entries';
const backend = 'proxy';
describe('Proxy Backend Simple Workflow', () => {
let taskResult = { data: {} };
before(() => {
specUtils.before(taskResult, { publish_mode: 'simple' }, backend);
});
after(() => {
specUtils.after(taskResult, backend);
});
beforeEach(() => {
specUtils.beforeEach(taskResult, backend);
});
afterEach(() => {
specUtils.afterEach(taskResult, backend);
});
fixture({
entries: [entry1, entry2, entry3],
getUser: () => taskResult.data.user,
});
});

View File

@ -27,6 +27,7 @@ const {
setupBitBucketTest,
teardownBitBucketTest,
} = require('./bitbucket');
const { setupProxy, teardownProxy, setupProxyTest, teardownProxyTest } = require('./proxy');
const { copyBackendFiles } = require('../utils/config');
@ -51,6 +52,9 @@ module.exports = async (on, config) => {
case 'bitbucket':
result = await setupBitBucket(options);
break;
case 'proxy':
result = await setupProxy(options);
break;
}
return result;
@ -72,6 +76,9 @@ module.exports = async (on, config) => {
case 'bitbucket':
await teardownBitBucket(taskData);
break;
case 'proxy':
await teardownProxy(taskData);
break;
}
console.log('Restoring defaults');
@ -96,6 +103,9 @@ module.exports = async (on, config) => {
case 'bitbucket':
await setupBitBucketTest(taskData);
break;
case 'proxy':
await setupProxyTest(taskData);
break;
}
return null;
@ -118,6 +128,9 @@ module.exports = async (on, config) => {
case 'bitbucket':
await teardownBitBucketTest(taskData);
break;
case 'proxy':
await teardownProxyTest(taskData);
break;
}
return null;

96
cypress/plugins/proxy.js Normal file
View File

@ -0,0 +1,96 @@
const fs = require('fs-extra');
const path = require('path');
const { spawn } = require('child_process');
const { updateConfig } = require('../utils/config');
const { merge } = require('lodash');
const { getGitClient } = require('./common');
const initRepo = async dir => {
await fs.remove(dir);
await fs.mkdirp(dir);
const git = getGitClient(dir);
await git.init();
await git.addConfig('user.email', 'cms-cypress-test@netlify.com');
await git.addConfig('user.name', 'cms-cypress-test');
const readme = 'README.md';
await fs.writeFile(path.join(dir, readme), '');
await git.add(readme);
await git.commit('initial commit', readme, { '--no-verify': true, '--no-gpg-sign': true });
};
const startServer = async repoDir => {
const tsNode = path.join(__dirname, '..', '..', 'node_modules', '.bin', 'ts-node');
const serverDir = path.join(__dirname, '..', '..', 'packages', 'netlify-cms-proxy-server');
const distIndex = path.join(serverDir, 'dist', 'index.js');
const tsIndex = path.join(serverDir, 'src', 'index.ts');
const env = { ...process.env, GIT_REPO_DIRECTORY: path.resolve(repoDir), PORT: 8082 };
if (await fs.pathExists(distIndex)) {
serverProcess = spawn('node', [distIndex], { env, cwd: serverDir });
} else {
serverProcess = spawn(tsNode, ['--files', tsIndex], { env, cwd: serverDir });
}
return new Promise((resolve, reject) => {
serverProcess.stdout.on('data', data => {
const message = data.toString().trim();
console.log(`server:stdout: ${message}`);
if (message.startsWith('Netlify CMS Proxy Server listening on port')) {
resolve(serverProcess);
}
});
serverProcess.stderr.on('data', data => {
console.error(`server:stderr: ${data.toString().trim()}`);
reject(data.toString());
});
});
};
let serverProcess;
async function setupProxy(options) {
const postfix = Math.random()
.toString(32)
.substring(2);
const testRepoName = `proxy-test-repo-${Date.now()}-${postfix}`;
const tempDir = path.join('.temp', testRepoName);
await updateConfig(config => {
merge(config, options);
});
return { tempDir };
}
async function teardownProxy(taskData) {
if (serverProcess) {
serverProcess.kill();
}
await fs.remove(taskData.tempDir);
return null;
}
async function setupProxyTest(taskData) {
await initRepo(taskData.tempDir);
serverProcess = await startServer(taskData.tempDir);
return null;
}
async function teardownProxyTest(taskData) {
if (serverProcess) {
serverProcess.kill();
}
await fs.remove(taskData.tempDir);
return null;
}
module.exports = {
setupProxy,
teardownProxy,
setupProxyTest,
teardownProxyTest,
};

View File

@ -133,19 +133,21 @@ function deleteEntryInEditor() {
function assertOnCollectionsPage() {
cy.url().should('contain', '/#/collections/posts');
cy.contains('h2', 'Collections');
}
function assertEntryDeleted(entry) {
if (Array.isArray(entry)) {
const titles = entry.map(e => e.title);
cy.get('a h2').each(el => {
expect(titles).not.to.include(el.text());
});
} else {
cy.get('a h2').each(el => {
expect(entry.title).not.to.equal(el.text());
});
const hasEntries = Cypress.$('a h2').length > 0;
if (hasEntries) {
if (Array.isArray(entry)) {
const titles = entry.map(e => e.title);
cy.get('a h2').each(el => {
expect(titles).not.to.include(el.text());
});
} else {
cy.get('a h2').each(el => {
expect(entry.title).not.to.equal(el.text());
});
}
}
}

View File

@ -0,0 +1,63 @@
backend:
name: proxy
branch: master
proxy_url: http://localhost:8082/api/v1
publish_mode: editorial_workflow
media_folder: static/media
public_folder: /media
collections:
- name: posts
label: Posts
label_singular: 'Post'
folder: content/posts
create: true
slug: '{{year}}-{{month}}-{{day}}-{{slug}}'
fields:
- label: Template
name: template
widget: hidden
default: post
- label: Title
name: title
widget: string
- label: 'Cover Image'
name: 'image'
widget: 'image'
required: false
- label: Publish Date
name: date
widget: datetime
- label: Description
name: description
widget: text
- label: Category
name: category
widget: string
- label: Body
name: body
widget: markdown
- label: Tags
name: tags
widget: list
- name: pages
label: Pages
label_singular: 'Page'
folder: content/pages
create: true
slug: '{{slug}}'
fields:
- label: Template
name: template
widget: hidden
default: page
- label: Title
name: title
widget: string
- label: Draft
name: draft
widget: boolean
default: true
- label: Body
name: body
widget: markdown

View File

@ -0,0 +1,41 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Netlify CMS Development Test</title>
</head>
<body>
<script src="dist/netlify-cms.js"></script>
<script>
var PostPreview = createClass({
render: function() {
var entry = this.props.entry;
return h(
'div',
{},
h('div', { className: 'cover' }, h('h1', {}, entry.getIn(['data', 'title']))),
h('p', {}, h('small', {}, 'Written ' + entry.getIn(['data', 'date']))),
h('div', { className: 'text' }, this.props.widgetFor('body')),
);
},
});
var PagePreview = createClass({
render: function() {
var entry = this.props.entry;
return h(
'div',
{},
h('div', { className: 'cover' }, h('h1', {}, entry.getIn(['data', 'title']))),
h('p', {}, h('small', {}, 'Written ' + entry.getIn(['data', 'date']))),
h('div', { className: 'text' }, this.props.widgetFor('body')),
);
},
});
CMS.registerPreviewTemplate('posts', PostPreview);
CMS.registerPreviewTemplate('pages', PagePreview);
</script>
</body>
</html>

View File

@ -2,7 +2,7 @@
"scripts": {
"bootstrap": "lerna bootstrap",
"start": "run-s clean bootstrap build:esm develop",
"develop": "lerna run develop --parallel",
"develop": "lerna run develop --parallel --ignore netlify-cms-proxy-server",
"build": "run-s clean build:esm build:lerna",
"build:lerna": "lerna run build",
"build:esm": "lerna run build:esm",

View File

@ -7,6 +7,7 @@ import { GitLabBackend } from 'netlify-cms-backend-gitlab';
import { GitGatewayBackend } from 'netlify-cms-backend-git-gateway';
import { BitbucketBackend } from 'netlify-cms-backend-bitbucket';
import { TestBackend } from 'netlify-cms-backend-test';
import { ProxyBackend } from 'netlify-cms-backend-proxy';
// Widgets
import NetlifyCmsWidgetString from 'netlify-cms-widget-string';
@ -37,6 +38,7 @@ CMS.registerBackend('github', GitHubBackend);
CMS.registerBackend('gitlab', GitLabBackend);
CMS.registerBackend('bitbucket', BitbucketBackend);
CMS.registerBackend('test-repo', TestBackend);
CMS.registerBackend('proxy', ProxyBackend);
CMS.registerWidget([
NetlifyCmsWidgetString.Widget(),
NetlifyCmsWidgetNumber.Widget(),

View File

@ -0,0 +1,11 @@
# Docs coming soon!
Netlify CMS was recently converted from a single npm package to a "monorepo" of over 20 packages.
That's over 20 Readme's! We haven't created one for this package yet, but we will soon.
In the meantime, you can:
1. Check out the [main readme](https://github.com/netlify/netlify-cms/#readme) or the [documentation
site](https://www.netlifycms.org) for more info.
2. Reach out to the [community chat](https://netlifycms.org/chat/) if you need help.
3. Help out and [write the readme yourself](https://github.com/netlify/netlify-cms/edit/master/packages/netlify-cms-backend-proxy/README.md)!

View File

@ -0,0 +1,29 @@
{
"name": "netlify-cms-backend-proxy",
"description": "Proxy backend for Netlify CMS",
"version": "1.0.1",
"repository": "https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-backend-proxy",
"bugs": "https://github.com/netlify/netlify-cms/issues",
"license": "MIT",
"module": "dist/esm/index.js",
"main": "dist/netlify-cms-backend-proxy.js",
"keywords": [
"netlify",
"netlify-cms",
"backend"
],
"sideEffects": false,
"scripts": {
"develop": "yarn build:esm --watch",
"build": "cross-env NODE_ENV=production webpack",
"build:esm": "cross-env NODE_ENV=esm babel src --out-dir dist/esm --ignore \"**/__tests__\" --root-mode upward --extensions \".js,.jsx,.ts,.tsx\""
},
"peerDependencies": {
"@emotion/core": "^10.0.9",
"@emotion/styled": "^10.0.9",
"netlify-cms-lib-util": "^2.3.0",
"netlify-cms-ui-default": "^2.6.0",
"prop-types": "^15.7.2",
"react": "^16.8.4"
}
}

View File

@ -0,0 +1,61 @@
import React from 'react';
import PropTypes from 'prop-types';
import styled from '@emotion/styled';
import { Icon, buttons, shadows, GoBackButton } from 'netlify-cms-ui-default';
const StyledAuthenticationPage = styled.section`
display: flex;
flex-flow: column nowrap;
align-items: center;
justify-content: center;
height: 100vh;
`;
const PageLogoIcon = styled(Icon)`
color: #c4c6d2;
margin-top: -300px;
`;
const LoginButton = styled.button`
${buttons.button};
${shadows.dropDeep};
${buttons.default};
${buttons.gray};
padding: 0 30px;
margin-top: -40px;
display: flex;
align-items: center;
position: relative;
${Icon} {
margin-right: 18px;
}
`;
export default class AuthenticationPage extends React.Component {
static propTypes = {
onLogin: PropTypes.func.isRequired,
inProgress: PropTypes.bool,
config: PropTypes.object.isRequired,
};
handleLogin = e => {
e.preventDefault();
this.props.onLogin(this.state);
};
render() {
const { config, inProgress } = this.props;
return (
<StyledAuthenticationPage>
<PageLogoIcon size="300px" type="netlify-cms" />
<LoginButton disabled={inProgress} onClick={this.handleLogin}>
{inProgress ? 'Logging in...' : 'Login'}
</LoginButton>
{config.site_url && <GoBackButton href={config.site_url}></GoBackButton>}
</StyledAuthenticationPage>
);
}
}

View File

@ -0,0 +1,215 @@
import {
Entry,
AssetProxy,
PersistOptions,
User,
Config,
Implementation,
ImplementationFile,
EditorialWorkflowError,
APIError,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
const serializeAsset = async (assetProxy: AssetProxy) => {
const base64content = await assetProxy.toBase64!();
return { path: assetProxy.path, content: base64content, encoding: 'base64' };
};
type MediaFile = {
id: string;
content: string;
encoding: string;
name: string;
path: string;
};
const deserializeMediaFile = ({ id, content, encoding, path, name }: MediaFile) => {
let byteArray = new Uint8Array(0);
if (encoding !== 'base64') {
console.error(`Unsupported encoding '${encoding}' for file '${path}'`);
} else {
const decodedContent = atob(content);
byteArray = new Uint8Array(decodedContent.length);
for (let i = 0; i < decodedContent.length; i++) {
byteArray[i] = decodedContent.charCodeAt(i);
}
}
const file = new File([byteArray], name);
const url = URL.createObjectURL(file);
return { id, name, path, file, size: file.size, url, displayURL: url };
};
export default class ProxyBackend implements Implementation {
proxyUrl: string;
mediaFolder: string;
options: { initialWorkflowStatus?: string };
branch: string;
constructor(config: Config, options = {}) {
if (!config.backend.proxy_url) {
throw new Error('The Proxy backend needs a "proxy_url" in the backend configuration.');
}
this.branch = config.backend.branch || 'master';
this.proxyUrl = config.backend.proxy_url;
this.mediaFolder = config.media_folder;
this.options = options;
}
authComponent() {
return AuthenticationPage;
}
restoreUser() {
return this.authenticate();
}
authenticate() {
return (Promise.resolve() as unknown) as Promise<User>;
}
logout() {
return null;
}
getToken() {
return Promise.resolve('');
}
async request(payload: { action: string; params: Record<string, unknown> }) {
const response = await fetch(this.proxyUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json; charset=utf-8' },
body: JSON.stringify({ branch: this.branch, ...payload }),
});
const json = await response.json();
if (response.ok) {
return json;
} else {
throw new APIError(json.message, response.status, 'Proxy');
}
}
entriesByFolder(folder: string, extension: string, depth: number) {
return this.request({
action: 'entriesByFolder',
params: { branch: this.branch, folder, extension, depth },
});
}
entriesByFiles(files: ImplementationFile[]) {
return this.request({
action: 'entriesByFiles',
params: { branch: this.branch, files },
});
}
getEntry(path: string) {
return this.request({
action: 'getEntry',
params: { branch: this.branch, path },
});
}
unpublishedEntries() {
return this.request({
action: 'unpublishedEntries',
params: { branch: this.branch },
});
}
async unpublishedEntry(collection: string, slug: string) {
try {
const entry = await this.request({
action: 'unpublishedEntry',
params: { branch: this.branch, collection, slug },
});
const mediaFiles = entry.mediaFiles.map(deserializeMediaFile);
return { ...entry, mediaFiles };
} catch (e) {
if (e.status === 404) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
throw e;
}
}
deleteUnpublishedEntry(collection: string, slug: string) {
return this.request({
action: 'deleteUnpublishedEntry',
params: { branch: this.branch, collection, slug },
});
}
async persistEntry(entry: Entry, assetProxies: AssetProxy[], options: PersistOptions) {
const assets = await Promise.all(assetProxies.map(serializeAsset));
return this.request({
action: 'persistEntry',
params: {
branch: this.branch,
entry,
assets,
options: { ...options, status: options.status || this.options.initialWorkflowStatus },
},
});
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
return this.request({
action: 'updateUnpublishedEntryStatus',
params: { branch: this.branch, collection, slug, newStatus },
});
}
publishUnpublishedEntry(collection: string, slug: string) {
return this.request({
action: 'publishUnpublishedEntry',
params: { branch: this.branch, collection, slug },
});
}
async getMedia(mediaFolder = this.mediaFolder) {
const files: MediaFile[] = await this.request({
action: 'getMedia',
params: { branch: this.branch, mediaFolder },
});
return files.map(deserializeMediaFile);
}
async getMediaFile(path: string) {
const file = await this.request({
action: 'getMediaFile',
params: { branch: this.branch, path },
});
return deserializeMediaFile(file);
}
async persistMedia(assetProxy: AssetProxy, options: PersistOptions) {
const asset = await serializeAsset(assetProxy);
const file: MediaFile = await this.request({
action: 'persistMedia',
params: { branch: this.branch, asset, options: { commitMessage: options.commitMessage } },
});
return deserializeMediaFile(file);
}
deleteFile(path: string, commitMessage: string) {
return this.request({
action: 'deleteFile',
params: { branch: this.branch, path, options: { commitMessage } },
});
}
getDeployPreview(collection: string, slug: string) {
return this.request({
action: 'getDeployPreview',
params: { branch: this.branch, collection, slug },
});
}
}

View File

@ -0,0 +1,8 @@
import ProxyBackend from './implementation';
import AuthenticationPage from './AuthenticationPage';
export const NetlifyCmsBackendProxy = {
ProxyBackend,
AuthenticationPage,
};
export { ProxyBackend, AuthenticationPage };

View File

@ -0,0 +1,3 @@
const { getConfig } = require('../../scripts/webpack.js');
module.exports = getConfig();

View File

@ -489,18 +489,20 @@ export async function getMediaDisplayURL(
// url loading had an error
url = null;
} else {
if (!displayURLState.get('isFetching')) {
// load display url
dispatch(loadMediaDisplayURL(file));
}
const key = file.id;
url = await waitUntilWithTimeout<string>(dispatch, resolve => ({
const promise = waitUntilWithTimeout<string>(dispatch, resolve => ({
predicate: ({ type, payload }) =>
(type === MEDIA_DISPLAY_URL_SUCCESS || type === MEDIA_DISPLAY_URL_FAILURE) &&
payload.key === key,
run: (_dispatch, _getState, action) => resolve(action.payload.url),
}));
if (!displayURLState.get('isFetching')) {
// load display url
dispatch(loadMediaDisplayURL(file));
}
url = await promise;
}
return url;

View File

@ -22,17 +22,26 @@ export const waitUntilWithTimeout = async <T>(
dispatch(waitUntil(waitActionArgs(resolve)));
});
const timeoutPromise = new Promise<T>((resolve, reject) => {
setTimeout(() => (waitDone ? resolve() : reject(new Error('Wait Action timed out'))), timeout);
const timeoutPromise = new Promise<T | null>(resolve => {
setTimeout(() => {
if (waitDone) {
resolve();
} else {
console.warn('Wait Action timed out');
resolve(null);
}
}, timeout);
});
const result = await Promise.race([
waitPromise.then(result => {
waitDone = true;
return result;
}),
waitPromise
.then(result => {
waitDone = true;
return result;
})
.catch(null),
timeoutPromise,
]).catch(null);
]);
return result;
};

View File

@ -15,6 +15,14 @@ export const parseContentKey = (contentKey: string) => {
return { collection: contentKey.substr(0, index), slug: contentKey.substr(index + 1) };
};
export const contentKeyFromBranch = (branch: string) => {
return branch.substring(`${CMS_BRANCH_PREFIX}/`.length);
};
export const branchFromContentKey = (contentKey: string) => {
return `${CMS_BRANCH_PREFIX}/${contentKey}`;
};
export interface FetchError extends Error {
status: number;
}

View File

@ -1,7 +1,9 @@
export const EDITORIAL_WORKFLOW_ERROR = 'EDITORIAL_WORKFLOW_ERROR';
export default class EditorialWorkflowError extends Error {
constructor(message, notUnderEditorialWorkflow) {
message: string;
notUnderEditorialWorkflow: boolean;
constructor(message: string, notUnderEditorialWorkflow: boolean) {
super(message);
this.message = message;
this.notUnderEditorialWorkflow = notUnderEditorialWorkflow;

View File

@ -84,6 +84,7 @@ export type Config = {
gateway_url?: string;
large_media_url?: string;
use_large_media_transforms_in_media_library?: boolean;
proxy_url?: string;
};
media_folder: string;
base_url?: string;

View File

@ -51,6 +51,8 @@ import {
PreviewState,
FetchError as FE,
parseContentKey,
branchFromContentKey,
contentKeyFromBranch,
} from './API';
import {
createPointerFile,
@ -132,6 +134,8 @@ export const NetlifyCmsLibUtil = {
getLargeMediaPatternsFromGitAttributesFile,
parsePointerFile,
getPointerFileForMediaFileObj,
branchFromContentKey,
contentKeyFromBranch,
};
export {
APIError,
@ -180,4 +184,6 @@ export {
getLargeMediaPatternsFromGitAttributesFile,
parsePointerFile,
getPointerFileForMediaFileObj,
branchFromContentKey,
contentKeyFromBranch,
};

View File

@ -0,0 +1,11 @@
# Docs coming soon!
Netlify CMS was recently converted from a single npm package to a "monorepo" of over 20 packages.
That's over 20 Readme's! We haven't created one for this package yet, but we will soon.
In the meantime, you can:
1. Check out the [main readme](https://github.com/netlify/netlify-cms/#readme) or the [documentation
site](https://www.netlifycms.org) for more info.
2. Reach out to the [community chat](https://netlifycms.org/chat/) if you need help.
3. Help out and [write the readme yourself](https://github.com/netlify/netlify-cms/edit/master/packages/netlify-cms-proxy-server/README.md)!

View File

@ -0,0 +1,7 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
moduleNameMapper: {
'netlify-cms-lib-util': '<rootDir>/../netlify-cms-lib-util/dist/esm',
},
};

View File

@ -0,0 +1,54 @@
{
"name": "netlify-cms-proxy-server",
"description": "Proxy server to be used with Netlify CMS proxy backend",
"version": "1.0.1",
"repository": "https://github.com/netlify/netlify-cms/tree/master/packages/netlify-cms-proxy-server",
"bugs": "https://github.com/netlify/netlify-cms/issues",
"license": "MIT",
"main": "dist/index",
"keywords": [
"netlify",
"netlify-cms",
"backend"
],
"sideEffects": false,
"scripts": {
"build": "webpack",
"prestart": "yarn build",
"start": "node dist/index.js",
"develop": "nodemon --watch 'src/**/*.ts' --ignore 'src/**/*.spec.ts' --exec 'ts-node' --files src/index.ts",
"test": "jest",
"test:watch": "yarn test --watch",
"test:coverage": "yarn test --coverage"
},
"dependencies": {
"@hapi/joi": "^17.0.2",
"cors": "^2.8.5",
"dotenv": "^8.2.0",
"express": "^4.17.1",
"joi": "^14.3.1",
"morgan": "^1.9.1",
"netlify-cms-lib-util": "^2.3.0",
"simple-git": "^1.129.0"
},
"devDependencies": {
"@types/cors": "^2.8.6",
"@types/express": "^4.17.2",
"@types/hapi__joi": "^16.0.6",
"@types/jest": "^24.9.0",
"@types/morgan": "^1.7.37",
"@types/node": "^13.1.7",
"@types/vfile-message": "^2.0.0",
"jest": "^24.9.0",
"nodemon": "^2.0.2",
"ts-jest": "^24.3.0",
"ts-loader": "^6.2.1",
"ts-node": "^8.6.2",
"tsconfig-paths-webpack-plugin": "^3.2.0",
"webpack": "^4.41.5",
"webpack-node-externals": "^1.7.2"
},
"engines": {
"node": ">=v10"
}
}

View File

@ -0,0 +1,4 @@
type LocalForage = {
getItem: <T>(key: string) => Promise<T>;
setItem: <T>(key: string, value: T) => Promise<void>;
};

View File

@ -0,0 +1,25 @@
require('dotenv').config();
import express from 'express';
import morgan from 'morgan';
import cors from 'cors';
import { registerMiddleware as registerLocalGit } from './middlewares/localGit';
const app = express();
const port = process.env.PORT || 8081;
(async () => {
app.use(morgan('combined'));
app.use(cors());
app.use(express.json());
try {
await registerLocalGit(app);
} catch (e) {
console.error(e.message);
process.exit(1);
}
return app.listen(port, () => {
console.log(`Netlify CMS Proxy Server listening on port ${port}`);
});
})();

View File

@ -0,0 +1,516 @@
import { defaultSchema, joi } from '.';
import express from 'express';
import Joi from '@hapi/joi';
const assetFailure = (result: Joi.ValidationResult, expectedMessage: string) => {
const { error } = result;
expect(error).not.toBeNull();
expect(error.details).toHaveLength(1);
const message = error.details.map(({ message }) => message)[0];
expect(message).toBe(expectedMessage);
};
const defaultParams = {
branch: 'master',
};
describe('defaultSchema', () => {
it('should fail on unsupported body', () => {
const schema = defaultSchema();
assetFailure(schema.validate({}), '"action" is required');
});
it('should fail on unsupported action', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'unknown', params: {} }),
'"action" must be one of [info, entriesByFolder, entriesByFiles, getEntry, unpublishedEntries, unpublishedEntry, deleteUnpublishedEntry, persistEntry, updateUnpublishedEntryStatus, publishUnpublishedEntry, getMedia, getMediaFile, persistMedia, deleteFile, getDeployPreview]',
);
});
describe('info', () => {
it('should pass with no params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'info',
});
expect(error).toBeUndefined();
});
});
describe('entriesByFolder', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'entriesByFolder', params: { ...defaultParams } }),
'"params.folder" is required',
);
assetFailure(
schema.validate({
action: 'entriesByFolder',
params: { ...defaultParams, folder: 'folder' },
}),
'"params.extension" is required',
);
assetFailure(
schema.validate({
action: 'entriesByFolder',
params: { ...defaultParams, folder: 'folder', extension: 'md' },
}),
'"params.depth" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'entriesByFolder',
params: { ...defaultParams, folder: 'folder', extension: 'md', depth: 1 },
});
expect(error).toBeUndefined();
});
});
describe('entriesByFiles', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'entriesByFiles', params: { ...defaultParams } }),
'"params.files" is required',
);
assetFailure(
schema.validate({ action: 'entriesByFiles', params: { ...defaultParams, files: {} } }),
'"params.files" must be an array',
);
assetFailure(
schema.validate({
action: 'entriesByFiles',
params: { ...defaultParams, files: [{ id: 'id' }] },
}),
'"params.files[0].path" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'entriesByFiles',
params: { ...defaultParams, files: [{ path: 'path' }] },
});
expect(error).toBeUndefined();
});
});
describe('getEntry', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'getEntry', params: { ...defaultParams } }),
'"params.path" is required',
);
assetFailure(
schema.validate({ action: 'getEntry', params: { ...defaultParams, path: 1 } }),
'"params.path" must be a string',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'getEntry',
params: { ...defaultParams, path: 'path' },
});
expect(error).toBeUndefined();
});
});
describe('unpublishedEntries', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'unpublishedEntries', params: {} }),
'"params.branch" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'unpublishedEntries',
params: { ...defaultParams, branch: 'master' },
});
expect(error).toBeUndefined();
});
});
describe('unpublishedEntry', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'unpublishedEntry', params: { ...defaultParams } }),
'"params.collection" is required',
);
assetFailure(
schema.validate({
action: 'unpublishedEntry',
params: { ...defaultParams, collection: 'collection' },
}),
'"params.slug" is required',
);
assetFailure(
schema.validate({
action: 'unpublishedEntry',
params: { ...defaultParams, collection: 'collection', slug: 1 },
}),
'"params.slug" must be a string',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'unpublishedEntry',
params: { ...defaultParams, collection: 'collection', slug: 'slug' },
});
expect(error).toBeUndefined();
});
});
describe('deleteUnpublishedEntry', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'deleteUnpublishedEntry', params: { ...defaultParams } }),
'"params.collection" is required',
);
assetFailure(
schema.validate({
action: 'deleteUnpublishedEntry',
params: { ...defaultParams, collection: 'collection' },
}),
'"params.slug" is required',
);
assetFailure(
schema.validate({
action: 'deleteUnpublishedEntry',
params: { ...defaultParams, collection: 'collection', slug: 1 },
}),
'"params.slug" must be a string',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'deleteUnpublishedEntry',
params: { ...defaultParams, collection: 'collection', slug: 'slug' },
});
expect(error).toBeUndefined();
});
});
describe('persistEntry', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'persistEntry', params: { ...defaultParams } }),
'"params.entry" is required',
);
assetFailure(
schema.validate({
action: 'persistEntry',
params: { ...defaultParams, entry: { slug: 'slug', path: 'path', raw: 'content' } },
}),
'"params.assets" is required',
);
assetFailure(
schema.validate({
action: 'persistEntry',
params: {
...defaultParams,
entry: { slug: 'slug', path: 'path', raw: 'content' },
assets: [],
},
}),
'"params.options" is required',
);
assetFailure(
schema.validate({
action: 'persistEntry',
params: {
...defaultParams,
entry: { slug: 'slug', path: 'path', raw: 'content' },
assets: [],
options: {},
},
}),
'"params.options.commitMessage" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'persistEntry',
params: {
...defaultParams,
entry: { slug: 'slug', path: 'path', raw: 'content' },
assets: [{ path: 'path', content: 'content', encoding: 'base64' }],
options: {
commitMessage: 'commitMessage',
useWorkflow: true,
status: 'draft',
},
},
});
expect(error).toBeUndefined();
});
});
describe('updateUnpublishedEntryStatus', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'updateUnpublishedEntryStatus', params: { ...defaultParams } }),
'"params.collection" is required',
);
assetFailure(
schema.validate({
action: 'updateUnpublishedEntryStatus',
params: { ...defaultParams, collection: 'collection' },
}),
'"params.slug" is required',
);
assetFailure(
schema.validate({
action: 'updateUnpublishedEntryStatus',
params: { ...defaultParams, collection: 'collection', slug: 'slug' },
}),
'"params.newStatus" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'updateUnpublishedEntryStatus',
params: { ...defaultParams, collection: 'collection', slug: 'slug', newStatus: 'draft' },
});
expect(error).toBeUndefined();
});
});
describe('publishUnpublishedEntry', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'publishUnpublishedEntry', params: { ...defaultParams } }),
'"params.collection" is required',
);
assetFailure(
schema.validate({
action: 'publishUnpublishedEntry',
params: { ...defaultParams, collection: 'collection' },
}),
'"params.slug" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'publishUnpublishedEntry',
params: { ...defaultParams, collection: 'collection', slug: 'slug' },
});
expect(error).toBeUndefined();
});
});
describe('getMedia', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'getMedia', params: { ...defaultParams } }),
'"params.mediaFolder" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'getMedia',
params: { ...defaultParams, mediaFolder: 'src/static/images' },
});
expect(error).toBeUndefined();
});
});
describe('getMediaFile', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'getMediaFile', params: { ...defaultParams } }),
'"params.path" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'getMediaFile',
params: { ...defaultParams, path: 'src/static/images/image.png' },
});
expect(error).toBeUndefined();
});
});
describe('persistMedia', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'persistMedia', params: { ...defaultParams } }),
'"params.asset" is required',
);
assetFailure(
schema.validate({
action: 'persistMedia',
params: { ...defaultParams, asset: { path: 'path' } },
}),
'"params.asset.content" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'persistMedia',
params: {
...defaultParams,
asset: { path: 'path', content: 'content', encoding: 'base64' },
options: { commitMessage: 'commitMessage' },
},
});
expect(error).toBeUndefined();
});
});
describe('deleteFile', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'deleteFile', params: { ...defaultParams } }),
'"params.path" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'deleteFile',
params: {
...defaultParams,
path: 'src/static/images/image.png',
options: { commitMessage: 'commitMessage' },
},
});
expect(error).toBeUndefined();
});
});
describe('getDeployPreview', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'getDeployPreview', params: { ...defaultParams } }),
'"params.collection" is required',
);
assetFailure(
schema.validate({
action: 'getDeployPreview',
params: { ...defaultParams, collection: 'collection' },
}),
'"params.slug" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'getDeployPreview',
params: { ...defaultParams, collection: 'collection', slug: 'slug' },
});
expect(error).toBeUndefined();
});
});
});
describe('joi', () => {
it('should call next on valid schema', () => {
const next = jest.fn();
const req = {
body: {
action: 'entriesByFolder',
params: { branch: 'master', folder: 'folder', extension: 'md', depth: 1 },
},
} as express.Request;
const res: express.Response = {} as express.Response;
joi(defaultSchema())(req, res, next);
expect(next).toHaveBeenCalledTimes(1);
});
it('should send error on invalid schema', () => {
const next = jest.fn();
const req = {
body: {
action: 'entriesByFolder',
},
} as express.Request;
const json = jest.fn();
const status = jest.fn(() => ({ json }));
const res: express.Response = ({ status } as unknown) as express.Response;
joi(defaultSchema())(req, res, next);
expect(next).toHaveBeenCalledTimes(0);
expect(status).toHaveBeenCalledTimes(1);
expect(json).toHaveBeenCalledTimes(1);
expect(status).toHaveBeenCalledWith(422);
expect(json).toHaveBeenCalledWith({ error: '"params" is required' });
});
});

View File

@ -0,0 +1,202 @@
import express from 'express';
import Joi from '@hapi/joi';
const allowedActions = [
'info',
'entriesByFolder',
'entriesByFiles',
'getEntry',
'unpublishedEntries',
'unpublishedEntry',
'deleteUnpublishedEntry',
'persistEntry',
'updateUnpublishedEntryStatus',
'publishUnpublishedEntry',
'getMedia',
'getMediaFile',
'persistMedia',
'deleteFile',
'getDeployPreview',
];
const requiredString = Joi.string().required();
const requiredNumber = Joi.number().required();
const requiredBool = Joi.bool().required();
const collection = requiredString;
const slug = requiredString;
export const defaultSchema = ({ path = requiredString } = {}) => {
const defaultParams = Joi.object({
branch: requiredString,
});
const asset = Joi.object({
path,
content: requiredString,
encoding: requiredString.valid('base64'),
});
const params = Joi.when('action', {
switch: [
{
is: 'info',
then: Joi.allow(),
},
{
is: 'entriesByFolder',
then: defaultParams
.keys({
folder: path,
extension: requiredString,
depth: requiredNumber,
})
.required(),
},
{
is: 'entriesByFiles',
then: defaultParams.keys({
files: Joi.array()
.items(Joi.object({ path }))
.required(),
}),
},
{
is: 'getEntry',
then: defaultParams
.keys({
path,
})
.required(),
},
{
is: 'unpublishedEntries',
then: defaultParams.keys({ branch: requiredString }).required(),
},
{
is: 'unpublishedEntry',
then: defaultParams
.keys({
collection,
slug,
})
.required(),
},
{
is: 'deleteUnpublishedEntry',
then: defaultParams
.keys({
collection,
slug,
})
.required(),
},
{
is: 'persistEntry',
then: defaultParams
.keys({
entry: Joi.object({ slug: requiredString, path, raw: requiredString }).required(),
assets: Joi.array()
.items(asset)
.required(),
options: Joi.object({
collectionName: Joi.string(),
commitMessage: requiredString,
useWorkflow: requiredBool,
status: requiredString,
}).required(),
})
.required(),
},
{
is: 'updateUnpublishedEntryStatus',
then: defaultParams
.keys({
collection,
slug,
newStatus: requiredString,
})
.required(),
},
{
is: 'publishUnpublishedEntry',
then: defaultParams
.keys({
collection,
slug,
})
.required(),
},
{
is: 'getMedia',
then: defaultParams
.keys({
mediaFolder: path,
})
.required(),
},
{
is: 'getMediaFile',
then: defaultParams
.keys({
path,
})
.required(),
},
{
is: 'persistMedia',
then: defaultParams
.keys({
asset: asset.required(),
options: Joi.object({
commitMessage: requiredString,
}).required(),
})
.required(),
},
{
is: 'deleteFile',
then: defaultParams
.keys({
path,
options: Joi.object({
commitMessage: requiredString,
}).required(),
})
.required(),
},
{
is: 'getDeployPreview',
then: defaultParams
.keys({
collection,
slug,
})
.required(),
},
],
otherwise: Joi.forbidden(),
});
return Joi.object({
action: Joi.valid(...allowedActions).required(),
params,
});
};
export const joi = (schema: Joi.Schema) => (
req: express.Request,
res: express.Response,
next: express.NextFunction,
) => {
const { error } = schema.validate(req.body, { allowUnknown: true });
const valid = error == null;
if (valid) {
next();
} else {
const { details } = error;
const message = details.map(i => i.message).join(',');
res.status(422).json({ error: message });
}
};

View File

@ -0,0 +1,151 @@
/* eslint-disable @typescript-eslint/no-var-requires */
import Joi from '@hapi/joi';
import express from 'express';
import { validateRepo, getSchema, localGitMiddleware } from '.';
jest.mock('netlify-cms-lib-util', () => jest.fn());
jest.mock('simple-git/promise');
const assetFailure = (result: Joi.ValidationResult, expectedMessage: string) => {
const { error } = result;
expect(error).not.toBeNull();
expect(error.details).toHaveLength(1);
const message = error.details.map(({ message }) => message)[0];
expect(message).toBe(expectedMessage);
};
const defaultParams = {
branch: 'master',
};
describe('localGitMiddleware', () => {
const simpleGit = require('simple-git/promise');
const git = {
checkIsRepo: jest.fn(),
silent: jest.fn(),
branchLocal: jest.fn(),
checkout: jest.fn(),
};
git.silent.mockReturnValue(git);
simpleGit.mockReturnValue(git);
beforeEach(() => {
jest.clearAllMocks();
});
describe('validateRepo', () => {
it('should throw on non valid git repo', async () => {
git.checkIsRepo.mockResolvedValue(false);
await expect(validateRepo({ repoPath: '/Users/user/code/repo' })).rejects.toEqual(
new Error('/Users/user/code/repo is not a valid git repository'),
);
});
it('should not throw on valid git repo', async () => {
git.checkIsRepo.mockResolvedValue(true);
await expect(validateRepo({ repoPath: '/Users/user/code/repo' })).resolves.toBeUndefined();
});
});
describe('getSchema', () => {
it('should throw on path traversal', () => {
const schema = getSchema({ repoPath: '/Users/user/documents/code/repo' });
assetFailure(
schema.validate({
action: 'getEntry',
params: { ...defaultParams, path: '../' },
}),
'"params.path" must resolve to a path under the configured repository',
);
});
it('should not throw on valid path', () => {
const schema = getSchema({ repoPath: '/Users/user/documents/code/repo' });
const { error } = schema.validate({
action: 'getEntry',
params: { ...defaultParams, path: 'src/content/posts/title.md' },
});
expect(error).toBeUndefined();
});
it('should throw on folder traversal', () => {
const schema = getSchema({ repoPath: '/Users/user/documents/code/repo' });
assetFailure(
schema.validate({
action: 'entriesByFolder',
params: { ...defaultParams, folder: '../', extension: 'md', depth: 1 },
}),
'"params.folder" must resolve to a path under the configured repository',
);
});
it('should not throw on valid folder', () => {
const schema = getSchema({ repoPath: '/Users/user/documents/code/repo' });
const { error } = schema.validate({
action: 'entriesByFolder',
params: { ...defaultParams, folder: 'src/posts', extension: 'md', depth: 1 },
});
expect(error).toBeUndefined();
});
it('should throw on media folder traversal', () => {
const schema = getSchema({ repoPath: '/Users/user/documents/code/repo' });
assetFailure(
schema.validate({
action: 'getMedia',
params: { ...defaultParams, mediaFolder: '../' },
}),
'"params.mediaFolder" must resolve to a path under the configured repository',
);
});
it('should not throw on valid folder', () => {
const schema = getSchema({ repoPath: '/Users/user/documents/code/repo' });
const { error } = schema.validate({
action: 'getMedia',
params: { ...defaultParams, mediaFolder: 'static/images' },
});
expect(error).toBeUndefined();
});
});
describe('localGitMiddleware', () => {
const json = jest.fn();
const status = jest.fn(() => ({ json }));
const res: express.Response = ({ status } as unknown) as express.Response;
const repoPath = '.';
it("should return error when default branch doesn't exist", async () => {
git.branchLocal.mockResolvedValue({ all: ['master'] });
const req = {
body: {
action: 'getMedia',
params: {
mediaFolder: 'mediaFolder',
branch: 'develop',
},
},
} as express.Request;
await localGitMiddleware({ repoPath })(req, res);
expect(status).toHaveBeenCalledTimes(1);
expect(status).toHaveBeenCalledWith(422);
expect(json).toHaveBeenCalledTimes(1);
expect(json).toHaveBeenCalledWith({ error: "Default branch 'develop' doesn't exist" });
});
});
});

View File

@ -0,0 +1,478 @@
import express from 'express';
import path from 'path';
import crypto from 'crypto';
import { promises as fs } from 'fs';
import Joi from '@hapi/joi';
import {
parseContentKey,
branchFromContentKey,
generateContentKey,
contentKeyFromBranch,
CMS_BRANCH_PREFIX,
statusToLabel,
labelToStatus,
} from 'netlify-cms-lib-util/src/API';
import { defaultSchema, joi } from '../joi';
import {
EntriesByFolderParams,
EntriesByFilesParams,
GetEntryParams,
DefaultParams,
UnpublishedEntryParams,
PersistEntryParams,
GetMediaParams,
Asset,
PublishUnpublishedEntryParams,
PersistMediaParams,
DeleteFileParams,
UpdateUnpublishedEntryStatusParams,
Entry,
GetMediaFileParams,
} from '../types';
// eslint-disable-next-line import/default
import simpleGit from 'simple-git/promise';
const sha256 = (buffer: Buffer) => {
return crypto
.createHash('sha256')
.update(buffer)
.digest('hex');
};
const writeFile = async (filePath: string, content: Buffer | string) => {
await fs.mkdir(path.dirname(filePath), { recursive: true });
await fs.writeFile(filePath, content);
};
const commit = async (git: simpleGit.SimpleGit, commitMessage: string, files: string[]) => {
await git.add(files);
await git.commit(commitMessage, files, {
'--no-verify': true,
'--no-gpg-sign': true,
});
};
const getCurrentBranch = async (git: simpleGit.SimpleGit) => {
const currentBranch = await git.branchLocal().then(summary => summary.current);
return currentBranch;
};
const runOnBranch = async <T>(git: simpleGit.SimpleGit, branch: string, func: () => Promise<T>) => {
const currentBranch = await getCurrentBranch(git);
try {
if (currentBranch !== branch) {
await git.checkout(branch);
}
const result = await func();
return result;
} finally {
await git.checkout(currentBranch);
}
};
const listFiles = async (dir: string, extension: string, depth: number): Promise<string[]> => {
if (depth <= 0) {
return [];
}
try {
const dirents = await fs.readdir(dir, { withFileTypes: true });
const files = await Promise.all(
dirents.map(dirent => {
const res = path.join(dir, dirent.name);
return dirent.isDirectory()
? listFiles(res, extension, depth - 1)
: [res].filter(f => f.endsWith(extension));
}),
);
return ([] as string[]).concat(...files);
} catch (e) {
return [];
}
};
const listRepoFiles = async (
repoPath: string,
folder: string,
extension: string,
depth: number,
) => {
const files = await listFiles(path.join(repoPath, folder), extension, depth);
return files.map(f => f.substr(repoPath.length + 1));
};
const entriesFromFiles = async (repoPath: string, files: string[]) => {
return Promise.all(
files.map(async file => {
try {
const content = await fs.readFile(path.join(repoPath, file));
return {
data: content.toString(),
file: { path: file, id: sha256(content) },
};
} catch (e) {
return { data: null, file: { path: file, id: null } };
}
}),
);
};
const branchDescription = (branch: string) => `branch.${branch}.description`;
const getEntryDataFromDiff = async (git: simpleGit.SimpleGit, branch: string, diff: string[]) => {
const contentKey = contentKeyFromBranch(branch);
const { collection, slug } = parseContentKey(contentKey);
const path = diff.find(d => d.includes(slug)) as string;
const mediaFiles = diff.filter(d => d !== path);
const label = await git.raw(['config', branchDescription(branch)]);
const status = label && labelToStatus(label.trim());
return {
slug,
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status },
};
};
type Options = {
repoPath: string;
};
const entriesFromDiffs = async (
git: simpleGit.SimpleGit,
branch: string,
repoPath: string,
cmsBranches: string[],
diffs: simpleGit.DiffResult[],
) => {
const entries = [];
for (let i = 0; i < diffs.length; i++) {
const cmsBranch = cmsBranches[i];
const diff = diffs[i];
const data = await getEntryDataFromDiff(
git,
cmsBranch,
diff.files.map(f => f.file),
);
const entryPath = data.metaData.objects.entry.path;
const [entry] = await runOnBranch(git, cmsBranch, () =>
entriesFromFiles(repoPath, [entryPath]),
);
const rawDiff = await git.diff([branch, cmsBranch, '--', entryPath]);
entries.push({
...data,
...entry,
isModification: !rawDiff.includes('new file'),
});
}
return entries;
};
const readMediaFile = async (repoPath: string, file: string) => {
const encoding = 'base64';
const buffer = await fs.readFile(path.join(repoPath, file));
const id = sha256(buffer);
return {
id,
content: buffer.toString(encoding),
encoding,
path: file,
name: path.basename(file),
};
};
const getEntryMediaFiles = async (
git: simpleGit.SimpleGit,
repoPath: string,
cmsBranch: string,
files: string[],
) => {
const mediaFiles = await runOnBranch(git, cmsBranch, async () => {
const serializedFiles = await Promise.all(files.map(file => readMediaFile(repoPath, file)));
return serializedFiles;
});
return mediaFiles;
};
const commitEntry = async (
git: simpleGit.SimpleGit,
repoPath: string,
entry: Entry,
assets: Asset[],
commitMessage: string,
) => {
// save entry content
await writeFile(path.join(repoPath, entry.path), entry.raw);
// save assets
await Promise.all(
assets.map(a => writeFile(path.join(repoPath, a.path), Buffer.from(a.content, a.encoding))),
);
// commits files
await commit(git, commitMessage, [entry.path, ...assets.map(a => a.path)]);
};
const isBranchExists = async (git: simpleGit.SimpleGit, branch: string) => {
const branchExists = await git.branchLocal().then(({ all }) => all.includes(branch));
return branchExists;
};
export const validateRepo = async ({ repoPath }: Options) => {
const git = simpleGit(repoPath).silent(false);
const isRepo = await git.checkIsRepo();
if (!isRepo) {
throw Error(`${repoPath} is not a valid git repository`);
}
};
export const getSchema = ({ repoPath }: Options) => {
const custom = Joi.extend({
type: 'path',
base: Joi.string().required(),
messages: {
'path.invalid': '{{#label}} must resolve to a path under the configured repository',
},
validate(value, helpers) {
const resolvedPath = path.join(repoPath, value);
if (!resolvedPath.startsWith(repoPath)) {
return { value, errors: helpers.error('path.invalid') };
}
},
});
const schema = defaultSchema({ path: custom.path() });
return schema;
};
export const localGitMiddleware = ({ repoPath }: Options) => {
const git = simpleGit(repoPath).silent(false);
return async function(req: express.Request, res: express.Response) {
try {
const { body } = req;
if (body.action === 'info') {
res.json({ repo: path.basename(repoPath) });
return;
}
const { branch } = body.params as DefaultParams;
const branchExists = await isBranchExists(git, branch);
if (!branchExists) {
const message = `Default branch '${branch}' doesn't exist`;
res.status(422).json({ error: message });
return;
}
switch (body.action) {
case 'entriesByFolder': {
const payload = body.params as EntriesByFolderParams;
const { folder, extension, depth } = payload;
const entries = await runOnBranch(git, branch, () =>
listRepoFiles(repoPath, folder, extension, depth).then(files =>
entriesFromFiles(repoPath, files),
),
);
res.json(entries);
break;
}
case 'entriesByFiles': {
const payload = body.params as EntriesByFilesParams;
const entries = await runOnBranch(git, branch, () =>
entriesFromFiles(
repoPath,
payload.files.map(file => path.join(repoPath, file.path)),
),
);
res.json(entries);
break;
}
case 'getEntry': {
const payload = body.params as GetEntryParams;
const [entry] = await runOnBranch(git, branch, () =>
entriesFromFiles(repoPath, [payload.path]),
);
res.json(entry);
break;
}
case 'unpublishedEntries': {
const cmsBranches = await git
.branchLocal()
.then(result => result.all.filter(b => b.startsWith(`${CMS_BRANCH_PREFIX}/`)));
const diffs = await Promise.all(
cmsBranches.map(cmsBranch => git.diffSummary([branch, cmsBranch])),
);
const entries = await entriesFromDiffs(git, branch, repoPath, cmsBranches, diffs);
res.json(entries);
break;
}
case 'unpublishedEntry': {
const { collection, slug } = body.params as UnpublishedEntryParams;
const contentKey = generateContentKey(collection, slug);
const cmsBranch = branchFromContentKey(contentKey);
const branchExists = await isBranchExists(git, cmsBranch);
if (branchExists) {
const diff = await git.diffSummary([branch, cmsBranch]);
const [entry] = await entriesFromDiffs(git, branch, repoPath, [cmsBranch], [diff]);
const mediaFiles = await getEntryMediaFiles(
git,
repoPath,
cmsBranch,
entry.metaData.objects.entry.mediaFiles,
);
res.json({ ...entry, mediaFiles });
} else {
return res.status(404).json({ message: 'Not Found' });
}
break;
}
case 'deleteUnpublishedEntry': {
const { collection, slug } = body.params as UnpublishedEntryParams;
const contentKey = generateContentKey(collection, slug);
const cmsBranch = branchFromContentKey(contentKey);
const currentBranch = await getCurrentBranch(git);
if (currentBranch === cmsBranch) {
await git.checkoutLocalBranch(branch);
}
await git.branch(['-D', cmsBranch]);
res.json({ message: `deleted branch: ${cmsBranch}` });
break;
}
case 'persistEntry': {
const { entry, assets, options } = body.params as PersistEntryParams;
if (!options.useWorkflow) {
runOnBranch(git, branch, async () => {
await commitEntry(git, repoPath, entry, assets, options.commitMessage);
});
} else {
const slug = entry.slug;
const collection = options.collectionName as string;
const contentKey = generateContentKey(collection, slug);
const cmsBranch = branchFromContentKey(contentKey);
await runOnBranch(git, branch, async () => {
const branchExists = await isBranchExists(git, cmsBranch);
if (branchExists) {
await git.checkout(cmsBranch);
} else {
await git.checkoutLocalBranch(cmsBranch);
}
await git.rebase([branch, '--no-gpg-sign', '--no-gpg-sign']);
const diff = await git.diffSummary([branch, cmsBranch]);
const data = await getEntryDataFromDiff(
git,
branch,
diff.files.map(f => f.file),
);
// delete media files that have been removed from the entry
const toDelete = data.metaData.objects.entry.mediaFiles.filter(
f => !assets.map(a => a.path).includes(f),
);
await Promise.all(toDelete.map(f => fs.unlink(path.join(repoPath, f))));
await commitEntry(git, repoPath, entry, assets, options.commitMessage);
// add status for new entries
if (!data.metaData.status) {
const description = statusToLabel(options.status);
await git.addConfig(branchDescription(cmsBranch), description);
}
// set path for new entries
if (!data.metaData.objects.entry.path) {
data.metaData.objects.entry.path = entry.path;
}
});
}
res.json({ message: 'entry persisted' });
break;
}
case 'updateUnpublishedEntryStatus': {
const { collection, slug, newStatus } = body.params as UpdateUnpublishedEntryStatusParams;
const contentKey = generateContentKey(collection, slug);
const cmsBranch = branchFromContentKey(contentKey);
const description = statusToLabel(newStatus);
await git.addConfig(branchDescription(cmsBranch), description);
res.json({ message: `${branch} description was updated to ${description}` });
break;
}
case 'publishUnpublishedEntry': {
const { collection, slug } = body.params as PublishUnpublishedEntryParams;
const contentKey = generateContentKey(collection, slug);
const cmsBranch = branchFromContentKey(contentKey);
await git.mergeFromTo(cmsBranch, branch);
await git.deleteLocalBranch(cmsBranch);
res.json({ message: `branch ${cmsBranch} merged to ${branch}` });
break;
}
case 'getMedia': {
const { mediaFolder } = body.params as GetMediaParams;
const mediaFiles = await runOnBranch(git, branch, async () => {
const files = await listRepoFiles(repoPath, mediaFolder, '', 1);
const serializedFiles = await Promise.all(
files.map(file => readMediaFile(repoPath, file)),
);
return serializedFiles;
});
res.json(mediaFiles);
break;
}
case 'getMediaFile': {
const { path } = body.params as GetMediaFileParams;
const mediaFile = await runOnBranch(git, branch, () => {
return readMediaFile(repoPath, path);
});
res.json(mediaFile);
break;
}
case 'persistMedia': {
const {
asset,
options: { commitMessage },
} = body.params as PersistMediaParams;
const file = await runOnBranch(git, branch, async () => {
await writeFile(
path.join(repoPath, asset.path),
Buffer.from(asset.content, asset.encoding),
);
await commit(git, commitMessage, [asset.path]);
return readMediaFile(repoPath, asset.path);
});
res.json(file);
break;
}
case 'deleteFile': {
const {
path: filePath,
options: { commitMessage },
} = body.params as DeleteFileParams;
await runOnBranch(git, branch, async () => {
await fs.unlink(path.join(repoPath, filePath));
await commit(git, commitMessage, [filePath]);
});
res.json({ message: `deleted file ${filePath}` });
break;
}
case 'getDeployPreview': {
res.json(null);
break;
}
default: {
const message = `Unknown action ${body.action}`;
res.status(422).json({ error: message });
break;
}
}
} catch (e) {
console.error(`Error handling ${JSON.stringify(req.body)}: ${e.message}`);
res.status(500).json({ error: 'Unknown error' });
}
};
};
export const registerMiddleware = async (app: express.Express) => {
const repoPath = path.resolve(process.env.GIT_REPO_DIRECTORY || process.cwd());
await validateRepo({ repoPath });
app.post('/api/v1', joi(getSchema({ repoPath })));
app.post('/api/v1', localGitMiddleware({ repoPath }));
console.log(`Netlify CMS Proxy Server configured with ${repoPath}`);
};

View File

@ -0,0 +1,70 @@
export type DefaultParams = {
branch: string;
};
export type EntriesByFolderParams = {
folder: string;
extension: string;
depth: 1;
};
export type EntriesByFilesParams = {
files: { path: string }[];
};
export type GetEntryParams = {
path: string;
};
export type UnpublishedEntryParams = {
collection: string;
slug: string;
};
export type UpdateUnpublishedEntryStatusParams = {
collection: string;
slug: string;
newStatus: string;
};
export type PublishUnpublishedEntryParams = {
collection: string;
slug: string;
};
export type Entry = { slug: string; path: string; raw: string };
export type Asset = { path: string; content: string; encoding: 'base64' };
export type PersistEntryParams = {
entry: Entry;
assets: Asset[];
options: {
collectionName?: string;
commitMessage: string;
useWorkflow: boolean;
status: string;
};
};
export type GetMediaParams = {
mediaFolder: string;
};
export type GetMediaFileParams = {
path: string;
};
export type PersistMediaParams = {
asset: Asset;
options: {
commitMessage: string;
};
};
export type DeleteFileParams = {
path: string;
options: {
commitMessage: string;
};
};

View File

@ -0,0 +1,19 @@
{
"compilerOptions": {
"module": "commonjs",
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"target": "ES2018",
"moduleResolution": "node",
"outDir": "dist",
"baseUrl": ".",
"allowJs": true,
"strict": true,
"noImplicitAny": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"sourceMap": true
},
"include": ["src/**/*"],
"exclude": ["src/**/*spec.ts"]
}

View File

@ -0,0 +1,36 @@
const path = require('path');
const nodeExternals = require('webpack-node-externals');
const TsconfigPathsPlugin = require('tsconfig-paths-webpack-plugin');
const { NODE_ENV = 'production' } = process.env;
const allowList = [/^netlify-cms-lib-util/];
module.exports = {
entry: path.join('src', 'index.ts'),
mode: NODE_ENV,
target: 'node',
devtool: 'source-map',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'index.js',
},
resolve: {
plugins: [new TsconfigPathsPlugin()],
extensions: ['.ts', '.js'],
},
module: {
rules: [
{
test: /\.ts$/,
use: ['ts-loader'],
},
],
},
externals: [
nodeExternals({ whitelist: allowList }),
nodeExternals({
whitelist: allowList,
modulesDir: path.resolve(__dirname, path.join('..', '..', 'node_modules')),
}),
],
};

713
yarn.lock

File diff suppressed because it is too large Load Diff