Feat: multi content authoring (#4139)

This commit is contained in:
Erez Rokah
2020-09-20 10:30:46 -07:00
committed by GitHub
parent 7968e01e29
commit cb2ad687ee
65 changed files with 4331 additions and 1521 deletions

View File

@ -23,6 +23,7 @@
},
"dependencies": {
"@hapi/joi": "^17.0.2",
"async-mutex": "^0.2.4",
"cors": "^2.8.5",
"dotenv": "^8.2.0",
"express": "^4.17.1",

View File

@ -26,7 +26,7 @@ describe('defaultSchema', () => {
assetFailure(
schema.validate({ action: 'unknown', params: {} }),
'"action" must be one of [info, entriesByFolder, entriesByFiles, getEntry, unpublishedEntries, unpublishedEntry, unpublishedEntryDataFile, unpublishedEntryMediaFile, deleteUnpublishedEntry, persistEntry, updateUnpublishedEntryStatus, publishUnpublishedEntry, getMedia, getMediaFile, persistMedia, deleteFile, getDeployPreview]',
'"action" must be one of [info, entriesByFolder, entriesByFiles, getEntry, unpublishedEntries, unpublishedEntry, unpublishedEntryDataFile, unpublishedEntryMediaFile, deleteUnpublishedEntry, persistEntry, updateUnpublishedEntryStatus, publishUnpublishedEntry, getMedia, getMediaFile, persistMedia, deleteFile, deleteFiles, getDeployPreview]',
);
});
@ -274,8 +274,19 @@ describe('defaultSchema', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'persistEntry', params: { ...defaultParams } }),
'"params.entry" is required',
schema.validate({
action: 'persistEntry',
params: {
...defaultParams,
assets: [],
options: {
commitMessage: 'commitMessage',
useWorkflow: true,
status: 'draft',
},
},
}),
'"params" must contain at least one of [entry, dataFiles]',
);
assetFailure(
schema.validate({
@ -309,7 +320,7 @@ describe('defaultSchema', () => {
);
});
it('should pass on valid params', () => {
it('should pass on valid params (entry argument)', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'persistEntry',
@ -327,6 +338,25 @@ describe('defaultSchema', () => {
expect(error).toBeUndefined();
});
it('should pass on valid params (dataFiles argument)', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'persistEntry',
params: {
...defaultParams,
dataFiles: [{ slug: 'slug', path: 'path', raw: 'content' }],
assets: [{ path: 'path', content: 'content', encoding: 'base64' }],
options: {
commitMessage: 'commitMessage',
useWorkflow: true,
status: 'draft',
},
},
});
expect(error).toBeUndefined();
});
});
describe('updateUnpublishedEntryStatus', () => {
@ -491,6 +521,31 @@ describe('defaultSchema', () => {
});
});
describe('deleteFiles', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();
assetFailure(
schema.validate({ action: 'deleteFiles', params: { ...defaultParams } }),
'"params.paths" is required',
);
});
it('should pass on valid params', () => {
const schema = defaultSchema();
const { error } = schema.validate({
action: 'deleteFiles',
params: {
...defaultParams,
paths: ['src/static/images/image.png'],
options: { commitMessage: 'commitMessage' },
},
});
expect(error).toBeUndefined();
});
});
describe('getDeployPreview', () => {
it('should fail on invalid params', () => {
const schema = defaultSchema();

View File

@ -18,6 +18,7 @@ const allowedActions = [
'getMediaFile',
'persistMedia',
'deleteFile',
'deleteFiles',
'getDeployPreview',
];
@ -39,6 +40,13 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
encoding: requiredString.valid('base64'),
});
const dataFile = Joi.object({
slug: requiredString,
path,
raw: requiredString,
newPath: path.optional(),
});
const params = Joi.when('action', {
switch: [
{
@ -122,12 +130,8 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
then: defaultParams
.keys({
cmsLabelPrefix: Joi.string().optional(),
entry: Joi.object({
slug: requiredString,
path,
raw: requiredString,
newPath: path.optional(),
}).required(),
entry: dataFile, // entry is kept for backwards compatibility
dataFiles: Joi.array().items(dataFile),
assets: Joi.array()
.items(asset)
.required(),
@ -138,6 +142,7 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
status: requiredString,
}).required(),
})
.xor('entry', 'dataFiles')
.required(),
},
{
@ -198,6 +203,20 @@ export const defaultSchema = ({ path = requiredString } = {}) => {
})
.required(),
},
{
is: 'deleteFiles',
then: defaultParams
.keys({
paths: Joi.array()
.items(path)
.min(1)
.required(),
options: Joi.object({
commitMessage: requiredString,
}).required(),
})
.required(),
},
{
is: 'getDeployPreview',
then: defaultParams

View File

@ -12,6 +12,8 @@ import {
GetMediaFileParams,
PersistMediaParams,
DeleteFileParams,
DeleteFilesParams,
DataFile,
} from '../types';
import { listRepoFiles, deleteFile, writeFile, move } from '../utils/fs';
import { entriesFromFiles, readMediaFile } from '../utils/entries';
@ -61,16 +63,27 @@ export const localFsMiddleware = ({ repoPath, logger }: FsOptions) => {
break;
}
case 'persistEntry': {
const { entry, assets } = body.params as PersistEntryParams;
await writeFile(path.join(repoPath, entry.path), entry.raw);
const {
entry,
dataFiles = [entry as DataFile],
assets,
} = body.params as PersistEntryParams;
await Promise.all(
dataFiles.map(dataFile => writeFile(path.join(repoPath, dataFile.path), dataFile.raw)),
);
// save assets
await Promise.all(
assets.map(a =>
writeFile(path.join(repoPath, a.path), Buffer.from(a.content, a.encoding)),
),
);
if (entry.newPath) {
await move(path.join(repoPath, entry.path), path.join(repoPath, entry.newPath));
if (dataFiles.every(dataFile => dataFile.newPath)) {
dataFiles.forEach(async dataFile => {
await move(
path.join(repoPath, dataFile.path),
path.join(repoPath, dataFile.newPath!),
);
});
}
res.json({ message: 'entry persisted' });
break;
@ -104,6 +117,12 @@ export const localFsMiddleware = ({ repoPath, logger }: FsOptions) => {
res.json({ message: `deleted file ${filePath}` });
break;
}
case 'deleteFiles': {
const { paths } = body.params as DeleteFilesParams;
await Promise.all(paths.map(filePath => deleteFile(repoPath, filePath)));
res.json({ message: `deleted files ${paths.join(', ')}` });
break;
}
case 'getDeployPreview': {
res.json(null);
break;

View File

@ -26,16 +26,18 @@ import {
PersistMediaParams,
DeleteFileParams,
UpdateUnpublishedEntryStatusParams,
Entry,
DataFile,
GetMediaFileParams,
DeleteEntryParams,
DeleteFilesParams,
UnpublishedEntryDataFileParams,
UnpublishedEntryMediaFileParams,
} from '../types';
// eslint-disable-next-line import/default
import simpleGit from 'simple-git/promise';
import { Mutex, withTimeout } from 'async-mutex';
import { pathTraversal } from '../joi/customValidators';
import { listRepoFiles, writeFile, move } from '../utils/fs';
import { listRepoFiles, writeFile, move, deleteFile, getUpdateDate } from '../utils/fs';
import { entriesFromFiles, readMediaFile } from '../utils/entries';
const commit = async (git: simpleGit.SimpleGit, commitMessage: string) => {
@ -76,18 +78,22 @@ type GitOptions = {
const commitEntry = async (
git: simpleGit.SimpleGit,
repoPath: string,
entry: Entry,
dataFiles: DataFile[],
assets: Asset[],
commitMessage: string,
) => {
// save entry content
await writeFile(path.join(repoPath, entry.path), entry.raw);
await Promise.all(
dataFiles.map(dataFile => writeFile(path.join(repoPath, dataFile.path), dataFile.raw)),
);
// save assets
await Promise.all(
assets.map(a => writeFile(path.join(repoPath, a.path), Buffer.from(a.content, a.encoding))),
);
if (entry.newPath) {
await move(path.join(repoPath, entry.path), path.join(repoPath, entry.newPath));
if (dataFiles.every(dataFile => dataFile.newPath)) {
dataFiles.forEach(async dataFile => {
await move(path.join(repoPath, dataFile.path), path.join(repoPath, dataFile.newPath!));
});
}
// commits files
@ -162,8 +168,13 @@ export const getSchema = ({ repoPath }: { repoPath: string }) => {
export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
const git = simpleGit(repoPath).silent(false);
// we can only perform a single git operation at any given time
const mutex = withTimeout(new Mutex(), 3000, new Error('Request timed out'));
return async function(req: express.Request, res: express.Response) {
let release;
try {
release = await mutex.acquire();
const { body } = req;
if (body.action === 'info') {
res.json({
@ -233,11 +244,23 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
const diffs = await getDiffs(git, branch, cmsBranch);
const label = await git.raw(['config', branchDescription(cmsBranch)]);
const status = label && labelToStatus(label.trim(), cmsLabelPrefix || '');
const updatedAt =
diffs.length >= 0
? await runOnBranch(git, cmsBranch, async () => {
const dates = await Promise.all(
diffs.map(({ newPath }) => getUpdateDate(repoPath, newPath)),
);
return dates.reduce((a, b) => {
return a > b ? a : b;
});
})
: new Date();
const unpublishedEntry = {
collection,
slug,
status,
diffs,
updatedAt,
};
res.json(unpublishedEntry);
} else {
@ -276,13 +299,20 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
break;
}
case 'persistEntry': {
const { entry, assets, options, cmsLabelPrefix } = body.params as PersistEntryParams;
const {
cmsLabelPrefix,
entry,
dataFiles = [entry as DataFile],
assets,
options,
} = body.params as PersistEntryParams;
if (!options.useWorkflow) {
await runOnBranch(git, branch, async () => {
await commitEntry(git, repoPath, entry, assets, options.commitMessage);
await commitEntry(git, repoPath, dataFiles, assets, options.commitMessage);
});
} else {
const slug = entry.slug;
const slug = dataFiles[0].slug;
const collection = options.collectionName as string;
const contentKey = generateContentKey(collection, slug);
const cmsBranch = branchFromContentKey(contentKey);
@ -300,7 +330,7 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
d => d.binary && !assets.map(a => a.path).includes(d.path),
);
await Promise.all(toDelete.map(f => fs.unlink(path.join(repoPath, f.path))));
await commitEntry(git, repoPath, entry, assets, options.commitMessage);
await commitEntry(git, repoPath, dataFiles, assets, options.commitMessage);
// add status for new entries
if (!branchExists) {
@ -378,12 +408,24 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
options: { commitMessage },
} = body.params as DeleteFileParams;
await runOnBranch(git, branch, async () => {
await fs.unlink(path.join(repoPath, filePath));
await deleteFile(repoPath, filePath);
await commit(git, commitMessage);
});
res.json({ message: `deleted file ${filePath}` });
break;
}
case 'deleteFiles': {
const {
paths,
options: { commitMessage },
} = body.params as DeleteFilesParams;
await runOnBranch(git, branch, async () => {
await Promise.all(paths.map(filePath => deleteFile(repoPath, filePath)));
await commit(git, commitMessage);
});
res.json({ message: `deleted files ${paths.join(', ')}` });
break;
}
case 'getDeployPreview': {
res.json(null);
break;
@ -397,6 +439,8 @@ export const localGitMiddleware = ({ repoPath, logger }: GitOptions) => {
} catch (e) {
logger.error(`Error handling ${JSON.stringify(req.body)}: ${e.message}`);
res.status(500).json({ error: 'Unknown error' });
} finally {
release && release();
}
};
};

View File

@ -54,13 +54,14 @@ export type PublishUnpublishedEntryParams = {
slug: string;
};
export type Entry = { slug: string; path: string; raw: string; newPath?: string };
export type DataFile = { slug: string; path: string; raw: string; newPath?: string };
export type Asset = { path: string; content: string; encoding: 'base64' };
export type PersistEntryParams = {
cmsLabelPrefix?: string;
entry: Entry;
entry?: DataFile;
dataFiles?: DataFile[];
assets: Asset[];
options: {
collectionName?: string;
@ -91,3 +92,10 @@ export type DeleteFileParams = {
commitMessage: string;
};
};
export type DeleteFilesParams = {
paths: string[];
options: {
commitMessage: string;
};
};

View File

@ -38,7 +38,7 @@ export const writeFile = async (filePath: string, content: Buffer | string) => {
};
export const deleteFile = async (repoPath: string, filePath: string) => {
await fs.unlink(path.join(repoPath, filePath));
await fs.unlink(path.join(repoPath, filePath)).catch(() => undefined);
};
const moveFile = async (from: string, to: string) => {
@ -56,3 +56,10 @@ export const move = async (from: string, to: string) => {
const allFiles = await listFiles(sourceDir, '', 100);
await Promise.all(allFiles.map(file => moveFile(file, file.replace(sourceDir, destDir))));
};
export const getUpdateDate = async (repoPath: string, filePath: string) => {
return fs
.stat(path.join(repoPath, filePath))
.then(stat => stat.mtime)
.catch(() => new Date());
};