Feat: multi content authoring (#4139)

This commit is contained in:
Erez Rokah
2020-09-20 10:30:46 -07:00
committed by GitHub
parent 7968e01e29
commit cb2ad687ee
65 changed files with 4331 additions and 1521 deletions

View File

@ -9,7 +9,6 @@ import {
APIError,
ApiRequest,
AssetProxy,
Entry,
PersistOptions,
readFile,
CMS_BRANCH_PREFIX,
@ -27,6 +26,7 @@ import {
requestWithBackoff,
readFileMetadata,
throwOnConflictingBranches,
DataFile,
} from 'netlify-cms-lib-util';
import { dirname } from 'path';
import { oneLine } from 'common-tags';
@ -437,11 +437,11 @@ export default class API {
// delete the file
formData.append('files', file.path);
} else if (file.newPath) {
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
const contentBlob = get(file, 'fileObj', new Blob([(file as DataFile).raw]));
toMove.push({ from: file.path, to: file.newPath, contentBlob });
} else {
// add/modify the file
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
const contentBlob = get(file, 'fileObj', new Blob([(file as DataFile).raw]));
// Third param is filename header, in case path is `message`, `branch`, etc.
formData.append(file.path, contentBlob, basename(file.path));
}
@ -502,10 +502,11 @@ export default class API {
return files;
}
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
const files = entry ? [entry, ...mediaFiles] : mediaFiles;
async persistFiles(dataFiles: DataFile[], mediaFiles: AssetProxy[], options: PersistOptions) {
const files = [...dataFiles, ...mediaFiles];
if (options.useWorkflow) {
return this.editorialWorkflowGit(files, entry as Entry, options);
const slug = dataFiles[0].slug;
return this.editorialWorkflowGit(files, slug, options);
} else {
return this.uploadFiles(files, { commitMessage: options.commitMessage, branch: this.branch });
}
@ -587,8 +588,12 @@ export default class API {
return diffs;
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
const contentKey = generateContentKey(options.collectionName as string, entry.slug);
async editorialWorkflowGit(
files: (DataFile | AssetProxy)[],
slug: string,
options: PersistOptions,
) {
const contentKey = generateContentKey(options.collectionName as string, slug);
const branch = branchFromContentKey(contentKey);
const unpublished = options.unpublished || false;
if (!unpublished) {
@ -620,9 +625,11 @@ export default class API {
}
}
deleteFile = (path: string, message: string) => {
deleteFiles = (paths: string[], message: string) => {
const body = new FormData();
body.append('files', path);
paths.forEach(path => {
body.append('files', path);
});
body.append('branch', this.branch);
if (message) {
body.append('message', message);

View File

@ -432,15 +432,17 @@ export default class BitbucketBackend implements Implementation {
};
}
async persistEntry(entry: Entry, mediaFiles: AssetProxy[], options: PersistOptions) {
async persistEntry(entry: Entry, options: PersistOptions) {
const client = await this.getLargeMediaClient();
// persistEntry is a transactional operation
return runWithLock(
this.lock,
async () =>
this.api!.persistFiles(
entry,
client.enabled ? await getLargeMediaFilteredMediaFiles(client, mediaFiles) : mediaFiles,
entry.dataFiles,
client.enabled
? await getLargeMediaFilteredMediaFiles(client, entry.assets)
: entry.assets,
options,
),
'Failed to acquire persist entry lock',
@ -468,7 +470,7 @@ export default class BitbucketBackend implements Implementation {
const [id] = await Promise.all([
getBlobSHA(fileObj),
this.api!.persistFiles(null, [mediaFile], options),
this.api!.persistFiles([], [mediaFile], options),
]);
const url = URL.createObjectURL(fileObj);
@ -484,8 +486,8 @@ export default class BitbucketBackend implements Implementation {
};
}
deleteFile(path: string, commitMessage: string) {
return this.api!.deleteFile(path, commitMessage);
deleteFiles(paths: string[], commitMessage: string) {
return this.api!.deleteFiles(paths, commitMessage);
}
traverseCursor(cursor: Cursor, action: string) {