Feat: nested collections (#3716)

This commit is contained in:
Erez Rokah
2020-06-18 10:11:37 +03:00
committed by GitHub
parent b4c47caf59
commit af7bbbd9a9
89 changed files with 8269 additions and 5619 deletions

View File

@ -28,6 +28,7 @@ import {
readFileMetadata,
throwOnConflictingBranches,
} from 'netlify-cms-lib-util';
import { dirname } from 'path';
import { oneLine } from 'common-tags';
import { parse } from 'what-the-diff';
@ -364,8 +365,8 @@ export default class API {
};
};
listFiles = async (path: string, depth = 1, pagelen = 20) => {
const node = await this.branchCommitSha(this.branch);
listFiles = async (path: string, depth = 1, pagelen: number, branch: string) => {
const node = await this.branchCommitSha(branch);
const result: BitBucketSrcResult = await this.requestJSON({
url: `${this.repoURL}/src/${node}/${path}`,
params: {
@ -398,11 +399,12 @@ export default class API {
})),
])(cursor.data!.getIn(['links', action]));
listAllFiles = async (path: string, depth = 1) => {
listAllFiles = async (path: string, depth: number, branch: string) => {
const { cursor: initialCursor, entries: initialEntries } = await this.listFiles(
path,
depth,
100,
branch,
);
const entries = [...initialEntries];
let currentCursor = initialCursor;
@ -418,7 +420,7 @@ export default class API {
};
async uploadFiles(
files: (Entry | AssetProxy | DeleteEntry)[],
files: { path: string; newPath?: string; delete?: boolean }[],
{
commitMessage,
branch,
@ -426,10 +428,14 @@ export default class API {
}: { commitMessage: string; branch: string; parentSha?: string },
) {
const formData = new FormData();
const toMove: { from: string; to: string; contentBlob: Blob }[] = [];
files.forEach(file => {
if ((file as DeleteEntry).delete) {
if (file.delete) {
// delete the file
formData.append('files', file.path);
} else if (file.newPath) {
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
toMove.push({ from: file.path, to: file.newPath, contentBlob });
} else {
// add/modify the file
const contentBlob = get(file, 'fileObj', new Blob([(file as Entry).raw]));
@ -437,6 +443,30 @@ export default class API {
formData.append(file.path, contentBlob, basename(file.path));
}
});
for (const { from, to, contentBlob } of toMove) {
const sourceDir = dirname(from);
const destDir = dirname(to);
const filesBranch = parentSha ? this.branch : branch;
const files = await this.listAllFiles(sourceDir, 100, filesBranch);
for (const file of files) {
// to move a file in Bitbucket we need to delete the old path
// and upload the file content to the new path
// NOTE: this is very wasteful, and also the Bitbucket `diff` API
// reports these files as deleted+added instead of renamed
// delete current path
formData.append('files', file.path);
// create in new path
const content =
file.path === from
? contentBlob
: await this.readFile(file.path, null, {
branch: filesBranch,
parseText: false,
});
formData.append(file.path.replace(sourceDir, destDir), content, basename(file.path));
}
}
if (commitMessage) {
formData.append('message', commitMessage);
}
@ -538,19 +568,20 @@ export default class API {
},
});
return parse(rawDiff).map(d => {
const diffs = parse(rawDiff).map(d => {
const oldPath = d.oldPath?.replace(/b\//, '') || '';
const newPath = d.newPath?.replace(/b\//, '') || '';
const path = newPath || (oldPath as string);
return {
oldPath,
newPath,
binary: d.binary || /.svg$/.test(path),
status: d.status,
newFile: d.status === 'added',
path,
binary: d.binary || /.svg$/.test(path),
};
});
return diffs;
}
async editorialWorkflowGit(files: (Entry | AssetProxy)[], entry: Entry, options: PersistOptions) {
@ -573,8 +604,8 @@ export default class API {
// mark files for deletion
const diffs = await this.getDifferences(branch);
const toDelete: DeleteEntry[] = [];
for (const diff of diffs) {
if (!files.some(file => file.path === diff.newPath)) {
for (const diff of diffs.filter(d => d.binary && d.status !== 'deleted')) {
if (!files.some(file => file.path === diff.path)) {
toDelete.push({ path: diff.path, delete: true });
}
}
@ -637,47 +668,6 @@ export default class API {
return pullRequests[0];
}
async retrieveMetadata(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const diff = await this.getDifferences(branch);
const { newPath: path, newFile } = diff.find(d => !d.binary) as {
newPath: string;
newFile: boolean;
};
// TODO: get real file id
const mediaFiles = await Promise.all(
diff.filter(d => d.newPath !== path).map(d => ({ path: d.newPath, id: null })),
);
const label = await this.getPullRequestLabel(pullRequest.id);
const status = labelToStatus(label);
const timeStamp = pullRequest.updated_on;
return { branch, collection, slug, path, status, newFile, mediaFiles, timeStamp };
}
async readUnpublishedBranchFile(contentKey: string) {
const {
branch,
collection,
slug,
path,
status,
newFile,
mediaFiles,
timeStamp,
} = await this.retrieveMetadata(contentKey);
const fileData = (await this.readFile(path, null, { branch })) as string;
return {
slug,
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status, timeStamp },
fileData,
isModification: !newFile,
};
}
async listUnpublishedBranches() {
console.log(
'%c Checking for Unpublished entries',
@ -690,6 +680,26 @@ export default class API {
return branches;
}
async retrieveUnpublishedEntryData(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const diffs = await this.getDifferences(branch);
const label = await this.getPullRequestLabel(pullRequest.id);
const status = labelToStatus(label);
const updatedAt = pullRequest.updated_on;
return {
collection,
slug,
status,
// TODO: get real id
diffs: diffs
.filter(d => d.status !== 'deleted')
.map(d => ({ path: d.path, newFile: d.newFile, id: '' })),
updatedAt,
};
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const contentKey = generateContentKey(collection, slug);
const branch = branchFromContentKey(contentKey);

View File

@ -23,7 +23,6 @@ import {
Config,
ImplementationFile,
unpublishedEntries,
UnpublishedEntryMediaFile,
runWithLock,
AsyncLock,
asyncLock,
@ -38,6 +37,7 @@ import {
localForage,
allEntriesByFolder,
AccessTokenError,
branchFromContentKey,
} from 'netlify-cms-lib-util';
import { NetlifyAuthenticator } from 'netlify-cms-lib-auth';
import AuthenticationPage from './AuthenticationPage';
@ -299,7 +299,7 @@ export default class BitbucketBackend implements Implementation {
let cursor: Cursor;
const listFiles = () =>
this.api!.listFiles(folder, depth).then(({ entries, cursor: c }) => {
this.api!.listFiles(folder, depth, 20, this.branch).then(({ entries, cursor: c }) => {
cursor = c.mergeMeta({ extension });
return entries.filter(e => filterByExtension(e, extension));
});
@ -323,7 +323,7 @@ export default class BitbucketBackend implements Implementation {
}
async listAllFiles(folder: string, extension: string, depth: number) {
const files = await this.api!.listAllFiles(folder, depth);
const files = await this.api!.listAllFiles(folder, depth, this.branch);
const filtered = files.filter(file => filterByExtension(file, extension));
return filtered;
}
@ -371,7 +371,7 @@ export default class BitbucketBackend implements Implementation {
}
getMedia(mediaFolder = this.mediaFolder) {
return this.api!.listAllFiles(mediaFolder).then(files =>
return this.api!.listAllFiles(mediaFolder, 1, this.branch).then(files =>
files.map(({ id, name, path }) => ({ id, name, path, displayURL: { id, path } })),
);
}
@ -509,31 +509,26 @@ export default class BitbucketBackend implements Implementation {
});
}
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
async loadMediaFile(path: string, id: string, { branch }: { branch: string }) {
const readFile = async (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
return getMediaAsBlob(file.path, null, readFile).then(blob => {
const name = basename(file.path);
const fileObj = blobToFileObj(name, blob);
return {
id: file.path,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
}
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
return mediaFiles;
) => {
const content = await this.api!.readFile(path, id, { branch, parseText });
return content;
};
const blob = await getMediaAsBlob(path, id, readFile);
const name = basename(path);
const fileObj = blobToFileObj(name, blob);
return {
id: path,
displayURL: URL.createObjectURL(fileObj),
path,
name,
size: fileObj.size,
file: fileObj,
};
}
async unpublishedEntries() {
@ -542,37 +537,47 @@ export default class BitbucketBackend implements Implementation {
branches.map(branch => contentKeyFromBranch(branch)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
this.api!.readUnpublishedBranchFile(contentKey);
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, API_NAME);
const ids = await unpublishedEntries(listEntriesKeys);
return ids;
}
async unpublishedEntry(
collection: string,
slug: string,
{
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
this.loadEntryMediaFiles(branch, files),
} = {},
) {
async unpublishedEntry({
id,
collection,
slug,
}: {
id?: string;
collection?: string;
slug?: string;
}) {
if (id) {
const data = await this.api!.retrieveUnpublishedEntryData(id);
return data;
} else if (collection && slug) {
const entryId = generateContentKey(collection, slug);
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
return data;
} else {
throw new Error('Missing unpublished entry id or collection and slug');
}
}
getBranch(collection: string, slug: string) {
const contentKey = generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,
// TODO: fix this
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
// @ts-ignore
data.metaData.objects.entry.mediaFiles,
);
return {
slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData as string,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
const branch = branchFromContentKey(contentKey);
return branch;
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const data = (await this.api!.readFile(path, id, { branch })) as string;
return data;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const mediaFile = await this.loadMediaFile(path, id, { branch });
return mediaFile;
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {