Feat: nested collections (#3716)

This commit is contained in:
Erez Rokah
2020-06-18 10:11:37 +03:00
committed by GitHub
parent b4c47caf59
commit af7bbbd9a9
89 changed files with 8269 additions and 5619 deletions

View File

@ -29,6 +29,7 @@ import {
ApiRequest,
throwOnConflictingBranches,
} from 'netlify-cms-lib-util';
import { dirname } from 'path';
import { Octokit } from '@octokit/rest';
type GitHubUser = Octokit.UsersGetAuthenticatedResponse;
@ -154,6 +155,24 @@ const getTreeFiles = (files: GitHubCompareFiles) => {
return treeFiles;
};
type Diff = {
path: string;
newFile: boolean;
sha: string;
binary: boolean;
};
const diffFromFile = (diff: Octokit.ReposCompareCommitsResponseFilesItem): Diff => {
return {
path: diff.filename,
newFile: diff.status === 'added',
sha: diff.sha,
// media files diffs don't have a patch attribute, except svg files
// renamed files don't have a patch attribute too
binary: (diff.status !== 'renamed' && !diff.patch) || diff.filename.endsWith('.svg'),
};
};
let migrationNotified = false;
export default class API {
@ -497,7 +516,9 @@ export default class API {
// since the contributor doesn't have access to set labels
// a branch without a pr (or a closed pr) means a 'draft' entry
// a branch with an opened pr means a 'pending_review' entry
const data = await this.getBranch(branch);
const data = await this.getBranch(branch).catch(() => {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
});
// since we get all (open and closed) pull requests by branch name, make sure to filter by head sha
const pullRequest = pullRequests.filter(pr => pr.head.sha === data.commit.sha)[0];
// if no pull request is found for the branch we return a mocked one
@ -552,65 +573,22 @@ export default class API {
}
}
matchingEntriesFromDiffs(diffs: Octokit.ReposCompareCommitsResponseFilesItem[]) {
// media files don't have a patch attribute, except svg files
const matchingEntries = diffs
.filter(d => d.patch && !d.filename.endsWith('.svg'))
.map(f => ({ path: f.filename, newFile: f.status === 'added' }));
return matchingEntries;
}
async retrieveMetadata(contentKey: string) {
async retrieveUnpublishedEntryData(contentKey: string) {
const { collection, slug } = this.parseContentKey(contentKey);
const branch = branchFromContentKey(contentKey);
const pullRequest = await this.getBranchPullRequest(branch);
const { files: diffs } = await this.getDifferences(this.branch, pullRequest.head.sha);
const matchingEntries = this.matchingEntriesFromDiffs(diffs);
let entry = matchingEntries[0];
if (matchingEntries.length <= 0) {
// this can happen if there is an empty diff for some reason
// we traverse the commits history to infer the entry
const commits = await this.getPullRequestCommits(pullRequest.number);
for (const commit of commits) {
const { files: diffs } = await this.getDifferences(this.branch, commit.sha);
const matchingEntries = this.matchingEntriesFromDiffs(diffs);
entry = matchingEntries[0];
if (entry) {
break;
}
}
if (!entry) {
console.error(
'Unable to locate entry from diff',
JSON.stringify({ branch, pullRequest, diffs, matchingEntries }),
);
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
} else if (matchingEntries.length > 1) {
// this only works for folder collections
const entryBySlug = matchingEntries.filter(e => e.path.includes(slug))[0];
entry = entryBySlug || entry;
if (!entryBySlug) {
console.warn(
`Expected 1 matching entry from diff, but received '${matchingEntries.length}'. Matched '${entry.path}'`,
JSON.stringify({ branch, pullRequest, diffs, matchingEntries }),
);
}
}
const { path, newFile } = entry;
const mediaFiles = diffs
.filter(d => d.filename !== path)
.map(({ filename: path, sha: id }) => ({
path,
id,
}));
const { files } = await this.getDifferences(this.branch, pullRequest.head.sha);
const diffs = files.map(diffFromFile);
const label = pullRequest.labels.find(l => isCMSLabel(l.name)) as { name: string };
const status = labelToStatus(label.name);
const timeStamp = pullRequest.updated_at;
return { branch, collection, slug, path, status, newFile, mediaFiles, timeStamp, pullRequest };
const updatedAt = pullRequest.updated_at;
return {
collection,
slug,
status,
diffs: diffs.map(d => ({ path: d.path, newFile: d.newFile, id: d.sha })),
updatedAt,
};
}
async readFile(
@ -712,45 +690,6 @@ export default class API {
}
}
async readUnpublishedBranchFile(contentKey: string) {
try {
const {
branch,
collection,
slug,
path,
status,
newFile,
mediaFiles,
timeStamp,
} = await this.retrieveMetadata(contentKey);
const repoURL = this.useOpenAuthoring
? `/repos/${contentKey
.split('/')
.slice(0, 2)
.join('/')}`
: this.repoURL;
const fileData = (await this.readFile(path, null, { branch, repoURL })) as string;
return {
slug,
metaData: {
branch,
collection,
objects: { entry: { path, mediaFiles } },
status,
timeStamp,
},
fileData,
isModification: !newFile,
};
} catch (e) {
throw new EditorialWorkflowError('content is not under editorial workflow', true);
}
}
filterOpenAuthoringBranches = async (branch: string) => {
try {
const pullRequest = await this.getBranchPullRequest(branch);
@ -1044,16 +983,17 @@ export default class API {
}
} else {
// Entry is already on editorial review workflow - commit to existing branch
const { files: diffs } = await this.getDifferences(
const { files: diffFiles } = await this.getDifferences(
this.branch,
await this.getHeadReference(branch),
);
const diffs = diffFiles.map(diffFromFile);
// mark media files to remove
const mediaFilesToRemove: { path: string; sha: string | null }[] = [];
for (const diff of diffs) {
if (!mediaFilesList.some(file => file.path === diff.filename)) {
mediaFilesToRemove.push({ path: diff.filename, sha: null });
for (const diff of diffs.filter(d => d.binary)) {
if (!mediaFilesList.some(file => file.path === diff.path)) {
mediaFilesToRemove.push({ path: diff.path, sha: null });
}
}
@ -1414,30 +1354,67 @@ export default class API {
return Promise.resolve(Base64.encode(str));
}
uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise<string> }) {
const content = result(item, 'toBase64', partial(this.toBase64, item.raw as string));
return content.then(contentBase64 =>
this.request(`${this.repoURL}/git/blobs`, {
method: 'POST',
body: JSON.stringify({
content: contentBase64,
encoding: 'base64',
}),
}).then(response => {
item.sha = response.sha;
return item;
}),
async uploadBlob(item: { raw?: string; sha?: string; toBase64?: () => Promise<string> }) {
const contentBase64 = await result(
item,
'toBase64',
partial(this.toBase64, item.raw as string),
);
const response = await this.request(`${this.repoURL}/git/blobs`, {
method: 'POST',
body: JSON.stringify({
content: contentBase64,
encoding: 'base64',
}),
});
item.sha = response.sha;
return item;
}
async updateTree(baseSha: string, files: { path: string; sha: string | null }[]) {
const tree: TreeEntry[] = files.map(file => ({
path: trimStart(file.path, '/'),
mode: '100644',
type: 'blob',
sha: file.sha,
}));
async updateTree(
baseSha: string,
files: { path: string; sha: string | null; newPath?: string }[],
branch = this.branch,
) {
const toMove: { from: string; to: string; sha: string }[] = [];
const tree = files.reduce((acc, file) => {
const entry = {
path: trimStart(file.path, '/'),
mode: '100644',
type: 'blob',
sha: file.sha,
} as TreeEntry;
if (file.newPath) {
toMove.push({ from: file.path, to: file.newPath, sha: file.sha as string });
} else {
acc.push(entry);
}
return acc;
}, [] as TreeEntry[]);
for (const { from, to, sha } of toMove) {
const sourceDir = dirname(from);
const destDir = dirname(to);
const files = await this.listFiles(sourceDir, { branch, depth: 100 });
for (const file of files) {
// delete current path
tree.push({
path: file.path,
mode: '100644',
type: 'blob',
sha: null,
});
// create in new path
tree.push({
path: file.path.replace(sourceDir, destDir),
mode: '100644',
type: 'blob',
sha: file.path === from ? sha : file.id,
});
}
}
const newTree = await this.createTree(baseSha, tree);
return { ...newTree, parentSha: baseSha };

View File

@ -403,6 +403,9 @@ export default class GraphQLAPI extends API {
...this.getBranchQuery(branch, this.repoOwner, this.repoName),
fetchPolicy: CACHE_FIRST,
});
if (!data.repository.branch) {
throw new APIError('Branch not found', 404, API_NAME);
}
return data.repository.branch;
}
@ -539,12 +542,9 @@ export default class GraphQLAPI extends API {
try {
const contentKey = this.generateContentKey(collectionName, slug);
const branchName = branchFromContentKey(contentKey);
const metadata = await this.retrieveMetadata(contentKey);
if (metadata.pullRequest.number !== MOCK_PULL_REQUEST) {
const { branch, pullRequest } = await this.getPullRequestAndBranch(
branchName,
metadata.pullRequest.number,
);
const pr = await this.getBranchPullRequest(branchName);
if (pr.number !== MOCK_PULL_REQUEST) {
const { branch, pullRequest } = await this.getPullRequestAndBranch(branchName, pr.number);
const { data } = await this.mutate({
mutation: mutations.closePullRequestAndDeleteBranch,

View File

@ -132,48 +132,16 @@ describe('github backend implementation', () => {
});
});
describe('loadEntryMediaFiles', () => {
const readFile = jest.fn();
const mockAPI = {
readFile,
};
it('should return media files from meta data', async () => {
const gitHubImplementation = new GitHubImplementation(config);
gitHubImplementation.api = mockAPI;
const blob = new Blob(['']);
readFile.mockResolvedValue(blob);
const file = new File([blob], name);
await expect(
gitHubImplementation.loadEntryMediaFiles('branch', [
{ path: 'static/media/image.png', id: 'sha' },
]),
).resolves.toEqual([
{
id: 'sha',
displayURL: 'displayURL',
path: 'static/media/image.png',
name: 'image.png',
size: file.size,
file,
},
]);
});
});
describe('unpublishedEntry', () => {
const generateContentKey = jest.fn();
const readUnpublishedBranchFile = jest.fn();
const retrieveUnpublishedEntryData = jest.fn();
const mockAPI = {
generateContentKey,
readUnpublishedBranchFile,
retrieveUnpublishedEntryData,
};
it('should return unpublished entry', async () => {
it('should return unpublished entry data', async () => {
const gitHubImplementation = new GitHubImplementation(config);
gitHubImplementation.api = mockAPI;
gitHubImplementation.loadEntryMediaFiles = jest
@ -183,37 +151,25 @@ describe('github backend implementation', () => {
generateContentKey.mockReturnValue('contentKey');
const data = {
fileData: 'fileData',
isModification: true,
metaData: {
branch: 'branch',
objects: {
entry: { path: 'entry-path', mediaFiles: [{ path: 'image.png', id: 'sha' }] },
},
},
collection: 'collection',
slug: 'slug',
status: 'draft',
diffs: [],
updatedAt: 'updatedAt',
};
readUnpublishedBranchFile.mockResolvedValue(data);
retrieveUnpublishedEntryData.mockResolvedValue(data);
const collection = 'posts';
await expect(gitHubImplementation.unpublishedEntry(collection, 'slug')).resolves.toEqual({
slug: 'slug',
file: { path: 'entry-path', id: null },
data: 'fileData',
metaData: data.metaData,
mediaFiles: [{ path: 'image.png', id: 'sha' }],
isModification: true,
});
const slug = 'slug';
await expect(gitHubImplementation.unpublishedEntry({ collection, slug })).resolves.toEqual(
data,
);
expect(generateContentKey).toHaveBeenCalledTimes(1);
expect(generateContentKey).toHaveBeenCalledWith('posts', 'slug');
expect(readUnpublishedBranchFile).toHaveBeenCalledTimes(1);
expect(readUnpublishedBranchFile).toHaveBeenCalledWith('contentKey');
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledTimes(1);
expect(gitHubImplementation.loadEntryMediaFiles).toHaveBeenCalledWith('branch', [
{ path: 'image.png', id: 'sha' },
]);
expect(retrieveUnpublishedEntryData).toHaveBeenCalledTimes(1);
expect(retrieveUnpublishedEntryData).toHaveBeenCalledWith('contentKey');
});
});

View File

@ -29,6 +29,7 @@ import {
blobToFileObj,
contentKeyFromBranch,
unsentRequest,
branchFromContentKey,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import { Octokit } from '@octokit/rest';
@ -546,68 +547,73 @@ export default class GitHub implements Implementation {
};
}
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
async loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
const readFile = (
path: string,
id: string | null | undefined,
{ parseText }: { parseText: boolean },
) => this.api!.readFile(path, id, { branch, parseText });
return getMediaAsBlob(file.path, file.id, readFile).then(blob => {
const name = basename(file.path);
const fileObj = blobToFileObj(name, blob);
return {
id: file.id,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
});
const blob = await getMediaAsBlob(file.path, file.id, readFile);
const name = basename(file.path);
const fileObj = blobToFileObj(name, blob);
return {
id: file.id,
displayURL: URL.createObjectURL(fileObj),
path: file.path,
name,
size: fileObj.size,
file: fileObj,
};
}
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
return mediaFiles;
}
unpublishedEntries() {
async unpublishedEntries() {
const listEntriesKeys = () =>
this.api!.listUnpublishedBranches().then(branches =>
branches.map(branch => contentKeyFromBranch(branch)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
this.api!.readUnpublishedBranchFile(contentKey);
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, 'GitHub');
const ids = await unpublishedEntries(listEntriesKeys);
return ids;
}
async unpublishedEntry(
collection: string,
slug: string,
{
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
this.loadEntryMediaFiles(branch, files),
} = {},
) {
async unpublishedEntry({
id,
collection,
slug,
}: {
id?: string;
collection?: string;
slug?: string;
}) {
if (id) {
const data = await this.api!.retrieveUnpublishedEntryData(id);
return data;
} else if (collection && slug) {
const entryId = this.api!.generateContentKey(collection, slug);
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
return data;
} else {
throw new Error('Missing unpublished entry id or collection and slug');
}
}
getBranch(collection: string, slug: string) {
const contentKey = this.api!.generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const files = data.metaData.objects.entry.mediaFiles || [];
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,
files.map(({ id, path }) => ({ id, path })),
);
return {
slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData as string,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
const branch = branchFromContentKey(contentKey);
return branch;
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const data = (await this.api!.readFile(path, id, { branch })) as string;
return data;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const mediaFile = await this.loadMediaFile(branch, { path, id });
return mediaFile;
}
async getDeployPreview(collection: string, slug: string) {