Feat: nested collections (#3716)

This commit is contained in:
Erez Rokah
2020-06-18 10:11:37 +03:00
committed by GitHub
parent b4c47caf59
commit af7bbbd9a9
89 changed files with 8269 additions and 5619 deletions

View File

@ -30,6 +30,7 @@ import {
import { Base64 } from 'js-base64';
import { Map } from 'immutable';
import { flow, partial, result, trimStart } from 'lodash';
import { dirname } from 'path';
export const API_NAME = 'GitLab';
@ -57,6 +58,7 @@ enum CommitAction {
type CommitItem = {
base64Content?: string;
path: string;
oldPath?: string;
action: CommitAction;
};
@ -68,6 +70,7 @@ interface CommitsParams {
actions?: {
action: string;
file_path: string;
previous_path?: string;
content?: string;
encoding?: string;
}[];
@ -386,14 +389,14 @@ export default class API {
};
};
listAllFiles = async (path: string, recursive = false) => {
listAllFiles = async (path: string, recursive = false, branch = this.branch) => {
const entries = [];
// eslint-disable-next-line prefer-const
let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({
url: `${this.repoURL}/repository/tree`,
// Get the maximum number of entries per page
// eslint-disable-next-line @typescript-eslint/camelcase
params: { path, ref: this.branch, per_page: 100, recursive },
params: { path, ref: branch, per_page: 100, recursive },
});
entries.push(...initialEntries);
while (cursor && cursor.actions!.has('next')) {
@ -423,7 +426,11 @@ export default class API {
action: item.action,
// eslint-disable-next-line @typescript-eslint/camelcase
file_path: item.path,
...(item.base64Content ? { content: item.base64Content, encoding: 'base64' } : {}),
// eslint-disable-next-line @typescript-eslint/camelcase
...(item.oldPath ? { previous_path: item.oldPath } : {}),
...(item.base64Content !== undefined
? { content: item.base64Content, encoding: 'base64' }
: {}),
}));
const commitParams: CommitsParams = {
@ -459,21 +466,49 @@ export default class API {
}
}
async getCommitItems(files: (Entry | AssetProxy)[], branch: string) {
const items = await Promise.all(
async getCommitItems(files: { path: string; newPath?: string }[], branch: string) {
const items: CommitItem[] = await Promise.all(
files.map(async file => {
const [base64Content, fileExists] = await Promise.all([
result(file, 'toBase64', partial(this.toBase64, (file as Entry).raw)),
this.isFileExists(file.path, branch),
]);
let action = CommitAction.CREATE;
let path = trimStart(file.path, '/');
let oldPath = undefined;
if (fileExists) {
action = file.newPath ? CommitAction.MOVE : CommitAction.UPDATE;
oldPath = file.newPath && path;
path = file.newPath ? trimStart(file.newPath, '/') : path;
}
return {
action: fileExists ? CommitAction.UPDATE : CommitAction.CREATE,
action,
base64Content,
path: trimStart(file.path, '/'),
path,
oldPath,
};
}),
);
return items as CommitItem[];
// move children
for (const item of items.filter(i => i.oldPath && i.action === CommitAction.MOVE)) {
const sourceDir = dirname(item.oldPath as string);
const destDir = dirname(item.path);
const children = await this.listAllFiles(sourceDir, true, branch);
children
.filter(f => f.path !== item.oldPath)
.forEach(file => {
items.push({
action: CommitAction.MOVE,
path: file.path.replace(sourceDir, destDir),
oldPath: file.path,
});
});
}
return items;
}
async persistFiles(entry: Entry | null, mediaFiles: AssetProxy[], options: PersistOptions) {
@ -604,54 +639,33 @@ export default class API {
oldPath: d.old_path,
newPath: d.new_path,
newFile: d.new_file,
path: d.new_path || d.old_path,
binary: d.diff.startsWith('Binary') || /.svg$/.test(d.new_path),
};
});
}
async retrieveMetadata(contentKey: string) {
async retrieveUnpublishedEntryData(contentKey: string) {
const { collection, slug } = parseContentKey(contentKey);
const branch = branchFromContentKey(contentKey);
const mergeRequest = await this.getBranchMergeRequest(branch);
const diff = await this.getDifferences(mergeRequest.sha);
const { oldPath: path, newFile: newFile } = diff.find(d => !d.binary) as {
oldPath: string;
newFile: boolean;
};
const mediaFiles = await Promise.all(
diff
.filter(d => d.oldPath !== path)
.map(async d => {
const path = d.newPath;
const id = await this.getFileId(path, branch);
return { path, id };
}),
const diffs = await this.getDifferences(mergeRequest.sha);
const diffsWithIds = await Promise.all(
diffs.map(async d => {
const { path, newFile } = d;
const id = await this.getFileId(path, branch);
return { id, path, newFile };
}),
);
const label = mergeRequest.labels.find(isCMSLabel) as string;
const status = labelToStatus(label);
const timeStamp = mergeRequest.updated_at;
return { branch, collection, slug, path, status, newFile, mediaFiles, timeStamp };
}
async readUnpublishedBranchFile(contentKey: string) {
const {
branch,
const updatedAt = mergeRequest.updated_at;
return {
collection,
slug,
path,
status,
newFile,
mediaFiles,
timeStamp,
} = await this.retrieveMetadata(contentKey);
const fileData = (await this.readFile(path, null, { branch })) as string;
return {
slug,
metaData: { branch, collection, objects: { entry: { path, mediaFiles } }, status, timeStamp },
fileData,
isModification: !newFile,
diffs: diffsWithIds,
updatedAt,
};
}
@ -726,10 +740,9 @@ export default class API {
this.getCommitItems(files, branch),
this.getDifferences(branch),
]);
// mark files for deletion
for (const diff of diffs) {
if (!items.some(item => item.path === diff.newPath)) {
for (const diff of diffs.filter(d => d.binary)) {
if (!items.some(item => item.path === diff.path)) {
items.push({ action: CommitAction.DELETE, path: diff.newPath });
}
}

View File

@ -32,6 +32,7 @@ import {
localForage,
allEntriesByFolder,
filterByExtension,
branchFromContentKey,
} from 'netlify-cms-lib-util';
import AuthenticationPage from './AuthenticationPage';
import API, { API_NAME } from './API';
@ -351,34 +352,47 @@ export default class GitLab implements Implementation {
branches.map(branch => contentKeyFromBranch(branch)),
);
const readUnpublishedBranchFile = (contentKey: string) =>
this.api!.readUnpublishedBranchFile(contentKey);
return unpublishedEntries(listEntriesKeys, readUnpublishedBranchFile, API_NAME);
const ids = await unpublishedEntries(listEntriesKeys);
return ids;
}
async unpublishedEntry(
collection: string,
slug: string,
{
loadEntryMediaFiles = (branch: string, files: UnpublishedEntryMediaFile[]) =>
this.loadEntryMediaFiles(branch, files),
} = {},
) {
async unpublishedEntry({
id,
collection,
slug,
}: {
id?: string;
collection?: string;
slug?: string;
}) {
if (id) {
const data = await this.api!.retrieveUnpublishedEntryData(id);
return data;
} else if (collection && slug) {
const entryId = generateContentKey(collection, slug);
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
return data;
} else {
throw new Error('Missing unpublished entry id or collection and slug');
}
}
getBranch(collection: string, slug: string) {
const contentKey = generateContentKey(collection, slug);
const data = await this.api!.readUnpublishedBranchFile(contentKey);
const mediaFiles = await loadEntryMediaFiles(
data.metaData.branch,
data.metaData.objects.entry.mediaFiles,
);
return {
slug,
file: { path: data.metaData.objects.entry.path, id: null },
data: data.fileData as string,
metaData: data.metaData,
mediaFiles,
isModification: data.isModification,
};
const branch = branchFromContentKey(contentKey);
return branch;
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const data = (await this.api!.readFile(path, id, { branch })) as string;
return data;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
const branch = this.getBranch(collection, slug);
const mediaFile = await this.loadMediaFile(branch, { path, id });
return mediaFile;
}
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {