Feat: nested collections (#3716)

This commit is contained in:
Erez Rokah
2020-06-18 10:11:37 +03:00
committed by GitHub
parent b4c47caf59
commit af7bbbd9a9
89 changed files with 8269 additions and 5619 deletions

View File

@ -1,4 +1,4 @@
import TestBackend, { getFolderEntries } from '../implementation';
import TestBackend, { getFolderFiles } from '../implementation';
describe('test backend implementation', () => {
beforeEach(() => {
@ -15,7 +15,7 @@ describe('test backend implementation', () => {
},
};
const backend = new TestBackend();
const backend = new TestBackend({});
await expect(backend.getEntry('posts/some-post.md')).resolves.toEqual({
file: { path: 'posts/some-post.md', id: null },
@ -36,7 +36,7 @@ describe('test backend implementation', () => {
},
};
const backend = new TestBackend();
const backend = new TestBackend({});
await expect(backend.getEntry('posts/dir1/dir2/some-post.md')).resolves.toEqual({
file: { path: 'posts/dir1/dir2/some-post.md', id: null },
@ -49,7 +49,7 @@ describe('test backend implementation', () => {
it('should persist entry', async () => {
window.repoFiles = {};
const backend = new TestBackend();
const backend = new TestBackend({});
const entry = { path: 'posts/some-post.md', raw: 'content', slug: 'some-post.md' };
await backend.persistEntry(entry, [], { newEntry: true });
@ -58,6 +58,7 @@ describe('test backend implementation', () => {
posts: {
'some-post.md': {
content: 'content',
path: 'posts/some-post.md',
},
},
});
@ -77,7 +78,7 @@ describe('test backend implementation', () => {
},
};
const backend = new TestBackend();
const backend = new TestBackend({});
const entry = { path: 'posts/new-post.md', raw: 'content', slug: 'new-post.md' };
await backend.persistEntry(entry, [], { newEntry: true });
@ -91,6 +92,7 @@ describe('test backend implementation', () => {
posts: {
'new-post.md': {
content: 'content',
path: 'posts/new-post.md',
},
'other-post.md': {
content: 'content',
@ -102,7 +104,7 @@ describe('test backend implementation', () => {
it('should persist nested entry', async () => {
window.repoFiles = {};
const backend = new TestBackend();
const backend = new TestBackend({});
const slug = 'dir1/dir2/some-post.md';
const path = `posts/${slug}`;
@ -115,6 +117,7 @@ describe('test backend implementation', () => {
dir2: {
'some-post.md': {
content: 'content',
path: 'posts/dir1/dir2/some-post.md',
},
},
},
@ -136,7 +139,7 @@ describe('test backend implementation', () => {
},
};
const backend = new TestBackend();
const backend = new TestBackend({});
const slug = 'dir1/dir2/some-post.md';
const path = `posts/${slug}`;
@ -148,7 +151,7 @@ describe('test backend implementation', () => {
dir1: {
dir2: {
'some-post.md': {
mediaFiles: ['file1'],
path: 'posts/dir1/dir2/some-post.md',
content: 'new content',
},
},
@ -168,7 +171,7 @@ describe('test backend implementation', () => {
},
};
const backend = new TestBackend();
const backend = new TestBackend({});
await backend.deleteFile('posts/some-post.md');
expect(window.repoFiles).toEqual({
@ -189,7 +192,7 @@ describe('test backend implementation', () => {
},
};
const backend = new TestBackend();
const backend = new TestBackend({});
await backend.deleteFile('posts/dir1/dir2/some-post.md');
expect(window.repoFiles).toEqual({
@ -202,7 +205,7 @@ describe('test backend implementation', () => {
});
});
describe('getFolderEntries', () => {
describe('getFolderFiles', () => {
it('should get files by depth', () => {
const tree = {
pages: {
@ -222,34 +225,34 @@ describe('test backend implementation', () => {
},
};
expect(getFolderEntries(tree, 'pages', 'md', 1)).toEqual([
expect(getFolderFiles(tree, 'pages', 'md', 1)).toEqual([
{
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
path: 'pages/root-page.md',
content: 'root page content',
},
]);
expect(getFolderEntries(tree, 'pages', 'md', 2)).toEqual([
expect(getFolderFiles(tree, 'pages', 'md', 2)).toEqual([
{
file: { path: 'pages/dir1/nested-page-1.md', id: null },
data: 'nested page 1 content',
path: 'pages/dir1/nested-page-1.md',
content: 'nested page 1 content',
},
{
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
path: 'pages/root-page.md',
content: 'root page content',
},
]);
expect(getFolderEntries(tree, 'pages', 'md', 3)).toEqual([
expect(getFolderFiles(tree, 'pages', 'md', 3)).toEqual([
{
file: { path: 'pages/dir1/dir2/nested-page-2.md', id: null },
data: 'nested page 2 content',
path: 'pages/dir1/dir2/nested-page-2.md',
content: 'nested page 2 content',
},
{
file: { path: 'pages/dir1/nested-page-1.md', id: null },
data: 'nested page 1 content',
path: 'pages/dir1/nested-page-1.md',
content: 'nested page 1 content',
},
{
file: { path: 'pages/root-page.md', id: null },
data: 'root page content',
path: 'pages/root-page.md',
content: 'root page content',
},
]);
});

View File

@ -10,35 +10,65 @@ import {
ImplementationEntry,
AssetProxy,
PersistOptions,
ImplementationMediaFile,
User,
Config,
ImplementationFile,
} from 'netlify-cms-lib-util';
import { extname, dirname } from 'path';
import AuthenticationPage from './AuthenticationPage';
type RepoFile = { file?: { path: string }; content: string };
type RepoFile = { path: string; content: string | AssetProxy };
type RepoTree = { [key: string]: RepoFile | RepoTree };
type UnpublishedRepoEntry = {
slug: string;
collection: string;
status: string;
diffs: {
id: string;
originalPath?: string;
path: string;
newFile: boolean;
status: string;
content: string | AssetProxy;
}[];
updatedAt: string;
};
declare global {
interface Window {
repoFiles: RepoTree;
repoFilesUnpublished: ImplementationEntry[];
repoFilesUnpublished: { [key: string]: UnpublishedRepoEntry };
}
}
window.repoFiles = window.repoFiles || {};
window.repoFilesUnpublished = window.repoFilesUnpublished || [];
function getFile(path: string) {
function getFile(path: string, tree: RepoTree) {
const segments = path.split('/');
let obj: RepoTree = window.repoFiles;
let obj: RepoTree = tree;
while (obj && segments.length) {
obj = obj[segments.shift() as string] as RepoTree;
}
return ((obj as unknown) as RepoFile) || {};
}
function writeFile(path: string, content: string | AssetProxy, tree: RepoTree) {
const segments = path.split('/');
let obj = tree;
while (segments.length > 1) {
const segment = segments.shift() as string;
obj[segment] = obj[segment] || {};
obj = obj[segment] as RepoTree;
}
(obj[segments.shift() as string] as RepoFile) = { content, path };
}
function deleteFile(path: string, tree: RepoTree) {
unset(tree, path.split('/'));
}
const pageSize = 10;
const getCursor = (
@ -60,12 +90,12 @@ const getCursor = (
});
};
export const getFolderEntries = (
export const getFolderFiles = (
tree: RepoTree,
folder: string,
extension: string,
depth: number,
files = [] as ImplementationEntry[],
files = [] as RepoFile[],
path = folder,
) => {
if (depth <= 0) {
@ -73,15 +103,14 @@ export const getFolderEntries = (
}
Object.keys(tree[folder] || {}).forEach(key => {
if (key.endsWith(`.${extension}`)) {
if (extname(key)) {
const file = (tree[folder] as RepoTree)[key] as RepoFile;
files.unshift({
file: { path: `${path}/${key}`, id: null },
data: file.content,
});
if (!extension || key.endsWith(`.${extension}`)) {
files.unshift({ content: file.content, path: `${path}/${key}` });
}
} else {
const subTree = tree[folder] as RepoTree;
return getFolderEntries(subTree, key, extension, depth - 1, files, `${path}/${key}`);
return getFolderFiles(subTree, key, extension, depth - 1, files, `${path}/${key}`);
}
});
@ -89,12 +118,12 @@ export const getFolderEntries = (
};
export default class TestBackend implements Implementation {
assets: ImplementationMediaFile[];
mediaFolder: string;
options: { initialWorkflowStatus?: string };
constructor(_config: Config, options = {}) {
this.assets = [];
constructor(config: Config, options = {}) {
this.options = options;
this.mediaFolder = config.media_folder;
}
isGitBackend() {
@ -149,14 +178,22 @@ export default class TestBackend implements Implementation {
return 0;
})();
// TODO: stop assuming cursors are for collections
const allEntries = getFolderEntries(window.repoFiles, folder, extension, depth);
const allFiles = getFolderFiles(window.repoFiles, folder, extension, depth);
const allEntries = allFiles.map(f => ({
data: f.content as string,
file: { path: f.path, id: f.path },
}));
const entries = allEntries.slice(newIndex * pageSize, newIndex * pageSize + pageSize);
const newCursor = getCursor(folder, extension, allEntries, newIndex, depth);
return Promise.resolve({ entries, cursor: newCursor });
}
entriesByFolder(folder: string, extension: string, depth: number) {
const entries = folder ? getFolderEntries(window.repoFiles, folder, extension, depth) : [];
const files = folder ? getFolderFiles(window.repoFiles, folder, extension, depth) : [];
const entries = files.map(f => ({
data: f.content as string,
file: { path: f.path, id: f.path },
}));
const cursor = getCursor(folder, extension, entries, 0, depth);
const ret = take(entries, pageSize);
// eslint-disable-next-line @typescript-eslint/ban-ts-ignore
@ -169,7 +206,7 @@ export default class TestBackend implements Implementation {
return Promise.all(
files.map(file => ({
file,
data: getFile(file.path).content,
data: getFile(file.path, window.repoFiles).content as string,
})),
);
}
@ -177,133 +214,160 @@ export default class TestBackend implements Implementation {
getEntry(path: string) {
return Promise.resolve({
file: { path, id: null },
data: getFile(path).content,
data: getFile(path, window.repoFiles).content as string,
});
}
unpublishedEntries() {
return Promise.resolve(window.repoFilesUnpublished);
return Promise.resolve(Object.keys(window.repoFilesUnpublished));
}
getMediaFiles(entry: ImplementationEntry) {
const mediaFiles = entry.mediaFiles!.map(file => ({
...file,
...this.normalizeAsset(file),
file: file.file as File,
}));
return mediaFiles;
}
unpublishedEntry(collection: string, slug: string) {
const entry = window.repoFilesUnpublished.find(
e => e.metaData!.collection === collection && e.slug === slug,
);
unpublishedEntry({ id, collection, slug }: { id?: string; collection?: string; slug?: string }) {
if (id) {
const parts = id.split('/');
collection = parts[0];
slug = parts[1];
}
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
if (!entry) {
return Promise.reject(
new EditorialWorkflowError('content is not under editorial workflow', true),
);
}
entry.mediaFiles = this.getMediaFiles(entry);
return Promise.resolve(entry);
}
async unpublishedEntryDataFile(collection: string, slug: string, path: string) {
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
const file = entry.diffs.find(d => d.path === path);
return file?.content as string;
}
async unpublishedEntryMediaFile(collection: string, slug: string, path: string) {
const entry = window.repoFilesUnpublished[`${collection}/${slug}`];
const file = entry.diffs.find(d => d.path === path);
return this.normalizeAsset(file?.content as AssetProxy);
}
deleteUnpublishedEntry(collection: string, slug: string) {
const unpubStore = window.repoFilesUnpublished;
const existingEntryIndex = unpubStore.findIndex(
e => e.metaData!.collection === collection && e.slug === slug,
);
unpubStore.splice(existingEntryIndex, 1);
delete window.repoFilesUnpublished[`${collection}/${slug}`];
return Promise.resolve();
}
async addOrUpdateUnpublishedEntry(
key: string,
path: string,
newPath: string | undefined,
raw: string,
assetProxies: AssetProxy[],
slug: string,
collection: string,
status: string,
) {
const currentDataFile = window.repoFilesUnpublished[key]?.diffs.find(d => d.path === path);
const originalPath = currentDataFile ? currentDataFile.originalPath : path;
const diffs = [];
diffs.push({
originalPath,
id: newPath || path,
path: newPath || path,
newFile: isEmpty(getFile(originalPath as string, window.repoFiles)),
status: 'added',
content: raw,
});
assetProxies.forEach(a => {
const asset = this.normalizeAsset(a);
diffs.push({
id: asset.id,
path: asset.path,
newFile: true,
status: 'added',
content: asset,
});
});
window.repoFilesUnpublished[key] = {
slug,
collection,
status,
diffs,
updatedAt: new Date().toISOString(),
};
}
async persistEntry(
{ path, raw, slug }: Entry,
{ path, raw, slug, newPath }: Entry,
assetProxies: AssetProxy[],
options: PersistOptions,
) {
if (options.useWorkflow) {
const unpubStore = window.repoFilesUnpublished;
const existingEntryIndex = unpubStore.findIndex(e => e.file.path === path);
if (existingEntryIndex >= 0) {
const unpubEntry = {
...unpubStore[existingEntryIndex],
data: raw,
mediaFiles: assetProxies.map(this.normalizeAsset),
};
unpubStore.splice(existingEntryIndex, 1, unpubEntry);
} else {
const unpubEntry = {
data: raw,
file: {
path,
id: null,
},
metaData: {
collection: options.collectionName as string,
status: (options.status || this.options.initialWorkflowStatus) as string,
},
slug,
mediaFiles: assetProxies.map(this.normalizeAsset),
isModification: !isEmpty(getFile(path)),
};
unpubStore.push(unpubEntry);
}
const key = `${options.collectionName}/${slug}`;
const currentEntry = window.repoFilesUnpublished[key];
const status =
currentEntry?.status || options.status || (this.options.initialWorkflowStatus as string);
this.addOrUpdateUnpublishedEntry(
key,
path,
newPath,
raw,
assetProxies,
slug,
options.collectionName as string,
status,
);
return Promise.resolve();
}
const newEntry = options.newEntry || false;
const segments = path.split('/');
const entry = newEntry ? { content: raw } : { ...getFile(path), content: raw };
let obj = window.repoFiles;
while (segments.length > 1) {
const segment = segments.shift() as string;
obj[segment] = obj[segment] || {};
obj = obj[segment] as RepoTree;
}
(obj[segments.shift() as string] as RepoFile) = entry;
await Promise.all(assetProxies.map(file => this.persistMedia(file)));
writeFile(path, raw, window.repoFiles);
assetProxies.forEach(a => {
writeFile(a.path, raw, window.repoFiles);
});
return Promise.resolve();
}
updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
const unpubStore = window.repoFilesUnpublished;
const entryIndex = unpubStore.findIndex(
e => e.metaData!.collection === collection && e.slug === slug,
);
unpubStore[entryIndex]!.metaData!.status = newStatus;
window.repoFilesUnpublished[`${collection}/${slug}`].status = newStatus;
return Promise.resolve();
}
async publishUnpublishedEntry(collection: string, slug: string) {
const unpubStore = window.repoFilesUnpublished;
const unpubEntryIndex = unpubStore.findIndex(
e => e.metaData!.collection === collection && e.slug === slug,
);
const unpubEntry = unpubStore[unpubEntryIndex];
const entry = {
raw: unpubEntry.data,
slug: unpubEntry.slug as string,
path: unpubEntry.file.path,
};
unpubStore.splice(unpubEntryIndex, 1);
publishUnpublishedEntry(collection: string, slug: string) {
const key = `${collection}/${slug}`;
const unpubEntry = window.repoFilesUnpublished[key];
await this.persistEntry(entry, unpubEntry.mediaFiles!, { commitMessage: '' });
delete window.repoFilesUnpublished[key];
const tree = window.repoFiles;
unpubEntry.diffs.forEach(d => {
if (d.originalPath && !d.newFile) {
const originalPath = d.originalPath;
const sourceDir = dirname(originalPath);
const destDir = dirname(d.path);
const toMove = getFolderFiles(tree, originalPath.split('/')[0], '', 100).filter(f =>
f.path.startsWith(sourceDir),
);
toMove.forEach(f => {
deleteFile(f.path, tree);
writeFile(f.path.replace(sourceDir, destDir), f.content, tree);
});
}
writeFile(d.path, d.content, tree);
});
return Promise.resolve();
}
getMedia() {
return Promise.resolve(this.assets);
getMedia(mediaFolder = this.mediaFolder) {
const files = getFolderFiles(window.repoFiles, mediaFolder.split('/')[0], '', 100).filter(f =>
f.path.startsWith(mediaFolder),
);
const assets = files.map(f => this.normalizeAsset(f.content as AssetProxy));
return Promise.resolve(assets);
}
async getMediaFile(path: string) {
const asset = this.assets.find(asset => asset.path === path) as ImplementationMediaFile;
const asset = getFile(path, window.repoFiles).content as AssetProxy;
const url = asset.url as string;
const url = asset.toString();
const name = basename(path);
const blob = await fetch(url).then(res => res.blob());
const fileObj = new File([blob], name);
@ -340,18 +404,13 @@ export default class TestBackend implements Implementation {
persistMedia(assetProxy: AssetProxy) {
const normalizedAsset = this.normalizeAsset(assetProxy);
this.assets.push(normalizedAsset);
writeFile(assetProxy.path, assetProxy, window.repoFiles);
return Promise.resolve(normalizedAsset);
}
deleteFile(path: string) {
const assetIndex = this.assets.findIndex(asset => asset.path === path);
if (assetIndex > -1) {
this.assets.splice(assetIndex, 1);
} else {
unset(window.repoFiles, path.split('/'));
}
deleteFile(path, window.repoFiles);
return Promise.resolve();
}